mirror of
https://github.com/kossakovsky/n8n-install.git
synced 2026-03-07 22:33:11 +00:00
Add installation and setup scripts for automated environment configuration
- Introduced a series of scripts to automate system preparation, Docker installation, secret generation, service execution, and final reporting. - Implemented logging functions for better visibility during script execution. - Ensured checks for required files and user inputs to enhance robustness. - Added functionality for managing Docker services and generating a comprehensive installation summary.
This commit is contained in:
49
scripts/01_system_preparation.sh
Executable file
49
scripts/01_system_preparation.sh
Executable file
@@ -0,0 +1,49 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# Source the utilities file
|
||||
source "$(dirname "$0")/utils.sh"
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# System Update
|
||||
log_info "Updating package list and upgrading the system..."
|
||||
apt update -y && apt upgrade -y
|
||||
|
||||
# Installing Basic Utilities
|
||||
log_info "Installing standard CLI tools..."
|
||||
apt install -y \
|
||||
htop git curl make unzip ufw fail2ban python3 psmisc \
|
||||
build-essential ca-certificates gnupg lsb-release openssl \
|
||||
debian-keyring debian-archive-keyring apt-transport-https
|
||||
|
||||
# Configuring Firewall (UFW)
|
||||
log_info "Configuring firewall (UFW)..."
|
||||
echo "y" | ufw reset
|
||||
ufw --force enable
|
||||
ufw default deny incoming
|
||||
ufw default allow outgoing
|
||||
ufw allow ssh
|
||||
ufw allow http
|
||||
ufw allow https
|
||||
ufw reload
|
||||
ufw status
|
||||
|
||||
# Configuring Fail2Ban
|
||||
log_info "Enabling brute-force protection (Fail2Ban)..."
|
||||
systemctl enable fail2ban
|
||||
sleep 1
|
||||
systemctl start fail2ban
|
||||
sleep 1
|
||||
fail2ban-client status
|
||||
sleep 1
|
||||
fail2ban-client status sshd
|
||||
|
||||
# Automatic Security Updates
|
||||
log_info "Enabling automatic security updates..."
|
||||
apt install -y unattended-upgrades
|
||||
# Automatic confirmation for dpkg-reconfigure
|
||||
echo "y" | dpkg-reconfigure --priority=low unattended-upgrades
|
||||
|
||||
exit 0
|
||||
127
scripts/02_install_docker.sh
Executable file
127
scripts/02_install_docker.sh
Executable file
@@ -0,0 +1,127 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# Source the utilities file
|
||||
source "$(dirname "$0")/utils.sh"
|
||||
|
||||
|
||||
# 1. Preparing the environment
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
APT_OPTIONS="-o Dpkg::Options::=--force-confold -o Dpkg::Options::=--force-confdef -y"
|
||||
log_info "Preparing Docker installation..."
|
||||
|
||||
# Function to run apt commands with retries for lock acquisition
|
||||
run_apt_with_retry() {
|
||||
local cmd_str="$*" # Capture command as a string for logging
|
||||
local retries=10
|
||||
local wait_time=10 # seconds
|
||||
|
||||
for ((i=1; i<=retries; i++)); do
|
||||
# Check for dpkg locks using fuser. Redirect stderr to /dev/null
|
||||
if fuser /var/lib/dpkg/lock >/dev/null 2>&1 || fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1; then
|
||||
sleep $wait_time
|
||||
continue
|
||||
fi
|
||||
# Check for apt locks using fuser
|
||||
if fuser /var/lib/apt/lists/lock >/dev/null 2>&1 || fuser /var/cache/apt/archives/lock >/dev/null 2>&1; then
|
||||
sleep $wait_time
|
||||
continue
|
||||
fi
|
||||
|
||||
# No lock detected, attempt the command
|
||||
# Use eval to correctly handle arguments with spaces/quotes passed as a single string
|
||||
if eval apt-get "$@"; then
|
||||
return 0 # Success
|
||||
else
|
||||
local exit_code=$?
|
||||
if [ $i -lt $retries ]; then
|
||||
sleep $wait_time
|
||||
else
|
||||
# Attempt to remove locks if they exist and seem stale? Maybe too risky.
|
||||
return $exit_code # Failed after retries
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
log_message "Failed to acquire lock or run command after $retries attempts: apt-get $cmd_str"
|
||||
return 1 # Failed after retries
|
||||
}
|
||||
|
||||
|
||||
# Check if Docker is already installed
|
||||
if command -v docker &> /dev/null; then
|
||||
log_info "Docker is already installed."
|
||||
docker --version
|
||||
# Check for Docker Compose plugin
|
||||
if docker compose version &> /dev/null; then
|
||||
docker compose version
|
||||
else
|
||||
log_error "Docker Compose plugin not found. Consider reinstalling or checking the installation."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the original user who invoked sudo
|
||||
ORIGINAL_USER=${SUDO_USER:-$(whoami)}
|
||||
# Skip user operations if we're root and SUDO_USER is not set
|
||||
if [ "$ORIGINAL_USER" != "root" ] && id "$ORIGINAL_USER" &>/dev/null; then
|
||||
# Check docker group membership
|
||||
if groups "$ORIGINAL_USER" | grep &> /dev/null '\bdocker\b'; then
|
||||
log_info "User '$ORIGINAL_USER' is already in the docker group."
|
||||
else
|
||||
log_info "Adding user '$ORIGINAL_USER' to the docker group..."
|
||||
usermod -aG docker "$ORIGINAL_USER"
|
||||
fi
|
||||
else
|
||||
log_warning "Could not identify a non-root user. Docker will only be available for the root user."
|
||||
fi
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# 2. Updating and installing dependencies
|
||||
log_info "Installing necessary dependencies..."
|
||||
run_apt_with_retry update -qq
|
||||
run_apt_with_retry install -qq $APT_OPTIONS \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg \
|
||||
lsb-release || { log_error "Failed to install dependencies."; exit 1; }
|
||||
|
||||
# 3. Adding Docker's GPG key
|
||||
log_info "Adding Docker's GPG key..."
|
||||
install -m 0755 -d /etc/apt/keyrings
|
||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg
|
||||
chmod a+r /etc/apt/keyrings/docker.gpg
|
||||
|
||||
# 4. Adding the Docker repository
|
||||
log_info "Adding the official Docker repository..."
|
||||
echo \
|
||||
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
|
||||
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
|
||||
tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||
|
||||
# 5. Installing Docker and Docker Compose
|
||||
log_info "Installing Docker Engine and Compose Plugin..."
|
||||
run_apt_with_retry update -qq
|
||||
run_apt_with_retry install -qq $APT_OPTIONS \
|
||||
docker-ce \
|
||||
docker-ce-cli \
|
||||
containerd.io \
|
||||
docker-buildx-plugin \
|
||||
docker-compose-plugin || { log_error "Failed to install Docker packages."; exit 1; }
|
||||
|
||||
# 6. Adding the user to the Docker group
|
||||
# Use SUDO_USER to get the original user who invoked sudo
|
||||
ORIGINAL_USER=${SUDO_USER:-$(whoami)}
|
||||
log_info "Adding user '$ORIGINAL_USER' to the docker group..."
|
||||
if id "$ORIGINAL_USER" &>/dev/null; then
|
||||
usermod -aG docker "$ORIGINAL_USER"
|
||||
fi
|
||||
|
||||
# 7. Verifying the installation
|
||||
log_info "Verifying Docker installation..."
|
||||
docker --version
|
||||
docker compose version
|
||||
|
||||
exit 0
|
||||
403
scripts/03_generate_secrets.sh
Executable file
403
scripts/03_generate_secrets.sh
Executable file
@@ -0,0 +1,403 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# Source the utilities file
|
||||
source "$(dirname "$0")/utils.sh"
|
||||
|
||||
# Check for openssl
|
||||
if ! command -v openssl &> /dev/null; then
|
||||
log_error "openssl could not be found. Please ensure it is installed and available in your PATH." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for caddy
|
||||
if ! command -v caddy &> /dev/null; then
|
||||
log_error "caddy could not be found. Please ensure it is installed and available in your PATH." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- Configuration ---
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
PROJECT_ROOT="$( cd "$SCRIPT_DIR/.." &> /dev/null && pwd )"
|
||||
TEMPLATE_FILE="$PROJECT_ROOT/.env.example"
|
||||
OUTPUT_FILE="$PROJECT_ROOT/.env"
|
||||
DOMAIN_PLACEHOLDER="yourdomain.com"
|
||||
|
||||
# Variables to generate: varName="type:length"
|
||||
# Types: password (alphanum), secret (base64), hex, base64, alphanum
|
||||
declare -A VARS_TO_GENERATE=(
|
||||
["FLOWISE_PASSWORD"]="password:32"
|
||||
["N8N_ENCRYPTION_KEY"]="secret:64" # base64 encoded, 48 bytes -> 64 chars
|
||||
["N8N_USER_MANAGEMENT_JWT_SECRET"]="secret:64" # base64 encoded, 48 bytes -> 64 chars
|
||||
["POSTGRES_PASSWORD"]="password:32"
|
||||
["POSTGRES_NON_ROOT_PASSWORD"]="password:32"
|
||||
["JWT_SECRET"]="base64:64" # 48 bytes -> 64 chars
|
||||
["DASHBOARD_PASSWORD"]="password:32" # Supabase Dashboard
|
||||
["CLICKHOUSE_PASSWORD"]="password:32"
|
||||
["MINIO_ROOT_PASSWORD"]="password:32"
|
||||
["LANGFUSE_SALT"]="secret:64" # base64 encoded, 48 bytes -> 64 chars
|
||||
["NEXTAUTH_SECRET"]="secret:64" # base64 encoded, 48 bytes -> 64 chars
|
||||
["ENCRYPTION_KEY"]="hex:64" # Langfuse Encryption Key (32 bytes -> 64 hex chars)
|
||||
["GRAFANA_ADMIN_PASSWORD"]="password:32"
|
||||
# From MD file (ensure they are in template if needed)
|
||||
["SECRET_KEY_BASE"]="base64:64" # 48 bytes -> 64 chars
|
||||
["VAULT_ENC_KEY"]="alphanum:32"
|
||||
["LOGFLARE_LOGGER_BACKEND_API_KEY"]="secret:64" # base64 encoded, 48 bytes -> 64 chars
|
||||
["LOGFLARE_API_KEY"]="secret:64" # base64 encoded, 48 bytes -> 64 chars
|
||||
["PROMETHEUS_PASSWORD"]="password:32" # Added Prometheus password
|
||||
["SEARXNG_PASSWORD"]="password:32" # Added SearXNG admin password
|
||||
)
|
||||
|
||||
# Check if .env file already exists
|
||||
if [ -f "$OUTPUT_FILE" ]; then
|
||||
log_info "$OUTPUT_FILE already exists. Skipping generation."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Install Caddy
|
||||
log_info "Installing Caddy..."
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/gpg.key' | gpg --yes --dearmor -o /usr/share/keyrings/caddy-stable-archive-keyring.gpg
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/debian.deb.txt' | tee /etc/apt/sources.list.d/caddy-stable.list
|
||||
apt install -y caddy
|
||||
|
||||
# Prompt for the domain name
|
||||
while true; do
|
||||
read -p "Enter the primary domain name for your services (e.g., example.com): " DOMAIN
|
||||
|
||||
# Validate domain input
|
||||
if [[ -z "$DOMAIN" ]]; then
|
||||
log_error "Domain name cannot be empty." >&2
|
||||
continue # Ask again
|
||||
fi
|
||||
|
||||
# Basic check for likely invalid domain characters (very permissive)
|
||||
if [[ "$DOMAIN" =~ [^a-zA-Z0-9.-] ]]; then
|
||||
log_warning "Warning: Domain name contains potentially invalid characters: '$DOMAIN'" >&2
|
||||
fi
|
||||
|
||||
read -p "Are you sure '$DOMAIN' is correct? (y/N): " confirm_domain
|
||||
if [[ "$confirm_domain" =~ ^[Yy]$ ]]; then
|
||||
break # Confirmed, exit loop
|
||||
else
|
||||
log_info "Please try entering the domain name again."
|
||||
fi
|
||||
done
|
||||
|
||||
# Prompt for user email
|
||||
echo "Please enter your email address. This email will be used for:"
|
||||
echo " - Login to Flowise"
|
||||
echo " - Login to Supabase"
|
||||
echo " - Login to SearXNG"
|
||||
echo " - Login to Grafana"
|
||||
echo " - Login to Prometheus"
|
||||
echo " - SSL certificate generation with Let's Encrypt"
|
||||
while true; do
|
||||
read -p "Email: " USER_EMAIL
|
||||
|
||||
# Validate email input
|
||||
if [[ -z "$USER_EMAIL" ]]; then
|
||||
log_error "Email cannot be empty." >&2
|
||||
continue # Ask again
|
||||
fi
|
||||
|
||||
# Basic email format validation
|
||||
if [[ ! "$USER_EMAIL" =~ ^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$ ]]; then
|
||||
log_warning "Warning: Email format appears to be invalid: '$USER_EMAIL'" >&2
|
||||
fi
|
||||
|
||||
read -p "Are you sure '$USER_EMAIL' is correct? (y/N): " confirm_email
|
||||
if [[ "$confirm_email" =~ ^[Yy]$ ]]; then
|
||||
break # Confirmed, exit loop
|
||||
else
|
||||
log_info "Please try entering the email address again."
|
||||
fi
|
||||
done
|
||||
|
||||
# Prompt for OpenAI API key (optional)
|
||||
echo "OpenAI API Key (optional). This key will be used for:"
|
||||
echo " - Supabase: AI services to help with writing SQL queries, statements, and policies"
|
||||
echo " - Crawl4AI: Default LLM configuration for web crawling capabilities"
|
||||
echo " You can skip this by leaving it empty."
|
||||
read -p "OpenAI API Key: " OPENAI_API_KEY
|
||||
|
||||
# Ask if user wants to import ready-made workflow for n8n
|
||||
echo "Do you want to import 300 ready-made workflows for n8n? This process may take about 30 minutes to complete."
|
||||
read -p "Import workflows? (y/n): " import_workflow
|
||||
if [[ "$import_workflow" =~ ^[Yy]$ ]]; then
|
||||
RUN_N8N_IMPORT="true"
|
||||
else
|
||||
RUN_N8N_IMPORT="false"
|
||||
fi
|
||||
|
||||
log_info "Generating secrets and creating .env file..."
|
||||
|
||||
# --- Helper Functions ---
|
||||
# Usage: gen_random <length> <characters>
|
||||
gen_random() {
|
||||
local length="$1"
|
||||
local characters="$2"
|
||||
head /dev/urandom | tr -dc "$characters" | head -c "$length"
|
||||
}
|
||||
|
||||
# Usage: gen_password <length>
|
||||
gen_password() {
|
||||
gen_random "$1" 'A-Za-z0-9'
|
||||
}
|
||||
|
||||
# Usage: gen_hex <length> (length = number of hex characters)
|
||||
gen_hex() {
|
||||
local length="$1"
|
||||
local bytes=$(( (length + 1) / 2 )) # Calculate bytes needed
|
||||
openssl rand -hex "$bytes" | head -c "$length"
|
||||
}
|
||||
|
||||
# Usage: gen_base64 <length> (length = number of base64 characters)
|
||||
gen_base64() {
|
||||
local length="$1"
|
||||
# Estimate bytes needed: base64 encodes 3 bytes to 4 chars.
|
||||
# So, we need length * 3 / 4 bytes. Use ceil division.
|
||||
local bytes=$(( (length * 3 + 3) / 4 ))
|
||||
openssl rand -base64 "$bytes" | head -c "$length" # Truncate just in case
|
||||
}
|
||||
|
||||
# --- Main Logic ---
|
||||
|
||||
if [ ! -f "$TEMPLATE_FILE" ]; then
|
||||
log_error "Template file not found at $TEMPLATE_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Associative array to store generated values
|
||||
declare -A generated_values
|
||||
|
||||
# Store user input values
|
||||
generated_values["FLOWISE_USERNAME"]="$USER_EMAIL"
|
||||
generated_values["DASHBOARD_USERNAME"]="$USER_EMAIL"
|
||||
generated_values["LETSENCRYPT_EMAIL"]="$USER_EMAIL"
|
||||
generated_values["RUN_N8N_IMPORT"]="$RUN_N8N_IMPORT"
|
||||
generated_values["PROMETHEUS_USERNAME"]="$USER_EMAIL"
|
||||
generated_values["SEARXNG_USERNAME"]="$USER_EMAIL"
|
||||
if [[ -n "$OPENAI_API_KEY" ]]; then
|
||||
generated_values["OPENAI_API_KEY"]="$OPENAI_API_KEY"
|
||||
fi
|
||||
|
||||
# Create a temporary file for processing
|
||||
TMP_ENV_FILE=$(mktemp)
|
||||
# Ensure temp file is cleaned up on exit
|
||||
trap 'rm -f "$TMP_ENV_FILE"' EXIT
|
||||
|
||||
# Track whether our custom variables were found in the template
|
||||
declare -A found_vars
|
||||
found_vars["FLOWISE_USERNAME"]=0
|
||||
found_vars["DASHBOARD_USERNAME"]=0
|
||||
found_vars["LETSENCRYPT_EMAIL"]=0
|
||||
found_vars["RUN_N8N_IMPORT"]=0
|
||||
found_vars["PROMETHEUS_USERNAME"]=0
|
||||
found_vars["SEARXNG_USERNAME"]=0
|
||||
found_vars["OPENAI_API_KEY"]=0
|
||||
|
||||
# Read template, substitute domain, generate initial values
|
||||
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||
# Substitute domain placeholder
|
||||
processed_line=$(echo "$line" | sed "s/$DOMAIN_PLACEHOLDER/$DOMAIN/g")
|
||||
|
||||
# Check if it's a variable assignment line (non-empty, not comment, contains '=')
|
||||
if [[ -n "$processed_line" && ! "$processed_line" =~ ^\s*# && "$processed_line" == *"="* ]]; then
|
||||
varName=$(echo "$processed_line" | cut -d'=' -f1 | xargs) # Trim whitespace
|
||||
currentValue=$(echo "$processed_line" | cut -d'=' -f2-)
|
||||
|
||||
# Check if this is one of our custom variables
|
||||
if [[ "$varName" == "FLOWISE_USERNAME" || "$varName" == "DASHBOARD_USERNAME" ||
|
||||
"$varName" == "LETSENCRYPT_EMAIL" || "$varName" == "RUN_N8N_IMPORT" ||
|
||||
"$varName" == "PROMETHEUS_USERNAME" ||
|
||||
"$varName" == "SEARXNG_USERNAME" || "$varName" == "OPENAI_API_KEY" ]]; then
|
||||
|
||||
found_vars["$varName"]=1
|
||||
|
||||
# If we have a value for this variable, use it
|
||||
if [[ -v generated_values["$varName"] ]]; then
|
||||
processed_line="${varName}=\"${generated_values[$varName]}\"" # Ensure quoting
|
||||
fi
|
||||
# Check if variable needs generation
|
||||
elif [[ -v VARS_TO_GENERATE["$varName"] ]]; then # Always generate if in VARS_TO_GENERATE
|
||||
IFS=':' read -r type length <<< "${VARS_TO_GENERATE[$varName]}"
|
||||
newValue=""
|
||||
case "$type" in
|
||||
password|alphanum) newValue=$(gen_password "$length") ;;
|
||||
secret|base64) newValue=$(gen_base64 "$length") ;;
|
||||
hex) newValue=$(gen_hex "$length") ;;
|
||||
*) log_warning "Unknown generation type '$type' for $varName" ;;
|
||||
esac
|
||||
|
||||
if [[ -n "$newValue" ]]; then
|
||||
processed_line="${varName}=\"${newValue}\"" # Quote generated values
|
||||
generated_values["$varName"]="$newValue"
|
||||
else
|
||||
# Keep original line structure but ensure value is empty
|
||||
processed_line="${varName}="
|
||||
fi
|
||||
else
|
||||
# Store existing value if it might be needed for substitution later
|
||||
# Trim potential quotes for storage, add them back during substitution
|
||||
trimmed_value=$(echo "$currentValue" | sed -e 's/^"//' -e 's/"$//')
|
||||
if [[ -n "$varName" && -n "$trimmed_value" && "$trimmed_value" != "\${"* ]]; then
|
||||
generated_values["$varName"]="$trimmed_value"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
echo "$processed_line" >> "$TMP_ENV_FILE"
|
||||
done < "$TEMPLATE_FILE"
|
||||
|
||||
# Generate placeholder Supabase keys (always generate these)
|
||||
log_info "Generating Supabase JWT keys..."
|
||||
|
||||
# Function to create a JWT token
|
||||
create_jwt() {
|
||||
local role=$1
|
||||
local jwt_secret=$2
|
||||
local now=$(date +%s)
|
||||
local exp=$((now + 315360000)) # 10 years from now (seconds)
|
||||
|
||||
# Create header (alg=HS256, typ=JWT)
|
||||
local header='{"alg":"HS256","typ":"JWT"}'
|
||||
# Create payload with role, issued at time, and expiry
|
||||
local payload="{\"role\":\"$role\",\"iss\":\"supabase\",\"iat\":$now,\"exp\":$exp}"
|
||||
|
||||
# Base64url encode header and payload
|
||||
local b64_header=$(echo -n "$header" | base64 -w 0 | tr '/+' '_-' | tr -d '=')
|
||||
local b64_payload=$(echo -n "$payload" | base64 -w 0 | tr '/+' '_-' | tr -d '=')
|
||||
|
||||
# Create signature
|
||||
local signature_input="$b64_header.$b64_payload"
|
||||
local signature=$(echo -n "$signature_input" | openssl dgst -sha256 -hmac "$jwt_secret" -binary | base64 -w 0 | tr '/+' '_-' | tr -d '=')
|
||||
|
||||
# Combine to form JWT
|
||||
echo -n "$b64_header.$b64_payload.$signature" # Use echo -n to avoid trailing newline
|
||||
}
|
||||
|
||||
# Get JWT secret from previously generated values
|
||||
JWT_SECRET="${generated_values["JWT_SECRET"]}"
|
||||
|
||||
# Generate the actual JWT tokens using the JWT_SECRET
|
||||
generated_values["ANON_KEY"]=$(create_jwt "anon" "$JWT_SECRET")
|
||||
generated_values["SERVICE_ROLE_KEY"]=$(create_jwt "service_role" "$JWT_SECRET")
|
||||
|
||||
# Add any custom variables that weren't found in the template
|
||||
for var in "FLOWISE_USERNAME" "DASHBOARD_USERNAME" "LETSENCRYPT_EMAIL" "RUN_N8N_IMPORT" "OPENAI_API_KEY" "PROMETHEUS_USERNAME" "SEARXNG_USERNAME"; do
|
||||
if [[ ${found_vars["$var"]} -eq 0 && -v generated_values["$var"] ]]; then
|
||||
echo "${var}=\"${generated_values[$var]}\"" >> "$TMP_ENV_FILE" # Ensure quoting
|
||||
fi
|
||||
done
|
||||
|
||||
# Second pass: Substitute generated values referenced like ${VAR}
|
||||
# We'll process the substitutions line by line to avoid escaping issues
|
||||
|
||||
# Copy the temporary file to the output
|
||||
cp "$TMP_ENV_FILE" "$OUTPUT_FILE"
|
||||
|
||||
log_info "Applying variable substitutions..."
|
||||
|
||||
# Process each generated value
|
||||
for key in "${!generated_values[@]}"; do
|
||||
value="${generated_values[$key]}"
|
||||
|
||||
# Create a temporary file for this value to avoid escaping issues
|
||||
value_file=$(mktemp)
|
||||
echo -n "$value" > "$value_file"
|
||||
|
||||
# Create a new temporary file for the output
|
||||
new_output=$(mktemp)
|
||||
|
||||
# Process each line in the file
|
||||
while IFS= read -r line; do
|
||||
# Replace ${KEY} format
|
||||
if [[ "$line" == *"\${$key}"* ]]; then
|
||||
placeholder="\${$key}"
|
||||
replacement=$(cat "$value_file")
|
||||
line="${line//$placeholder/$replacement}"
|
||||
fi
|
||||
|
||||
# Replace $KEY format
|
||||
if [[ "$line" == *"$"$key* ]]; then
|
||||
placeholder="$"$key
|
||||
replacement=$(cat "$value_file")
|
||||
line="${line//$placeholder/$replacement}"
|
||||
fi
|
||||
|
||||
# Handle specific cases
|
||||
if [[ "$key" == "ANON_KEY" && "$line" == "ANON_KEY="* ]]; then
|
||||
line="ANON_KEY=\"$(cat "$value_file")\""
|
||||
fi
|
||||
|
||||
if [[ "$key" == "SERVICE_ROLE_KEY" && "$line" == "SERVICE_ROLE_KEY="* ]]; then
|
||||
line="SERVICE_ROLE_KEY=\"$(cat "$value_file")\""
|
||||
fi
|
||||
|
||||
if [[ "$key" == "ANON_KEY" && "$line" == "SUPABASE_ANON_KEY="* ]]; then
|
||||
line="SUPABASE_ANON_KEY=\"$(cat "$value_file")\""
|
||||
fi
|
||||
|
||||
if [[ "$key" == "SERVICE_ROLE_KEY" && "$line" == "SUPABASE_SERVICE_ROLE_KEY="* ]]; then
|
||||
line="SUPABASE_SERVICE_ROLE_KEY=\"$(cat "$value_file")\""
|
||||
fi
|
||||
|
||||
if [[ "$key" == "JWT_SECRET" && "$line" == "SUPABASE_JWT_SECRET="* ]]; then
|
||||
line="SUPABASE_JWT_SECRET=\"$(cat "$value_file")\""
|
||||
fi
|
||||
|
||||
if [[ "$key" == "POSTGRES_PASSWORD" && "$line" == "SUPABASE_POSTGRES_PASSWORD="* ]]; then
|
||||
line="SUPABASE_POSTGRES_PASSWORD=\"$(cat "$value_file")\""
|
||||
fi
|
||||
|
||||
# Write the processed line to the new file
|
||||
echo "$line" >> "$new_output"
|
||||
done < "$OUTPUT_FILE"
|
||||
|
||||
# Replace the output file with the new version
|
||||
mv "$new_output" "$OUTPUT_FILE"
|
||||
|
||||
# Clean up
|
||||
rm -f "$value_file"
|
||||
done
|
||||
|
||||
# Hash passwords using caddy with bcrypt
|
||||
log_info "Hashing passwords with caddy using bcrypt..."
|
||||
PROMETHEUS_PLAIN_PASS="${generated_values["PROMETHEUS_PASSWORD"]}"
|
||||
SEARXNG_PLAIN_PASS="${generated_values["SEARXNG_PASSWORD"]}"
|
||||
|
||||
if [[ -n "$PROMETHEUS_PLAIN_PASS" ]]; then
|
||||
PROMETHEUS_HASH=$(caddy hash-password --algorithm bcrypt --plaintext "$PROMETHEUS_PLAIN_PASS" 2>/dev/null)
|
||||
if [[ $? -eq 0 && -n "$PROMETHEUS_HASH" ]]; then
|
||||
echo "PROMETHEUS_PASSWORD_HASH='$PROMETHEUS_HASH'" >> "$OUTPUT_FILE"
|
||||
else
|
||||
log_warning "Failed to hash Prometheus password using caddy."
|
||||
fi
|
||||
else
|
||||
log_warning "Prometheus password was not generated, skipping hash."
|
||||
fi
|
||||
|
||||
if [[ -n "$SEARXNG_PLAIN_PASS" ]]; then
|
||||
SEARXNG_HASH=$(caddy hash-password --algorithm bcrypt --plaintext "$SEARXNG_PLAIN_PASS" 2>/dev/null)
|
||||
if [[ $? -eq 0 && -n "$SEARXNG_HASH" ]]; then
|
||||
echo "SEARXNG_PASSWORD_HASH='$SEARXNG_HASH'" >> "$OUTPUT_FILE"
|
||||
else
|
||||
log_warning "Failed to hash SearXNG password using caddy."
|
||||
fi
|
||||
else
|
||||
log_warning "SearXNG password was not generated, skipping hash."
|
||||
fi
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
log_success ".env file generated successfully in the project root ($OUTPUT_FILE)."
|
||||
else
|
||||
log_error "Failed to generate .env file." >&2
|
||||
rm -f "$OUTPUT_FILE" # Clean up potentially broken output file
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Uninstall caddy
|
||||
log_info "Uninstalling caddy..."
|
||||
apt remove -y caddy
|
||||
|
||||
exit 0
|
||||
47
scripts/04_run_services.sh
Executable file
47
scripts/04_run_services.sh
Executable file
@@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# Source the utilities file
|
||||
source "$(dirname "$0")/utils.sh"
|
||||
|
||||
# 1. Check for .env file
|
||||
if [ ! -f ".env" ]; then
|
||||
log_error ".env file not found in project root." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 2. Check for docker-compose.yml file
|
||||
if [ ! -f "docker-compose.yml" ]; then
|
||||
log_error "docker-compose.yml file not found in project root." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 3. Check for Caddyfile (optional but recommended for reverse proxy)
|
||||
if [ ! -f "Caddyfile" ]; then
|
||||
log_warning "Caddyfile not found in project root. Reverse proxy might not work as expected." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 4. Check if Docker daemon is running
|
||||
if ! docker info > /dev/null 2>&1; then
|
||||
log_error "Docker daemon is not running. Please start Docker and try again." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 5. Check if start_services.py exists and is executable
|
||||
if [ ! -f "start_services.py" ]; then
|
||||
log_error "start_services.py file not found in project root." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -x "start_services.py" ]; then
|
||||
log_warning "start_services.py is not executable. Making it executable..."
|
||||
chmod +x "start_services.py"
|
||||
fi
|
||||
|
||||
log_info "Launching services using start_services.py..."
|
||||
# Execute start_services.py
|
||||
./start_services.py
|
||||
|
||||
exit 0
|
||||
117
scripts/05_final_report.sh
Executable file
117
scripts/05_final_report.sh
Executable file
@@ -0,0 +1,117 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# Source the utilities file
|
||||
source "$(dirname "$0")/utils.sh"
|
||||
|
||||
# Get the directory where the script resides
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
PROJECT_ROOT="$( cd "$SCRIPT_DIR/.." &> /dev/null && pwd )"
|
||||
ENV_FILE="$PROJECT_ROOT/.env"
|
||||
|
||||
# Check if .env file exists
|
||||
if [ ! -f "$ENV_FILE" ]; then
|
||||
log_error "The .env file ('$ENV_FILE') was not found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Load environment variables from .env file
|
||||
# Use set -a to export all variables read from the file
|
||||
set -a
|
||||
source "$ENV_FILE"
|
||||
set +a
|
||||
|
||||
# --- Installation Summary ---
|
||||
log_info "Installation Summary. The following steps were performed by the scripts:"
|
||||
log_success "- System updated and basic utilities installed"
|
||||
log_success "- Firewall (UFW) configured and enabled"
|
||||
log_success "- Fail2Ban activated for brute-force protection"
|
||||
log_success "- Automatic security updates enabled"
|
||||
log_success "- Docker and Docker Compose installed"
|
||||
log_success "- '.env' generated with secure passwords and secrets"
|
||||
log_success "- Services launched via Docker Compose"
|
||||
|
||||
# --- Service Access Credentials ---
|
||||
|
||||
# Display credentials, checking if variables exist
|
||||
echo
|
||||
echo "======================================================================="
|
||||
echo
|
||||
log_info "Service Access Credentials. Save this information securely!"
|
||||
# Display credentials, checking if variables exist
|
||||
echo
|
||||
echo "================================= n8n ================================="
|
||||
echo
|
||||
echo "Host: ${N8N_HOSTNAME:-<hostname_not_set>}"
|
||||
echo
|
||||
echo "================================= Langfuse ============================"
|
||||
echo
|
||||
echo "Host: ${LANGFUSE_HOSTNAME:-<hostname_not_set>}"
|
||||
echo
|
||||
echo "================================= WebUI ==============================="
|
||||
echo
|
||||
echo "Host: ${WEBUI_HOSTNAME:-<hostname_not_set>}"
|
||||
echo
|
||||
echo "================================= Flowise ============================="
|
||||
echo
|
||||
echo "Host: ${FLOWISE_HOSTNAME:-<hostname_not_set>}"
|
||||
echo "User: ${FLOWISE_USERNAME:-<not_set_in_env>}"
|
||||
echo "Password: ${FLOWISE_PASSWORD:-<not_set_in_env>}"
|
||||
echo
|
||||
echo "================================= Supabase ============================"
|
||||
echo
|
||||
echo "External Host (via Caddy): ${SUPABASE_HOSTNAME:-<hostname_not_set>}"
|
||||
echo "Internal API Gateway: http://kong:8000"
|
||||
echo "Studio User: ${DASHBOARD_USERNAME:-<not_set_in_env>}"
|
||||
echo "Studio Password: ${DASHBOARD_PASSWORD:-<not_set_in_env>}"
|
||||
echo
|
||||
echo "================================= Grafana ============================="
|
||||
echo
|
||||
echo "Host: ${GRAFANA_HOSTNAME:-<hostname_not_set>}"
|
||||
echo "User: admin"
|
||||
echo "Password: ${GRAFANA_ADMIN_PASSWORD:-<not_set_in_env>}"
|
||||
echo
|
||||
echo "================================= Prometheus =========================="
|
||||
echo
|
||||
echo "Host: ${PROMETHEUS_HOSTNAME:-<hostname_not_set>}"
|
||||
echo "User: ${PROMETHEUS_USERNAME:-<not_set_in_env>}"
|
||||
echo "Password: ${PROMETHEUS_PASSWORD:-<not_set_in_env>}"
|
||||
echo
|
||||
echo "================================= Searxng ============================="
|
||||
echo
|
||||
echo "Host: ${SEARXNG_HOSTNAME:-<hostname_not_set>}"
|
||||
echo "User: ${SEARXNG_USERNAME:-<not_set_in_env>}"
|
||||
echo "Password: ${SEARXNG_PASSWORD:-<not_set_in_env>}"
|
||||
echo
|
||||
echo "================================= Qdrant =============================="
|
||||
echo
|
||||
echo "Internal gRPC Access (e.g., from backend): qdrant:6333"
|
||||
echo "Internal REST API Access (e.g., from backend): http://qdrant:6334"
|
||||
echo "(Note: Not exposed externally via Caddy by default)"
|
||||
echo
|
||||
echo "================================= Crawl4AI ============================"
|
||||
echo
|
||||
echo "Internal Access (e.g., from n8n): http://crawl4ai:11235"
|
||||
echo "(Note: Not exposed externally via Caddy by default)"
|
||||
echo
|
||||
echo "======================================================================="
|
||||
echo
|
||||
|
||||
# --- Update Script Info (Placeholder) ---
|
||||
log_info "To update the services, run the 'update.sh' script: bash ./scripts/update.sh"
|
||||
|
||||
echo
|
||||
echo "======================================================================"
|
||||
echo
|
||||
echo "Next Steps:"
|
||||
echo "1. Review the credentials above and store them safely."
|
||||
echo "2. Access the services via their respective URLs (check \`docker compose ps\` if needed)."
|
||||
echo "3. Configure services as needed (e.g., first-run setup for n8n)."
|
||||
echo
|
||||
echo "======================================================================"
|
||||
echo
|
||||
log_info "Thank you for using this installer setup!"
|
||||
echo
|
||||
|
||||
exit 0
|
||||
79
scripts/install.sh
Executable file
79
scripts/install.sh
Executable file
@@ -0,0 +1,79 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# Source the utilities file
|
||||
source "$(dirname "$0")/utils.sh"
|
||||
|
||||
# Get the directory where this script is located (which is the scripts directory)
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
|
||||
# Check if all required scripts exist and are executable in the current directory
|
||||
required_scripts=(
|
||||
"01_system_preparation.sh"
|
||||
"02_install_docker.sh"
|
||||
"03_generate_secrets.sh"
|
||||
"04_run_services.sh"
|
||||
"05_final_report.sh"
|
||||
)
|
||||
|
||||
missing_scripts=()
|
||||
non_executable_scripts=()
|
||||
|
||||
for script in "${required_scripts[@]}"; do
|
||||
# Check directly in the current directory (SCRIPT_DIR)
|
||||
script_path="$SCRIPT_DIR/$script"
|
||||
if [ ! -f "$script_path" ]; then
|
||||
missing_scripts+=("$script")
|
||||
elif [ ! -x "$script_path" ]; then
|
||||
non_executable_scripts+=("$script")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#missing_scripts[@]} -gt 0 ]; then
|
||||
# Update error message to reflect current directory check
|
||||
log_error "The following required scripts are missing in $SCRIPT_DIR:"
|
||||
printf " - %s\n" "${missing_scripts[@]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Attempt to make scripts executable if they are not
|
||||
if [ ${#non_executable_scripts[@]} -gt 0 ]; then
|
||||
log_warning "The following scripts were not executable and will be made executable:"
|
||||
printf " - %s\n" "${non_executable_scripts[@]}"
|
||||
# Make all .sh files in the current directory executable
|
||||
chmod +x "$SCRIPT_DIR"/*.sh
|
||||
# Re-check after chmod
|
||||
for script in "${non_executable_scripts[@]}"; do
|
||||
script_path="$SCRIPT_DIR/$script"
|
||||
if [ ! -x "$script_path" ]; then
|
||||
# Update error message
|
||||
log_error "Failed to make '$script' in $SCRIPT_DIR executable. Please check permissions."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
log_success "Scripts successfully made executable."
|
||||
fi
|
||||
|
||||
# Run installation steps sequentially using their full paths
|
||||
log_info "Step 1: System Preparation..."
|
||||
bash "$SCRIPT_DIR/01_system_preparation.sh" || { log_error "System Preparation failed"; exit 1; }
|
||||
log_success "System preparation complete!"
|
||||
|
||||
log_info "Step 2: Installing Docker..."
|
||||
bash "$SCRIPT_DIR/02_install_docker.sh" || { log_error "Docker Installation failed"; exit 1; }
|
||||
log_success "Docker installation complete!"
|
||||
|
||||
log_info "Step 3: Generating Secrets and Configuration..."
|
||||
bash "$SCRIPT_DIR/03_generate_secrets.sh" || { log_error "Secret/Config Generation failed"; exit 1; }
|
||||
log_success "Secret/Config Generation complete!"
|
||||
|
||||
log_info "Step 4: Running Services..."
|
||||
bash "$SCRIPT_DIR/04_run_services.sh" || { log_error "Running Services failed"; exit 1; }
|
||||
log_success "Running Services complete!"
|
||||
|
||||
log_info "Step 5: Generating Final Report..."
|
||||
bash "$SCRIPT_DIR/05_final_report.sh" || { log_error "Final Report Generation failed"; exit 1; }
|
||||
log_success "Final Report Generation complete!"
|
||||
|
||||
exit 0
|
||||
76
scripts/update.sh
Executable file
76
scripts/update.sh
Executable file
@@ -0,0 +1,76 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# Source the utilities file
|
||||
source "$(dirname "$0")/utils.sh"
|
||||
|
||||
# Set the compose command explicitly to use docker compose subcommand
|
||||
COMPOSE_CMD="docker compose"
|
||||
log_info "Using $COMPOSE_CMD as compose command"
|
||||
|
||||
# Navigate to the directory where this script is located
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
# Project root directory (one level up from scripts)
|
||||
PROJECT_ROOT="$( cd "$SCRIPT_DIR/.." &> /dev/null && pwd )"
|
||||
# Path to the 04_run_services.sh script
|
||||
RUN_SERVICES_SCRIPT="$SCRIPT_DIR/04_run_services.sh"
|
||||
# Compose files
|
||||
MAIN_COMPOSE_FILE="$PROJECT_ROOT/docker-compose.yml"
|
||||
SUPABASE_COMPOSE_FILE="$PROJECT_ROOT/supabase/docker/docker-compose.yml"
|
||||
|
||||
|
||||
# Check if run services script exists
|
||||
if [ ! -f "$RUN_SERVICES_SCRIPT" ]; then
|
||||
log_error "$RUN_SERVICES_SCRIPT not found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_info "Starting update process..."
|
||||
|
||||
# Pull the latest repository changes
|
||||
log_info "Pulling latest repository changes..."
|
||||
# Check if git is installed
|
||||
if ! command -v git &> /dev/null; then
|
||||
log_warning "'git' command not found. Skipping repository update."
|
||||
else
|
||||
# Since script is run from root, just do git pull in current directory
|
||||
git pull || { log_warning "Failed to pull latest repository changes. Continuing with update..."; }
|
||||
fi
|
||||
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
# Stop all services
|
||||
log_info "Stopping all services..."
|
||||
$COMPOSE_CMD down || {
|
||||
log_warning "Failed to stop containers with 'docker compose down'. Continuing with update anyway...";
|
||||
}
|
||||
|
||||
# Pull latest versions of all containers
|
||||
log_info "Pulling latest versions of all containers..."
|
||||
$COMPOSE_CMD pull || { log_error "Failed to pull Docker images. Check network connection and Docker Hub status."; exit 1; }
|
||||
|
||||
# Ask user about n8n import and modify .env file
|
||||
ENV_FILE="$PROJECT_ROOT/.env"
|
||||
|
||||
if [ -f "$ENV_FILE" ]; then
|
||||
read -p "Import n8n workflow? (y/n): " import_choice
|
||||
case "$import_choice" in
|
||||
[yY] | [yY][eE][sS] )
|
||||
sed -i 's/^RUN_N8N_IMPORT=.*/RUN_N8N_IMPORT=true/' "$ENV_FILE" || log_error "Failed to set RUN_N8N_IMPORT in $ENV_FILE. Check permissions."
|
||||
;;
|
||||
* )
|
||||
sed -i 's/^RUN_N8N_IMPORT=.*/RUN_N8N_IMPORT=false/' "$ENV_FILE" || log_error "Failed to set RUN_N8N_IMPORT in $ENV_FILE. Check permissions."
|
||||
;;
|
||||
esac
|
||||
else
|
||||
log_warning "$ENV_FILE not found. Cannot configure RUN_N8N_IMPORT."
|
||||
fi
|
||||
|
||||
# Start services using the 04_run_services.sh script
|
||||
log_info "Running Services..."
|
||||
bash "$RUN_SERVICES_SCRIPT" || { log_error "Failed to start services. Check logs for details."; exit 1; }
|
||||
|
||||
log_success "Update completed successfully!"
|
||||
|
||||
exit 0
|
||||
51
scripts/utils.sh
Normal file
51
scripts/utils.sh
Normal file
@@ -0,0 +1,51 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Logging function that frames a message with a border and adds a timestamp
|
||||
log_message() {
|
||||
local message="$1"
|
||||
local combined_message="${message}"
|
||||
local length=${#combined_message}
|
||||
local border_length=$((length + 4))
|
||||
|
||||
# Create the top border
|
||||
local border=""
|
||||
for ((i=0; i<border_length; i++)); do
|
||||
border="${border}─"
|
||||
done
|
||||
|
||||
# Display the framed message with timestamp
|
||||
echo "╭${border}╮"
|
||||
echo "│ ${combined_message} │"
|
||||
echo "╰${border}╯"
|
||||
}
|
||||
|
||||
# Example usage:
|
||||
# log_message "This is a test message"
|
||||
|
||||
log_success() {
|
||||
local message="$1"
|
||||
local timestamp=$(date +%H:%M:%S)
|
||||
local combined_message="[SUCCESS] ${timestamp}: ${message}"
|
||||
log_message "${combined_message}"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
local message="$1"
|
||||
local timestamp=$(date +%H:%M:%S)
|
||||
local combined_message="[ERROR] ${timestamp}: ${message}"
|
||||
log_message "${combined_message}"
|
||||
}
|
||||
|
||||
log_warning() {
|
||||
local message="$1"
|
||||
local timestamp=$(date +%H:%M:%S)
|
||||
local combined_message="[WARNING] ${timestamp}: ${message}"
|
||||
log_message "${combined_message}"
|
||||
}
|
||||
|
||||
log_info() {
|
||||
local message="$1"
|
||||
local timestamp=$(date +%H:%M:%S)
|
||||
local combined_message="[INFO] ${timestamp}: ${message}"
|
||||
log_message "${combined_message}"
|
||||
}
|
||||
Reference in New Issue
Block a user