diff --git a/deployment/docker-compose-hub.yaml b/deployment/docker-compose-hub.yaml
new file mode 100644
index 00000000..ddfaf26c
--- /dev/null
+++ b/deployment/docker-compose-hub.yaml
@@ -0,0 +1,74 @@
+name: docsgpt-oss
+services:
+
+ frontend:
+ image: arc53/docsgpt-fe:develop
+ environment:
+ - VITE_API_HOST=http://localhost:7091
+ - VITE_API_STREAMING=$VITE_API_STREAMING
+ ports:
+ - "5173:5173"
+ depends_on:
+ - backend
+
+
+ backend:
+ user: root
+ image: arc53/docsgpt:develop
+ environment:
+ - API_KEY=$API_KEY
+ - EMBEDDINGS_KEY=$API_KEY
+ - LLM_PROVIDER=$LLM_PROVIDER
+ - LLM_NAME=$LLM_NAME
+ - CELERY_BROKER_URL=redis://redis:6379/0
+ - CELERY_RESULT_BACKEND=redis://redis:6379/1
+ - MONGO_URI=mongodb://mongo:27017/docsgpt
+ - CACHE_REDIS_URL=redis://redis:6379/2
+ - OPENAI_BASE_URL=$OPENAI_BASE_URL
+ ports:
+ - "7091:7091"
+ volumes:
+ - ../application/indexes:/app/indexes
+ - ../application/inputs:/app/inputs
+ - ../application/vectors:/app/vectors
+ depends_on:
+ - redis
+ - mongo
+
+
+ worker:
+ user: root
+ image: arc53/docsgpt:develop
+ command: celery -A application.app.celery worker -l INFO -B
+ environment:
+ - API_KEY=$API_KEY
+ - EMBEDDINGS_KEY=$API_KEY
+ - LLM_PROVIDER=$LLM_PROVIDER
+ - LLM_NAME=$LLM_NAME
+ - CELERY_BROKER_URL=redis://redis:6379/0
+ - CELERY_RESULT_BACKEND=redis://redis:6379/1
+ - MONGO_URI=mongodb://mongo:27017/docsgpt
+ - API_URL=http://backend:7091
+ - CACHE_REDIS_URL=redis://redis:6379/2
+ volumes:
+ - ../application/indexes:/app/indexes
+ - ../application/inputs:/app/inputs
+ - ../application/vectors:/app/vectors
+ depends_on:
+ - redis
+ - mongo
+
+ redis:
+ image: redis:6-alpine
+ ports:
+ - 6379:6379
+
+ mongo:
+ image: mongo:6
+ ports:
+ - 27017:27017
+ volumes:
+ - mongodb_data_container:/data/db
+
+volumes:
+ mongodb_data_container:
diff --git a/frontend/index.html b/frontend/index.html
index 30faadc9..a9aa68c3 100644
--- a/frontend/index.html
+++ b/frontend/index.html
@@ -5,6 +5,8 @@
+
+
DocsGPT
diff --git a/setup.ps1 b/setup.ps1
index 8572484f..d675536e 100644
--- a/setup.ps1
+++ b/setup.ps1
@@ -9,7 +9,9 @@ $ErrorActionPreference = "Stop"
# Get current script directory
$SCRIPT_DIR = Split-Path -Parent $MyInvocation.MyCommand.Definition
-$COMPOSE_FILE = Join-Path -Path $SCRIPT_DIR -ChildPath "deployment\docker-compose.yaml"
+$COMPOSE_FILE_HUB = Join-Path -Path $SCRIPT_DIR -ChildPath "deployment\docker-compose-hub.yaml"
+$COMPOSE_FILE_LOCAL = Join-Path -Path $SCRIPT_DIR -ChildPath "deployment\docker-compose.yaml"
+$COMPOSE_FILE = $COMPOSE_FILE_HUB
$ENV_FILE = Join-Path -Path $SCRIPT_DIR -ChildPath ".env"
# Function to write colored text
@@ -223,12 +225,15 @@ function Prompt-MainMenu {
Write-Host ""
Write-ColorText "Welcome to DocsGPT Setup!" -ForegroundColor "White" -Bold
Write-ColorText "How would you like to proceed?" -ForegroundColor "White"
- Write-ColorText "1) Use DocsGPT Public API Endpoint (simple and free)" -ForegroundColor "Yellow"
+ Write-ColorText "1) Use DocsGPT Public API Endpoint (simple and free, uses pre-built Docker images from Docker Hub for fastest setup)" -ForegroundColor "Yellow"
Write-ColorText "2) Serve Local (with Ollama)" -ForegroundColor "Yellow"
Write-ColorText "3) Connect Local Inference Engine" -ForegroundColor "Yellow"
Write-ColorText "4) Connect Cloud API Provider" -ForegroundColor "Yellow"
+ Write-ColorText "5) Advanced: Build images locally (for developers)" -ForegroundColor "Yellow"
Write-Host ""
- $script:main_choice = Read-Host "Choose option (1-4)"
+ Write-ColorText "By default, DocsGPT uses pre-built images from Docker Hub for a fast, reliable, and consistent experience. This avoids local build errors and speeds up onboarding. Advanced users can choose to build images locally if needed." -ForegroundColor "White"
+ Write-Host ""
+ $script:main_choice = Read-Host "Choose option (1-5)"
}
# Function to prompt for Local Inference Engine options
@@ -304,9 +309,9 @@ function Use-DocsPublicAPIEndpoint {
# Run Docker compose commands
try {
- & docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" build
+ & docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" pull
if ($LASTEXITCODE -ne 0) {
- throw "Docker compose build failed with exit code $LASTEXITCODE"
+ throw "Docker compose pull failed with exit code $LASTEXITCODE"
}
& docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" up -d
@@ -415,10 +420,10 @@ function Serve-LocalOllama {
Write-Host ""
Write-ColorText "Starting Docker Compose with Ollama ($docker_compose_file_suffix)..." -ForegroundColor "White"
- # Build the containers
- & docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" -f "$optional_compose" build
+ # Pull the containers
+ & docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" -f "$optional_compose" pull
if ($LASTEXITCODE -ne 0) {
- throw "Docker compose build failed with exit code $LASTEXITCODE"
+ throw "Docker compose pull failed with exit code $LASTEXITCODE"
}
# Start the containers
@@ -575,10 +580,10 @@ function Connect-LocalInferenceEngine {
Write-Host ""
Write-ColorText "Starting Docker Compose..." -ForegroundColor "White"
- # Build the containers
- & docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" build
+ # Pull the containers
+ & docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" pull
if ($LASTEXITCODE -ne 0) {
- throw "Docker compose build failed with exit code $LASTEXITCODE"
+ throw "Docker compose pull failed with exit code $LASTEXITCODE"
}
# Start the containers
@@ -706,10 +711,12 @@ function Connect-CloudAPIProvider {
Write-ColorText "Starting Docker Compose..." -ForegroundColor "White"
# Run Docker compose commands
- & docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" up -d --build
+ & docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" pull
if ($LASTEXITCODE -ne 0) {
- throw "Docker compose build or up failed with exit code $LASTEXITCODE"
+ throw "Docker compose pull failed with exit code $LASTEXITCODE"
}
+
+ & docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" up -d
Write-Host ""
Write-ColorText "DocsGPT is now configured to use $provider_name on http://localhost:5173" -ForegroundColor "Green"
@@ -735,13 +742,13 @@ while ($true) {
switch ($main_choice) {
"1" {
+ $COMPOSE_FILE = $COMPOSE_FILE_HUB
Use-DocsPublicAPIEndpoint
$exitLoop = $true # Set flag to true on completion
break
}
"2" {
Serve-LocalOllama
- # Only exit the loop if user didn't press "b" to go back
if ($ollama_choice -ne "b" -and $ollama_choice -ne "B") {
$exitLoop = $true
}
@@ -749,7 +756,6 @@ while ($true) {
}
"3" {
Connect-LocalInferenceEngine
- # Only exit the loop if user didn't press "b" to go back
if ($engine_choice -ne "b" -and $engine_choice -ne "B") {
$exitLoop = $true
}
@@ -757,20 +763,25 @@ while ($true) {
}
"4" {
Connect-CloudAPIProvider
- # Only exit the loop if user didn't press "b" to go back
if ($provider_choice -ne "b" -and $provider_choice -ne "B") {
$exitLoop = $true
}
break
}
+ "5" {
+ Write-Host ""
+ Write-ColorText "You have selected to build images locally. This is recommended for developers or if you want to test local changes." -ForegroundColor "Yellow"
+ $COMPOSE_FILE = $COMPOSE_FILE_LOCAL
+ Use-DocsPublicAPIEndpoint
+ $exitLoop = $true
+ break
+ }
default {
Write-Host ""
- Write-ColorText "Invalid choice. Please choose 1-4." -ForegroundColor "Red"
+ Write-ColorText "Invalid choice. Please choose 1-5." -ForegroundColor "Red"
Start-Sleep -Seconds 1
}
}
-
- # Only break out of the loop if a function completed successfully
if ($exitLoop) {
break
}
diff --git a/setup.sh b/setup.sh
index b072d546..23aeb717 100755
--- a/setup.sh
+++ b/setup.sh
@@ -9,7 +9,8 @@ NC='\033[0m'
BOLD='\033[1m'
# Base Compose file (relative to script location)
-COMPOSE_FILE="$(dirname "$(readlink -f "$0")")/deployment/docker-compose.yaml"
+COMPOSE_FILE="$(dirname "$(readlink -f "$0")")/deployment/docker-compose-hub.yaml"
+COMPOSE_FILE_LOCAL="$(dirname "$(readlink -f "$0")")/deployment/docker-compose.yaml"
ENV_FILE="$(dirname "$(readlink -f "$0")")/.env"
# Animation function
@@ -111,12 +112,15 @@ check_and_start_docker() {
prompt_main_menu() {
echo -e "\n${DEFAULT_FG}${BOLD}Welcome to DocsGPT Setup!${NC}"
echo -e "${DEFAULT_FG}How would you like to proceed?${NC}"
- echo -e "${YELLOW}1) Use DocsGPT Public API Endpoint (simple and free)${NC}"
+ echo -e "${YELLOW}1) Use DocsGPT Public API Endpoint (simple and free, uses pre-built Docker images from Docker Hub for fastest setup)${NC}"
echo -e "${YELLOW}2) Serve Local (with Ollama)${NC}"
echo -e "${YELLOW}3) Connect Local Inference Engine${NC}"
echo -e "${YELLOW}4) Connect Cloud API Provider${NC}"
+ echo -e "${YELLOW}5) Advanced: Build images locally (for developers)${NC}"
echo
- read -p "$(echo -e "${DEFAULT_FG}Choose option (1-4): ${NC}")" main_choice
+ echo -e "${DEFAULT_FG}By default, DocsGPT uses pre-built images from Docker Hub for a fast, reliable, and consistent experience. This avoids local build errors and speeds up onboarding. Advanced users can choose to build images locally if needed.${NC}"
+ echo
+ read -p "$(echo -e "${DEFAULT_FG}Choose option (1-5): ${NC}")" main_choice
}
# Function to prompt for Local Inference Engine options
@@ -176,7 +180,7 @@ use_docs_public_api_endpoint() {
check_and_start_docker
echo -e "\n${NC}Starting Docker Compose...${NC}"
- docker compose --env-file "${ENV_FILE}" -f "${COMPOSE_FILE}" build && docker compose --env-file "${ENV_FILE}" -f "${COMPOSE_FILE}" up -d
+ docker compose --env-file "${ENV_FILE}" -f "${COMPOSE_FILE}" pull && docker compose --env-file "${ENV_FILE}" -f "${COMPOSE_FILE}" up -d
docker_compose_status=$? # Capture exit status of docker compose
echo "Docker Compose Exit Status: $docker_compose_status"
@@ -252,7 +256,7 @@ serve_local_ollama() {
)
echo -e "\n${NC}Starting Docker Compose with Ollama (${docker_compose_file_suffix})...${NC}"
- docker compose --env-file "${ENV_FILE}" "${compose_files[@]}" build
+ docker compose --env-file "${ENV_FILE}" "${compose_files[@]}" pull
docker compose --env-file "${ENV_FILE}" "${compose_files[@]}" up -d
docker_compose_status=$?
@@ -360,7 +364,7 @@ connect_local_inference_engine() {
check_and_start_docker
echo -e "\n${NC}Starting Docker Compose...${NC}"
- docker compose --env-file "${ENV_FILE}" -f "${COMPOSE_FILE}" build && docker compose -f "${COMPOSE_FILE}" up -d
+ docker compose --env-file "${ENV_FILE}" -f "${COMPOSE_FILE}" pull && docker compose -f "${COMPOSE_FILE}" up -d
docker_compose_status=$?
echo "Docker Compose Exit Status: $docker_compose_status" # Debug output
@@ -449,7 +453,7 @@ connect_cloud_api_provider() {
check_and_start_docker
echo -e "\n${NC}Starting Docker Compose...${NC}"
- docker compose --env-file "${ENV_FILE}" -f "${COMPOSE_FILE}" up -d --build
+ docker compose --env-file "${ENV_FILE}" -f "${COMPOSE_FILE}" pull && docker compose --env-file "${ENV_FILE}" -f "${COMPOSE_FILE}" up -d
docker_compose_status=$?
echo "Docker Compose Exit Status: $docker_compose_status" # Debug output
@@ -468,12 +472,14 @@ connect_cloud_api_provider() {
# Main script execution
animate_dino
+
while true; do # Main menu loop
clear # Clear screen before showing main menu again
prompt_main_menu
case $main_choice in
- 1) # Use DocsGPT Public API Endpoint
+ 1) # Use DocsGPT Public API Endpoint (Docker Hub images)
+ COMPOSE_FILE="$(dirname "$(readlink -f "$0")")/deployment/docker-compose-hub.yaml"
use_docs_public_api_endpoint
break ;;
2) # Serve Local (with Ollama)
@@ -485,8 +491,13 @@ while true; do # Main menu loop
4) # Connect Cloud API Provider
connect_cloud_api_provider
break ;;
+ 5) # Advanced: Build images locally
+ echo -e "\n${YELLOW}You have selected to build images locally. This is recommended for developers or if you want to test local changes.${NC}"
+ COMPOSE_FILE="$COMPOSE_FILE_LOCAL"
+ use_docs_public_api_endpoint
+ break ;;
*)
- echo -e "\n${RED}Invalid choice. Please choose 1-4.${NC}" ; sleep 1 ;;
+ echo -e "\n${RED}Invalid choice. Please choose 1-5.${NC}" ; sleep 1 ;;
esac
done