This commit is contained in:
Alex
2023-10-01 17:20:47 +01:00
parent b47ecab1a9
commit 9bbf4044e0
7 changed files with 137 additions and 56 deletions

106
setup.sh
View File

@@ -1,45 +1,77 @@
#!/bin/bash
cd "$(dirname "$0")" || exit
# Create the required directories on the host machine if they don't exist
[ ! -d "./application/indexes" ] && mkdir -p ./application/indexes
[ ! -d "./application/inputs" ] && mkdir -p ./application/inputs
[ ! -d "./application/vectors" ] && mkdir -p ./application/vectors
# Function to prompt the user for their choice
prompt_user() {
echo "Do you want to:"
echo "1. Download the language model locally (12GB)"
echo "2. Use the OpenAI API"
read -p "Enter your choice (1/2): " choice
}
# Build frontend and backend images
docker build -t frontend_image ./frontend
docker build -t backend_image ./application
# Function to handle the choice to download the model locally
download_locally() {
echo "LLM_NAME=llama.cpp" > .env
echo "VITE_API_STREAMING=true" >> .env
echo "EMBEDDINGS_NAME=huggingface_sentence-transformers/all-mpnet-base-v2" >> .env
echo "The .env file has been created with LLM_NAME set to llama.cpp."
# Run redis and mongo services
docker run -d --name redis -p 6379:6379 redis:6-alpine
docker run -d --name mongo -p 27017:27017 -v mongodb_data_container:/data/db mongo:6
# Creating the directory if it does not exist
mkdir -p models
# Downloading the model to the specific directory
echo "Downloading the model..."
# check if docsgpt-7b-f16.gguf does not exist
if [ ! -f models/docsgpt-7b-f16.gguf ]; then
echo "Downloading the model..."
wget -P models https://docsgpt.s3.eu-west-1.amazonaws.com/models/docsgpt-7b-f16.gguf
echo "Model downloaded to models directory."
else
echo "Model already exists."
fi
docker-compose -f docker-compose-local.yaml build && docker-compose -f docker-compose-local.yaml up -d
python -m venv venv
source venv/bin/activate
pip install -r application/requirements.txt
pip install llama-cpp-python
export FLASK_APP=application/app.py
export FLASK_DEBUG=true
echo "The application is now running on http://localhost:5173"
echo "You can stop the application by running the following command:"
echo "Ctrl + C and then"
echo "docker-compose down"
flask run --host=0.0.0.0 --port=7091
}
# Run backend and worker services
docker run -d --name backend -p 7091:7091 \
--link redis:redis --link mongo:mongo \
-v $(pwd)/application/indexes:/app/indexes \
-v $(pwd)/application/inputs:/app/inputs \
-v $(pwd)/application/vectors:/app/vectors \
-e API_KEY=$OPENAI_API_KEY \
-e EMBEDDINGS_KEY=$OPENAI_API_KEY \
-e CELERY_BROKER_URL=redis://redis:6379/0 \
-e CELERY_RESULT_BACKEND=redis://redis:6379/1 \
-e MONGO_URI=mongodb://mongo:27017/docsgpt \
backend_image
# Function to handle the choice to use the OpenAI API
use_openai() {
read -p "Please enter your OpenAI API key: " api_key
echo "API_KEY=$api_key" > .env
echo "LLM_NAME=openai" >> .env
echo "VITE_API_STREAMING=true" >> .env
echo "The .env file has been created with API_KEY set to your provided key."
docker run -d --name worker \
--link redis:redis --link mongo:mongo \
-e API_KEY=$OPENAI_API_KEY \
-e EMBEDDINGS_KEY=$OPENAI_API_KEY \
-e CELERY_BROKER_URL=redis://redis:6379/0 \
-e CELERY_RESULT_BACKEND=redis://redis:6379/1 \
-e MONGO_URI=mongodb://mongo:27017/docsgpt \
-e API_URL=http://backend:7091 \
backend_image \
celery -A app.celery worker -l INFO
docker-compose build && docker-compose up -d
# Run frontend service
docker run -d --name frontend -p 5173:5173 \
-e VITE_API_HOST=http://localhost:7091 \
frontend_image
echo "The application is will runn on http://localhost:5173"
echo "You can stop the application by running the following command:"
echo "docker-compose down"
}
# Prompt the user for their choice
prompt_user
# Handle the user's choice
case $choice in
1)
download_locally
;;
2)
use_openai
;;
*)
echo "Invalid choice. Please choose either 1 or 2."
;;
esac