diff --git a/application/llm/openai.py b/application/llm/openai.py index f8a38ed0..5e11a072 100644 --- a/application/llm/openai.py +++ b/application/llm/openai.py @@ -137,17 +137,17 @@ class OpenAILLM(BaseLLM): class AzureOpenAILLM(OpenAILLM): def __init__( - self, openai_api_key, openai_api_base, openai_api_version, deployment_name + self, api_key, user_api_key, *args, **kwargs ): - super().__init__(openai_api_key) + + super().__init__(api_key) self.api_base = (settings.OPENAI_API_BASE,) self.api_version = (settings.OPENAI_API_VERSION,) self.deployment_name = (settings.AZURE_DEPLOYMENT_NAME,) from openai import AzureOpenAI self.client = AzureOpenAI( - api_key=openai_api_key, + api_key=api_key, api_version=settings.OPENAI_API_VERSION, - api_base=settings.OPENAI_API_BASE, - deployment_name=settings.AZURE_DEPLOYMENT_NAME, + azure_endpoint=settings.OPENAI_API_BASE ) diff --git a/setup.sh b/setup.sh index 31ed3e42..479def9b 100755 --- a/setup.sh +++ b/setup.sh @@ -450,7 +450,7 @@ connect_cloud_api_provider() { check_and_start_docker echo -e "\n${NC}Starting Docker Compose...${NC}" - docker compose --env-file "${ENV_FILE}" -f "${COMPOSE_FILE}" build && docker compose -f "${COMPOSE_FILE}" up -d + docker compose --env-file "${ENV_FILE}" -f "${COMPOSE_FILE}" up -d --build docker_compose_status=$? echo "Docker Compose Exit Status: $docker_compose_status" # Debug output @@ -476,16 +476,16 @@ while true; do # Main menu loop case $main_choice in 1) # Use DocsGPT Public API Endpoint use_docs_public_api_endpoint - ;; + break ;; 2) # Serve Local (with Ollama) serve_local_ollama - ;; + break ;; 3) # Connect Local Inference Engine connect_local_inference_engine - ;; + break ;; 4) # Connect Cloud API Provider connect_cloud_api_provider - ;; + break ;; *) echo -e "\n${RED}Invalid choice. Please choose 1-4.${NC}" ; sleep 1 ;; esac