From dbb822f6b078a7d3fdd21be2aa9b3209e223eaf8 Mon Sep 17 00:00:00 2001 From: Pavel Date: Thu, 15 May 2025 13:50:08 +0400 Subject: [PATCH] fix for OPENAI_BASE_URL + ollama can't connect to container - fix for OpenAI trying to use base_url="" - fix for ollama container error: `Error code: 404 - {'error': {'message': 'model "MODEL_NAME" not found, try pulling it first', 'type': 'api_error', 'param': None, 'code': None}}` --- application/llm/openai.py | 5 +++-- setup.sh | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/application/llm/openai.py b/application/llm/openai.py index c918768d..2a6d8f98 100644 --- a/application/llm/openai.py +++ b/application/llm/openai.py @@ -13,10 +13,11 @@ class OpenAILLM(BaseLLM): from openai import OpenAI super().__init__(*args, **kwargs) - if settings.OPENAI_BASE_URL: + if isinstance(settings.OPENAI_BASE_URL, str) and settings.OPENAI_BASE_URL.strip(): self.client = OpenAI(api_key=api_key, base_url=settings.OPENAI_BASE_URL) else: - self.client = OpenAI(api_key=api_key) + DEFAULT_OPENAI_API_BASE = f"https://api.openai.com/v1" + self.client = OpenAI(api_key=api_key, base_url=DEFAULT_OPENAI_API_BASE) self.api_key = api_key self.user_api_key = user_api_key self.storage = StorageCreator.get_storage() diff --git a/setup.sh b/setup.sh index 479def9b..5cf013fc 100755 --- a/setup.sh +++ b/setup.sh @@ -240,7 +240,7 @@ serve_local_ollama() { echo "LLM_NAME=openai" >> .env echo "MODEL_NAME=$model_name" >> .env echo "VITE_API_STREAMING=true" >> .env - echo "OPENAI_BASE_URL=http://host.docker.internal:11434/v1" >> .env + echo "OPENAI_BASE_URL=http://ollama:11434/v1" >> .env echo "EMBEDDINGS_NAME=huggingface_sentence-transformers/all-mpnet-base-v2" >> .env echo -e "${GREEN}.env file configured for Ollama ($(echo "$docker_compose_file_suffix" | tr '[:lower:]' '[:upper:]')${NC}${GREEN}).${NC}" echo -e "${YELLOW}Note: MODEL_NAME is set to '${BOLD}$model_name${NC}${YELLOW}'. You can change it later in the .env file.${NC}"