From 9526ed02584b9946e4c3b9e33dc29e4268a68232 Mon Sep 17 00:00:00 2001
From: Alex
Date: Mon, 2 Sep 2024 19:46:25 +0100
Subject: [PATCH 1/4] feat: added easy way to proxy
---
application/core/settings.py | 1 +
application/llm/openai.py | 28 +++++++++++-----------------
2 files changed, 12 insertions(+), 17 deletions(-)
diff --git a/application/core/settings.py b/application/core/settings.py
index 6ae5475c..bbd62fe4 100644
--- a/application/core/settings.py
+++ b/application/core/settings.py
@@ -29,6 +29,7 @@ class Settings(BaseSettings):
OPENAI_API_VERSION: Optional[str] = None # azure openai api version
AZURE_DEPLOYMENT_NAME: Optional[str] = None # azure deployment name for answering
AZURE_EMBEDDINGS_DEPLOYMENT_NAME: Optional[str] = None # azure deployment name for embeddings
+ OPENAI_BASE_URL: Optional[str] = None # openai base url for open ai compatable models
# elasticsearch
ELASTIC_CLOUD_ID: Optional[str] = None # cloud id for elasticsearch
diff --git a/application/llm/openai.py b/application/llm/openai.py
index b1574dd1..73a0c3d1 100644
--- a/application/llm/openai.py
+++ b/application/llm/openai.py
@@ -1,26 +1,25 @@
from application.llm.base import BaseLLM
from application.core.settings import settings
+import logging
+
class OpenAILLM(BaseLLM):
def __init__(self, api_key=None, user_api_key=None, *args, **kwargs):
- global openai
from openai import OpenAI
super().__init__(*args, **kwargs)
- self.client = OpenAI(
- api_key=api_key,
- )
+ if settings.OPENAI_BASE_URL:
+ self.client = OpenAI(
+ api_key=api_key,
+ base_url=settings.OPENAI_BASE_URL
+ )
+ else:
+ self.client = OpenAI(api_key=api_key)
self.api_key = api_key
self.user_api_key = user_api_key
- def _get_openai(self):
- # Import openai when needed
- import openai
-
- return openai
-
def _raw_gen(
self,
baseself,
@@ -29,7 +28,7 @@ class OpenAILLM(BaseLLM):
stream=False,
engine=settings.AZURE_DEPLOYMENT_NAME,
**kwargs
- ):
+ ):
response = self.client.chat.completions.create(
model=model, messages=messages, stream=stream, **kwargs
)
@@ -44,7 +43,7 @@ class OpenAILLM(BaseLLM):
stream=True,
engine=settings.AZURE_DEPLOYMENT_NAME,
**kwargs
- ):
+ ):
response = self.client.chat.completions.create(
model=model, messages=messages, stream=stream, **kwargs
)
@@ -73,8 +72,3 @@ class AzureOpenAILLM(OpenAILLM):
api_base=settings.OPENAI_API_BASE,
deployment_name=settings.AZURE_DEPLOYMENT_NAME,
)
-
- def _get_openai(self):
- openai = super()._get_openai()
-
- return openai
From 5246d85f118b7a704853bcf9907755bd7ed2f53c Mon Sep 17 00:00:00 2001
From: Alex
Date: Mon, 2 Sep 2024 20:00:24 +0100
Subject: [PATCH 2/4] fix: ruff
---
application/llm/openai.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/application/llm/openai.py b/application/llm/openai.py
index 73a0c3d1..f85de6ea 100644
--- a/application/llm/openai.py
+++ b/application/llm/openai.py
@@ -1,6 +1,5 @@
from application.llm.base import BaseLLM
from application.core.settings import settings
-import logging
From 2de1e5f71a1deb1962b7b6aabf0453fbb1c6020b Mon Sep 17 00:00:00 2001
From: Alex
Date: Mon, 2 Sep 2024 20:09:16 +0100
Subject: [PATCH 3/4] chore: open ai compatable data
---
docs/pages/Guides/How-to-use-different-LLM.mdx | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/docs/pages/Guides/How-to-use-different-LLM.mdx b/docs/pages/Guides/How-to-use-different-LLM.mdx
index 908ddb28..274660be 100644
--- a/docs/pages/Guides/How-to-use-different-LLM.mdx
+++ b/docs/pages/Guides/How-to-use-different-LLM.mdx
@@ -36,6 +36,15 @@ List of latest supported LLMs are https://github.com/arc53/DocsGPT/blob/main/app
Visit application/llm and select the file of your selected llm and there you will find the speicifc requirements needed to be filled in order to use it,i.e API key of that llm.
+### For OpenAI-Compatible Endpoints:
+DocsGPT supports the use of OpenAI-compatible endpoints through base URL substitution. This feature allows you to use alternative AI models or services that implement the OpenAI API interface.
+
+
+Set the OPENAI_BASE_URL in your environment. You can change .env file with OPENAI_BASE_URL with the desired base URL or docker-compose.yml file and add the environment variable to the backend container.
+
+> [!Note]
+> Make sure you have the right API_KEY and correct LLM_NAME.
+
From 817fc5d4b3448417c12e2e8ed78375b41314e873 Mon Sep 17 00:00:00 2001
From: Alex
Date: Mon, 2 Sep 2024 20:11:31 +0100
Subject: [PATCH 4/4] fix: little nextra edit
---
docs/pages/Guides/How-to-use-different-LLM.mdx | 1 -
1 file changed, 1 deletion(-)
diff --git a/docs/pages/Guides/How-to-use-different-LLM.mdx b/docs/pages/Guides/How-to-use-different-LLM.mdx
index 274660be..7df77742 100644
--- a/docs/pages/Guides/How-to-use-different-LLM.mdx
+++ b/docs/pages/Guides/How-to-use-different-LLM.mdx
@@ -42,7 +42,6 @@ DocsGPT supports the use of OpenAI-compatible endpoints through base URL substit
Set the OPENAI_BASE_URL in your environment. You can change .env file with OPENAI_BASE_URL with the desired base URL or docker-compose.yml file and add the environment variable to the backend container.
-> [!Note]
> Make sure you have the right API_KEY and correct LLM_NAME.