diff --git a/README.md b/README.md
index f1942dc1..ee9a1af6 100644
--- a/README.md
+++ b/README.md
@@ -35,7 +35,8 @@ We're eager to provide personalized assistance when deploying your DocsGPT to a
[Send Email :email:](mailto:contact@arc53.com?subject=DocsGPT%20support%2Fsolutions)
-
+
+
## Roadmap
diff --git a/application/api/answer/routes.py b/application/api/answer/routes.py
index 9a22db84..17eb5cc3 100644
--- a/application/api/answer/routes.py
+++ b/application/api/answer/routes.py
@@ -292,6 +292,7 @@ class Stream(Resource):
def post(self):
data = request.get_json()
required_fields = ["question"]
+
missing_fields = check_required_fields(data, required_fields)
if missing_fields:
return missing_fields
@@ -422,7 +423,7 @@ class Answer(Resource):
@api.doc(description="Provide an answer based on the question and retriever")
def post(self):
data = request.get_json()
- required_fields = ["question"]
+ required_fields = ["question"]
missing_fields = check_required_fields(data, required_fields)
if missing_fields:
return missing_fields
diff --git a/application/api/user/routes.py b/application/api/user/routes.py
index c409e69a..feee91cc 100644
--- a/application/api/user/routes.py
+++ b/application/api/user/routes.py
@@ -7,7 +7,7 @@ from bson.binary import Binary, UuidRepresentation
from bson.dbref import DBRef
from bson.objectid import ObjectId
from flask import Blueprint, jsonify, make_response, request
-from flask_restx import fields, Namespace, Resource
+from flask_restx import inputs, fields, Namespace, Resource
from pymongo import MongoClient
from werkzeug.utils import secure_filename
@@ -802,7 +802,7 @@ class ShareConversation(Resource):
if missing_fields:
return missing_fields
- is_promptable = request.args.get("isPromptable")
+ is_promptable = request.args.get("isPromptable", type=inputs.boolean)
if is_promptable is None:
return make_response(
jsonify({"success": False, "message": "isPromptable is required"}), 400
@@ -831,7 +831,7 @@ class ShareConversation(Resource):
uuid.uuid4(), UuidRepresentation.STANDARD
)
- if is_promptable.lower() == "true":
+ if is_promptable:
prompt_id = data.get("prompt_id", "default")
chunks = data.get("chunks", "2")
@@ -859,7 +859,7 @@ class ShareConversation(Resource):
"conversation_id": DBRef(
"conversations", ObjectId(conversation_id)
),
- "isPromptable": is_promptable.lower() == "true",
+ "isPromptable": is_promptable,
"first_n_queries": current_n_queries,
"user": user,
"api_key": api_uuid,
@@ -883,7 +883,7 @@ class ShareConversation(Resource):
"$ref": "conversations",
"$id": ObjectId(conversation_id),
},
- "isPromptable": is_promptable.lower() == "true",
+ "isPromptable": is_promptable,
"first_n_queries": current_n_queries,
"user": user,
"api_key": api_uuid,
@@ -918,7 +918,7 @@ class ShareConversation(Resource):
"$ref": "conversations",
"$id": ObjectId(conversation_id),
},
- "isPromptable": is_promptable.lower() == "true",
+ "isPromptable": is_promptable,
"first_n_queries": current_n_queries,
"user": user,
"api_key": api_uuid,
@@ -939,7 +939,7 @@ class ShareConversation(Resource):
"conversation_id": DBRef(
"conversations", ObjectId(conversation_id)
),
- "isPromptable": is_promptable.lower() == "false",
+ "isPromptable": not is_promptable,
"first_n_queries": current_n_queries,
"user": user,
}
@@ -962,7 +962,7 @@ class ShareConversation(Resource):
"$ref": "conversations",
"$id": ObjectId(conversation_id),
},
- "isPromptable": is_promptable.lower() == "false",
+ "isPromptable": not is_promptable,
"first_n_queries": current_n_queries,
"user": user,
}
diff --git a/application/cache.py b/application/cache.py
new file mode 100644
index 00000000..33022e45
--- /dev/null
+++ b/application/cache.py
@@ -0,0 +1,93 @@
+import redis
+import time
+import json
+import logging
+from threading import Lock
+from application.core.settings import settings
+from application.utils import get_hash
+
+logger = logging.getLogger(__name__)
+
+_redis_instance = None
+_instance_lock = Lock()
+
+def get_redis_instance():
+ global _redis_instance
+ if _redis_instance is None:
+ with _instance_lock:
+ if _redis_instance is None:
+ try:
+ _redis_instance = redis.Redis.from_url(settings.CACHE_REDIS_URL, socket_connect_timeout=2)
+ except redis.ConnectionError as e:
+ logger.error(f"Redis connection error: {e}")
+ _redis_instance = None
+ return _redis_instance
+
+def gen_cache_key(*messages, model="docgpt"):
+ if not all(isinstance(msg, dict) for msg in messages):
+ raise ValueError("All messages must be dictionaries.")
+ messages_str = json.dumps(list(messages), sort_keys=True)
+ combined = f"{model}_{messages_str}"
+ cache_key = get_hash(combined)
+ return cache_key
+
+def gen_cache(func):
+ def wrapper(self, model, messages, *args, **kwargs):
+ try:
+ cache_key = gen_cache_key(*messages)
+ redis_client = get_redis_instance()
+ if redis_client:
+ try:
+ cached_response = redis_client.get(cache_key)
+ if cached_response:
+ return cached_response.decode('utf-8')
+ except redis.ConnectionError as e:
+ logger.error(f"Redis connection error: {e}")
+
+ result = func(self, model, messages, *args, **kwargs)
+ if redis_client:
+ try:
+ redis_client.set(cache_key, result, ex=1800)
+ except redis.ConnectionError as e:
+ logger.error(f"Redis connection error: {e}")
+
+ return result
+ except ValueError as e:
+ logger.error(e)
+ return "Error: No user message found in the conversation to generate a cache key."
+ return wrapper
+
+def stream_cache(func):
+ def wrapper(self, model, messages, stream, *args, **kwargs):
+ cache_key = gen_cache_key(*messages)
+ logger.info(f"Stream cache key: {cache_key}")
+
+ redis_client = get_redis_instance()
+ if redis_client:
+ try:
+ cached_response = redis_client.get(cache_key)
+ if cached_response:
+ logger.info(f"Cache hit for stream key: {cache_key}")
+ cached_response = json.loads(cached_response.decode('utf-8'))
+ for chunk in cached_response:
+ yield chunk
+ time.sleep(0.03)
+ return
+ except redis.ConnectionError as e:
+ logger.error(f"Redis connection error: {e}")
+
+ result = func(self, model, messages, stream, *args, **kwargs)
+ stream_cache_data = []
+
+ for chunk in result:
+ stream_cache_data.append(chunk)
+ yield chunk
+
+ if redis_client:
+ try:
+ redis_client.set(cache_key, json.dumps(stream_cache_data), ex=1800)
+ logger.info(f"Stream cache saved for key: {cache_key}")
+ except redis.ConnectionError as e:
+ logger.error(f"Redis connection error: {e}")
+
+ return wrapper
\ No newline at end of file
diff --git a/application/core/settings.py b/application/core/settings.py
index e6173be4..7346da08 100644
--- a/application/core/settings.py
+++ b/application/core/settings.py
@@ -21,6 +21,9 @@ class Settings(BaseSettings):
VECTOR_STORE: str = "faiss" # "faiss" or "elasticsearch" or "qdrant" or "milvus"
RETRIEVERS_ENABLED: list = ["classic_rag", "duckduck_search"] # also brave_search
+ # LLM Cache
+ CACHE_REDIS_URL: str = "redis://localhost:6379/2"
+
API_URL: str = "http://localhost:7091" # backend url for celery worker
API_KEY: Optional[str] = None # LLM api key
diff --git a/application/llm/base.py b/application/llm/base.py
index 475b7937..1caab5d3 100644
--- a/application/llm/base.py
+++ b/application/llm/base.py
@@ -1,28 +1,29 @@
from abc import ABC, abstractmethod
from application.usage import gen_token_usage, stream_token_usage
+from application.cache import stream_cache, gen_cache
class BaseLLM(ABC):
def __init__(self):
self.token_usage = {"prompt_tokens": 0, "generated_tokens": 0}
- def _apply_decorator(self, method, decorator, *args, **kwargs):
- return decorator(method, *args, **kwargs)
+ def _apply_decorator(self, method, decorators, *args, **kwargs):
+ for decorator in decorators:
+ method = decorator(method)
+ return method(self, *args, **kwargs)
@abstractmethod
def _raw_gen(self, model, messages, stream, *args, **kwargs):
pass
def gen(self, model, messages, stream=False, *args, **kwargs):
- return self._apply_decorator(self._raw_gen, gen_token_usage)(
- self, model=model, messages=messages, stream=stream, *args, **kwargs
- )
+ decorators = [gen_token_usage, gen_cache]
+ return self._apply_decorator(self._raw_gen, decorators=decorators, model=model, messages=messages, stream=stream, *args, **kwargs)
@abstractmethod
def _raw_gen_stream(self, model, messages, stream, *args, **kwargs):
pass
def gen_stream(self, model, messages, stream=True, *args, **kwargs):
- return self._apply_decorator(self._raw_gen_stream, stream_token_usage)(
- self, model=model, messages=messages, stream=stream, *args, **kwargs
- )
+ decorators = [stream_cache, stream_token_usage]
+ return self._apply_decorator(self._raw_gen_stream, decorators=decorators, model=model, messages=messages, stream=stream, *args, **kwargs)
\ No newline at end of file
diff --git a/application/requirements.txt b/application/requirements.txt
index 6a57dd12..6ea1d1ba 100644
--- a/application/requirements.txt
+++ b/application/requirements.txt
@@ -4,7 +4,7 @@ beautifulsoup4==4.12.3
celery==5.3.6
dataclasses-json==0.6.7
docx2txt==0.8
-duckduckgo-search==6.2.6
+duckduckgo-search==6.3.0
ebooklib==0.18
elastic-transport==8.15.0
elasticsearch==8.15.1
@@ -54,7 +54,7 @@ pathable==0.4.3
pillow==10.4.0
portalocker==2.10.1
prance==23.6.21.0
-primp==0.6.2
+primp==0.6.3
prompt-toolkit==3.0.47
protobuf==5.28.2
py==1.11.0
diff --git a/application/utils.py b/application/utils.py
index f0802c39..1fc9e329 100644
--- a/application/utils.py
+++ b/application/utils.py
@@ -1,6 +1,8 @@
import tiktoken
+import hashlib
from flask import jsonify, make_response
+
_encoding = None
@@ -39,3 +41,8 @@ def check_required_fields(data, required_fields):
400,
)
return None
+
+
+def get_hash(data):
+ return hashlib.md5(data.encode()).hexdigest()
+
diff --git a/docker-compose.yaml b/docker-compose.yaml
index f3b8a363..d3f3421a 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -20,6 +20,7 @@ services:
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/1
- MONGO_URI=mongodb://mongo:27017/docsgpt
+ - CACHE_REDIS_URL=redis://redis:6379/2
ports:
- "7091:7091"
volumes:
@@ -41,6 +42,7 @@ services:
- CELERY_RESULT_BACKEND=redis://redis:6379/1
- MONGO_URI=mongodb://mongo:27017/docsgpt
- API_URL=http://backend:7091
+ - CACHE_REDIS_URL=redis://redis:6379/2
depends_on:
- redis
- mongo
diff --git a/frontend/signal-desktop-keyring.gpg b/frontend/signal-desktop-keyring.gpg
new file mode 100644
index 00000000..b5e68a04
Binary files /dev/null and b/frontend/signal-desktop-keyring.gpg differ
diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx
index 1455f495..ba0a4bd7 100644
--- a/frontend/src/App.tsx
+++ b/frontend/src/App.tsx
@@ -19,7 +19,7 @@ function MainLayout() {
+ const [, , componentMounted] = useDarkTheme();
+ if (!componentMounted) {
+ return
;
}
return (
diff --git a/frontend/src/Navigation.tsx b/frontend/src/Navigation.tsx
index c4543085..58c68186 100644
--- a/frontend/src/Navigation.tsx
+++ b/frontend/src/Navigation.tsx
@@ -2,17 +2,15 @@ import { useEffect, useRef, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
import { NavLink, useNavigate } from 'react-router-dom';
-
import conversationService from './api/services/conversationService';
import userService from './api/services/userService';
import Add from './assets/add.svg';
import openNewChat from './assets/openNewChat.svg';
+import Hamburger from './assets/hamburger.svg';
import DocsGPT3 from './assets/cute_docsgpt3.svg';
import Discord from './assets/discord.svg';
import Expand from './assets/expand.svg';
import Github from './assets/github.svg';
-import Hamburger from './assets/hamburger.svg';
-import Info from './assets/info.svg';
import SettingGear from './assets/settingGear.svg';
import Twitter from './assets/TwitterX.svg';
import UploadIcon from './assets/upload.svg';
@@ -43,6 +41,7 @@ import {
} from './preferences/preferenceSlice';
import { selectQueries } from './conversation/conversationSlice';
import Upload from './upload/Upload';
+import Help from './components/Help';
interface NavigationProps {
navOpen: boolean;
@@ -76,7 +75,6 @@ export default function Navigation({ navOpen, setNavOpen }: NavigationProps) {
const [isDarkTheme] = useDarkTheme();
const [isDocsListOpen, setIsDocsListOpen] = useState(false);
const { t } = useTranslation();
-
const isApiKeySet = useSelector(selectApiKeyStatus);
const [apiKeyModalState, setApiKeyModalState] =
useState
('INACTIVE');
@@ -304,7 +302,10 @@ export default function Navigation({ navOpen, setNavOpen }: NavigationProps) {
{t('newChat')}
-
+
{conversations && conversations.length > 0 ? (
@@ -333,7 +334,6 @@ export default function Navigation({ navOpen, setNavOpen }: NavigationProps) {
<>>
)}
-
@@ -388,68 +388,51 @@ export default function Navigation({ navOpen, setNavOpen }: NavigationProps) {
-
-
{
- if (isMobile) {
- setNavOpen(!navOpen);
- }
- resetConversation();
- }}
- to="/about"
- className={({ isActive }) =>
- `my-auto mx-4 flex h-9 cursor-pointer gap-4 rounded-3xl hover:bg-gray-100 dark:hover:bg-[#28292E] ${
- isActive ? 'bg-gray-3000 dark:bg-[#28292E]' : ''
- }`
- }
- >
-
- {t('about')}
-
-
diff --git a/frontend/src/assets/documentation-dark.svg b/frontend/src/assets/documentation-dark.svg
index 78440206..5cbde1b1 100644
--- a/frontend/src/assets/documentation-dark.svg
+++ b/frontend/src/assets/documentation-dark.svg
@@ -1,3 +1,4 @@
-
diff --git a/frontend/src/conversation/Conversation.tsx b/frontend/src/conversation/Conversation.tsx
index 4341407e..a8326da7 100644
--- a/frontend/src/conversation/Conversation.tsx
+++ b/frontend/src/conversation/Conversation.tsx
@@ -5,7 +5,6 @@ import newChatIcon from '../assets/openNewChat.svg';
import ArrowDown from '../assets/arrow-down.svg';
import Send from '../assets/send.svg';
import SendDark from '../assets/send_dark.svg';
-import ShareIcon from '../assets/share.svg';
import SpinnerDark from '../assets/spinner-dark.svg';
import Spinner from '../assets/spinner.svg';
import RetryIcon from '../components/RetryIcon';
@@ -19,6 +18,7 @@ import { AppDispatch } from '../store';
import ConversationBubble from './ConversationBubble';
import { handleSendFeedback } from './conversationHandlers';
import { FEEDBACK, Query } from './conversationModels';
+import ShareIcon from '../assets/share.svg';
import {
addQuery,
fetchAnswer,
diff --git a/frontend/src/conversation/ConversationTile.tsx b/frontend/src/conversation/ConversationTile.tsx
index 0193d161..dd3e6e1d 100644
--- a/frontend/src/conversation/ConversationTile.tsx
+++ b/frontend/src/conversation/ConversationTile.tsx
@@ -1,4 +1,10 @@
-import { SyntheticEvent, useEffect, useRef, useState } from 'react';
+import {
+ SyntheticEvent,
+ useEffect,
+ useRef,
+ useState,
+ useCallback,
+} from 'react';
import { useSelector } from 'react-redux';
import Edit from '../assets/edit.svg';
import Exit from '../assets/exit.svg';
@@ -75,6 +81,36 @@ export default function ConversationTile({
document.removeEventListener('mousedown', handleClickOutside);
};
}, []);
+
+ const preventScroll = useCallback((event: WheelEvent | TouchEvent) => {
+ event.preventDefault();
+ }, []);
+
+ useEffect(() => {
+ const conversationsMainDiv = document.getElementById(
+ 'conversationsMainDiv',
+ );
+
+ if (conversationsMainDiv) {
+ if (isOpen) {
+ conversationsMainDiv.addEventListener('wheel', preventScroll, {
+ passive: false,
+ });
+ conversationsMainDiv.addEventListener('touchmove', preventScroll, {
+ passive: false,
+ });
+ } else {
+ conversationsMainDiv.removeEventListener('wheel', preventScroll);
+ conversationsMainDiv.removeEventListener('touchmove', preventScroll);
+ }
+
+ return () => {
+ conversationsMainDiv.removeEventListener('wheel', preventScroll);
+ conversationsMainDiv.removeEventListener('touchmove', preventScroll);
+ };
+ }
+ }, [isOpen]);
+
function onClear() {
setConversationsName(conversation.name);
setIsEdit(false);
@@ -147,7 +183,7 @@ export default function ConversationTile({