Merge branch 'arc53:main' into AidanComponentEditChat

This commit is contained in:
aidanbennettjones
2025-02-28 10:35:17 -05:00
committed by GitHub
16 changed files with 307 additions and 757 deletions

View File

@@ -63,6 +63,12 @@ We're eager to provide personalized assistance when deploying your DocsGPT to a
[Send Email :email:](mailto:support@docsgpt.cloud?subject=DocsGPT%20support%2Fsolutions)
## Join the Lighthouse Program 🌟
Calling all developers and GenAI innovators! The **DocsGPT Lighthouse Program** connects technical leaders actively deploying or extending DocsGPT in real-world scenarios. Collaborate directly with our team to shape the roadmap, access priority support, and build enterprise-ready solutions with exclusive community insights.
[Learn More & Apply →](https://docs.google.com/forms/d/1KAADiJinUJ8EMQyfTXUIGyFbqINNClNR3jBNWq7DgTE)
## QuickStart

View File

@@ -115,8 +115,9 @@ def is_azure_configured():
def save_conversation(
conversation_id, question, response, source_log_docs, tool_calls, llm, index=None
conversation_id, question, response, source_log_docs, tool_calls, llm, index=None, api_key=None
):
current_time = datetime.datetime.now(datetime.timezone.utc)
if conversation_id is not None and index is not None:
conversations_collection.update_one(
{"_id": ObjectId(conversation_id), f"queries.{index}": {"$exists": True}},
@@ -126,6 +127,7 @@ def save_conversation(
f"queries.{index}.response": response,
f"queries.{index}.sources": source_log_docs,
f"queries.{index}.tool_calls": tool_calls,
f"queries.{index}.timestamp": current_time
}
},
)
@@ -144,6 +146,7 @@ def save_conversation(
"response": response,
"sources": source_log_docs,
"tool_calls": tool_calls,
"timestamp": current_time
}
}
},
@@ -168,21 +171,25 @@ def save_conversation(
]
completion = llm.gen(model=gpt_model, messages=messages_summary, max_tokens=30)
conversation_id = conversations_collection.insert_one(
{
"user": "local",
"date": datetime.datetime.utcnow(),
"name": completion,
"queries": [
{
"prompt": question,
"response": response,
"sources": source_log_docs,
"tool_calls": tool_calls,
}
],
}
).inserted_id
conversation_data = {
"user": "local",
"date": datetime.datetime.utcnow(),
"name": completion,
"queries": [
{
"prompt": question,
"response": response,
"sources": source_log_docs,
"tool_calls": tool_calls,
"timestamp": current_time
}
],
}
if api_key:
api_key_doc = api_key_collection.find_one({"key": api_key})
if api_key_doc:
conversation_data["api_key"] = api_key_doc["key"]
conversation_id = conversations_collection.insert_one(conversation_data).inserted_id
return conversation_id
@@ -197,11 +204,15 @@ def get_prompt(prompt_id):
prompt = prompts_collection.find_one({"_id": ObjectId(prompt_id)})["content"]
return prompt
def complete_stream(
question, retriever, conversation_id, user_api_key, isNoneDoc=False, index=None
question,
retriever,
conversation_id,
user_api_key,
isNoneDoc=False,
index=None,
should_save_conversation=True
):
try:
response_full = ""
source_log_docs = []
@@ -232,9 +243,12 @@ def complete_stream(
doc["source"] = "None"
llm = LLMCreator.create_llm(
settings.LLM_NAME, api_key=settings.API_KEY, user_api_key=user_api_key
settings.LLM_NAME,
api_key=settings.API_KEY,
user_api_key=user_api_key
)
if user_api_key is None:
if should_save_conversation:
conversation_id = save_conversation(
conversation_id,
question,
@@ -243,10 +257,14 @@ def complete_stream(
tool_calls,
llm,
index,
api_key=user_api_key
)
# send data.type = "end" to indicate that the stream has ended as json
data = json.dumps({"type": "id", "id": str(conversation_id)})
yield f"data: {data}\n\n"
else:
conversation_id = None
# send data.type = "end" to indicate that the stream has ended as json
data = json.dumps({"type": "id", "id": str(conversation_id)})
yield f"data: {data}\n\n"
retriever_params = retriever.get_params()
user_logs_collection.insert_one(
@@ -309,6 +327,9 @@ class Stream(Resource):
"index": fields.Integer(
required=False, description="The position where query is to be updated"
),
"save_conversation": fields.Boolean(
required=False, default=True, description="Flag to save conversation"
),
},
)
@@ -323,6 +344,8 @@ class Stream(Resource):
if missing_fields:
return missing_fields
save_conv = data.get("save_conversation", True)
try:
question = data["question"]
history = limit_chat_history(
@@ -381,6 +404,7 @@ class Stream(Resource):
user_api_key=user_api_key,
isNoneDoc=data.get("isNoneDoc"),
index=index,
should_save_conversation=save_conv,
),
mimetype="text/event-stream",
)

View File

@@ -106,11 +106,14 @@ class DeleteAllConversations(Resource):
@user_ns.route("/api/get_conversations")
class GetConversations(Resource):
@api.doc(
description="Retrieve a list of the latest 30 conversations",
description="Retrieve a list of the latest 30 conversations (excluding API key conversations)",
)
def get(self):
try:
conversations = conversations_collection.find().sort("date", -1).limit(30)
conversations = conversations_collection.find(
{"api_key": {"$exists": False}}
).sort("date", -1).limit(30)
list_conversations = [
{"id": str(conversation["_id"]), "name": conversation["name"]}
for conversation in conversations
@@ -213,17 +216,34 @@ class SubmitFeedback(Resource):
return missing_fields
try:
conversations_collection.update_one(
{
"_id": ObjectId(data["conversation_id"]),
f"queries.{data['question_index']}": {"$exists": True},
},
{
"$set": {
f"queries.{data['question_index']}.feedback": data["feedback"]
}
},
)
if data["feedback"] is None:
# Remove feedback and feedback_timestamp if feedback is null
conversations_collection.update_one(
{
"_id": ObjectId(data["conversation_id"]),
f"queries.{data['question_index']}": {"$exists": True},
},
{
"$unset": {
f"queries.{data['question_index']}.feedback": "",
f"queries.{data['question_index']}.feedback_timestamp": ""
}
},
)
else:
# Set feedback and feedback_timestamp if feedback has a value
conversations_collection.update_one(
{
"_id": ObjectId(data["conversation_id"]),
f"queries.{data['question_index']}": {"$exists": True},
},
{
"$set": {
f"queries.{data['question_index']}.feedback": data["feedback"],
f"queries.{data['question_index']}.feedback_timestamp": datetime.datetime.now(datetime.timezone.utc)
}
},
)
except Exception as err:
current_app.logger.error(f"Error submitting feedback: {err}")
@@ -1186,21 +1206,12 @@ class GetMessageAnalytics(Resource):
get_message_analytics_model = api.model(
"GetMessageAnalyticsModel",
{
"api_key_id": fields.String(
required=False,
description="API Key ID",
),
"api_key_id": fields.String(required=False, description="API Key ID"),
"filter_option": fields.String(
required=False,
description="Filter option for analytics",
default="last_30_days",
enum=[
"last_hour",
"last_24_hour",
"last_7_days",
"last_15_days",
"last_30_days",
],
enum=["last_hour", "last_24_hour", "last_7_days", "last_15_days", "last_30_days"],
),
},
)
@@ -1221,42 +1232,21 @@ class GetMessageAnalytics(Resource):
except Exception as err:
current_app.logger.error(f"Error getting API key: {err}")
return make_response(jsonify({"success": False}), 400)
end_date = datetime.datetime.now(datetime.timezone.utc)
if filter_option == "last_hour":
start_date = end_date - datetime.timedelta(hours=1)
group_format = "%Y-%m-%d %H:%M:00"
group_stage = {
"$group": {
"_id": {
"minute": {
"$dateToString": {"format": group_format, "date": "$date"}
}
},
"total_messages": {"$sum": 1},
}
}
elif filter_option == "last_24_hour":
start_date = end_date - datetime.timedelta(hours=24)
group_format = "%Y-%m-%d %H:00"
group_stage = {
"$group": {
"_id": {
"hour": {
"$dateToString": {"format": group_format, "date": "$date"}
}
},
"total_messages": {"$sum": 1},
}
}
else:
if filter_option in ["last_7_days", "last_15_days", "last_30_days"]:
filter_days = (
6
if filter_option == "last_7_days"
else (14 if filter_option == "last_15_days" else 29)
6 if filter_option == "last_7_days"
else 14 if filter_option == "last_15_days"
else 29
)
else:
return make_response(
@@ -1264,36 +1254,44 @@ class GetMessageAnalytics(Resource):
)
start_date = end_date - datetime.timedelta(days=filter_days)
start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
end_date = end_date.replace(
hour=23, minute=59, second=59, microsecond=999999
)
end_date = end_date.replace(hour=23, minute=59, second=59, microsecond=999999)
group_format = "%Y-%m-%d"
group_stage = {
"$group": {
"_id": {
"day": {
"$dateToString": {"format": group_format, "date": "$date"}
}
},
"total_messages": {"$sum": 1},
}
}
try:
match_stage = {
"$match": {
"date": {"$gte": start_date, "$lte": end_date},
}
}
if api_key:
match_stage["$match"]["api_key"] = api_key
message_data = conversations_collection.aggregate(
[
match_stage,
group_stage,
{"$sort": {"_id": 1}},
]
)
pipeline = [
# Initial match for API key if provided
{
"$match": {
"api_key": api_key if api_key else {"$exists": False}
}
},
{"$unwind": "$queries"},
# Match queries within the time range
{
"$match": {
"queries.timestamp": {
"$gte": start_date,
"$lte": end_date
}
}
},
# Group by formatted timestamp
{
"$group": {
"_id": {
"$dateToString": {
"format": group_format,
"date": "$queries.timestamp"
}
},
"count": {"$sum": 1}
}
},
# Sort by timestamp
{"$sort": {"_id": 1}}
]
message_data = conversations_collection.aggregate(pipeline)
if filter_option == "last_hour":
intervals = generate_minute_range(start_date, end_date)
@@ -1305,12 +1303,7 @@ class GetMessageAnalytics(Resource):
daily_messages = {interval: 0 for interval in intervals}
for entry in message_data:
if filter_option == "last_hour":
daily_messages[entry["_id"]["minute"]] = entry["total_messages"]
elif filter_option == "last_24_hour":
daily_messages[entry["_id"]["hour"]] = entry["total_messages"]
else:
daily_messages[entry["_id"]["day"]] = entry["total_messages"]
daily_messages[entry["_id"]] = entry["count"]
except Exception as err:
current_app.logger.error(f"Error getting message analytics: {err}")
@@ -1358,6 +1351,7 @@ class GetTokenAnalytics(Resource):
except Exception as err:
current_app.logger.error(f"Error getting API key: {err}")
return make_response(jsonify({"success": False}), 400)
end_date = datetime.datetime.now(datetime.timezone.utc)
if filter_option == "last_hour":
@@ -1378,7 +1372,6 @@ class GetTokenAnalytics(Resource):
},
}
}
elif filter_option == "last_24_hour":
start_date = end_date - datetime.timedelta(hours=24)
group_format = "%Y-%m-%d %H:00"
@@ -1397,7 +1390,6 @@ class GetTokenAnalytics(Resource):
},
}
}
else:
if filter_option in ["last_7_days", "last_15_days", "last_30_days"]:
filter_days = (
@@ -1439,6 +1431,8 @@ class GetTokenAnalytics(Resource):
}
if api_key:
match_stage["$match"]["api_key"] = api_key
else:
match_stage["$match"]["api_key"] = {"$exists": False}
token_usage_data = token_usage_collection.aggregate(
[
@@ -1517,11 +1511,11 @@ class GetFeedbackAnalytics(Resource):
if filter_option == "last_hour":
start_date = end_date - datetime.timedelta(hours=1)
group_format = "%Y-%m-%d %H:%M:00"
date_field = {"$dateToString": {"format": group_format, "date": "$date"}}
date_field = {"$dateToString": {"format": group_format, "date": "$queries.feedback_timestamp"}}
elif filter_option == "last_24_hour":
start_date = end_date - datetime.timedelta(hours=24)
group_format = "%Y-%m-%d %H:00"
date_field = {"$dateToString": {"format": group_format, "date": "$date"}}
date_field = {"$dateToString": {"format": group_format, "date": "$queries.feedback_timestamp"}}
else:
if filter_option in ["last_7_days", "last_15_days", "last_30_days"]:
filter_days = (
@@ -1539,17 +1533,19 @@ class GetFeedbackAnalytics(Resource):
hour=23, minute=59, second=59, microsecond=999999
)
group_format = "%Y-%m-%d"
date_field = {"$dateToString": {"format": group_format, "date": "$date"}}
date_field = {"$dateToString": {"format": group_format, "date": "$queries.feedback_timestamp"}}
try:
match_stage = {
"$match": {
"date": {"$gte": start_date, "$lte": end_date},
"queries": {"$exists": True, "$ne": []},
"queries.feedback_timestamp": {"$gte": start_date, "$lte": end_date},
"queries.feedback": {"$exists": True}
}
}
if api_key:
match_stage["$match"]["api_key"] = api_key
else:
match_stage["$match"]["api_key"] = {"$exists": False}
# Unwind the queries array to process each query separately
pipeline = [

View File

@@ -22,7 +22,7 @@ class GoogleLLM(BaseLLM):
parts = []
if role and content is not None:
if isinstance(content, str):
parts = [types.Part.from_text(content)]
parts = [types.Part.from_text(text=content)]
elif isinstance(content, list):
for item in content:
if "text" in item:

View File

@@ -7,14 +7,14 @@ docx2txt==0.8
duckduckgo-search==7.4.2
ebooklib==0.18
elastic-transport==8.17.0
elasticsearch==8.17.0
elasticsearch==8.17.1
escodegen==1.0.11
esprima==4.0.1
esutils==1.0.1
Flask==3.1.0
faiss-cpu==1.9.0.post1
flask-restx==1.3.0
google-genai==0.5.0
google-genai==1.3.0
google-generativeai==0.8.3
gTTS==2.5.4
gunicorn==23.0.0
@@ -37,7 +37,7 @@ langchain-openai==0.3.0
langchain-text-splitters==0.3.5
langsmith==0.2.10
lazy-object-proxy==1.10.0
lxml==5.3.0
lxml==5.3.1
markupsafe==3.0.2
marshmallow==3.26.1
mpmath==1.3.0
@@ -46,7 +46,7 @@ mypy-extensions==1.0.0
networkx==3.4.2
numpy==2.2.1
openai==1.59.5
openapi-schema-validator==0.6.2
openapi-schema-validator==0.6.3
openapi-spec-validator==0.6.0
openapi3-parser==1.1.19
orjson==3.10.14
@@ -62,7 +62,7 @@ prompt-toolkit==3.0.50
protobuf==5.29.3
psycopg2-binary==2.9.10
py==1.11.0
pydantic==2.10.4
pydantic==2.10.6
pydantic-core==2.27.2
pydantic-settings==2.7.1
pymongo==4.10.1
@@ -70,7 +70,7 @@ pypdf==5.2.0
python-dateutil==2.9.0.post0
python-dotenv==1.0.1
python-pptx==1.0.2
qdrant-client==1.12.2
qdrant-client==1.13.2
redis==5.2.1
referencing==0.30.2
regex==2024.11.6
@@ -81,7 +81,7 @@ tiktoken==0.8.0
tokenizers==0.21.0
torch==2.5.1
tqdm==4.67.1
transformers==4.48.0
transformers==4.49.0
typing-extensions==4.12.2
typing-inspect==0.9.0
tzdata==2024.2

546
docs/package-lock.json generated
View File

@@ -7,7 +7,7 @@
"license": "MIT",
"dependencies": {
"@vercel/analytics": "^1.1.1",
"docsgpt-react": "^0.4.11",
"docsgpt-react": "^0.5.0",
"next": "^14.2.22",
"nextra": "^2.13.2",
"nextra-theme-docs": "^2.13.2",
@@ -1177,407 +1177,6 @@
"node": ">=8"
}
},
"node_modules/@parcel/core": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/core/-/core-2.13.2.tgz",
"integrity": "sha512-1zC5Au4z9or5XyP6ipfvJqHktuB0jD7WuxMcV1CWAZGARHKylLe+0ccl+Wx7HN5O+xAvfCDtTlKrATY8qyrIyw==",
"peer": true,
"dependencies": {
"@mischnic/json-sourcemap": "^0.1.0",
"@parcel/cache": "2.13.2",
"@parcel/diagnostic": "2.13.2",
"@parcel/events": "2.13.2",
"@parcel/feature-flags": "2.13.2",
"@parcel/fs": "2.13.2",
"@parcel/graph": "3.3.2",
"@parcel/logger": "2.13.2",
"@parcel/package-manager": "2.13.2",
"@parcel/plugin": "2.13.2",
"@parcel/profiler": "2.13.2",
"@parcel/rust": "2.13.2",
"@parcel/source-map": "^2.1.1",
"@parcel/types": "2.13.2",
"@parcel/utils": "2.13.2",
"@parcel/workers": "2.13.2",
"base-x": "^3.0.8",
"browserslist": "^4.6.6",
"clone": "^2.1.1",
"dotenv": "^16.4.5",
"dotenv-expand": "^11.0.6",
"json5": "^2.2.0",
"msgpackr": "^1.9.9",
"nullthrows": "^1.1.1",
"semver": "^7.5.2"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/core/node_modules/@parcel/cache": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/cache/-/cache-2.13.2.tgz",
"integrity": "sha512-Y0nWlCMWDSp1lxiPI5zCWTGD0InnVZ+IfqeyLWmROAqValYyd0QZCvnSljKJ144jWTr0jXxDveir+DVF8sAYaA==",
"peer": true,
"dependencies": {
"@parcel/fs": "2.13.2",
"@parcel/logger": "2.13.2",
"@parcel/utils": "2.13.2",
"lmdb": "2.8.5"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"peerDependencies": {
"@parcel/core": "^2.13.2"
}
},
"node_modules/@parcel/core/node_modules/@parcel/codeframe": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/codeframe/-/codeframe-2.13.2.tgz",
"integrity": "sha512-qFMiS14orb6QSQj5/J/QN+gJElUfedVAKBTNkp9QB4i8ObdLHDqHRUzFb55ZQJI3G4vsxOOWAOUXGirtLwrxGQ==",
"peer": true,
"dependencies": {
"chalk": "^4.1.2"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/core/node_modules/@parcel/diagnostic": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/diagnostic/-/diagnostic-2.13.2.tgz",
"integrity": "sha512-6Au0JEJ5SY2gYrY0/m0i0sTuqTvK0k2E9azhBJR+zzCREbUxLiDdLZ+vXAfLW7t/kPAcWtdNU0Bj7pnZcMiMXg==",
"peer": true,
"dependencies": {
"@mischnic/json-sourcemap": "^0.1.0",
"nullthrows": "^1.1.1"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/core/node_modules/@parcel/events": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/events/-/events-2.13.2.tgz",
"integrity": "sha512-BVB9hW1RGh/tMaDHfpa+uIgz5PMULorCnjmWr/KvrlhdUSUQoaPYfRcTDYrKhoKuNIKsWSnTGvXrxE53L5qo0w==",
"peer": true,
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/core/node_modules/@parcel/fs": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/fs/-/fs-2.13.2.tgz",
"integrity": "sha512-bdeIMuAXhMnROvqV55JWRUmjD438/T7h3r3NsFnkq+Mp4z2nuAn0STxbqDNxIgTMJHNunSDzncqRNMT7xJCe8A==",
"peer": true,
"dependencies": {
"@parcel/feature-flags": "2.13.2",
"@parcel/rust": "2.13.2",
"@parcel/types-internal": "2.13.2",
"@parcel/utils": "2.13.2",
"@parcel/watcher": "^2.0.7",
"@parcel/workers": "2.13.2"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"peerDependencies": {
"@parcel/core": "^2.13.2"
}
},
"node_modules/@parcel/core/node_modules/@parcel/logger": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/logger/-/logger-2.13.2.tgz",
"integrity": "sha512-SFVABAMqaT9jIDn4maPgaQQauPDz8fpoKUGEuLF44Q0aQFbBUy7vX7KYs/EvYSWZo4VyJcUDHvIInBlepA0/ZQ==",
"peer": true,
"dependencies": {
"@parcel/diagnostic": "2.13.2",
"@parcel/events": "2.13.2"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/core/node_modules/@parcel/markdown-ansi": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/markdown-ansi/-/markdown-ansi-2.13.2.tgz",
"integrity": "sha512-MIEoetfT/snk1GqWzBI3AhifV257i2xke9dvyQl14PPiMl+TlVhwnbQyA09WJBvDor+MuxZypHL7xoFdW8ff3A==",
"peer": true,
"dependencies": {
"chalk": "^4.1.2"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/core/node_modules/@parcel/node-resolver-core": {
"version": "3.4.2",
"resolved": "https://registry.npmjs.org/@parcel/node-resolver-core/-/node-resolver-core-3.4.2.tgz",
"integrity": "sha512-SwnKLcZRG1VdB5JeM/Ax5VMWWh2QfXufmMQCKKx0/Kk41nUpie+aIZKj3LH6Z/fJsnKig/vXpeWoxGhmG523qg==",
"peer": true,
"dependencies": {
"@mischnic/json-sourcemap": "^0.1.0",
"@parcel/diagnostic": "2.13.2",
"@parcel/fs": "2.13.2",
"@parcel/rust": "2.13.2",
"@parcel/utils": "2.13.2",
"nullthrows": "^1.1.1",
"semver": "^7.5.2"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/core/node_modules/@parcel/package-manager": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/package-manager/-/package-manager-2.13.2.tgz",
"integrity": "sha512-6HjfbdJUjHyNKzYB7GSYnOCtLwqCGW7yT95GlnnTKyFffvXYsqvBSyepMuPRlbX0mFUm4S9l2DH3OVZrk108AA==",
"peer": true,
"dependencies": {
"@parcel/diagnostic": "2.13.2",
"@parcel/fs": "2.13.2",
"@parcel/logger": "2.13.2",
"@parcel/node-resolver-core": "3.4.2",
"@parcel/types": "2.13.2",
"@parcel/utils": "2.13.2",
"@parcel/workers": "2.13.2",
"@swc/core": "^1.7.26",
"semver": "^7.5.2"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"peerDependencies": {
"@parcel/core": "^2.13.2"
}
},
"node_modules/@parcel/core/node_modules/@parcel/plugin": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/plugin/-/plugin-2.13.2.tgz",
"integrity": "sha512-Q+RIENS1B185yLPhrGdzBK1oJrZmh/RXrYMnzJs78Tog8SpihjeNBNR6z4PT85o2F+Gy2y1S9A26fpiGq161qQ==",
"peer": true,
"dependencies": {
"@parcel/types": "2.13.2"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/core/node_modules/@parcel/profiler": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/profiler/-/profiler-2.13.2.tgz",
"integrity": "sha512-fur6Oq2HkX6AiM8rtqmDvldH5JWz0sqXA1ylz8cE3XOiDZIuvCulZmQ+hH+4odaNH6QocI1MwfV+GDh3HlQoCA==",
"peer": true,
"dependencies": {
"@parcel/diagnostic": "2.13.2",
"@parcel/events": "2.13.2",
"@parcel/types-internal": "2.13.2",
"chrome-trace-event": "^1.0.2"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/core/node_modules/@parcel/rust": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/rust/-/rust-2.13.2.tgz",
"integrity": "sha512-XFIewSwxkrDYOnnSP/XZ1LDLdXTs7L9CjQUWtl46Vir5Pq/rinemwLJeKGIwKLHy7fhUZQjYxquH6fBL+AY8DA==",
"peer": true,
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/core/node_modules/@parcel/types": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/types/-/types-2.13.2.tgz",
"integrity": "sha512-6ixqjk2pjKELn4sQ/jdvpbCVTeH6xXQTdotkN8Wzk68F2K2MtSPIRAEocumlexScfffbRQplr2MdIf1JJWLogA==",
"peer": true,
"dependencies": {
"@parcel/types-internal": "2.13.2",
"@parcel/workers": "2.13.2"
}
},
"node_modules/@parcel/core/node_modules/@parcel/utils": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/utils/-/utils-2.13.2.tgz",
"integrity": "sha512-BkFtRo5xenmonwnBy+X4sVbHIRrx+ZHMPpS/6hFqyTvoUUFq2yTFQnfRGVVOOvscVUxpGom+kewnrTG3HHbZoA==",
"peer": true,
"dependencies": {
"@parcel/codeframe": "2.13.2",
"@parcel/diagnostic": "2.13.2",
"@parcel/logger": "2.13.2",
"@parcel/markdown-ansi": "2.13.2",
"@parcel/rust": "2.13.2",
"@parcel/source-map": "^2.1.1",
"chalk": "^4.1.2",
"nullthrows": "^1.1.1"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/core/node_modules/@parcel/workers": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/workers/-/workers-2.13.2.tgz",
"integrity": "sha512-P78BpH0yTT9KK09wgK4eabtlb5OlcWAmZebOToN5UYuwWEylKt0gWZx1+d+LPQupvK84/iZ+AutDScsATjgUMw==",
"peer": true,
"dependencies": {
"@parcel/diagnostic": "2.13.2",
"@parcel/logger": "2.13.2",
"@parcel/profiler": "2.13.2",
"@parcel/types-internal": "2.13.2",
"@parcel/utils": "2.13.2",
"nullthrows": "^1.1.1"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
},
"peerDependencies": {
"@parcel/core": "^2.13.2"
}
},
"node_modules/@parcel/core/node_modules/ansi-styles": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"peer": true,
"dependencies": {
"color-convert": "^2.0.1"
},
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/@parcel/core/node_modules/chalk": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
"peer": true,
"dependencies": {
"ansi-styles": "^4.1.0",
"supports-color": "^7.1.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
"node_modules/@parcel/core/node_modules/color-convert": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"peer": true,
"dependencies": {
"color-name": "~1.1.4"
},
"engines": {
"node": ">=7.0.0"
}
},
"node_modules/@parcel/core/node_modules/color-name": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"peer": true
},
"node_modules/@parcel/core/node_modules/has-flag": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
"peer": true,
"engines": {
"node": ">=8"
}
},
"node_modules/@parcel/core/node_modules/semver": {
"version": "7.6.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
"integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==",
"peer": true,
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/@parcel/core/node_modules/supports-color": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
"peer": true,
"dependencies": {
"has-flag": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/@parcel/diagnostic": {
"version": "2.12.0",
"resolved": "https://registry.npmjs.org/@parcel/diagnostic/-/diagnostic-2.12.0.tgz",
@@ -1606,19 +1205,6 @@
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/feature-flags": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/feature-flags/-/feature-flags-2.13.2.tgz",
"integrity": "sha512-cCwDAKD4Er24EkuQ+loVZXSURpM0gAGRsLJVoBtFiCSbB3nmIJJ6FLRwSBI/5OsOUExiUXDvSpfUCA5ldGTzbw==",
"peer": true,
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/fs": {
"version": "2.12.0",
"resolved": "https://registry.npmjs.org/@parcel/fs/-/fs-2.12.0.tgz",
@@ -1641,23 +1227,6 @@
"@parcel/core": "^2.12.0"
}
},
"node_modules/@parcel/graph": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@parcel/graph/-/graph-3.3.2.tgz",
"integrity": "sha512-aAysQLRr8SOonSHWqdKHMJzfcrDFXKK8IYZEurlOzosiSgZXrAK7q8b8JcaJ4r84/jlvQYNYneNZeFQxKjHXkA==",
"peer": true,
"dependencies": {
"@parcel/feature-flags": "2.13.2",
"nullthrows": "^1.1.1"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/logger": {
"version": "2.12.0",
"resolved": "https://registry.npmjs.org/@parcel/logger/-/logger-2.12.0.tgz",
@@ -2007,35 +1576,6 @@
"utility-types": "^3.10.0"
}
},
"node_modules/@parcel/types-internal": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/types-internal/-/types-internal-2.13.2.tgz",
"integrity": "sha512-j0zb3WNM8O/+d8CArll7/4w4AyBED3Jbo32/unz89EPVN0VklmgBrRCAI5QXDKuJAGdAZSL5/a8bNYbwl7/Wxw==",
"peer": true,
"dependencies": {
"@parcel/diagnostic": "2.13.2",
"@parcel/feature-flags": "2.13.2",
"@parcel/source-map": "^2.1.1",
"utility-types": "^3.10.0"
}
},
"node_modules/@parcel/types-internal/node_modules/@parcel/diagnostic": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/@parcel/diagnostic/-/diagnostic-2.13.2.tgz",
"integrity": "sha512-6Au0JEJ5SY2gYrY0/m0i0sTuqTvK0k2E9azhBJR+zzCREbUxLiDdLZ+vXAfLW7t/kPAcWtdNU0Bj7pnZcMiMXg==",
"peer": true,
"dependencies": {
"@mischnic/json-sourcemap": "^0.1.0",
"nullthrows": "^1.1.1"
},
"engines": {
"node": ">= 16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/parcel"
}
},
"node_modules/@parcel/utils": {
"version": "2.12.0",
"resolved": "https://registry.npmjs.org/@parcel/utils/-/utils-2.12.0.tgz",
@@ -3193,15 +2733,6 @@
"url": "https://github.com/sponsors/wooorm"
}
},
"node_modules/base-x": {
"version": "3.0.10",
"resolved": "https://registry.npmjs.org/base-x/-/base-x-3.0.10.tgz",
"integrity": "sha512-7d0s06rR9rYaIWHkpfLIFICM/tkSVdoPC9qYAQRpxn9DdKNWNsKC0uk++akckyLq16Tx2WIinnZ6WRriAt6njQ==",
"peer": true,
"dependencies": {
"safe-buffer": "^5.0.1"
}
},
"node_modules/boolbase": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
@@ -3416,15 +2947,6 @@
"node": ">=4"
}
},
"node_modules/clone": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz",
"integrity": "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==",
"peer": true,
"engines": {
"node": ">=0.8"
}
},
"node_modules/clsx": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz",
@@ -4064,9 +3586,9 @@
}
},
"node_modules/docsgpt-react": {
"version": "0.4.11",
"resolved": "https://registry.npmjs.org/docsgpt-react/-/docsgpt-react-0.4.11.tgz",
"integrity": "sha512-0n+SgC4wtBL+xV6sWMh8hzvfnDTvGQ+kqUM2bvblVqAWd041c4K1ZPZKpTu/099DLeF9Y8K06ACZYCoNsZO9AA==",
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/docsgpt-react/-/docsgpt-react-0.5.0.tgz",
"integrity": "sha512-5tDfFxBHG9432URaE8rQaYmBE8tbEUg74L85ykg/WbcoL84U3ixrt0tG7T0SfoTfxQT46H3afliYdv1rDmFGLw==",
"license": "Apache-2.0",
"dependencies": {
"@babel/plugin-transform-flow-strip-types": "^7.23.3",
@@ -4154,33 +3676,6 @@
"url": "https://github.com/fb55/domutils?sponsor=1"
}
},
"node_modules/dotenv": {
"version": "16.4.7",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz",
"integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==",
"peer": true,
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://dotenvx.com"
}
},
"node_modules/dotenv-expand": {
"version": "11.0.7",
"resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-11.0.7.tgz",
"integrity": "sha512-zIHwmZPRshsCdpMDyVsqGmgyP0yT8GAgXUnkdAoJisxvf33k7yO6OuoKmcTGuXPWSsm8Oh88nZicRLA9Y0rUeA==",
"peer": true,
"dependencies": {
"dotenv": "^16.4.5"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://dotenvx.com"
}
},
"node_modules/electron-to-chromium": {
"version": "1.4.693",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.693.tgz",
@@ -10114,26 +9609,6 @@
"node": ">=6"
}
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"peer": true
},
"node_modules/safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
@@ -10478,19 +9953,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/typescript": {
"version": "5.7.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz",
"integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==",
"peer": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
},
"node_modules/uc.micro": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz",

View File

@@ -7,7 +7,7 @@
"license": "MIT",
"dependencies": {
"@vercel/analytics": "^1.1.1",
"docsgpt-react": "^0.4.11",
"docsgpt-react": "^0.5.0",
"next": "^14.2.22",
"nextra": "^2.13.2",
"nextra-theme-docs": "^2.13.2",

View File

@@ -84,11 +84,11 @@ There are two Ollama optional files:
**CPU:**
```bash
docker compose -f deployment/docker-compose.yaml -f deployment/optional/docker-compose.optional.ollama-cpu.yaml up -d
docker compose --env-file .env -f deployment/docker-compose.yaml -f deployment/optional/docker-compose.optional.ollama-cpu.yaml up -d
```
**GPU:**
```bash
docker compose -f deployment/docker-compose.yaml -f deployment/optional/docker-compose.optional.ollama-gpu.yaml up -d
docker compose --env-file .env -f deployment/docker-compose.yaml -f deployment/optional/docker-compose.optional.ollama-gpu.yaml up -d
```
3. **Pull the Ollama Model:**
@@ -132,4 +132,4 @@ Whenever you modify the `.env` file or any Docker Compose files, you need to res
## Further Configuration
This guide covers the basic Docker deployment of DocsGPT. For detailed information on configuring various aspects of DocsGPT, such as LLM providers, models, vector stores, and more, please refer to the comprehensive [DocsGPT Settings Guide](/Deploying/DocsGPT-Settings).
This guide covers the basic Docker deployment of DocsGPT. For detailed information on configuring various aspects of DocsGPT, such as LLM providers, models, vector stores, and more, please refer to the comprehensive [DocsGPT Settings Guide](/Deploying/DocsGPT-Settings).

View File

@@ -1,12 +1,12 @@
{
"name": "docsgpt",
"version": "0.4.9",
"version": "0.5.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "docsgpt",
"version": "0.4.9",
"version": "0.5.0",
"license": "Apache-2.0",
"dependencies": {
"@babel/plugin-transform-flow-strip-types": "^7.23.3",

View File

@@ -1,6 +1,6 @@
{
"name": "docsgpt",
"version": "0.4.9",
"version": "0.5.0",
"private": false,
"description": "DocsGPT 🦖 is an innovative open-source tool designed to simplify the retrieval of information from project documentation using advanced GPT models 🤖.",
"source": "./src/index.html",

View File

@@ -8,6 +8,15 @@ cp package-lock.json package-lock_original.json
# Store the latest version after publishing
LATEST_VERSION=""
# Check if a specific version was provided
if [ "$1" ]; then
VERSION_UPDATE_TYPE="$1"
echo "Using custom version update: $VERSION_UPDATE_TYPE"
else
VERSION_UPDATE_TYPE="patch"
echo "No version specified, defaulting to patch update"
fi
publish_package() {
PACKAGE_NAME=$1
BUILD_COMMAND=$2
@@ -34,27 +43,24 @@ publish_package() {
rm -rf dist
fi
# update version and store it
LATEST_VERSION=$(npm version patch)
# Update version based on input parameter or default to patch
if [[ "$VERSION_UPDATE_TYPE" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
# If full version number is provided (e.g., 0.5.0)
LATEST_VERSION=$(npm version "$VERSION_UPDATE_TYPE" --no-git-tag-version)
else
# If update type is provided (patch, minor, major)
LATEST_VERSION=$(npm version "$VERSION_UPDATE_TYPE" --no-git-tag-version)
fi
echo "New version: ${LATEST_VERSION}"
# Build package
npm run "$BUILD_COMMAND"
# Replace npm publish with npm pack for testing
# Publish package
npm publish
echo "Successfully packaged ${PACKAGE_NAME}"
# Log the bundle size
TARBALL="${PACKAGE_NAME}-${LATEST_VERSION#v}.tgz"
if [ -f "$TARBALL" ]; then
BUNDLE_SIZE=$(du -h "$TARBALL" | cut -f1)
echo "Bundle size for ${PACKAGE_NAME}: ${BUNDLE_SIZE}"
else
echo "Error: ${TARBALL} not found."
exit 1
fi
echo "Successfully published ${PACKAGE_NAME} version ${LATEST_VERSION}"
}
# First publish docsgpt (HTML bundle)
@@ -70,7 +76,7 @@ cp package-lock_original.json package-lock.json
# Update the version in the final package.json
jq --arg version "${LATEST_VERSION#v}" '.version=$version' package.json > temp.json && mv temp.json package.json
# Run npm install to update package-lock.json with the new version
# Run npm install to update package-lock-only
npm install --package-lock-only
# Cleanup backup files
@@ -81,5 +87,4 @@ rm -f temp.json
echo "---Process completed---"
echo "Final version in package.json: $(jq -r '.version' package.json)"
echo "Final version in package-lock.json: $(jq -r '.version' package-lock.json)"
echo "Generated test packages:"
ls *.tgz

View File

@@ -1,13 +1,13 @@
"use client";
import React, { useRef } from 'react'
import React, { useRef, useState, useEffect } from 'react'
import DOMPurify from 'dompurify';
import styled, { keyframes, css } from 'styled-components';
import { PaperPlaneIcon, RocketIcon, ExclamationTriangleIcon, Cross2Icon } from '@radix-ui/react-icons';
import { FEEDBACK, MESSAGE_TYPE, Query, Status, WidgetCoreProps, WidgetProps } from '../types/index';
import { fetchAnswerStreaming, sendFeedback } from '../requests/streamingApi';
import { ThemeProvider } from 'styled-components';
import Like from "../assets/like.svg"
import Dislike from "../assets/dislike.svg"
import Like from '../assets/like.svg';
import Dislike from '../assets/dislike.svg';
import MarkdownIt from 'markdown-it';
const themes = {
@@ -591,10 +591,10 @@ export const DocsGPTWidget = (props: WidgetProps) => {
</>
)
}
export const WidgetCore = ({
apiHost = 'https://gptcloud.arc53.com',
apiKey = "74039c6d-bff7-44ce-ae55-2973cbf13837",
//apiKey = '82962c9a-aa77-4152-94e5-a4f84fd44c6a',
apiKey = "82962c9a-aa77-4152-94e5-a4f84fd44c6a",
avatar = 'https://d3dg1063dc54p9.cloudfront.net/cute-docsgpt.png',
title = 'Get AI assistance',
description = 'DocsGPT\'s AI Chatbot is here to help',
@@ -614,8 +614,10 @@ export const WidgetCore = ({
const [queries, setQueries] = React.useState<Query[]>([]);
const [conversationId, setConversationId] = React.useState<string | null>(null);
const [eventInterrupt, setEventInterrupt] = React.useState<boolean>(false); //click or scroll by user while autoScrolling
const [hasScrolledToLast, setHasScrolledToLast] = useState(true);
const isBubbleHovered = useRef<boolean>(false);
const conversationRef = useRef<HTMLDivElement | null>(null);
const endMessageRef = React.useRef<HTMLDivElement | null>(null);
const md = new MarkdownIt();
@@ -632,55 +634,94 @@ export const WidgetCore = ({
}
}, [isOpen]);
const handleUserInterrupt = () => {
(status === 'loading') && setEventInterrupt(true);
if (!eventInterrupt && status === 'loading') setEventInterrupt(true);
}
const scrollToBottom = (element: Element | null) => {
//recursive function to scroll to the last child of the last child ...
// to get to the bottom most element
if (!element) return;
if (element?.children.length === 0) {
element?.scrollIntoView({
const scrollIntoView = () => {
if (!conversationRef?.current || eventInterrupt) return;
if (status === 'idle' || !queries.length || !queries[queries.length - 1].response) {
conversationRef.current.scrollTo({
behavior: 'smooth',
block: 'start',
top: conversationRef.current.scrollHeight,
});
} else {
conversationRef.current.scrollTop = conversationRef.current.scrollHeight;
}
const lastChild = element?.children?.[element.children.length - 1]
lastChild && scrollToBottom(lastChild)
setHasScrolledToLast(true);
};
const checkScroll = () => {
const el = conversationRef.current;
if (!el) return;
const isBottom = el.scrollHeight - el.scrollTop - el.clientHeight < 10;
setHasScrolledToLast(isBottom);
};
React.useEffect(() => {
!eventInterrupt && scrollToBottom(endMessageRef.current);
!eventInterrupt && scrollIntoView();
conversationRef.current?.addEventListener('scroll', checkScroll);
return () => {
conversationRef.current?.removeEventListener('scroll', checkScroll);
};
}, [queries.length, queries[queries.length - 1]?.response]);
async function handleFeedback(feedback: FEEDBACK, index: number) {
let query = queries[index]
if (!query.response)
let query = queries[index];
if (!query.response || !conversationId) {
console.log("Cannot submit feedback: missing response or conversation ID");
return;
if (query.feedback != feedback) {
sendFeedback({
}
// If clicking the same feedback button that's already active, remove the feedback by sending null
if (query.feedback === feedback) {
try {
const response = await sendFeedback({
question: query.prompt,
answer: query.response,
feedback: null,
apikey: apiKey,
conversation_id: conversationId,
question_index: index,
}, apiHost);
if (response.status === 200) {
const updatedQuery = { ...query };
delete updatedQuery.feedback;
setQueries((prev: Query[]) =>
prev.map((q, i) => (i === index ? updatedQuery : q))
);
}
} catch (err) {
console.error("Failed to submit feedback:", err);
}
return;
}
try {
const response = await sendFeedback({
question: query.prompt,
answer: query.response,
feedback: feedback,
apikey: apiKey
}, apiHost)
.then(res => {
if (res.status == 200) {
query.feedback = feedback;
setQueries((prev: Query[]) => {
return prev.map((q, i) => (i === index ? query : q));
});
}
})
.catch(err => console.log("Connection failed", err))
}
else {
delete query.feedback;
setQueries((prev: Query[]) => {
return prev.map((q, i) => (i === index ? query : q));
});
apikey: apiKey,
conversation_id: conversationId,
question_index: index,
}, apiHost);
if (response.status === 200) {
setQueries((prev: Query[]) => {
return prev.map((q, i) => {
if (i === index) {
return { ...q, feedback: feedback };
}
return q;
});
});
}
} catch (err) {
console.error("Failed to submit feedback:", err);
}
}
@@ -777,7 +818,11 @@ export const WidgetCore = ({
</ContentWrapper>
</Header>
</div>
<Conversation onWheel={handleUserInterrupt} onTouchMove={handleUserInterrupt}>
<Conversation
ref={conversationRef}
onWheel={handleUserInterrupt}
onTouchMove={handleUserInterrupt}
>
{
queries.length > 0 ? queries?.map((query, index) => {
return (
@@ -808,20 +853,34 @@ export const WidgetCore = ({
{collectFeedback &&
<Feedback>
<button
style={{backgroundColor:'transparent', border:'none',cursor:'pointer'}}
onClick={(e) => {
e.stopPropagation()
handleFeedback("LIKE", index)}
}>
<Like
style={{
stroke: query.feedback == 'LIKE' ? '#8860DB' : '#c0c0c0',
visibility: query.feedback == 'LIKE' ? 'visible' : 'hidden'
}}
fill='none'
onClick={() => handleFeedback("LIKE", index)} />
/>
</button>
<button
style={{backgroundColor:'transparent', border:'none',cursor:'pointer'}}
onClick={(e) => {
e.stopPropagation()
handleFeedback("DISLIKE", index)}
}>
<Dislike
style={{
stroke: query.feedback == 'DISLIKE' ? '#ed8085' : '#c0c0c0',
visibility: query.feedback == 'DISLIKE' ? 'visible' : 'hidden'
}}
fill='none'
onClick={() => handleFeedback("DISLIKE", index)} />
/>
</button>
</Feedback>}
</MessageBubble>
: <div>

View File

@@ -15,11 +15,13 @@ interface FetchAnswerStreamingProps {
onEvent?: (event: MessageEvent) => void;
}
interface FeedbackPayload {
question: string;
answer: string;
apikey: string;
feedback: FEEDBACK;
export interface FeedbackPayload {
question?: string;
answer?: string;
feedback: string | null;
apikey?: string;
conversation_id: string;
question_index: number;
}
export function fetchAnswerStreaming({
@@ -94,7 +96,7 @@ export function fetchAnswerStreaming({
}
export const sendFeedback = (payload: FeedbackPayload,apiHost:string): Promise<Response> => {
export const sendFeedback = (payload: FeedbackPayload, apiHost: string): Promise<Response> => {
return fetch(`${apiHost}/api/feedback`, {
method: 'POST',
headers: {
@@ -104,7 +106,9 @@ export const sendFeedback = (payload: FeedbackPayload,apiHost:string): Promise<R
question: payload.question,
answer: payload.answer,
feedback: payload.feedback,
api_key:payload.apikey
api_key: payload.apikey,
conversation_id: payload.conversation_id,
question_index: payload.question_index
}),
});
};
};

View File

@@ -460,13 +460,11 @@ const ConversationBubble = forwardRef<
<>
<div
className={`relative mr-2 flex items-center justify-center ${
!isLikeClicked ? 'lg:invisible' : ''
} ${
feedback === 'LIKE' || type !== 'ERROR'
? 'group-hover:lg:visible'
: ''
}
${feedback === 'DISLIKE' && type !== 'ERROR' ? 'hidden' : ''}`}
feedback === 'LIKE' || isLikeClicked
? 'visible'
: 'lg:invisible'
} ${type !== 'ERROR' ? 'group-hover:lg:visible' : ''}
${feedback === 'DISLIKE' && type !== 'ERROR' ? 'hidden' : ''}`}
>
<div>
<div
@@ -500,14 +498,14 @@ const ConversationBubble = forwardRef<
</div>
</div>
</div>
<div
className={`mr-13 relative flex items-center justify-center ${
!isDislikeClicked ? 'lg:invisible' : ''
} ${
feedback === 'DISLIKE' || type !== 'ERROR'
? 'group-hover:lg:visible'
: ''
} ${feedback === 'LIKE' && type !== 'ERROR' ? ' hidden' : ''} `}
className={`relative mr-2 flex items-center justify-center ${
feedback === 'DISLIKE' || isLikeClicked
? 'visible'
: 'lg:invisible'
} ${type !== 'ERROR' ? 'group-hover:lg:visible' : ''}
${feedback === 'LIKE' && type !== 'ERROR' ? 'hidden' : ''}`}
>
<div>
<div

View File

@@ -262,6 +262,7 @@ export function handleFetchSharedAnswerStreaming( //for shared conversations
question: question,
history: JSON.stringify(history),
api_key: apiKey,
save_conversation: false,
};
conversationService
.answerStream(payload, signal)

View File

@@ -159,14 +159,9 @@ export const sharedConversationSlice = createSlice({
action: PayloadAction<{ index: number; query: Partial<Query> }>,
) {
const { index, query } = action.payload;
if (query.response != undefined) {
if (query.response !== undefined) {
state.queries[index].response =
(state.queries[index].response || '') + query.response;
} else {
state.queries[index] = {
...state.queries[index],
...query,
};
}
},
updateToolCalls(