diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5d620820..2ea8961f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,10 +1,8 @@ name: Build and push DocsGPT Docker image on: - workflow_dispatch: - push: - branches: - - main + release: + types: [published] jobs: deploy: @@ -43,5 +41,7 @@ jobs: context: ./application push: true tags: | - ${{ secrets.DOCKER_USERNAME }}/docsgpt:latest - ghcr.io/${{ github.repository_owner }}/docsgpt:latest + ${{ secrets.DOCKER_USERNAME }}/docsgpt:${{ github.event.release.tag_name }},${{ secrets.DOCKER_USERNAME }}/docsgpt:latest + ghcr.io/${{ github.repository_owner }}/docsgpt:${{ github.event.release.tag_name }},ghcr.io/${{ github.repository_owner }}/docsgpt:latest + cache-from: type=registry,ref=${{ secrets.DOCKER_USERNAME }}/docsgpt:latest + cache-to: type=inline diff --git a/.github/workflows/cife.yml b/.github/workflows/cife.yml index 67aadfbb..73a97755 100644 --- a/.github/workflows/cife.yml +++ b/.github/workflows/cife.yml @@ -1,10 +1,8 @@ name: Build and push DocsGPT-FE Docker image on: - workflow_dispatch: - push: - branches: - - main + release: + types: [published] jobs: deploy: @@ -44,5 +42,7 @@ jobs: context: ./frontend push: true tags: | - ${{ secrets.DOCKER_USERNAME }}/docsgpt-fe:latest - ghcr.io/${{ github.repository_owner }}/docsgpt-fe:latest + ${{ secrets.DOCKER_USERNAME }}/docsgpt-fe:${{ github.event.release.tag_name }},${{ secrets.DOCKER_USERNAME }}/docsgpt-fe:latest + ghcr.io/${{ github.repository_owner }}/docsgpt-fe:${{ github.event.release.tag_name }},ghcr.io/${{ github.repository_owner }}/docsgpt-fe:latest + cache-from: type=registry,ref=${{ secrets.DOCKER_USERNAME }}/docsgpt-fe:latest + cache-to: type=inline diff --git a/.github/workflows/docker-develop-build.yml b/.github/workflows/docker-develop-build.yml new file mode 100644 index 00000000..5edc69d7 --- /dev/null +++ b/.github/workflows/docker-develop-build.yml @@ -0,0 +1,49 @@ +name: Build and push DocsGPT Docker image for development + +on: + workflow_dispatch: + push: + branches: + - main + +jobs: + deploy: + if: github.repository == 'arc53/DocsGPT' + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - uses: actions/checkout@v3 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Login to DockerHub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Login to ghcr.io + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push Docker images to docker.io and ghcr.io + uses: docker/build-push-action@v4 + with: + file: './application/Dockerfile' + platforms: linux/amd64 + context: ./application + push: true + tags: | + ${{ secrets.DOCKER_USERNAME }}/docsgpt:develop + ghcr.io/${{ github.repository_owner }}/docsgpt:develop + cache-from: type=registry,ref=${{ secrets.DOCKER_USERNAME }}/docsgpt:develop + cache-to: type=inline diff --git a/.github/workflows/docker-develop-fe-build.yml b/.github/workflows/docker-develop-fe-build.yml new file mode 100644 index 00000000..29ad4524 --- /dev/null +++ b/.github/workflows/docker-develop-fe-build.yml @@ -0,0 +1,49 @@ +name: Build and push DocsGPT FE Docker image for development + +on: + workflow_dispatch: + push: + branches: + - main + +jobs: + deploy: + if: github.repository == 'arc53/DocsGPT' + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - uses: actions/checkout@v3 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Login to DockerHub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Login to ghcr.io + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push Docker images to docker.io and ghcr.io + uses: docker/build-push-action@v4 + with: + file: './frontend/Dockerfile' + platforms: linux/amd64 + context: ./frontend + push: true + tags: | + ${{ secrets.DOCKER_USERNAME }}/docsgpt-fe:develop + ghcr.io/${{ github.repository_owner }}/docsgpt-fe:develop + cache-from: type=registry,ref=${{ secrets.DOCKER_USERNAME }}/docsgpt-fe:develop + cache-to: type=inline diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index f191efe3..c6615e56 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -6,7 +6,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.11"] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -21,7 +21,7 @@ jobs: if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Test with pytest and generate coverage report run: | - python -m pytest --cov=application --cov=scripts --cov=extensions --cov-report=xml + python -m pytest --cov=application --cov-report=xml - name: Upload coverage reports to Codecov if: github.event_name == 'pull_request' && matrix.python-version == '3.11' uses: codecov/codecov-action@v3 diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..fc4b8128 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,16 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Docker Debug Frontend", + "request": "launch", + "type": "chrome", + "preLaunchTask": "docker-compose: debug:frontend", + "url": "http://127.0.0.1:5173", + "webRoot": "${workspaceFolder}/frontend", + "skipFiles": [ + "/**" + ] + } + ] +} \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 00000000..ea0974bd --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,21 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "type": "docker-compose", + "label": "docker-compose: debug:frontend", + "dockerCompose": { + "up": { + "detached": true, + "services": [ + "frontend" + ], + "build": true + }, + "files": [ + "${workspaceFolder}/docker-compose.yaml" + ] + } + } + ] +} \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5200794b..1b0567e4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -6,7 +6,7 @@ Thank you for choosing to contribute to DocsGPT! We are all very grateful! 📣 **Discussions** - Engage in conversations, start new topics, or help answer questions. -🐞 **Issues** - This is where we keep track of tasks. It could be bugs,fixes or suggestions for new features. +🐞 **Issues** - This is where we keep track of tasks. It could be bugs, fixes or suggestions for new features. 🛠️ **Pull requests** - Suggest changes to our repository, either by working on existing issues or adding new features. @@ -21,8 +21,9 @@ Thank you for choosing to contribute to DocsGPT! We are all very grateful! - If you're interested in contributing code, here are some important things to know: - We have a frontend built on React (Vite) and a backend in Python. -======= -Before creating issues, please check out how the latest version of our app looks and works by launching it via [Quickstart](https://github.com/arc53/DocsGPT#quickstart) the version on our live demo is slightly modified with login. Your issues should relate to the version that you can launch via [Quickstart](https://github.com/arc53/DocsGPT#quickstart). + + +Before creating issues, please check out how the latest version of our app looks and works by launching it via [Quickstart](https://github.com/arc53/DocsGPT#quickstart) the version on our live demo is slightly modified with login. Your issues should relate to the version you can launch via [Quickstart](https://github.com/arc53/DocsGPT#quickstart). ### 👨‍💻 If you're interested in contributing code, here are some important things to know: @@ -43,7 +44,7 @@ Please try to follow the guidelines. ### 🖥 If you are looking to contribute to Backend (🐍 Python): -- Review our issues and contribute to [`/application`](https://github.com/arc53/DocsGPT/tree/main/application) or [`/scripts`](https://github.com/arc53/DocsGPT/tree/main/scripts) (please disregard old [`ingest_rst.py`](https://github.com/arc53/DocsGPT/blob/main/scripts/old/ingest_rst.py) [`ingest_rst_sphinx.py`](https://github.com/arc53/DocsGPT/blob/main/scripts/old/ingest_rst_sphinx.py) files; they will be deprecated soon). +- Review our issues and contribute to [`/application`](https://github.com/arc53/DocsGPT/tree/main/application) or [`/scripts`](https://github.com/arc53/DocsGPT/tree/main/scripts) (please disregard old [`ingest_rst.py`](https://github.com/arc53/DocsGPT/blob/main/scripts/old/ingest_rst.py) [`ingest_rst_sphinx.py`](https://github.com/arc53/DocsGPT/blob/main/scripts/old/ingest_rst_sphinx.py) files; these will be deprecated soon). - All new code should be covered with unit tests ([pytest](https://github.com/pytest-dev/pytest)). Please find tests under [`/tests`](https://github.com/arc53/DocsGPT/tree/main/tests) folder. - Before submitting your Pull Request, ensure it can be queried after ingesting some test data. @@ -125,4 +126,4 @@ Thank you for considering contributing to DocsGPT! 🙏 ## Questions/collaboration Feel free to join our [Discord](https://discord.gg/n5BX8dh8rU). We're very friendly and welcoming to new contributors, so don't hesitate to reach out. -# Thank you so much for considering to contribute DocsGPT!🙏 +# Thank you so much for considering to contributing DocsGPT!🙏 diff --git a/HACKTOBERFEST.md b/HACKTOBERFEST.md new file mode 100644 index 00000000..8656bd84 --- /dev/null +++ b/HACKTOBERFEST.md @@ -0,0 +1,41 @@ +# **🎉 Join the Hacktoberfest with DocsGPT and win a Free T-shirt and other prizes! 🎉** + +Welcome, contributors! We're excited to announce that DocsGPT is participating in Hacktoberfest. Get involved by submitting meaningful pull requests. + +All contributors with accepted PRs will receive a cool Holopin! 🤩 (Watch out for a reply in your PR to collect it). + +### 🏆 Top 50 contributors will receive a special T-shirt + +### 🏆 [LLM Document analysis by LexEU competition](https://github.com/arc53/DocsGPT/blob/main/lexeu-competition.md): +A separate competition is available for those who submit new retrieval / workflow method that will analyze a Document using EU laws. +With 200$, 100$, 50$ prize for 1st, 2nd and 3rd place respectively. +You can find more information [here](https://github.com/arc53/DocsGPT/blob/main/lexeu-competition.md) + +## 📜 Here's How to Contribute: +```text +🛠️ Code: This is the golden ticket! Make meaningful contributions through PRs. + +🧩 API extension: Build an app utilising DocsGPT API. We prefer submissions that showcase original ideas and turn the API into an AI agent. +They can be a completely separate repos. +For example: +https://github.com/arc53/tg-bot-docsgpt-extenstion or +https://github.com/arc53/DocsGPT-cli + +Non-Code Contributions: + +📚 Wiki: Improve our documentation, create a guide or change existing documentation. + +🖥️ Design: Improve the UI/UX or design a new feature. + +📝 Blogging or Content Creation: Write articles or create videos to showcase DocsGPT or highlight your contributions! +``` + +### 📝 Guidelines for Pull Requests: +- Familiarize yourself with the current contributions and our [Roadmap](https://github.com/orgs/arc53/projects/2). +- Before contributing we highly advise that you check existing [issues](https://github.com/arc53/DocsGPT/issues) or [create](https://github.com/arc53/DocsGPT/issues/new/choose) an issue and wait to get assigned. +- Once you are finished with your contribution, please fill in this [form](https://airtable.com/appikMaJwdHhC1SDP/pagoblCJ9W29wf6Hf/form). +- Refer to the [Documentation](https://docs.docsgpt.cloud/). +- Feel free to join our [Discord](https://discord.gg/n5BX8dh8rU) server. We're here to help newcomers, so don't hesitate to jump in! Join us [here](https://discord.gg/n5BX8dh8rU). + +Thank you very much for considering contributing to DocsGPT during Hacktoberfest! 🙏 Your contributions (not just simple typos) could earn you a stylish new t-shirt and other prizes as a token of our appreciation. 🎁 Join us, and let's code together! 🚀 + diff --git a/README.md b/README.md index 7f7c56a5..ee9a1af6 100644 --- a/README.md +++ b/README.md @@ -23,14 +23,20 @@ Say goodbye to time-consuming manual searches, and let + Let's chat + -![video-example-of-docs-gpt](https://d3dg1063dc54p9.cloudfront.net/videos/demov3.gif) +[Send Email :email:](mailto:contact@arc53.com?subject=DocsGPT%20support%2Fsolutions) + + +video-example-of-docs-gpt ## Roadmap diff --git a/application/api/answer/routes.py b/application/api/answer/routes.py index a809b4ef..17eb5cc3 100644 --- a/application/api/answer/routes.py +++ b/application/api/answer/routes.py @@ -1,29 +1,38 @@ import asyncio +import datetime +import json +import logging import os import sys -from flask import Blueprint, request, Response, current_app -import json -import datetime -import logging import traceback -from pymongo import MongoClient +from bson.dbref import DBRef from bson.objectid import ObjectId +from flask import Blueprint, current_app, make_response, request, Response +from flask_restx import fields, Namespace, Resource + +from pymongo import MongoClient from application.core.settings import settings +from application.error import bad_request +from application.extensions import api from application.llm.llm_creator import LLMCreator from application.retriever.retriever_creator import RetrieverCreator -from application.error import bad_request +from application.utils import check_required_fields logger = logging.getLogger(__name__) mongo = MongoClient(settings.MONGO_URI) db = mongo["docsgpt"] conversations_collection = db["conversations"] -vectors_collection = db["vectors"] +sources_collection = db["sources"] prompts_collection = db["prompts"] api_key_collection = db["api_keys"] +user_logs_collection = db["user_logs"] + answer = Blueprint("answer", __name__) +answer_ns = Namespace("answer", description="Answer related operations", path="/") +api.add_namespace(answer_ns) gpt_model = "" # to have some kind of default behaviour @@ -31,6 +40,8 @@ if settings.LLM_NAME == "openai": gpt_model = "gpt-3.5-turbo" elif settings.LLM_NAME == "anthropic": gpt_model = "claude-2" +elif settings.LLM_NAME == "groq": + gpt_model = "llama3-8b-8192" if settings.MODEL_NAME: # in case there is particular model name configured gpt_model = settings.MODEL_NAME @@ -74,27 +85,29 @@ def run_async_chain(chain, question, chat_history): def get_data_from_api_key(api_key): data = api_key_collection.find_one({"key": api_key}) - # # Raise custom exception if the API key is not found if data is None: raise Exception("Invalid API Key, please generate new key", 401) + + if "retriever" not in data: + data["retriever"] = None + + if "source" in data and isinstance(data["source"], DBRef): + source_doc = db.dereference(data["source"]) + data["source"] = str(source_doc["_id"]) + if "retriever" in source_doc: + data["retriever"] = source_doc["retriever"] + else: + data["source"] = {} return data -def get_vectorstore(data): - if "active_docs" in data: - if data["active_docs"].split("/")[0] == "default": - vectorstore = "" - elif data["active_docs"].split("/")[0] == "local": - vectorstore = "indexes/" + data["active_docs"] - else: - vectorstore = "vectors/" + data["active_docs"] - if data["active_docs"] == "default": - vectorstore = "" - else: - vectorstore = "" - vectorstore = os.path.join("application", vectorstore) - return vectorstore +def get_retriever(source_id: str): + doc = sources_collection.find_one({"_id": ObjectId(source_id)}) + if doc is None: + raise Exception("Source document does not exist", 404) + retriever_name = None if "retriever" not in doc else doc["retriever"] + return retriever_name def is_azure_configured(): @@ -180,6 +193,13 @@ def complete_stream( response_full = "" source_log_docs = [] answer = retriever.gen() + sources = retriever.search() + for source in sources: + if "text" in source: + source["text"] = source["text"][:100].strip() + "..." + if len(sources) > 0: + data = json.dumps({"type": "source", "source": sources}) + yield f"data: {data}\n\n" for line in answer: if "answer" in line: response_full += str(line["answer"]) @@ -203,6 +223,20 @@ def complete_stream( data = json.dumps({"type": "id", "id": str(conversation_id)}) yield f"data: {data}\n\n" + retriever_params = retriever.get_params() + user_logs_collection.insert_one( + { + "action": "stream_answer", + "level": "info", + "user": "local", + "api_key": user_api_key, + "question": question, + "response": response_full, + "sources": source_log_docs, + "retriever_params": retriever_params, + "timestamp": datetime.datetime.now(datetime.timezone.utc), + } + ) data = json.dumps({"type": "end"}) yield f"data: {data}\n\n" except Exception as e: @@ -218,106 +252,134 @@ def complete_stream( return -@answer.route("/stream", methods=["POST"]) -def stream(): - try: - data = request.get_json() - question = data["question"] - if "history" not in data: - history = [] - else: - history = data["history"] - history = json.loads(history) - if "conversation_id" not in data: - conversation_id = None - else: - conversation_id = data["conversation_id"] - if "prompt_id" in data: - prompt_id = data["prompt_id"] - else: - prompt_id = "default" - if "selectedDocs" in data and data["selectedDocs"] is None: - chunks = 0 - elif "chunks" in data: - chunks = int(data["chunks"]) - else: - chunks = 2 - if "token_limit" in data: - token_limit = data["token_limit"] - else: - token_limit = settings.DEFAULT_MAX_HISTORY - - # check if active_docs or api_key is set - - if "api_key" in data: - data_key = get_data_from_api_key(data["api_key"]) - chunks = int(data_key["chunks"]) - prompt_id = data_key["prompt_id"] - source = {"active_docs": data_key["source"]} - user_api_key = data["api_key"] - elif "active_docs" in data: - source = {"active_docs": data["active_docs"]} - user_api_key = None - else: - source = {} - user_api_key = None - - if source["active_docs"].split("/")[0] in ["default", "local"]: - retriever_name = "classic" - else: - retriever_name = source["active_docs"] - - current_app.logger.info(f"/stream - request_data: {data}, source: {source}", - extra={"data": json.dumps({"request_data": data, "source": source})} - ) - - prompt = get_prompt(prompt_id) - - retriever = RetrieverCreator.create_retriever( - retriever_name, - question=question, - source=source, - chat_history=history, - prompt=prompt, - chunks=chunks, - token_limit=token_limit, - gpt_model=gpt_model, - user_api_key=user_api_key, - ) - - return Response( - complete_stream( - question=question, - retriever=retriever, - conversation_id=conversation_id, - user_api_key=user_api_key, - isNoneDoc=data.get("isNoneDoc"), +@answer_ns.route("/stream") +class Stream(Resource): + stream_model = api.model( + "StreamModel", + { + "question": fields.String( + required=True, description="Question to be asked" ), - mimetype="text/event-stream", - ) + "history": fields.List( + fields.String, required=False, description="Chat history" + ), + "conversation_id": fields.String( + required=False, description="Conversation ID" + ), + "prompt_id": fields.String( + required=False, default="default", description="Prompt ID" + ), + "selectedDocs": fields.String( + required=False, description="Selected documents" + ), + "chunks": fields.Integer( + required=False, default=2, description="Number of chunks" + ), + "token_limit": fields.Integer(required=False, description="Token limit"), + "retriever": fields.String(required=False, description="Retriever type"), + "api_key": fields.String(required=False, description="API key"), + "active_docs": fields.String( + required=False, description="Active documents" + ), + "isNoneDoc": fields.Boolean( + required=False, description="Flag indicating if no document is used" + ), + }, + ) - except ValueError: - message = "Malformed request body" - print("\033[91merr", str(message), file=sys.stderr) - return Response( - error_stream_generate(message), - status=400, - mimetype="text/event-stream", - ) - except Exception as e: - current_app.logger.error(f"/stream - error: {str(e)} - traceback: {traceback.format_exc()}", - extra={"error": str(e), "traceback": traceback.format_exc()} - ) - message = e.args[0] - status_code = 400 - # # Custom exceptions with two arguments, index 1 as status code - if len(e.args) >= 2: - status_code = e.args[1] - return Response( - error_stream_generate(message), - status=status_code, - mimetype="text/event-stream", - ) + @api.expect(stream_model) + @api.doc(description="Stream a response based on the question and retriever") + def post(self): + data = request.get_json() + required_fields = ["question"] + + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + try: + question = data["question"] + history = data.get("history", []) + history = json.loads(history) + conversation_id = data.get("conversation_id") + prompt_id = data.get("prompt_id", "default") + if "selectedDocs" in data and data["selectedDocs"] is None: + chunks = 0 + else: + chunks = int(data.get("chunks", 2)) + token_limit = data.get("token_limit", settings.DEFAULT_MAX_HISTORY) + retriever_name = data.get("retriever", "classic") + + if "api_key" in data: + data_key = get_data_from_api_key(data["api_key"]) + chunks = int(data_key.get("chunks", 2)) + prompt_id = data_key.get("prompt_id", "default") + source = {"active_docs": data_key.get("source")} + retriever_name = data_key.get("retriever", retriever_name) + user_api_key = data["api_key"] + + elif "active_docs" in data: + source = {"active_docs": data["active_docs"]} + retriever_name = get_retriever(data["active_docs"]) or retriever_name + user_api_key = None + + else: + source = {} + user_api_key = None + + current_app.logger.info( + f"/stream - request_data: {data}, source: {source}", + extra={"data": json.dumps({"request_data": data, "source": source})}, + ) + + prompt = get_prompt(prompt_id) + + retriever = RetrieverCreator.create_retriever( + retriever_name, + question=question, + source=source, + chat_history=history, + prompt=prompt, + chunks=chunks, + token_limit=token_limit, + gpt_model=gpt_model, + user_api_key=user_api_key, + ) + + return Response( + complete_stream( + question=question, + retriever=retriever, + conversation_id=conversation_id, + user_api_key=user_api_key, + isNoneDoc=data.get("isNoneDoc"), + ), + mimetype="text/event-stream", + ) + + except ValueError: + message = "Malformed request body" + print("\033[91merr", str(message), file=sys.stderr) + return Response( + error_stream_generate(message), + status=400, + mimetype="text/event-stream", + ) + except Exception as e: + current_app.logger.error( + f"/stream - error: {str(e)} - traceback: {traceback.format_exc()}", + extra={"error": str(e), "traceback": traceback.format_exc()}, + ) + message = e.args[0] + status_code = 400 + # Custom exceptions with two arguments, index 1 as status code + if len(e.args) >= 2: + status_code = e.args[1] + return Response( + error_stream_generate(message), + status=status_code, + mimetype="text/event-stream", + ) def error_stream_generate(err_response): @@ -325,143 +387,235 @@ def error_stream_generate(err_response): yield f"data: {data}\n\n" -@answer.route("/api/answer", methods=["POST"]) -def api_answer(): - data = request.get_json() - question = data["question"] - if "history" not in data: - history = [] - else: - history = data["history"] - if "conversation_id" not in data: - conversation_id = None - else: - conversation_id = data["conversation_id"] - print("-" * 5) - if "prompt_id" in data: - prompt_id = data["prompt_id"] - else: - prompt_id = "default" - if "chunks" in data: - chunks = int(data["chunks"]) - else: - chunks = 2 - if "token_limit" in data: - token_limit = data["token_limit"] - else: - token_limit = settings.DEFAULT_MAX_HISTORY - - try: - # check if the vectorstore is set - if "api_key" in data: - data_key = get_data_from_api_key(data["api_key"]) - chunks = int(data_key["chunks"]) - prompt_id = data_key["prompt_id"] - source = {"active_docs": data_key["source"]} - user_api_key = data["api_key"] - else: - source = data - user_api_key = None - - if source["active_docs"].split("/")[0] in ["default", "local"]: - retriever_name = "classic" - else: - retriever_name = source["active_docs"] - - prompt = get_prompt(prompt_id) - - current_app.logger.info(f"/api/answer - request_data: {data}, source: {source}", - extra={"data": json.dumps({"request_data": data, "source": source})} - ) - - retriever = RetrieverCreator.create_retriever( - retriever_name, - question=question, - source=source, - chat_history=history, - prompt=prompt, - chunks=chunks, - token_limit=token_limit, - gpt_model=gpt_model, - user_api_key=user_api_key, - ) - source_log_docs = [] - response_full = "" - for line in retriever.gen(): - if "source" in line: - source_log_docs.append(line["source"]) - elif "answer" in line: - response_full += line["answer"] - - if data.get("isNoneDoc"): - for doc in source_log_docs: - doc["source"] = "None" - - llm = LLMCreator.create_llm( - settings.LLM_NAME, api_key=settings.API_KEY, user_api_key=user_api_key - ) - - result = {"answer": response_full, "sources": source_log_docs} - result["conversation_id"] = save_conversation( - conversation_id, question, response_full, source_log_docs, llm - ) - - return result - except Exception as e: - current_app.logger.error(f"/api/answer - error: {str(e)} - traceback: {traceback.format_exc()}", - extra={"error": str(e), "traceback": traceback.format_exc()} - ) - return bad_request(500, str(e)) - - -@answer.route("/api/search", methods=["POST"]) -def api_search(): - data = request.get_json() - question = data["question"] - if "chunks" in data: - chunks = int(data["chunks"]) - else: - chunks = 2 - if "api_key" in data: - data_key = get_data_from_api_key(data["api_key"]) - chunks = int(data_key["chunks"]) - source = {"active_docs": data_key["source"]} - user_api_key = data["api_key"] - elif "active_docs" in data: - source = {"active_docs": data["active_docs"]} - user_api_key = None - else: - source = {} - user_api_key = None - - if source["active_docs"].split("/")[0] in ["default", "local"]: - retriever_name = "classic" - else: - retriever_name = source["active_docs"] - if "token_limit" in data: - token_limit = data["token_limit"] - else: - token_limit = settings.DEFAULT_MAX_HISTORY - - current_app.logger.info(f"/api/answer - request_data: {data}, source: {source}", - extra={"data": json.dumps({"request_data": data, "source": source})} +@answer_ns.route("/api/answer") +class Answer(Resource): + answer_model = api.model( + "AnswerModel", + { + "question": fields.String( + required=True, description="The question to answer" + ), + "history": fields.List( + fields.String, required=False, description="Conversation history" + ), + "conversation_id": fields.String( + required=False, description="Conversation ID" + ), + "prompt_id": fields.String( + required=False, default="default", description="Prompt ID" + ), + "chunks": fields.Integer( + required=False, default=2, description="Number of chunks" + ), + "token_limit": fields.Integer(required=False, description="Token limit"), + "retriever": fields.String(required=False, description="Retriever type"), + "api_key": fields.String(required=False, description="API key"), + "active_docs": fields.String( + required=False, description="Active documents" + ), + "isNoneDoc": fields.Boolean( + required=False, description="Flag indicating if no document is used" + ), + }, ) - retriever = RetrieverCreator.create_retriever( - retriever_name, - question=question, - source=source, - chat_history=[], - prompt="default", - chunks=chunks, - token_limit=token_limit, - gpt_model=gpt_model, - user_api_key=user_api_key, + @api.expect(answer_model) + @api.doc(description="Provide an answer based on the question and retriever") + def post(self): + data = request.get_json() + required_fields = ["question"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + try: + question = data["question"] + history = data.get("history", []) + conversation_id = data.get("conversation_id") + prompt_id = data.get("prompt_id", "default") + chunks = int(data.get("chunks", 2)) + token_limit = data.get("token_limit", settings.DEFAULT_MAX_HISTORY) + retriever_name = data.get("retriever", "classic") + + if "api_key" in data: + data_key = get_data_from_api_key(data["api_key"]) + chunks = int(data_key.get("chunks", 2)) + prompt_id = data_key.get("prompt_id", "default") + source = {"active_docs": data_key.get("source")} + retriever_name = data_key.get("retriever", retriever_name) + user_api_key = data["api_key"] + elif "active_docs" in data: + source = {"active_docs": data["active_docs"]} + retriever_name = get_retriever(data["active_docs"]) or retriever_name + user_api_key = None + else: + source = {} + user_api_key = None + + prompt = get_prompt(prompt_id) + + current_app.logger.info( + f"/api/answer - request_data: {data}, source: {source}", + extra={"data": json.dumps({"request_data": data, "source": source})}, + ) + + retriever = RetrieverCreator.create_retriever( + retriever_name, + question=question, + source=source, + chat_history=history, + prompt=prompt, + chunks=chunks, + token_limit=token_limit, + gpt_model=gpt_model, + user_api_key=user_api_key, + ) + + source_log_docs = [] + response_full = "" + for line in retriever.gen(): + if "source" in line: + source_log_docs.append(line["source"]) + elif "answer" in line: + response_full += line["answer"] + + if data.get("isNoneDoc"): + for doc in source_log_docs: + doc["source"] = "None" + + llm = LLMCreator.create_llm( + settings.LLM_NAME, api_key=settings.API_KEY, user_api_key=user_api_key + ) + + result = {"answer": response_full, "sources": source_log_docs} + result["conversation_id"] = str( + save_conversation( + conversation_id, question, response_full, source_log_docs, llm + ) + ) + retriever_params = retriever.get_params() + user_logs_collection.insert_one( + { + "action": "api_answer", + "level": "info", + "user": "local", + "api_key": user_api_key, + "question": question, + "response": response_full, + "sources": source_log_docs, + "retriever_params": retriever_params, + "timestamp": datetime.datetime.now(datetime.timezone.utc), + } + ) + + except Exception as e: + current_app.logger.error( + f"/api/answer - error: {str(e)} - traceback: {traceback.format_exc()}", + extra={"error": str(e), "traceback": traceback.format_exc()}, + ) + return bad_request(500, str(e)) + + return make_response(result, 200) + + +@answer_ns.route("/api/search") +class Search(Resource): + search_model = api.model( + "SearchModel", + { + "question": fields.String( + required=True, description="The question to search" + ), + "chunks": fields.Integer( + required=False, default=2, description="Number of chunks" + ), + "api_key": fields.String( + required=False, description="API key for authentication" + ), + "active_docs": fields.String( + required=False, description="Active documents for retrieval" + ), + "retriever": fields.String(required=False, description="Retriever type"), + "token_limit": fields.Integer( + required=False, description="Limit for tokens" + ), + "isNoneDoc": fields.Boolean( + required=False, description="Flag indicating if no document is used" + ), + }, ) - docs = retriever.search() - if data.get("isNoneDoc"): - for doc in docs: - doc["source"] = "None" + @api.expect(search_model) + @api.doc( + description="Search for relevant documents based on the question and retriever" + ) + def post(self): + data = request.get_json() + required_fields = ["question"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields - return docs + try: + question = data["question"] + chunks = int(data.get("chunks", 2)) + token_limit = data.get("token_limit", settings.DEFAULT_MAX_HISTORY) + retriever_name = data.get("retriever", "classic") + + if "api_key" in data: + data_key = get_data_from_api_key(data["api_key"]) + chunks = int(data_key.get("chunks", 2)) + source = {"active_docs": data_key.get("source")} + user_api_key = data["api_key"] + elif "active_docs" in data: + source = {"active_docs": data["active_docs"]} + user_api_key = None + else: + source = {} + user_api_key = None + + current_app.logger.info( + f"/api/answer - request_data: {data}, source: {source}", + extra={"data": json.dumps({"request_data": data, "source": source})}, + ) + + retriever = RetrieverCreator.create_retriever( + retriever_name, + question=question, + source=source, + chat_history=[], + prompt="default", + chunks=chunks, + token_limit=token_limit, + gpt_model=gpt_model, + user_api_key=user_api_key, + ) + + docs = retriever.search() + retriever_params = retriever.get_params() + + user_logs_collection.insert_one( + { + "action": "api_search", + "level": "info", + "user": "local", + "api_key": user_api_key, + "question": question, + "sources": docs, + "retriever_params": retriever_params, + "timestamp": datetime.datetime.now(datetime.timezone.utc), + } + ) + + if data.get("isNoneDoc"): + for doc in docs: + doc["source"] = "None" + + except Exception as e: + current_app.logger.error( + f"/api/search - error: {str(e)} - traceback: {traceback.format_exc()}", + extra={"error": str(e), "traceback": traceback.format_exc()}, + ) + return bad_request(500, str(e)) + + return make_response(docs, 200) diff --git a/application/api/internal/routes.py b/application/api/internal/routes.py index 6039ecdf..6ecb4346 100755 --- a/application/api/internal/routes.py +++ b/application/api/internal/routes.py @@ -3,18 +3,23 @@ import datetime from flask import Blueprint, request, send_from_directory from pymongo import MongoClient from werkzeug.utils import secure_filename - +from bson.objectid import ObjectId from application.core.settings import settings + mongo = MongoClient(settings.MONGO_URI) db = mongo["docsgpt"] conversations_collection = db["conversations"] -vectors_collection = db["vectors"] +sources_collection = db["sources"] -current_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +current_dir = os.path.dirname( + os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +) + + +internal = Blueprint("internal", __name__) -internal = Blueprint('internal', __name__) @internal.route("/api/download", methods=["get"]) def download_file(): user = secure_filename(request.args.get("user")) @@ -24,7 +29,6 @@ def download_file(): return send_from_directory(save_dir, filename, as_attachment=True) - @internal.route("/api/upload_index", methods=["POST"]) def upload_index_files(): """Upload two files(index.faiss, index.pkl) to the user's folder.""" @@ -35,7 +39,13 @@ def upload_index_files(): return {"status": "no name"} job_name = secure_filename(request.form["name"]) tokens = secure_filename(request.form["tokens"]) - save_dir = os.path.join(current_dir, "indexes", user, job_name) + retriever = secure_filename(request.form["retriever"]) + id = secure_filename(request.form["id"]) + type = secure_filename(request.form["type"]) + remote_data = request.form["remote_data"] if "remote_data" in request.form else None + sync_frequency = secure_filename(request.form["sync_frequency"]) if "sync_frequency" in request.form else None + + save_dir = os.path.join(current_dir, "indexes", str(id)) if settings.VECTOR_STORE == "faiss": if "file_faiss" not in request.files: print("No file part") @@ -50,22 +60,45 @@ def upload_index_files(): if file_pkl.filename == "": return {"status": "no file name"} # saves index files - + if not os.path.exists(save_dir): os.makedirs(save_dir) file_faiss.save(os.path.join(save_dir, "index.faiss")) file_pkl.save(os.path.join(save_dir, "index.pkl")) - # create entry in vectors_collection - vectors_collection.insert_one( - { - "user": user, - "name": job_name, - "language": job_name, - "location": save_dir, - "date": datetime.datetime.now().strftime("%d/%m/%Y %H:%M:%S"), - "model": settings.EMBEDDINGS_NAME, - "type": "local", - "tokens": tokens - } - ) - return {"status": "ok"} \ No newline at end of file + + existing_entry = sources_collection.find_one({"_id": ObjectId(id)}) + if existing_entry: + sources_collection.update_one( + {"_id": ObjectId(id)}, + { + "$set": { + "user": user, + "name": job_name, + "language": job_name, + "date": datetime.datetime.now(), + "model": settings.EMBEDDINGS_NAME, + "type": type, + "tokens": tokens, + "retriever": retriever, + "remote_data": remote_data, + "sync_frequency": sync_frequency, + } + }, + ) + else: + sources_collection.insert_one( + { + "_id": ObjectId(id), + "user": user, + "name": job_name, + "language": job_name, + "date": datetime.datetime.now(), + "model": settings.EMBEDDINGS_NAME, + "type": type, + "tokens": tokens, + "retriever": retriever, + "remote_data": remote_data, + "sync_frequency": sync_frequency, + } + ) + return {"status": "ok"} diff --git a/application/api/user/routes.py b/application/api/user/routes.py index 91b90d6a..feee91cc 100644 --- a/application/api/user/routes.py +++ b/application/api/user/routes.py @@ -1,558 +1,916 @@ +import datetime import os -import uuid import shutil -from flask import Blueprint, request, jsonify -from urllib.parse import urlparse -import requests -from pymongo import MongoClient -from bson.objectid import ObjectId +import uuid + from bson.binary import Binary, UuidRepresentation -from werkzeug.utils import secure_filename from bson.dbref import DBRef +from bson.objectid import ObjectId +from flask import Blueprint, jsonify, make_response, request +from flask_restx import inputs, fields, Namespace, Resource +from pymongo import MongoClient +from werkzeug.utils import secure_filename + from application.api.user.tasks import ingest, ingest_remote from application.core.settings import settings +from application.extensions import api +from application.utils import check_required_fields from application.vectorstore.vector_creator import VectorCreator mongo = MongoClient(settings.MONGO_URI) db = mongo["docsgpt"] conversations_collection = db["conversations"] -vectors_collection = db["vectors"] +sources_collection = db["sources"] prompts_collection = db["prompts"] feedback_collection = db["feedback"] api_key_collection = db["api_keys"] +token_usage_collection = db["token_usage"] shared_conversations_collections = db["shared_conversations"] +user_logs_collection = db["user_logs"] user = Blueprint("user", __name__) +user_ns = Namespace("user", description="User related operations", path="/") +api.add_namespace(user_ns) current_dir = os.path.dirname( os.path.dirname(os.path.dirname(os.path.abspath(__file__))) ) -@user.route("/api/delete_conversation", methods=["POST"]) -def delete_conversation(): - # deletes a conversation from the database - conversation_id = request.args.get("id") - # write to mongodb - conversations_collection.delete_one( - { - "_id": ObjectId(conversation_id), - } +def generate_minute_range(start_date, end_date): + return { + (start_date + datetime.timedelta(minutes=i)).strftime("%Y-%m-%d %H:%M:00"): 0 + for i in range(int((end_date - start_date).total_seconds() // 60) + 1) + } + + +def generate_hourly_range(start_date, end_date): + return { + (start_date + datetime.timedelta(hours=i)).strftime("%Y-%m-%d %H:00"): 0 + for i in range(int((end_date - start_date).total_seconds() // 3600) + 1) + } + + +def generate_date_range(start_date, end_date): + return { + (start_date + datetime.timedelta(days=i)).strftime("%Y-%m-%d"): 0 + for i in range((end_date - start_date).days + 1) + } + + +@user_ns.route("/api/delete_conversation") +class DeleteConversation(Resource): + @api.doc( + description="Deletes a conversation by ID", + params={"id": "The ID of the conversation to delete"}, ) + def post(self): + conversation_id = request.args.get("id") + if not conversation_id: + return make_response( + jsonify({"success": False, "message": "ID is required"}), 400 + ) - return {"status": "ok"} - - -@user.route("/api/delete_all_conversations", methods=["GET"]) -def delete_all_conversations(): - user_id = "local" - conversations_collection.delete_many({"user": user_id}) - return {"status": "ok"} - - -@user.route("/api/get_conversations", methods=["get"]) -def get_conversations(): - # provides a list of conversations - conversations = conversations_collection.find().sort("date", -1).limit(30) - list_conversations = [] - for conversation in conversations: - list_conversations.append( - {"id": str(conversation["_id"]), "name": conversation["name"]} - ) - - # list_conversations = [{"id": "default", "name": "default"}, {"id": "jeff", "name": "jeff"}] - - return jsonify(list_conversations) - - -@user.route("/api/get_single_conversation", methods=["get"]) -def get_single_conversation(): - # provides data for a conversation - conversation_id = request.args.get("id") - conversation = conversations_collection.find_one({"_id": ObjectId(conversation_id)}) - return jsonify(conversation["queries"]) - - -@user.route("/api/update_conversation_name", methods=["POST"]) -def update_conversation_name(): - # update data for a conversation - data = request.get_json() - id = data["id"] - name = data["name"] - conversations_collection.update_one({"_id": ObjectId(id)}, {"$set": {"name": name}}) - return {"status": "ok"} - - -@user.route("/api/feedback", methods=["POST"]) -def api_feedback(): - data = request.get_json() - question = data["question"] - answer = data["answer"] - feedback = data["feedback"] - - feedback_collection.insert_one( - { - "question": question, - "answer": answer, - "feedback": feedback, - } - ) - return {"status": "ok"} - - -@user.route("/api/delete_by_ids", methods=["get"]) -def delete_by_ids(): - """Delete by ID. These are the IDs in the vectorstore""" - - ids = request.args.get("path") - if not ids: - return {"status": "error"} - - if settings.VECTOR_STORE == "faiss": - result = vectors_collection.delete_index(ids=ids) - if result: - return {"status": "ok"} - return {"status": "error"} - - -@user.route("/api/delete_old", methods=["get"]) -def delete_old(): - """Delete old indexes.""" - import shutil - - path = request.args.get("path") - dirs = path.split("/") - dirs_clean = [] - for i in range(0, len(dirs)): - dirs_clean.append(secure_filename(dirs[i])) - # check that path strats with indexes or vectors - - if dirs_clean[0] not in ["indexes", "vectors"]: - return {"status": "error"} - path_clean = "/".join(dirs_clean) - vectors_collection.delete_one({"name": dirs_clean[-1], "user": dirs_clean[-2]}) - if settings.VECTOR_STORE == "faiss": try: - shutil.rmtree(os.path.join(current_dir, path_clean)) + conversations_collection.delete_one({"_id": ObjectId(conversation_id)}) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + return make_response(jsonify({"success": True}), 200) + + +@user_ns.route("/api/delete_all_conversations") +class DeleteAllConversations(Resource): + @api.doc( + description="Deletes all conversations for a specific user", + ) + def get(self): + user_id = "local" + try: + conversations_collection.delete_many({"user": user_id}) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + return make_response(jsonify({"success": True}), 200) + + +@user_ns.route("/api/get_conversations") +class GetConversations(Resource): + @api.doc( + description="Retrieve a list of the latest 30 conversations", + ) + def get(self): + try: + conversations = conversations_collection.find().sort("date", -1).limit(30) + list_conversations = [ + {"id": str(conversation["_id"]), "name": conversation["name"]} + for conversation in conversations + ] + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + return make_response(jsonify(list_conversations), 200) + + +@user_ns.route("/api/get_single_conversation") +class GetSingleConversation(Resource): + @api.doc( + description="Retrieve a single conversation by ID", + params={"id": "The conversation ID"}, + ) + def get(self): + conversation_id = request.args.get("id") + if not conversation_id: + return make_response( + jsonify({"success": False, "message": "ID is required"}), 400 + ) + + try: + conversation = conversations_collection.find_one( + {"_id": ObjectId(conversation_id)} + ) + if not conversation: + return make_response(jsonify({"status": "not found"}), 404) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + return make_response(jsonify(conversation["queries"]), 200) + + +@user_ns.route("/api/update_conversation_name") +class UpdateConversationName(Resource): + @api.expect( + api.model( + "UpdateConversationModel", + { + "id": fields.String(required=True, description="Conversation ID"), + "name": fields.String( + required=True, description="New name of the conversation" + ), + }, + ) + ) + @api.doc( + description="Updates the name of a conversation", + ) + def post(self): + data = request.get_json() + required_fields = ["id", "name"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + try: + conversations_collection.update_one( + {"_id": ObjectId(data["id"])}, {"$set": {"name": data["name"]}} + ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"success": True}), 200) + + +@user_ns.route("/api/feedback") +class SubmitFeedback(Resource): + @api.expect( + api.model( + "FeedbackModel", + { + "question": fields.String( + required=True, description="The user question" + ), + "answer": fields.String(required=True, description="The AI answer"), + "feedback": fields.String(required=True, description="User feedback"), + "api_key": fields.String(description="Optional API key"), + }, + ) + ) + @api.doc( + description="Submit feedback for a conversation", + ) + def post(self): + data = request.get_json() + required_fields = ["question", "answer", "feedback"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + new_doc = { + "question": data["question"], + "answer": data["answer"], + "feedback": data["feedback"], + "timestamp": datetime.datetime.now(datetime.timezone.utc), + } + + if "api_key" in data: + new_doc["api_key"] = data["api_key"] + + try: + feedback_collection.insert_one(new_doc) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"success": True}), 200) + + +@user_ns.route("/api/delete_by_ids") +class DeleteByIds(Resource): + @api.doc( + description="Deletes documents from the vector store by IDs", + params={"path": "Comma-separated list of IDs"}, + ) + def get(self): + ids = request.args.get("path") + if not ids: + return make_response( + jsonify({"success": False, "message": "Missing required fields"}), 400 + ) + + try: + result = sources_collection.delete_index(ids=ids) + if result: + return make_response(jsonify({"success": True}), 200) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"success": False}), 400) + + +@user_ns.route("/api/delete_old") +class DeleteOldIndexes(Resource): + @api.doc( + description="Deletes old indexes", + params={"source_id": "The source ID to delete"}, + ) + def get(self): + source_id = request.args.get("source_id") + if not source_id: + return make_response( + jsonify({"success": False, "message": "Missing required fields"}), 400 + ) + + try: + doc = sources_collection.find_one( + {"_id": ObjectId(source_id), "user": "local"} + ) + if not doc: + return make_response(jsonify({"status": "not found"}), 404) + + if settings.VECTOR_STORE == "faiss": + shutil.rmtree(os.path.join(current_dir, "indexes", str(doc["_id"]))) + else: + vectorstore = VectorCreator.create_vectorstore( + settings.VECTOR_STORE, source_id=str(doc["_id"]) + ) + vectorstore.delete_index() + + sources_collection.delete_one({"_id": ObjectId(source_id)}) except FileNotFoundError: pass - else: - vetorstore = VectorCreator.create_vectorstore( - settings.VECTOR_STORE, path=os.path.join(current_dir, path_clean) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"success": True}), 200) + + +@user_ns.route("/api/upload") +class UploadFile(Resource): + @api.expect( + api.model( + "UploadModel", + { + "user": fields.String(required=True, description="User ID"), + "name": fields.String(required=True, description="Job name"), + "file": fields.Raw(required=True, description="File(s) to upload"), + }, ) - vetorstore.delete_index() - - return {"status": "ok"} - - -@user.route("/api/upload", methods=["POST"]) -def upload_file(): - """Upload a file to get vectorized and indexed.""" - if "user" not in request.form: - return {"status": "no user"} - user = secure_filename(request.form["user"]) - if "name" not in request.form: - return {"status": "no name"} - job_name = secure_filename(request.form["name"]) - # check if the post request has the file part - files = request.files.getlist("file") - - if not files or all(file.filename == "" for file in files): - return {"status": "no file name"} - - # Directory where files will be saved - save_dir = os.path.join(current_dir, settings.UPLOAD_FOLDER, user, job_name) - os.makedirs(save_dir, exist_ok=True) - - if len(files) > 1: - # Multiple files; prepare them for zip - temp_dir = os.path.join(save_dir, "temp") - os.makedirs(temp_dir, exist_ok=True) - - for file in files: - filename = secure_filename(file.filename) - file.save(os.path.join(temp_dir, filename)) - - # Use shutil.make_archive to zip the temp directory - zip_path = shutil.make_archive( - base_name=os.path.join(save_dir, job_name), format="zip", root_dir=temp_dir - ) - final_filename = os.path.basename(zip_path) - - # Clean up the temporary directory after zipping - shutil.rmtree(temp_dir) - else: - # Single file - file = files[0] - final_filename = secure_filename(file.filename) - file_path = os.path.join(save_dir, final_filename) - file.save(file_path) - - # Call ingest with the single file or zipped file - task = ingest.delay( - settings.UPLOAD_FOLDER, - [".rst", ".md", ".pdf", ".txt", ".docx", ".csv", ".epub", ".html", ".mdx"], - job_name, - final_filename, - user, ) + @api.doc( + description="Uploads a file to be vectorized and indexed", + ) + def post(self): + data = request.form + files = request.files.getlist("file") + required_fields = ["user", "name"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields or not files or all(file.filename == "" for file in files): + return make_response( + jsonify( + { + "status": "error", + "message": "Missing required fields or files", + } + ), + 400, + ) - return {"status": "ok", "task_id": task.id} + user = secure_filename(request.form["user"]) + job_name = secure_filename(request.form["name"]) + try: + save_dir = os.path.join(current_dir, settings.UPLOAD_FOLDER, user, job_name) + os.makedirs(save_dir, exist_ok=True) + if len(files) > 1: + temp_dir = os.path.join(save_dir, "temp") + os.makedirs(temp_dir, exist_ok=True) -@user.route("/api/remote", methods=["POST"]) -def upload_remote(): - """Upload a remote source to get vectorized and indexed.""" - if "user" not in request.form: - return {"status": "no user"} - user = secure_filename(request.form["user"]) - if "source" not in request.form: - return {"status": "no source"} - source = secure_filename(request.form["source"]) - if "name" not in request.form: - return {"status": "no name"} - job_name = secure_filename(request.form["name"]) - if "data" not in request.form: - print("No data") - return {"status": "no data"} - source_data = request.form["data"] + for file in files: + filename = secure_filename(file.filename) + file.save(os.path.join(temp_dir, filename)) - if source_data: - task = ingest_remote.delay( - source_data=source_data, job_name=job_name, user=user, loader=source - ) - task_id = task.id - return {"status": "ok", "task_id": task_id} - else: - return {"status": "error"} - - -@user.route("/api/task_status", methods=["GET"]) -def task_status(): - """Get celery job status.""" - task_id = request.args.get("task_id") - from application.celery_init import celery - - task = celery.AsyncResult(task_id) - task_meta = task.info - return {"status": task.status, "result": task_meta} - - -@user.route("/api/combine", methods=["GET"]) -def combined_json(): - user = "local" - """Provide json file with combined available indexes.""" - # get json from https://d3dg1063dc54p9.cloudfront.net/combined.json - - data = [ - { - "name": "default", - "language": "default", - "version": "", - "description": "default", - "fullName": "default", - "date": "default", - "docLink": "default", - "model": settings.EMBEDDINGS_NAME, - "location": "remote", - "tokens": "", - } - ] - # structure: name, language, version, description, fullName, date, docLink - # append data from vectors_collection in sorted order in descending order of date - for index in vectors_collection.find({"user": user}).sort("date", -1): - data.append( - { - "name": index["name"], - "language": index["language"], - "version": "", - "description": index["name"], - "fullName": index["name"], - "date": index["date"], - "docLink": index["location"], - "model": settings.EMBEDDINGS_NAME, - "location": "local", - "tokens": index["tokens"] if ("tokens" in index.keys()) else "", - } - ) - if settings.VECTOR_STORE == "faiss": - data_remote = requests.get( - "https://d3dg1063dc54p9.cloudfront.net/combined.json" - ).json() - for index in data_remote: - index["location"] = "remote" - data.append(index) - if "duckduck_search" in settings.RETRIEVERS_ENABLED: - data.append( - { - "name": "DuckDuckGo Search", - "language": "en", - "version": "", - "description": "duckduck_search", - "fullName": "DuckDuckGo Search", - "date": "duckduck_search", - "docLink": "duckduck_search", - "model": settings.EMBEDDINGS_NAME, - "location": "custom", - "tokens": "", - } - ) - if "brave_search" in settings.RETRIEVERS_ENABLED: - data.append( - { - "name": "Brave Search", - "language": "en", - "version": "", - "description": "brave_search", - "fullName": "Brave Search", - "date": "brave_search", - "docLink": "brave_search", - "model": settings.EMBEDDINGS_NAME, - "location": "custom", - "tokens": "", - } - ) - - return jsonify(data) - - -@user.route("/api/docs_check", methods=["POST"]) -def check_docs(): - # check if docs exist in a vectorstore folder - data = request.get_json() - # split docs on / and take first part - if data["docs"].split("/")[0] == "local": - return {"status": "exists"} - vectorstore = "vectors/" + secure_filename(data["docs"]) - base_path = "https://raw.githubusercontent.com/arc53/DocsHUB/main/" - if os.path.exists(vectorstore) or data["docs"] == "default": - return {"status": "exists"} - else: - file_url = urlparse(base_path + vectorstore + "index.faiss") - - if ( - file_url.scheme in ["https"] - and file_url.netloc == "raw.githubusercontent.com" - and file_url.path.startswith("/arc53/DocsHUB/main/") - ): - r = requests.get(file_url.geturl()) - if r.status_code != 200: - return {"status": "null"} + zip_path = shutil.make_archive( + base_name=os.path.join(save_dir, job_name), + format="zip", + root_dir=temp_dir, + ) + final_filename = os.path.basename(zip_path) + shutil.rmtree(temp_dir) else: - if not os.path.exists(vectorstore): - os.makedirs(vectorstore) - with open(vectorstore + "index.faiss", "wb") as f: - f.write(r.content) + file = files[0] + final_filename = secure_filename(file.filename) + file_path = os.path.join(save_dir, final_filename) + file.save(file_path) - r = requests.get(base_path + vectorstore + "index.pkl") - with open(vectorstore + "index.pkl", "wb") as f: - f.write(r.content) - else: - return {"status": "null"} + task = ingest.delay( + settings.UPLOAD_FOLDER, + [ + ".rst", + ".md", + ".pdf", + ".txt", + ".docx", + ".csv", + ".epub", + ".html", + ".mdx", + ], + job_name, + final_filename, + user, + ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) - return {"status": "loaded"} + return make_response(jsonify({"success": True, "task_id": task.id}), 200) -@user.route("/api/create_prompt", methods=["POST"]) -def create_prompt(): - data = request.get_json() - content = data["content"] - name = data["name"] - if name == "": - return {"status": "error"} - user = "local" - resp = prompts_collection.insert_one( - { - "name": name, - "content": content, - "user": user, - } - ) - new_id = str(resp.inserted_id) - return {"id": new_id} - - -@user.route("/api/get_prompts", methods=["GET"]) -def get_prompts(): - user = "local" - prompts = prompts_collection.find({"user": user}) - list_prompts = [] - list_prompts.append({"id": "default", "name": "default", "type": "public"}) - list_prompts.append({"id": "creative", "name": "creative", "type": "public"}) - list_prompts.append({"id": "strict", "name": "strict", "type": "public"}) - for prompt in prompts: - list_prompts.append( - {"id": str(prompt["_id"]), "name": prompt["name"], "type": "private"} - ) - - return jsonify(list_prompts) - - -@user.route("/api/get_single_prompt", methods=["GET"]) -def get_single_prompt(): - prompt_id = request.args.get("id") - if prompt_id == "default": - with open( - os.path.join(current_dir, "prompts", "chat_combine_default.txt"), "r" - ) as f: - chat_combine_template = f.read() - return jsonify({"content": chat_combine_template}) - elif prompt_id == "creative": - with open( - os.path.join(current_dir, "prompts", "chat_combine_creative.txt"), "r" - ) as f: - chat_reduce_creative = f.read() - return jsonify({"content": chat_reduce_creative}) - elif prompt_id == "strict": - with open( - os.path.join(current_dir, "prompts", "chat_combine_strict.txt"), "r" - ) as f: - chat_reduce_strict = f.read() - return jsonify({"content": chat_reduce_strict}) - - prompt = prompts_collection.find_one({"_id": ObjectId(prompt_id)}) - return jsonify({"content": prompt["content"]}) - - -@user.route("/api/delete_prompt", methods=["POST"]) -def delete_prompt(): - data = request.get_json() - id = data["id"] - prompts_collection.delete_one( - { - "_id": ObjectId(id), - } - ) - return {"status": "ok"} - - -@user.route("/api/update_prompt", methods=["POST"]) -def update_prompt_name(): - data = request.get_json() - id = data["id"] - name = data["name"] - content = data["content"] - # check if name is null - if name == "": - return {"status": "error"} - prompts_collection.update_one( - {"_id": ObjectId(id)}, {"$set": {"name": name, "content": content}} - ) - return {"status": "ok"} - - -@user.route("/api/get_api_keys", methods=["GET"]) -def get_api_keys(): - user = "local" - keys = api_key_collection.find({"user": user}) - list_keys = [] - for key in keys: - list_keys.append( +@user_ns.route("/api/remote") +class UploadRemote(Resource): + @api.expect( + api.model( + "RemoteUploadModel", { - "id": str(key["_id"]), - "name": key["name"], - "key": key["key"][:4] + "..." + key["key"][-4:], - "source": key["source"], - "prompt_id": key["prompt_id"], - "chunks": key["chunks"], + "user": fields.String(required=True, description="User ID"), + "source": fields.String( + required=True, description="Source of the data" + ), + "name": fields.String(required=True, description="Job name"), + "data": fields.String(required=True, description="Data to process"), + "repo_url": fields.String(description="GitHub repository URL"), + }, + ) + ) + @api.doc( + description="Uploads remote source for vectorization", + ) + def post(self): + data = request.form + required_fields = ["user", "source", "name", "data"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + try: + if "repo_url" in data: + source_data = data["repo_url"] + loader = "github" + else: + source_data = data["data"] + loader = data["source"] + + task = ingest_remote.delay( + source_data=source_data, + job_name=data["name"], + user=data["user"], + loader=loader, + ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"success": True, "task_id": task.id}), 200) + + +@user_ns.route("/api/task_status") +class TaskStatus(Resource): + task_status_model = api.model( + "TaskStatusModel", + {"task_id": fields.String(required=True, description="Task ID")}, + ) + + @api.expect(task_status_model) + @api.doc(description="Get celery job status") + def get(self): + task_id = request.args.get("task_id") + if not task_id: + return make_response( + jsonify({"success": False, "message": "Task ID is required"}), 400 + ) + + try: + from application.celery_init import celery + + task = celery.AsyncResult(task_id) + task_meta = task.info + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"status": task.status, "result": task_meta}), 200) + + +@user_ns.route("/api/combine") +class CombinedJson(Resource): + @api.doc(description="Provide JSON file with combined available indexes") + def get(self): + user = "local" + data = [ + { + "name": "default", + "date": "default", + "model": settings.EMBEDDINGS_NAME, + "location": "remote", + "tokens": "", + "retriever": "classic", } - ) - return jsonify(list_keys) + ] + + try: + for index in sources_collection.find({"user": user}).sort("date", -1): + data.append( + { + "id": str(index["_id"]), + "name": index.get("name"), + "date": index.get("date"), + "model": settings.EMBEDDINGS_NAME, + "location": "local", + "tokens": index.get("tokens", ""), + "retriever": index.get("retriever", "classic"), + "syncFrequency": index.get("sync_frequency", ""), + } + ) + + if "duckduck_search" in settings.RETRIEVERS_ENABLED: + data.append( + { + "name": "DuckDuckGo Search", + "date": "duckduck_search", + "model": settings.EMBEDDINGS_NAME, + "location": "custom", + "tokens": "", + "retriever": "duckduck_search", + } + ) + + if "brave_search" in settings.RETRIEVERS_ENABLED: + data.append( + { + "name": "Brave Search", + "language": "en", + "date": "brave_search", + "model": settings.EMBEDDINGS_NAME, + "location": "custom", + "tokens": "", + "retriever": "brave_search", + } + ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify(data), 200) -@user.route("/api/create_api_key", methods=["POST"]) -def create_api_key(): - data = request.get_json() - name = data["name"] - source = data["source"] - prompt_id = data["prompt_id"] - chunks = data["chunks"] - key = str(uuid.uuid4()) - user = "local" - resp = api_key_collection.insert_one( - { - "name": name, - "key": key, - "source": source, - "user": user, - "prompt_id": prompt_id, - "chunks": chunks, - } +@user_ns.route("/api/docs_check") +class CheckDocs(Resource): + check_docs_model = api.model( + "CheckDocsModel", + {"docs": fields.String(required=True, description="Document name")}, ) - new_id = str(resp.inserted_id) - return {"id": new_id, "key": key} - -@user.route("/api/delete_api_key", methods=["POST"]) -def delete_api_key(): - data = request.get_json() - id = data["id"] - api_key_collection.delete_one( - { - "_id": ObjectId(id), - } - ) - return {"status": "ok"} - - -# route to share conversation -##isPromptable should be passed through queries -@user.route("/api/share", methods=["POST"]) -def share_conversation(): - try: + @api.expect(check_docs_model) + @api.doc(description="Check if document exists") + def post(self): data = request.get_json() - user = "local" if "user" not in data else data["user"] - conversation_id = data["conversation_id"] - isPromptable = request.args.get("isPromptable").lower() == "true" + required_fields = ["docs"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields - conversation = conversations_collection.find_one( - {"_id": ObjectId(conversation_id)} - ) - current_n_queries = len(conversation["queries"]) + try: + vectorstore = "vectors/" + secure_filename(data["docs"]) + if os.path.exists(vectorstore) or data["docs"] == "default": + return {"status": "exists"}, 200 + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) - ##generate binary representation of uuid - explicit_binary = Binary.from_uuid(uuid.uuid4(), UuidRepresentation.STANDARD) + return make_response(jsonify({"status": "not found"}), 404) - if isPromptable: - source = "default" if "source" not in data else data["source"] - prompt_id = "default" if "prompt_id" not in data else data["prompt_id"] - chunks = "2" if "chunks" not in data else data["chunks"] - name = conversation["name"] + "(shared)" - pre_existing_api_document = api_key_collection.find_one( +@user_ns.route("/api/create_prompt") +class CreatePrompt(Resource): + create_prompt_model = api.model( + "CreatePromptModel", + { + "content": fields.String( + required=True, description="Content of the prompt" + ), + "name": fields.String(required=True, description="Name of the prompt"), + }, + ) + + @api.expect(create_prompt_model) + @api.doc(description="Create a new prompt") + def post(self): + data = request.get_json() + required_fields = ["content", "name"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + user = "local" + try: + + resp = prompts_collection.insert_one( { - "prompt_id": prompt_id, - "chunks": chunks, - "source": source, + "name": data["name"], + "content": data["content"], "user": user, } ) - api_uuid = str(uuid.uuid4()) - if pre_existing_api_document: - api_uuid = pre_existing_api_document["key"] - pre_existing = shared_conversations_collections.find_one( + new_id = str(resp.inserted_id) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"id": new_id}), 200) + + +@user_ns.route("/api/get_prompts") +class GetPrompts(Resource): + @api.doc(description="Get all prompts for the user") + def get(self): + user = "local" + try: + prompts = prompts_collection.find({"user": user}) + list_prompts = [ + {"id": "default", "name": "default", "type": "public"}, + {"id": "creative", "name": "creative", "type": "public"}, + {"id": "strict", "name": "strict", "type": "public"}, + ] + + for prompt in prompts: + list_prompts.append( { - "conversation_id": DBRef( - "conversations", ObjectId(conversation_id) - ), - "isPromptable": isPromptable, - "first_n_queries": current_n_queries, - "user": user, - "api_key": api_uuid, + "id": str(prompt["_id"]), + "name": prompt["name"], + "type": "private", } ) - if pre_existing is not None: - return ( - jsonify( - { - "success": True, - "identifier": str(pre_existing["uuid"].as_uuid()), - } - ), - 200, - ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify(list_prompts), 200) + + +@user_ns.route("/api/get_single_prompt") +class GetSinglePrompt(Resource): + @api.doc(params={"id": "ID of the prompt"}, description="Get a single prompt by ID") + def get(self): + prompt_id = request.args.get("id") + if not prompt_id: + return make_response( + jsonify({"success": False, "message": "ID is required"}), 400 + ) + + try: + if prompt_id == "default": + with open( + os.path.join(current_dir, "prompts", "chat_combine_default.txt"), + "r", + ) as f: + chat_combine_template = f.read() + return make_response(jsonify({"content": chat_combine_template}), 200) + + elif prompt_id == "creative": + with open( + os.path.join(current_dir, "prompts", "chat_combine_creative.txt"), + "r", + ) as f: + chat_reduce_creative = f.read() + return make_response(jsonify({"content": chat_reduce_creative}), 200) + + elif prompt_id == "strict": + with open( + os.path.join(current_dir, "prompts", "chat_combine_strict.txt"), "r" + ) as f: + chat_reduce_strict = f.read() + return make_response(jsonify({"content": chat_reduce_strict}), 200) + + prompt = prompts_collection.find_one({"_id": ObjectId(prompt_id)}) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"content": prompt["content"]}), 200) + + +@user_ns.route("/api/delete_prompt") +class DeletePrompt(Resource): + delete_prompt_model = api.model( + "DeletePromptModel", + {"id": fields.String(required=True, description="Prompt ID to delete")}, + ) + + @api.expect(delete_prompt_model) + @api.doc(description="Delete a prompt by ID") + def post(self): + data = request.get_json() + required_fields = ["id"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + try: + prompts_collection.delete_one({"_id": ObjectId(data["id"])}) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"success": True}), 200) + + +@user_ns.route("/api/update_prompt") +class UpdatePrompt(Resource): + update_prompt_model = api.model( + "UpdatePromptModel", + { + "id": fields.String(required=True, description="Prompt ID to update"), + "name": fields.String(required=True, description="New name of the prompt"), + "content": fields.String( + required=True, description="New content of the prompt" + ), + }, + ) + + @api.expect(update_prompt_model) + @api.doc(description="Update an existing prompt") + def post(self): + data = request.get_json() + required_fields = ["id", "name", "content"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + try: + prompts_collection.update_one( + {"_id": ObjectId(data["id"])}, + {"$set": {"name": data["name"], "content": data["content"]}}, + ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"success": True}), 200) + + +@user_ns.route("/api/get_api_keys") +class GetApiKeys(Resource): + @api.doc(description="Retrieve API keys for the user") + def get(self): + user = "local" + try: + keys = api_key_collection.find({"user": user}) + list_keys = [] + for key in keys: + if "source" in key and isinstance(key["source"], DBRef): + source = db.dereference(key["source"]) + if source is None: + continue + source_name = source["name"] + elif "retriever" in key: + source_name = key["retriever"] else: + continue + + list_keys.append( + { + "id": str(key["_id"]), + "name": key["name"], + "key": key["key"][:4] + "..." + key["key"][-4:], + "source": source_name, + "prompt_id": key["prompt_id"], + "chunks": key["chunks"], + } + ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + return make_response(jsonify(list_keys), 200) + + +@user_ns.route("/api/create_api_key") +class CreateApiKey(Resource): + create_api_key_model = api.model( + "CreateApiKeyModel", + { + "name": fields.String(required=True, description="Name of the API key"), + "prompt_id": fields.String(required=True, description="Prompt ID"), + "chunks": fields.Integer(required=True, description="Chunks count"), + "source": fields.String(description="Source ID (optional)"), + "retriever": fields.String(description="Retriever (optional)"), + }, + ) + + @api.expect(create_api_key_model) + @api.doc(description="Create a new API key") + def post(self): + data = request.get_json() + required_fields = ["name", "prompt_id", "chunks"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + user = "local" + try: + key = str(uuid.uuid4()) + new_api_key = { + "name": data["name"], + "key": key, + "user": user, + "prompt_id": data["prompt_id"], + "chunks": data["chunks"], + } + if "source" in data and ObjectId.is_valid(data["source"]): + new_api_key["source"] = DBRef("sources", ObjectId(data["source"])) + if "retriever" in data: + new_api_key["retriever"] = data["retriever"] + + resp = api_key_collection.insert_one(new_api_key) + new_id = str(resp.inserted_id) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"id": new_id, "key": key}), 201) + + +@user_ns.route("/api/delete_api_key") +class DeleteApiKey(Resource): + delete_api_key_model = api.model( + "DeleteApiKeyModel", + {"id": fields.String(required=True, description="API Key ID to delete")}, + ) + + @api.expect(delete_api_key_model) + @api.doc(description="Delete an API key by ID") + def post(self): + data = request.get_json() + required_fields = ["id"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + try: + result = api_key_collection.delete_one({"_id": ObjectId(data["id"])}) + if result.deleted_count == 0: + return {"success": False, "message": "API Key not found"}, 404 + except Exception as err: + return {"success": False, "error": str(err)}, 400 + + return {"success": True}, 200 + + +@user_ns.route("/api/share") +class ShareConversation(Resource): + share_conversation_model = api.model( + "ShareConversationModel", + { + "conversation_id": fields.String( + required=True, description="Conversation ID" + ), + "user": fields.String(description="User ID (optional)"), + "prompt_id": fields.String(description="Prompt ID (optional)"), + "chunks": fields.Integer(description="Chunks count (optional)"), + }, + ) + + @api.expect(share_conversation_model) + @api.doc(description="Share a conversation") + def post(self): + data = request.get_json() + required_fields = ["conversation_id"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + is_promptable = request.args.get("isPromptable", type=inputs.boolean) + if is_promptable is None: + return make_response( + jsonify({"success": False, "message": "isPromptable is required"}), 400 + ) + + user = data.get("user", "local") + conversation_id = data["conversation_id"] + + try: + conversation = conversations_collection.find_one( + {"_id": ObjectId(conversation_id)} + ) + if conversation is None: + return make_response( + jsonify( + { + "status": "error", + "message": "Conversation does not exist", + } + ), + 404, + ) + + current_n_queries = len(conversation["queries"]) + explicit_binary = Binary.from_uuid( + uuid.uuid4(), UuidRepresentation.STANDARD + ) + + if is_promptable: + prompt_id = data.get("prompt_id", "default") + chunks = data.get("chunks", "2") + + name = conversation["name"] + "(shared)" + new_api_key_data = { + "prompt_id": prompt_id, + "chunks": chunks, + "user": user, + } + + if "source" in data and ObjectId.is_valid(data["source"]): + new_api_key_data["source"] = DBRef( + "sources", ObjectId(data["source"]) + ) + if "retriever" in data: + new_api_key_data["retriever"] = data["retriever"] + + pre_existing_api_document = api_key_collection.find_one( + new_api_key_data + ) + if pre_existing_api_document: + api_uuid = pre_existing_api_document["key"] + pre_existing = shared_conversations_collections.find_one( + { + "conversation_id": DBRef( + "conversations", ObjectId(conversation_id) + ), + "isPromptable": is_promptable, + "first_n_queries": current_n_queries, + "user": user, + "api_key": api_uuid, + } + ) + if pre_existing is not None: + return make_response( + jsonify( + { + "success": True, + "identifier": str(pre_existing["uuid"].as_uuid()), + } + ), + 200, + ) + else: + shared_conversations_collections.insert_one( + { + "uuid": explicit_binary, + "conversation_id": { + "$ref": "conversations", + "$id": ObjectId(conversation_id), + }, + "isPromptable": is_promptable, + "first_n_queries": current_n_queries, + "user": user, + "api_key": api_uuid, + } + ) + return make_response( + jsonify( + { + "success": True, + "identifier": str(explicit_binary.as_uuid()), + } + ), + 201, + ) + else: + api_uuid = str(uuid.uuid4()) + new_api_key_data["key"] = api_uuid + new_api_key_data["name"] = name + + if "source" in data and ObjectId.is_valid(data["source"]): + new_api_key_data["source"] = DBRef( + "sources", ObjectId(data["source"]) + ) + if "retriever" in data: + new_api_key_data["retriever"] = data["retriever"] + + api_key_collection.insert_one(new_api_key_data) shared_conversations_collections.insert_one( { "uuid": explicit_binary, @@ -560,107 +918,98 @@ def share_conversation(): "$ref": "conversations", "$id": ObjectId(conversation_id), }, - "isPromptable": isPromptable, + "isPromptable": is_promptable, "first_n_queries": current_n_queries, "user": user, "api_key": api_uuid, } ) - return jsonify( - {"success": True, "identifier": str(explicit_binary.as_uuid())} + return make_response( + jsonify( + { + "success": True, + "identifier": str(explicit_binary.as_uuid()), + } + ), + 201, ) + + pre_existing = shared_conversations_collections.find_one( + { + "conversation_id": DBRef( + "conversations", ObjectId(conversation_id) + ), + "isPromptable": not is_promptable, + "first_n_queries": current_n_queries, + "user": user, + } + ) + if pre_existing is not None: + return make_response( + jsonify( + { + "success": True, + "identifier": str(pre_existing["uuid"].as_uuid()), + } + ), + 200, + ) else: - api_key_collection.insert_one( + shared_conversations_collections.insert_one( { - "name": name, - "key": api_uuid, - "source": source, + "uuid": explicit_binary, + "conversation_id": { + "$ref": "conversations", + "$id": ObjectId(conversation_id), + }, + "isPromptable": not is_promptable, + "first_n_queries": current_n_queries, "user": user, - "prompt_id": prompt_id, - "chunks": chunks, } ) - shared_conversations_collections.insert_one( - { - "uuid": explicit_binary, - "conversation_id": { - "$ref": "conversations", - "$id": ObjectId(conversation_id), - }, - "isPromptable": isPromptable, - "first_n_queries": current_n_queries, - "user": user, - "api_key": api_uuid, - } - ) - ## Identifier as route parameter in frontend - return ( - jsonify( - {"success": True, "identifier": str(explicit_binary.as_uuid())} - ), - 201, - ) - - ##isPromptable = False - pre_existing = shared_conversations_collections.find_one( - { - "conversation_id": DBRef("conversations", ObjectId(conversation_id)), - "isPromptable": isPromptable, - "first_n_queries": current_n_queries, - "user": user, - } - ) - if pre_existing is not None: - return ( - jsonify( - {"success": True, "identifier": str(pre_existing["uuid"].as_uuid())} - ), - 200, - ) - else: - shared_conversations_collections.insert_one( - { - "uuid": explicit_binary, - "conversation_id": { - "$ref": "conversations", - "$id": ObjectId(conversation_id), - }, - "isPromptable": isPromptable, - "first_n_queries": current_n_queries, - "user": user, - } - ) - ## Identifier as route parameter in frontend - return ( - jsonify( - {"success": True, "identifier": str(explicit_binary.as_uuid())} - ), - 201, - ) - except Exception as err: - print(err) - return jsonify({"success": False, "error": str(err)}), 400 + return make_response( + jsonify( + {"success": True, "identifier": str(explicit_binary.as_uuid())} + ), + 201, + ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) -# route to get publicly shared conversations -@user.route("/api/shared_conversation/", methods=["GET"]) -def get_publicly_shared_conversations(identifier: str): - try: - query_uuid = Binary.from_uuid( - uuid.UUID(identifier), UuidRepresentation.STANDARD - ) - shared = shared_conversations_collections.find_one({"uuid": query_uuid}) - conversation_queries = [] - if ( - shared - and "conversation_id" in shared - and isinstance(shared["conversation_id"], DBRef) - ): - # Resolve the DBRef - conversation_ref = shared["conversation_id"] - conversation = db.dereference(conversation_ref) - if conversation is None: - return ( +@user_ns.route("/api/shared_conversation/") +class GetPubliclySharedConversations(Resource): + @api.doc(description="Get publicly shared conversations by identifier") + def get(self, identifier: str): + try: + query_uuid = Binary.from_uuid( + uuid.UUID(identifier), UuidRepresentation.STANDARD + ) + shared = shared_conversations_collections.find_one({"uuid": query_uuid}) + conversation_queries = [] + + if ( + shared + and "conversation_id" in shared + and isinstance(shared["conversation_id"], DBRef) + ): + conversation_ref = shared["conversation_id"] + conversation = db.dereference(conversation_ref) + if conversation is None: + return make_response( + jsonify( + { + "sucess": False, + "error": "might have broken url or the conversation does not exist", + } + ), + 404, + ) + conversation_queries = conversation["queries"][ + : (shared["first_n_queries"]) + ] + else: + return make_response( jsonify( { "sucess": False, @@ -669,31 +1018,646 @@ def get_publicly_shared_conversations(identifier: str): ), 404, ) - conversation_queries = conversation["queries"][ - : (shared["first_n_queries"]) - ] - for query in conversation_queries: - query.pop("sources") ## avoid exposing sources - else: - return ( - jsonify( - { - "sucess": False, - "error": "might have broken url or the conversation does not exist", - } - ), - 404, + date = conversation["_id"].generation_time.isoformat() + res = { + "success": True, + "queries": conversation_queries, + "title": conversation["name"], + "timestamp": date, + } + if shared["isPromptable"] and "api_key" in shared: + res["api_key"] = shared["api_key"] + return make_response(jsonify(res), 200) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + +@user_ns.route("/api/get_message_analytics") +class GetMessageAnalytics(Resource): + get_message_analytics_model = api.model( + "GetMessageAnalyticsModel", + { + "api_key_id": fields.String( + required=False, + description="API Key ID", + ), + "filter_option": fields.String( + required=False, + description="Filter option for analytics", + default="last_30_days", + enum=[ + "last_hour", + "last_24_hour", + "last_7_days", + "last_15_days", + "last_30_days", + ], + ), + }, + ) + + @api.expect(get_message_analytics_model) + @api.doc(description="Get message analytics based on filter option") + def post(self): + data = request.get_json() + api_key_id = data.get("api_key_id") + filter_option = data.get("filter_option", "last_30_days") + + try: + api_key = ( + api_key_collection.find_one({"_id": ObjectId(api_key_id)})["key"] + if api_key_id + else None ) - date = conversation["_id"].generation_time.isoformat() - res = { - "success": True, - "queries": conversation_queries, - "title": conversation["name"], - "timestamp": date, - } - if shared["isPromptable"] and "api_key" in shared: - res["api_key"] = shared["api_key"] - return jsonify(res), 200 - except Exception as err: - print(err) - return jsonify({"success": False, "error": str(err)}), 400 + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + end_date = datetime.datetime.now(datetime.timezone.utc) + + if filter_option == "last_hour": + start_date = end_date - datetime.timedelta(hours=1) + group_format = "%Y-%m-%d %H:%M:00" + group_stage = { + "$group": { + "_id": { + "minute": { + "$dateToString": {"format": group_format, "date": "$date"} + } + }, + "total_messages": {"$sum": 1}, + } + } + + elif filter_option == "last_24_hour": + start_date = end_date - datetime.timedelta(hours=24) + group_format = "%Y-%m-%d %H:00" + group_stage = { + "$group": { + "_id": { + "hour": { + "$dateToString": {"format": group_format, "date": "$date"} + } + }, + "total_messages": {"$sum": 1}, + } + } + + else: + if filter_option in ["last_7_days", "last_15_days", "last_30_days"]: + filter_days = ( + 6 + if filter_option == "last_7_days" + else (14 if filter_option == "last_15_days" else 29) + ) + else: + return make_response( + jsonify({"success": False, "message": "Invalid option"}), 400 + ) + start_date = end_date - datetime.timedelta(days=filter_days) + start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0) + end_date = end_date.replace( + hour=23, minute=59, second=59, microsecond=999999 + ) + group_format = "%Y-%m-%d" + group_stage = { + "$group": { + "_id": { + "day": { + "$dateToString": {"format": group_format, "date": "$date"} + } + }, + "total_messages": {"$sum": 1}, + } + } + + try: + match_stage = { + "$match": { + "date": {"$gte": start_date, "$lte": end_date}, + } + } + if api_key: + match_stage["$match"]["api_key"] = api_key + message_data = conversations_collection.aggregate( + [ + match_stage, + group_stage, + {"$sort": {"_id": 1}}, + ] + ) + + if filter_option == "last_hour": + intervals = generate_minute_range(start_date, end_date) + elif filter_option == "last_24_hour": + intervals = generate_hourly_range(start_date, end_date) + else: + intervals = generate_date_range(start_date, end_date) + + daily_messages = {interval: 0 for interval in intervals} + + for entry in message_data: + if filter_option == "last_hour": + daily_messages[entry["_id"]["minute"]] = entry["total_messages"] + elif filter_option == "last_24_hour": + daily_messages[entry["_id"]["hour"]] = entry["total_messages"] + else: + daily_messages[entry["_id"]["day"]] = entry["total_messages"] + + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response( + jsonify({"success": True, "messages": daily_messages}), 200 + ) + + +@user_ns.route("/api/get_token_analytics") +class GetTokenAnalytics(Resource): + get_token_analytics_model = api.model( + "GetTokenAnalyticsModel", + { + "api_key_id": fields.String(required=False, description="API Key ID"), + "filter_option": fields.String( + required=False, + description="Filter option for analytics", + default="last_30_days", + enum=[ + "last_hour", + "last_24_hour", + "last_7_days", + "last_15_days", + "last_30_days", + ], + ), + }, + ) + + @api.expect(get_token_analytics_model) + @api.doc(description="Get token analytics data") + def post(self): + data = request.get_json() + api_key_id = data.get("api_key_id") + filter_option = data.get("filter_option", "last_30_days") + + try: + api_key = ( + api_key_collection.find_one({"_id": ObjectId(api_key_id)})["key"] + if api_key_id + else None + ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + end_date = datetime.datetime.now(datetime.timezone.utc) + + if filter_option == "last_hour": + start_date = end_date - datetime.timedelta(hours=1) + group_format = "%Y-%m-%d %H:%M:00" + group_stage = { + "$group": { + "_id": { + "minute": { + "$dateToString": { + "format": group_format, + "date": "$timestamp", + } + } + }, + "total_tokens": { + "$sum": {"$add": ["$prompt_tokens", "$generated_tokens"]} + }, + } + } + + elif filter_option == "last_24_hour": + start_date = end_date - datetime.timedelta(hours=24) + group_format = "%Y-%m-%d %H:00" + group_stage = { + "$group": { + "_id": { + "hour": { + "$dateToString": { + "format": group_format, + "date": "$timestamp", + } + } + }, + "total_tokens": { + "$sum": {"$add": ["$prompt_tokens", "$generated_tokens"]} + }, + } + } + + else: + if filter_option in ["last_7_days", "last_15_days", "last_30_days"]: + filter_days = ( + 6 + if filter_option == "last_7_days" + else (14 if filter_option == "last_15_days" else 29) + ) + else: + return make_response( + jsonify({"success": False, "message": "Invalid option"}), 400 + ) + start_date = end_date - datetime.timedelta(days=filter_days) + start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0) + end_date = end_date.replace( + hour=23, minute=59, second=59, microsecond=999999 + ) + group_format = "%Y-%m-%d" + group_stage = { + "$group": { + "_id": { + "day": { + "$dateToString": { + "format": group_format, + "date": "$timestamp", + } + } + }, + "total_tokens": { + "$sum": {"$add": ["$prompt_tokens", "$generated_tokens"]} + }, + } + } + + try: + match_stage = { + "$match": { + "timestamp": {"$gte": start_date, "$lte": end_date}, + } + } + if api_key: + match_stage["$match"]["api_key"] = api_key + + token_usage_data = token_usage_collection.aggregate( + [ + match_stage, + group_stage, + {"$sort": {"_id": 1}}, + ] + ) + + if filter_option == "last_hour": + intervals = generate_minute_range(start_date, end_date) + elif filter_option == "last_24_hour": + intervals = generate_hourly_range(start_date, end_date) + else: + intervals = generate_date_range(start_date, end_date) + + daily_token_usage = {interval: 0 for interval in intervals} + + for entry in token_usage_data: + if filter_option == "last_hour": + daily_token_usage[entry["_id"]["minute"]] = entry["total_tokens"] + elif filter_option == "last_24_hour": + daily_token_usage[entry["_id"]["hour"]] = entry["total_tokens"] + else: + daily_token_usage[entry["_id"]["day"]] = entry["total_tokens"] + + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response( + jsonify({"success": True, "token_usage": daily_token_usage}), 200 + ) + + +@user_ns.route("/api/get_feedback_analytics") +class GetFeedbackAnalytics(Resource): + get_feedback_analytics_model = api.model( + "GetFeedbackAnalyticsModel", + { + "api_key_id": fields.String(required=False, description="API Key ID"), + "filter_option": fields.String( + required=False, + description="Filter option for analytics", + default="last_30_days", + enum=[ + "last_hour", + "last_24_hour", + "last_7_days", + "last_15_days", + "last_30_days", + ], + ), + }, + ) + + @api.expect(get_feedback_analytics_model) + @api.doc(description="Get feedback analytics data") + def post(self): + data = request.get_json() + api_key_id = data.get("api_key_id") + filter_option = data.get("filter_option", "last_30_days") + + try: + api_key = ( + api_key_collection.find_one({"_id": ObjectId(api_key_id)})["key"] + if api_key_id + else None + ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + end_date = datetime.datetime.now(datetime.timezone.utc) + + if filter_option == "last_hour": + start_date = end_date - datetime.timedelta(hours=1) + group_format = "%Y-%m-%d %H:%M:00" + group_stage_1 = { + "$group": { + "_id": { + "minute": { + "$dateToString": { + "format": group_format, + "date": "$timestamp", + } + }, + "feedback": "$feedback", + }, + "count": {"$sum": 1}, + } + } + group_stage_2 = { + "$group": { + "_id": "$_id.minute", + "likes": { + "$sum": { + "$cond": [ + {"$eq": ["$_id.feedback", "LIKE"]}, + "$count", + 0, + ] + } + }, + "dislikes": { + "$sum": { + "$cond": [ + {"$eq": ["$_id.feedback", "DISLIKE"]}, + "$count", + 0, + ] + } + }, + } + } + + elif filter_option == "last_24_hour": + start_date = end_date - datetime.timedelta(hours=24) + group_format = "%Y-%m-%d %H:00" + group_stage_1 = { + "$group": { + "_id": { + "hour": { + "$dateToString": { + "format": group_format, + "date": "$timestamp", + } + }, + "feedback": "$feedback", + }, + "count": {"$sum": 1}, + } + } + group_stage_2 = { + "$group": { + "_id": "$_id.hour", + "likes": { + "$sum": { + "$cond": [ + {"$eq": ["$_id.feedback", "LIKE"]}, + "$count", + 0, + ] + } + }, + "dislikes": { + "$sum": { + "$cond": [ + {"$eq": ["$_id.feedback", "DISLIKE"]}, + "$count", + 0, + ] + } + }, + } + } + + else: + if filter_option in ["last_7_days", "last_15_days", "last_30_days"]: + filter_days = ( + 6 + if filter_option == "last_7_days" + else (14 if filter_option == "last_15_days" else 29) + ) + else: + return make_response( + jsonify({"success": False, "message": "Invalid option"}), 400 + ) + start_date = end_date - datetime.timedelta(days=filter_days) + start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0) + end_date = end_date.replace( + hour=23, minute=59, second=59, microsecond=999999 + ) + group_format = "%Y-%m-%d" + group_stage_1 = { + "$group": { + "_id": { + "day": { + "$dateToString": { + "format": group_format, + "date": "$timestamp", + } + }, + "feedback": "$feedback", + }, + "count": {"$sum": 1}, + } + } + group_stage_2 = { + "$group": { + "_id": "$_id.day", + "likes": { + "$sum": { + "$cond": [ + {"$eq": ["$_id.feedback", "LIKE"]}, + "$count", + 0, + ] + } + }, + "dislikes": { + "$sum": { + "$cond": [ + {"$eq": ["$_id.feedback", "DISLIKE"]}, + "$count", + 0, + ] + } + }, + } + } + + try: + match_stage = { + "$match": { + "timestamp": {"$gte": start_date, "$lte": end_date}, + } + } + if api_key: + match_stage["$match"]["api_key"] = api_key + + feedback_data = feedback_collection.aggregate( + [ + match_stage, + group_stage_1, + group_stage_2, + {"$sort": {"_id": 1}}, + ] + ) + + if filter_option == "last_hour": + intervals = generate_minute_range(start_date, end_date) + elif filter_option == "last_24_hour": + intervals = generate_hourly_range(start_date, end_date) + else: + intervals = generate_date_range(start_date, end_date) + + daily_feedback = { + interval: {"positive": 0, "negative": 0} for interval in intervals + } + + for entry in feedback_data: + daily_feedback[entry["_id"]] = { + "positive": entry["likes"], + "negative": entry["dislikes"], + } + + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response( + jsonify({"success": True, "feedback": daily_feedback}), 200 + ) + + +@user_ns.route("/api/get_user_logs") +class GetUserLogs(Resource): + get_user_logs_model = api.model( + "GetUserLogsModel", + { + "page": fields.Integer( + required=False, + description="Page number for pagination", + default=1, + ), + "api_key_id": fields.String(required=False, description="API Key ID"), + "page_size": fields.Integer( + required=False, + description="Number of logs per page", + default=10, + ), + }, + ) + + @api.expect(get_user_logs_model) + @api.doc(description="Get user logs with pagination") + def post(self): + data = request.get_json() + page = int(data.get("page", 1)) + api_key_id = data.get("api_key_id") + page_size = int(data.get("page_size", 10)) + skip = (page - 1) * page_size + + try: + api_key = ( + api_key_collection.find_one({"_id": ObjectId(api_key_id)})["key"] + if api_key_id + else None + ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + query = {} + if api_key: + query = {"api_key": api_key} + + items_cursor = ( + user_logs_collection.find(query) + .sort("timestamp", -1) + .skip(skip) + .limit(page_size + 1) + ) + items = list(items_cursor) + + results = [ + { + "id": str(item.get("_id")), + "action": item.get("action"), + "level": item.get("level"), + "user": item.get("user"), + "question": item.get("question"), + "sources": item.get("sources"), + "retriever_params": item.get("retriever_params"), + "timestamp": item.get("timestamp"), + } + for item in items[:page_size] + ] + + has_more = len(items) > page_size + + return make_response( + jsonify( + { + "success": True, + "logs": results, + "page": page, + "page_size": page_size, + "has_more": has_more, + } + ), + 200, + ) + + +@user_ns.route("/api/manage_sync") +class ManageSync(Resource): + manage_sync_model = api.model( + "ManageSyncModel", + { + "source_id": fields.String(required=True, description="Source ID"), + "sync_frequency": fields.String( + required=True, + description="Sync frequency (never, daily, weekly, monthly)", + ), + }, + ) + + @api.expect(manage_sync_model) + @api.doc(description="Manage sync frequency for sources") + def post(self): + data = request.get_json() + required_fields = ["source_id", "sync_frequency"] + missing_fields = check_required_fields(data, required_fields) + if missing_fields: + return missing_fields + + source_id = data["source_id"] + sync_frequency = data["sync_frequency"] + + if sync_frequency not in ["never", "daily", "weekly", "monthly"]: + return make_response( + jsonify({"success": False, "message": "Invalid frequency"}), 400 + ) + + update_data = {"$set": {"sync_frequency": sync_frequency}} + try: + sources_collection.update_one( + { + "_id": ObjectId(source_id), + "user": "local", + }, + update_data, + ) + except Exception as err: + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + return make_response(jsonify({"success": True}), 200) diff --git a/application/api/user/tasks.py b/application/api/user/tasks.py index 862b6dcd..73ad716e 100644 --- a/application/api/user/tasks.py +++ b/application/api/user/tasks.py @@ -1,12 +1,38 @@ -from application.worker import ingest_worker, remote_worker +from datetime import timedelta + from application.celery_init import celery +from application.worker import ingest_worker, remote_worker, sync_worker + @celery.task(bind=True) def ingest(self, directory, formats, name_job, filename, user): resp = ingest_worker(self, directory, formats, name_job, filename, user) return resp + @celery.task(bind=True) def ingest_remote(self, source_data, job_name, user, loader): resp = remote_worker(self, source_data, job_name, user, loader) return resp + + +@celery.task(bind=True) +def schedule_syncs(self, frequency): + resp = sync_worker(self, frequency) + return resp + + +@celery.on_after_configure.connect +def setup_periodic_tasks(sender, **kwargs): + sender.add_periodic_task( + timedelta(days=1), + schedule_syncs.s("daily"), + ) + sender.add_periodic_task( + timedelta(weeks=1), + schedule_syncs.s("weekly"), + ) + sender.add_periodic_task( + timedelta(days=30), + schedule_syncs.s("monthly"), + ) diff --git a/application/app.py b/application/app.py index 87d9d42f..d7727001 100644 --- a/application/app.py +++ b/application/app.py @@ -1,15 +1,19 @@ import platform + import dotenv -from application.celery_init import celery -from flask import Flask, request, redirect -from application.core.settings import settings -from application.api.user.routes import user +from flask import Flask, redirect, request + from application.api.answer.routes import answer from application.api.internal.routes import internal +from application.api.user.routes import user +from application.celery_init import celery from application.core.logging_config import setup_logging +from application.core.settings import settings +from application.extensions import api if platform.system() == "Windows": import pathlib + pathlib.PosixPath = pathlib.WindowsPath dotenv.load_dotenv() @@ -23,16 +27,19 @@ app.config.update( UPLOAD_FOLDER="inputs", CELERY_BROKER_URL=settings.CELERY_BROKER_URL, CELERY_RESULT_BACKEND=settings.CELERY_RESULT_BACKEND, - MONGO_URI=settings.MONGO_URI + MONGO_URI=settings.MONGO_URI, ) celery.config_from_object("application.celeryconfig") +api.init_app(app) + @app.route("/") def home(): - if request.remote_addr in ('0.0.0.0', '127.0.0.1', 'localhost', '172.18.0.1'): - return redirect('http://localhost:5173') + if request.remote_addr in ("0.0.0.0", "127.0.0.1", "localhost", "172.18.0.1"): + return redirect("http://localhost:5173") else: - return 'Welcome to DocsGPT Backend!' + return "Welcome to DocsGPT Backend!" + @app.after_request def after_request(response): @@ -41,6 +48,6 @@ def after_request(response): response.headers.add("Access-Control-Allow-Methods", "GET,PUT,POST,DELETE,OPTIONS") return response + if __name__ == "__main__": app.run(debug=settings.FLASK_DEBUG_MODE, port=7091) - diff --git a/application/cache.py b/application/cache.py new file mode 100644 index 00000000..33022e45 --- /dev/null +++ b/application/cache.py @@ -0,0 +1,93 @@ +import redis +import time +import json +import logging +from threading import Lock +from application.core.settings import settings +from application.utils import get_hash + +logger = logging.getLogger(__name__) + +_redis_instance = None +_instance_lock = Lock() + +def get_redis_instance(): + global _redis_instance + if _redis_instance is None: + with _instance_lock: + if _redis_instance is None: + try: + _redis_instance = redis.Redis.from_url(settings.CACHE_REDIS_URL, socket_connect_timeout=2) + except redis.ConnectionError as e: + logger.error(f"Redis connection error: {e}") + _redis_instance = None + return _redis_instance + +def gen_cache_key(*messages, model="docgpt"): + if not all(isinstance(msg, dict) for msg in messages): + raise ValueError("All messages must be dictionaries.") + messages_str = json.dumps(list(messages), sort_keys=True) + combined = f"{model}_{messages_str}" + cache_key = get_hash(combined) + return cache_key + +def gen_cache(func): + def wrapper(self, model, messages, *args, **kwargs): + try: + cache_key = gen_cache_key(*messages) + redis_client = get_redis_instance() + if redis_client: + try: + cached_response = redis_client.get(cache_key) + if cached_response: + return cached_response.decode('utf-8') + except redis.ConnectionError as e: + logger.error(f"Redis connection error: {e}") + + result = func(self, model, messages, *args, **kwargs) + if redis_client: + try: + redis_client.set(cache_key, result, ex=1800) + except redis.ConnectionError as e: + logger.error(f"Redis connection error: {e}") + + return result + except ValueError as e: + logger.error(e) + return "Error: No user message found in the conversation to generate a cache key." + return wrapper + +def stream_cache(func): + def wrapper(self, model, messages, stream, *args, **kwargs): + cache_key = gen_cache_key(*messages) + logger.info(f"Stream cache key: {cache_key}") + + redis_client = get_redis_instance() + if redis_client: + try: + cached_response = redis_client.get(cache_key) + if cached_response: + logger.info(f"Cache hit for stream key: {cache_key}") + cached_response = json.loads(cached_response.decode('utf-8')) + for chunk in cached_response: + yield chunk + time.sleep(0.03) + return + except redis.ConnectionError as e: + logger.error(f"Redis connection error: {e}") + + result = func(self, model, messages, stream, *args, **kwargs) + stream_cache_data = [] + + for chunk in result: + stream_cache_data.append(chunk) + yield chunk + + if redis_client: + try: + redis_client.set(cache_key, json.dumps(stream_cache_data), ex=1800) + logger.info(f"Stream cache saved for key: {cache_key}") + except redis.ConnectionError as e: + logger.error(f"Redis connection error: {e}") + + return wrapper \ No newline at end of file diff --git a/application/core/settings.py b/application/core/settings.py index c8fe9c4a..d4b02481 100644 --- a/application/core/settings.py +++ b/application/core/settings.py @@ -21,6 +21,9 @@ class Settings(BaseSettings): VECTOR_STORE: str = "faiss" # "faiss" or "elasticsearch" or "qdrant" or "milvus" or "lancedb" RETRIEVERS_ENABLED: list = ["classic_rag", "duckduck_search"] # also brave_search + # LLM Cache + CACHE_REDIS_URL: str = "redis://localhost:6379/2" + API_URL: str = "http://localhost:7091" # backend url for celery worker API_KEY: Optional[str] = None # LLM api key diff --git a/application/extensions.py b/application/extensions.py new file mode 100644 index 00000000..b6f52893 --- /dev/null +++ b/application/extensions.py @@ -0,0 +1,7 @@ +from flask_restx import Api + +api = Api( + version="1.0", + title="DocsGPT API", + description="API for DocsGPT", +) diff --git a/application/llm/base.py b/application/llm/base.py index 475b7937..1caab5d3 100644 --- a/application/llm/base.py +++ b/application/llm/base.py @@ -1,28 +1,29 @@ from abc import ABC, abstractmethod from application.usage import gen_token_usage, stream_token_usage +from application.cache import stream_cache, gen_cache class BaseLLM(ABC): def __init__(self): self.token_usage = {"prompt_tokens": 0, "generated_tokens": 0} - def _apply_decorator(self, method, decorator, *args, **kwargs): - return decorator(method, *args, **kwargs) + def _apply_decorator(self, method, decorators, *args, **kwargs): + for decorator in decorators: + method = decorator(method) + return method(self, *args, **kwargs) @abstractmethod def _raw_gen(self, model, messages, stream, *args, **kwargs): pass def gen(self, model, messages, stream=False, *args, **kwargs): - return self._apply_decorator(self._raw_gen, gen_token_usage)( - self, model=model, messages=messages, stream=stream, *args, **kwargs - ) + decorators = [gen_token_usage, gen_cache] + return self._apply_decorator(self._raw_gen, decorators=decorators, model=model, messages=messages, stream=stream, *args, **kwargs) @abstractmethod def _raw_gen_stream(self, model, messages, stream, *args, **kwargs): pass def gen_stream(self, model, messages, stream=True, *args, **kwargs): - return self._apply_decorator(self._raw_gen_stream, stream_token_usage)( - self, model=model, messages=messages, stream=stream, *args, **kwargs - ) + decorators = [stream_cache, stream_token_usage] + return self._apply_decorator(self._raw_gen_stream, decorators=decorators, model=model, messages=messages, stream=stream, *args, **kwargs) \ No newline at end of file diff --git a/application/llm/groq.py b/application/llm/groq.py new file mode 100644 index 00000000..b5731a90 --- /dev/null +++ b/application/llm/groq.py @@ -0,0 +1,45 @@ +from application.llm.base import BaseLLM + + + +class GroqLLM(BaseLLM): + + def __init__(self, api_key=None, user_api_key=None, *args, **kwargs): + from openai import OpenAI + + super().__init__(*args, **kwargs) + self.client = OpenAI(api_key=api_key, base_url="https://api.groq.com/openai/v1") + self.api_key = api_key + self.user_api_key = user_api_key + + def _raw_gen( + self, + baseself, + model, + messages, + stream=False, + **kwargs + ): + response = self.client.chat.completions.create( + model=model, messages=messages, stream=stream, **kwargs + ) + + return response.choices[0].message.content + + def _raw_gen_stream( + self, + baseself, + model, + messages, + stream=True, + **kwargs + ): + response = self.client.chat.completions.create( + model=model, messages=messages, stream=stream, **kwargs + ) + + for line in response: + # import sys + # print(line.choices[0].delta.content, file=sys.stderr) + if line.choices[0].delta.content is not None: + yield line.choices[0].delta.content diff --git a/application/llm/llm_creator.py b/application/llm/llm_creator.py index 7960778b..6a19de10 100644 --- a/application/llm/llm_creator.py +++ b/application/llm/llm_creator.py @@ -1,3 +1,4 @@ +from application.llm.groq import GroqLLM from application.llm.openai import OpenAILLM, AzureOpenAILLM from application.llm.sagemaker import SagemakerAPILLM from application.llm.huggingface import HuggingFaceLLM @@ -17,6 +18,7 @@ class LLMCreator: "anthropic": AnthropicLLM, "docsgpt": DocsGPTAPILLM, "premai": PremAILLM, + "groq": GroqLLM } @classmethod diff --git a/application/parser/file/bulk.py b/application/parser/file/bulk.py index aec6c8c1..79fc2c45 100644 --- a/application/parser/file/bulk.py +++ b/application/parser/file/bulk.py @@ -10,13 +10,14 @@ from application.parser.file.epub_parser import EpubParser from application.parser.file.html_parser import HTMLParser from application.parser.file.markdown_parser import MarkdownParser from application.parser.file.rst_parser import RstParser -from application.parser.file.tabular_parser import PandasCSVParser +from application.parser.file.tabular_parser import PandasCSVParser,ExcelParser from application.parser.schema.base import Document DEFAULT_FILE_EXTRACTOR: Dict[str, BaseParser] = { ".pdf": PDFParser(), ".docx": DocxParser(), ".csv": PandasCSVParser(), + ".xlsx":ExcelParser(), ".epub": EpubParser(), ".md": MarkdownParser(), ".rst": RstParser(), diff --git a/application/parser/file/tabular_parser.py b/application/parser/file/tabular_parser.py index 81355ae0..b2dbd193 100644 --- a/application/parser/file/tabular_parser.py +++ b/application/parser/file/tabular_parser.py @@ -113,3 +113,68 @@ class PandasCSVParser(BaseParser): return (self._row_joiner).join(text_list) else: return text_list + + +class ExcelParser(BaseParser): + r"""Excel (.xlsx) parser. + + Parses Excel files using Pandas `read_excel` function. + If special parameters are required, use the `pandas_config` dict. + + Args: + concat_rows (bool): whether to concatenate all rows into one document. + If set to False, a Document will be created for each row. + True by default. + + col_joiner (str): Separator to use for joining cols per row. + Set to ", " by default. + + row_joiner (str): Separator to use for joining each row. + Only used when `concat_rows=True`. + Set to "\n" by default. + + pandas_config (dict): Options for the `pandas.read_excel` function call. + Refer to https://pandas.pydata.org/docs/reference/api/pandas.read_excel.html + for more information. + Set to empty dict by default, this means pandas will try to figure + out the table structure on its own. + + """ + + def __init__( + self, + *args: Any, + concat_rows: bool = True, + col_joiner: str = ", ", + row_joiner: str = "\n", + pandas_config: dict = {}, + **kwargs: Any + ) -> None: + """Init params.""" + super().__init__(*args, **kwargs) + self._concat_rows = concat_rows + self._col_joiner = col_joiner + self._row_joiner = row_joiner + self._pandas_config = pandas_config + + def _init_parser(self) -> Dict: + """Init parser.""" + return {} + + def parse_file(self, file: Path, errors: str = "ignore") -> Union[str, List[str]]: + """Parse file.""" + try: + import pandas as pd + except ImportError: + raise ValueError("pandas module is required to read Excel files.") + + df = pd.read_excel(file, **self._pandas_config) + + text_list = df.apply( + lambda row: (self._col_joiner).join(row.astype(str).tolist()), axis=1 + ).tolist() + + if self._concat_rows: + return (self._row_joiner).join(text_list) + else: + return text_list \ No newline at end of file diff --git a/application/parser/open_ai_func.py b/application/parser/open_ai_func.py index c58e8059..3109f583 100755 --- a/application/parser/open_ai_func.py +++ b/application/parser/open_ai_func.py @@ -1,9 +1,11 @@ import os -from application.vectorstore.vector_creator import VectorCreator -from application.core.settings import settings from retry import retry +from application.core.settings import settings + +from application.vectorstore.vector_creator import VectorCreator + # from langchain_community.embeddings import HuggingFaceEmbeddings # from langchain_community.embeddings import HuggingFaceInstructEmbeddings @@ -11,12 +13,14 @@ from retry import retry @retry(tries=10, delay=60) -def store_add_texts_with_retry(store, i): +def store_add_texts_with_retry(store, i, id): + # add source_id to the metadata + i.metadata["source_id"] = str(id) store.add_texts([i.page_content], metadatas=[i.metadata]) # store_pine.add_texts([i.page_content], metadatas=[i.metadata]) -def call_openai_api(docs, folder_name, task_status): +def call_openai_api(docs, folder_name, id, task_status): # Function to create a vector store from the documents and save it to disk if not os.path.exists(f"{folder_name}"): @@ -32,15 +36,16 @@ def call_openai_api(docs, folder_name, task_status): store = VectorCreator.create_vectorstore( settings.VECTOR_STORE, docs_init=docs_init, - path=f"{folder_name}", + source_id=f"{folder_name}", embeddings_key=os.getenv("EMBEDDINGS_KEY"), ) else: store = VectorCreator.create_vectorstore( settings.VECTOR_STORE, - path=f"{folder_name}", + source_id=str(id), embeddings_key=os.getenv("EMBEDDINGS_KEY"), ) + store.delete_index() # Uncomment for MPNet embeddings # model_name = "sentence-transformers/all-mpnet-base-v2" # hf = HuggingFaceEmbeddings(model_name=model_name) @@ -57,7 +62,7 @@ def call_openai_api(docs, folder_name, task_status): task_status.update_state( state="PROGRESS", meta={"current": int((c1 / s1) * 100)} ) - store_add_texts_with_retry(store, i) + store_add_texts_with_retry(store, i, id) except Exception as e: print(e) print("Error on ", i) @@ -68,5 +73,3 @@ def call_openai_api(docs, folder_name, task_status): c1 += 1 if settings.VECTOR_STORE == "faiss": store.save_local(f"{folder_name}") - - diff --git a/application/parser/remote/github_loader.py b/application/parser/remote/github_loader.py index e69de29b..49f0ae9c 100644 --- a/application/parser/remote/github_loader.py +++ b/application/parser/remote/github_loader.py @@ -0,0 +1,53 @@ +import base64 +import requests +from typing import List +from application.parser.remote.base import BaseRemote +from langchain_core.documents import Document + +class GitHubLoader(BaseRemote): + def __init__(self): + self.access_token = None + self.headers = { + "Authorization": f"token {self.access_token}" + } if self.access_token else {} + return + + def fetch_file_content(self, repo_url: str, file_path: str) -> str: + url = f"https://api.github.com/repos/{repo_url}/contents/{file_path}" + response = requests.get(url, headers=self.headers) + + if response.status_code == 200: + content = response.json() + if content.get("encoding") == "base64": + try: + decoded_content = base64.b64decode(content["content"]).decode("utf-8") + return f"Filename: {file_path}\n\n{decoded_content}" + except Exception as e: + print(f"Error decoding content for {file_path}: {e}") + raise + else: + return f"Filename: {file_path}\n\n{content['content']}" + else: + response.raise_for_status() + + def fetch_repo_files(self, repo_url: str, path: str = "") -> List[str]: + url = f"https://api.github.com/repos/{repo_url}/contents/{path}" + response = requests.get(url, headers={**self.headers, "Accept": "application/vnd.github.v3.raw"}) + contents = response.json() + files = [] + for item in contents: + if item["type"] == "file": + files.append(item["path"]) + elif item["type"] == "dir": + files.extend(self.fetch_repo_files(repo_url, item["path"])) + return files + + def load_data(self, repo_url: str) -> List[Document]: + repo_name = repo_url.split("github.com/")[-1] + files = self.fetch_repo_files(repo_name) + documents = [] + for file_path in files: + content = self.fetch_file_content(repo_name, file_path) + documents.append(Document(page_content=content, metadata={"title": file_path, + "source": f"https://github.com/{repo_name}/blob/main/{file_path}"})) + return documents diff --git a/application/parser/remote/remote_creator.py b/application/parser/remote/remote_creator.py index d2a58f8d..026abd76 100644 --- a/application/parser/remote/remote_creator.py +++ b/application/parser/remote/remote_creator.py @@ -2,6 +2,7 @@ from application.parser.remote.sitemap_loader import SitemapLoader from application.parser.remote.crawler_loader import CrawlerLoader from application.parser.remote.web_loader import WebLoader from application.parser.remote.reddit_loader import RedditPostsLoaderRemote +from application.parser.remote.github_loader import GitHubLoader class RemoteCreator: @@ -10,6 +11,7 @@ class RemoteCreator: "sitemap": SitemapLoader, "crawler": CrawlerLoader, "reddit": RedditPostsLoaderRemote, + "github": GitHubLoader, } @classmethod diff --git a/application/requirements.txt b/application/requirements.txt index f1b9e3c8..6ea1d1ba 100644 --- a/application/requirements.txt +++ b/application/requirements.txt @@ -1,37 +1,87 @@ -anthropic==0.34.0 +anthropic==0.34.2 boto3==1.34.153 beautifulsoup4==4.12.3 celery==5.3.6 -dataclasses_json==0.6.7 +dataclasses-json==0.6.7 docx2txt==0.8 -duckduckgo-search==6.2.6 -EbookLib==0.18 -elasticsearch==8.14.0 +duckduckgo-search==6.3.0 +ebooklib==0.18 +elastic-transport==8.15.0 +elasticsearch==8.15.1 escodegen==1.0.11 esprima==4.0.1 -Flask==3.0.1 +esutils==1.0.1 +Flask==3.0.3 faiss-cpu==1.8.0.post1 +flask-restx==1.3.0 gunicorn==23.0.0 -html2text==2020.1.16 +html2text==2024.2.26 javalang==0.13.0 -langchain==0.2.16 -langchain-community==0.2.16 -langchain-core==0.2.38 -langchain-openai==0.1.23 -lancedb==0.13.0 -openapi3_parser==1.1.16 -pandas==2.2.2 -pydantic_settings==2.4.0 +jinja2==3.1.4 +jiter==0.5.0 +jmespath==1.0.1 +joblib==1.4.2 +jsonpatch==1.33 +jsonpointer==3.0.0 +jsonschema==4.23.0 +jsonschema-spec==0.2.4 +jsonschema-specifications==2023.7.1 +kombu==5.4.2 +langchain==0.3.0 +langchain-community==0.3.0 +langchain-core==0.3.2 +langchain-openai==0.2.0 +langchain-text-splitters==0.3.0 +langsmith==0.1.125 +lazy-object-proxy==1.10.0 +lxml==5.3.0 +markupsafe==2.1.5 +marshmallow==3.22.0 +mpmath==1.3.0 +multidict==6.1.0 +mypy-extensions==1.0.0 +networkx==3.3 +numpy==1.26.4 +openai==1.46.1 +openapi-schema-validator==0.6.2 +openapi-spec-validator==0.6.0 +openapi3-parser==1.1.18 +orjson==3.10.7 +packaging==24.1 +pandas==2.2.3 +openpyxl==3.1.5 +pathable==0.4.3 +pillow==10.4.0 +portalocker==2.10.1 +prance==23.6.21.0 +primp==0.6.3 +prompt-toolkit==3.0.47 +protobuf==5.28.2 +py==1.11.0 +pydantic==2.9.2 +pydantic-core==2.23.4 +pydantic-settings==2.4.0 pymongo==4.8.0 -PyPDF2==3.0.1 +pypdf2==3.0.1 +python-dateutil==2.9.0.post0 python-dotenv==1.0.1 qdrant-client==1.11.0 redis==5.0.1 -Requests==2.32.0 +referencing==0.30.2 +regex==2024.9.11 +requests==2.32.3 retry==0.9.2 -sentence-transformers +sentence-transformers==3.0.1 tiktoken==0.7.0 -torch -tqdm==4.66.3 -transformers==4.44.0 -Werkzeug==3.0.3 +tokenizers==0.19.1 +torch==2.4.1 +tqdm==4.66.5 +transformers==4.44.2 +typing-extensions==4.12.2 +typing-inspect==0.9.0 +tzdata==2024.2 +urllib3==2.2.3 +vine==5.1.0 +wcwidth==0.2.13 +werkzeug==3.0.4 +yarl==1.11.1 diff --git a/application/retriever/base.py b/application/retriever/base.py index 4a37e810..fd99dbdd 100644 --- a/application/retriever/base.py +++ b/application/retriever/base.py @@ -12,3 +12,7 @@ class BaseRetriever(ABC): @abstractmethod def search(self, *args, **kwargs): pass + + @abstractmethod + def get_params(self): + pass diff --git a/application/retriever/brave_search.py b/application/retriever/brave_search.py index 5d1e1566..29666a57 100644 --- a/application/retriever/brave_search.py +++ b/application/retriever/brave_search.py @@ -101,3 +101,15 @@ class BraveRetSearch(BaseRetriever): def search(self): return self._get_data() + + def get_params(self): + return { + "question": self.question, + "source": self.source, + "chat_history": self.chat_history, + "prompt": self.prompt, + "chunks": self.chunks, + "token_limit": self.token_limit, + "gpt_model": self.gpt_model, + "user_api_key": self.user_api_key + } diff --git a/application/retriever/classic_rag.py b/application/retriever/classic_rag.py index aef6e503..b87b5852 100644 --- a/application/retriever/classic_rag.py +++ b/application/retriever/classic_rag.py @@ -1,4 +1,3 @@ -import os from application.retriever.base import BaseRetriever from application.core.settings import settings from application.vectorstore.vector_creator import VectorCreator @@ -21,7 +20,7 @@ class ClassicRAG(BaseRetriever): user_api_key=None, ): self.question = question - self.vectorstore = self._get_vectorstore(source=source) + self.vectorstore = source['active_docs'] if 'active_docs' in source else None self.chat_history = chat_history self.prompt = prompt self.chunks = chunks @@ -38,21 +37,6 @@ class ClassicRAG(BaseRetriever): ) self.user_api_key = user_api_key - def _get_vectorstore(self, source): - if "active_docs" in source: - if source["active_docs"].split("/")[0] == "default": - vectorstore = "" - elif source["active_docs"].split("/")[0] == "local": - vectorstore = "indexes/" + source["active_docs"] - else: - vectorstore = "vectors/" + source["active_docs"] - if source["active_docs"] == "default": - vectorstore = "" - else: - vectorstore = "" - vectorstore = os.path.join("application", vectorstore) - return vectorstore - def _get_data(self): if self.chunks == 0: docs = [] @@ -61,13 +45,12 @@ class ClassicRAG(BaseRetriever): settings.VECTOR_STORE, self.vectorstore, settings.EMBEDDINGS_KEY ) docs_temp = docsearch.search(self.question, k=self.chunks) + print(docs_temp) docs = [ { - "title": ( - i.metadata["title"].split("/")[-1] - if i.metadata - else i.page_content - ), + "title": i.metadata.get( + "title", i.metadata.get("post_title", i.page_content) + ).split("/")[-1], "text": i.page_content, "source": ( i.metadata.get("source") @@ -121,3 +104,15 @@ class ClassicRAG(BaseRetriever): def search(self): return self._get_data() + + def get_params(self): + return { + "question": self.question, + "source": self.vectorstore, + "chat_history": self.chat_history, + "prompt": self.prompt, + "chunks": self.chunks, + "token_limit": self.token_limit, + "gpt_model": self.gpt_model, + "user_api_key": self.user_api_key + } diff --git a/application/retriever/duckduck_search.py b/application/retriever/duckduck_search.py index 6d2965f5..d746ecaa 100644 --- a/application/retriever/duckduck_search.py +++ b/application/retriever/duckduck_search.py @@ -118,3 +118,15 @@ class DuckDuckSearch(BaseRetriever): def search(self): return self._get_data() + + def get_params(self): + return { + "question": self.question, + "source": self.source, + "chat_history": self.chat_history, + "prompt": self.prompt, + "chunks": self.chunks, + "token_limit": self.token_limit, + "gpt_model": self.gpt_model, + "user_api_key": self.user_api_key + } diff --git a/application/retriever/retriever_creator.py b/application/retriever/retriever_creator.py index ad071401..07be373d 100644 --- a/application/retriever/retriever_creator.py +++ b/application/retriever/retriever_creator.py @@ -5,15 +5,16 @@ from application.retriever.brave_search import BraveRetSearch class RetrieverCreator: - retievers = { + retrievers = { 'classic': ClassicRAG, 'duckduck_search': DuckDuckSearch, - 'brave_search': BraveRetSearch + 'brave_search': BraveRetSearch, + 'default': ClassicRAG } @classmethod def create_retriever(cls, type, *args, **kwargs): - retiever_class = cls.retievers.get(type.lower()) + retiever_class = cls.retrievers.get(type.lower()) if not retiever_class: raise ValueError(f"No retievers class found for type {type}") return retiever_class(*args, **kwargs) \ No newline at end of file diff --git a/application/utils.py b/application/utils.py index 70a00ce0..1fc9e329 100644 --- a/application/utils.py +++ b/application/utils.py @@ -1,22 +1,48 @@ import tiktoken +import hashlib +from flask import jsonify, make_response + _encoding = None + def get_encoding(): global _encoding if _encoding is None: _encoding = tiktoken.get_encoding("cl100k_base") return _encoding + def num_tokens_from_string(string: str) -> int: encoding = get_encoding() num_tokens = len(encoding.encode(string)) return num_tokens + def count_tokens_docs(docs): docs_content = "" for doc in docs: docs_content += doc.page_content tokens = num_tokens_from_string(docs_content) - return tokens \ No newline at end of file + return tokens + + +def check_required_fields(data, required_fields): + missing_fields = [field for field in required_fields if field not in data] + if missing_fields: + return make_response( + jsonify( + { + "success": False, + "message": f"Missing fields: {', '.join(missing_fields)}", + } + ), + 400, + ) + return None + + +def get_hash(data): + return hashlib.md5(data.encode()).hexdigest() + diff --git a/application/vectorstore/elasticsearch.py b/application/vectorstore/elasticsearch.py index bb28d5ce..e393e4a5 100644 --- a/application/vectorstore/elasticsearch.py +++ b/application/vectorstore/elasticsearch.py @@ -9,9 +9,9 @@ import elasticsearch class ElasticsearchStore(BaseVectorStore): _es_connection = None # Class attribute to hold the Elasticsearch connection - def __init__(self, path, embeddings_key, index_name=settings.ELASTIC_INDEX): + def __init__(self, source_id, embeddings_key, index_name=settings.ELASTIC_INDEX): super().__init__() - self.path = path.replace("application/indexes/", "").rstrip("/") + self.source_id = source_id.replace("application/indexes/", "").rstrip("/") self.embeddings_key = embeddings_key self.index_name = index_name @@ -81,7 +81,7 @@ class ElasticsearchStore(BaseVectorStore): embeddings = self._get_embeddings(settings.EMBEDDINGS_NAME, self.embeddings_key) vector = embeddings.embed_query(question) knn = { - "filter": [{"match": {"metadata.store.keyword": self.path}}], + "filter": [{"match": {"metadata.source_id.keyword": self.source_id}}], "field": "vector", "k": k, "num_candidates": 100, @@ -100,7 +100,7 @@ class ElasticsearchStore(BaseVectorStore): } } ], - "filter": [{"match": {"metadata.store.keyword": self.path}}], + "filter": [{"match": {"metadata.source_id.keyword": self.source_id}}], } }, "rank": {"rrf": {}}, @@ -209,5 +209,4 @@ class ElasticsearchStore(BaseVectorStore): def delete_index(self): self._es_connection.delete_by_query(index=self.index_name, query={"match": { - "metadata.store.keyword": self.path}},) - + "metadata.source_id.keyword": self.source_id}},) diff --git a/application/vectorstore/faiss.py b/application/vectorstore/faiss.py index 46f6e8cb..afa55db9 100644 --- a/application/vectorstore/faiss.py +++ b/application/vectorstore/faiss.py @@ -1,22 +1,29 @@ from langchain_community.vectorstores import FAISS from application.vectorstore.base import BaseVectorStore from application.core.settings import settings +import os + +def get_vectorstore(path: str) -> str: + if path: + vectorstore = os.path.join("application", "indexes", path) + else: + vectorstore = os.path.join("application") + return vectorstore class FaissStore(BaseVectorStore): - - def __init__(self, path, embeddings_key, docs_init=None): + def __init__(self, source_id: str, embeddings_key: str, docs_init=None): super().__init__() - self.path = path + self.path = get_vectorstore(source_id) embeddings = self._get_embeddings(settings.EMBEDDINGS_NAME, embeddings_key) - if docs_init: - self.docsearch = FAISS.from_documents( - docs_init, embeddings - ) - else: - self.docsearch = FAISS.load_local( - self.path, embeddings, - allow_dangerous_deserialization=True - ) + + try: + if docs_init: + self.docsearch = FAISS.from_documents(docs_init, embeddings) + else: + self.docsearch = FAISS.load_local(self.path, embeddings, allow_dangerous_deserialization=True) + except Exception: + raise + self.assert_embedding_dimensions(embeddings) def search(self, *args, **kwargs): @@ -32,16 +39,12 @@ class FaissStore(BaseVectorStore): return self.docsearch.delete(*args, **kwargs) def assert_embedding_dimensions(self, embeddings): - """ - Check that the word embedding dimension of the docsearch index matches - the dimension of the word embeddings used - """ + """Check that the word embedding dimension of the docsearch index matches the dimension of the word embeddings used.""" if settings.EMBEDDINGS_NAME == "huggingface_sentence-transformers/all-mpnet-base-v2": - try: - word_embedding_dimension = embeddings.dimension - except AttributeError as e: - raise AttributeError("'dimension' attribute not found in embeddings instance. Make sure the embeddings object is properly initialized.") from e + word_embedding_dimension = getattr(embeddings, 'dimension', None) + if word_embedding_dimension is None: + raise AttributeError("'dimension' attribute not found in embeddings instance.") + docsearch_index_dimension = self.docsearch.index.d if word_embedding_dimension != docsearch_index_dimension: - raise ValueError(f"Embedding dimension mismatch: embeddings.dimension ({word_embedding_dimension}) " + - f"!= docsearch index dimension ({docsearch_index_dimension})") \ No newline at end of file + raise ValueError(f"Embedding dimension mismatch: embeddings.dimension ({word_embedding_dimension}) != docsearch index dimension ({docsearch_index_dimension})") diff --git a/application/vectorstore/mongodb.py b/application/vectorstore/mongodb.py index 337fc41f..c577a5d5 100644 --- a/application/vectorstore/mongodb.py +++ b/application/vectorstore/mongodb.py @@ -1,11 +1,12 @@ -from application.vectorstore.base import BaseVectorStore from application.core.settings import settings +from application.vectorstore.base import BaseVectorStore from application.vectorstore.document_class import Document + class MongoDBVectorStore(BaseVectorStore): def __init__( self, - path: str = "", + source_id: str = "", embeddings_key: str = "embeddings", collection: str = "documents", index_name: str = "vector_search_index", @@ -18,7 +19,7 @@ class MongoDBVectorStore(BaseVectorStore): self._embedding_key = embedding_key self._embeddings_key = embeddings_key self._mongo_uri = settings.MONGO_URI - self._path = path.replace("application/indexes/", "").rstrip("/") + self._source_id = source_id.replace("application/indexes/", "").rstrip("/") self._embedding = self._get_embeddings(settings.EMBEDDINGS_NAME, embeddings_key) try: @@ -33,27 +34,24 @@ class MongoDBVectorStore(BaseVectorStore): self._database = self._client[database] self._collection = self._database[collection] - def search(self, question, k=2, *args, **kwargs): query_vector = self._embedding.embed_query(question) pipeline = [ { "$vectorSearch": { - "queryVector": query_vector, + "queryVector": query_vector, "path": self._embedding_key, - "limit": k, - "numCandidates": k * 10, + "limit": k, + "numCandidates": k * 10, "index": self._index_name, - "filter": { - "store": {"$eq": self._path} - } + "filter": {"source_id": {"$eq": self._source_id}}, } } ] cursor = self._collection.aggregate(pipeline) - + results = [] for doc in cursor: text = doc[self._text_key] @@ -63,30 +61,32 @@ class MongoDBVectorStore(BaseVectorStore): metadata = doc results.append(Document(text, metadata)) return results - + def _insert_texts(self, texts, metadatas): if not texts: return [] embeddings = self._embedding.embed_documents(texts) + to_insert = [ {self._text_key: t, self._embedding_key: embedding, **m} for t, m, embedding in zip(texts, metadatas, embeddings) ] - # insert the documents in MongoDB Atlas + insert_result = self._collection.insert_many(to_insert) return insert_result.inserted_ids - - def add_texts(self, + + def add_texts( + self, texts, - metadatas = None, - ids = None, - refresh_indices = True, - create_index_if_not_exists = True, - bulk_kwargs = None, - **kwargs,): + metadatas=None, + ids=None, + refresh_indices=True, + create_index_if_not_exists=True, + bulk_kwargs=None, + **kwargs, + ): - - #dims = self._embedding.client[1].word_embedding_dimension + # dims = self._embedding.client[1].word_embedding_dimension # # check if index exists # if create_index_if_not_exists: # # check if index exists @@ -121,6 +121,6 @@ class MongoDBVectorStore(BaseVectorStore): if texts_batch: result_ids.extend(self._insert_texts(texts_batch, metadatas_batch)) return result_ids - + def delete_index(self, *args, **kwargs): - self._collection.delete_many({"store": self._path}) \ No newline at end of file + self._collection.delete_many({"source_id": self._source_id}) diff --git a/application/vectorstore/qdrant.py b/application/vectorstore/qdrant.py index 482d06a1..3f94505f 100644 --- a/application/vectorstore/qdrant.py +++ b/application/vectorstore/qdrant.py @@ -5,12 +5,12 @@ from qdrant_client import models class QdrantStore(BaseVectorStore): - def __init__(self, path: str = "", embeddings_key: str = "embeddings"): + def __init__(self, source_id: str = "", embeddings_key: str = "embeddings"): self._filter = models.Filter( must=[ models.FieldCondition( - key="metadata.store", - match=models.MatchValue(value=path.replace("application/indexes/", "").rstrip("/")), + key="metadata.source_id", + match=models.MatchValue(value=source_id.replace("application/indexes/", "").rstrip("/")), ) ] ) diff --git a/application/worker.py b/application/worker.py index c315f916..f8f38afa 100755 --- a/application/worker.py +++ b/application/worker.py @@ -1,37 +1,44 @@ +import logging import os import shutil import string import zipfile +from collections import Counter from urllib.parse import urljoin -import logging import requests +from bson.objectid import ObjectId +from pymongo import MongoClient from application.core.settings import settings from application.parser.file.bulk import SimpleDirectoryReader -from application.parser.remote.remote_creator import RemoteCreator from application.parser.open_ai_func import call_openai_api +from application.parser.remote.remote_creator import RemoteCreator from application.parser.schema.base import Document from application.parser.token_func import group_split from application.utils import count_tokens_docs +mongo = MongoClient(settings.MONGO_URI) +db = mongo["docsgpt"] +sources_collection = db["sources"] + +# Constants +MIN_TOKENS = 150 +MAX_TOKENS = 1250 +RECURSION_DEPTH = 2 # Define a function to extract metadata from a given filename. def metadata_from_filename(title): - store = "/".join(title.split("/")[1:3]) - return {"title": title, "store": store} - + return {"title": title} # Define a function to generate a random string of a given length. def generate_random_string(length): return "".join([string.ascii_letters[i % 52] for i in range(length)]) - current_dir = os.path.dirname( os.path.dirname(os.path.dirname(os.path.abspath(__file__))) ) - def extract_zip_recursive(zip_path, extract_to, current_depth=0, max_depth=5): """ Recursively extract zip files with a limit on recursion depth. @@ -46,9 +53,13 @@ def extract_zip_recursive(zip_path, extract_to, current_depth=0, max_depth=5): logging.warning(f"Reached maximum recursion depth of {max_depth}") return - with zipfile.ZipFile(zip_path, "r") as zip_ref: - zip_ref.extractall(extract_to) - os.remove(zip_path) # Remove the zip file after extracting + try: + with zipfile.ZipFile(zip_path, "r") as zip_ref: + zip_ref.extractall(extract_to) + os.remove(zip_path) # Remove the zip file after extracting + except Exception as e: + logging.error(f"Error extracting zip file {zip_path}: {e}") + return # Check for nested zip files and extract them for root, dirs, files in os.walk(extract_to): @@ -58,9 +69,43 @@ def extract_zip_recursive(zip_path, extract_to, current_depth=0, max_depth=5): file_path = os.path.join(root, file) extract_zip_recursive(file_path, root, current_depth + 1, max_depth) +def download_file(url, params, dest_path): + try: + response = requests.get(url, params=params) + response.raise_for_status() + with open(dest_path, "wb") as f: + f.write(response.content) + except requests.RequestException as e: + logging.error(f"Error downloading file: {e}") + raise + +def upload_index(full_path, file_data): + try: + if settings.VECTOR_STORE == "faiss": + files = { + "file_faiss": open(full_path + "/index.faiss", "rb"), + "file_pkl": open(full_path + "/index.pkl", "rb"), + } + response = requests.post( + urljoin(settings.API_URL, "/api/upload_index"), files=files, data=file_data + ) + else: + response = requests.post( + urljoin(settings.API_URL, "/api/upload_index"), data=file_data + ) + response.raise_for_status() + except requests.RequestException as e: + logging.error(f"Error uploading index: {e}") + raise + finally: + if settings.VECTOR_STORE == "faiss": + for file in files.values(): + file.close() # Define the main function for ingesting and processing documents. -def ingest_worker(self, directory, formats, name_job, filename, user): +def ingest_worker( + self, directory, formats, name_job, filename, user, retriever="classic" +): """ Ingest and process documents. @@ -71,43 +116,30 @@ def ingest_worker(self, directory, formats, name_job, filename, user): name_job (str): Name of the job for this ingestion task. filename (str): Name of the file to be ingested. user (str): Identifier for the user initiating the ingestion. + retriever (str): Type of retriever to use for processing the documents. Returns: dict: Information about the completed ingestion task, including input parameters and a "limited" flag. """ - # directory = 'inputs' or 'temp' - # formats = [".rst", ".md"] input_files = None recursive = True limit = None exclude = True - # name_job = 'job1' - # filename = 'install.rst' - # user = 'local' sample = False token_check = True - min_tokens = 150 - max_tokens = 1250 - recursion_depth = 2 full_path = os.path.join(directory, user, name_job) logging.info(f"Ingest file: {full_path}", extra={"user": user, "job": name_job}) - # check if API_URL env variable is set file_data = {"name": name_job, "file": filename, "user": user} - response = requests.get( - urljoin(settings.API_URL, "/api/download"), params=file_data - ) - file = response.content if not os.path.exists(full_path): os.makedirs(full_path) - with open(os.path.join(full_path, filename), "wb") as f: - f.write(file) + download_file(urljoin(settings.API_URL, "/api/download"), file_data, os.path.join(full_path, filename)) # check if file is .zip and extract it if filename.endswith(".zip"): extract_zip_recursive( - os.path.join(full_path, filename), full_path, 0, recursion_depth + os.path.join(full_path, filename), full_path, 0, RECURSION_DEPTH ) self.update_state(state="PROGRESS", meta={"current": 1}) @@ -123,14 +155,15 @@ def ingest_worker(self, directory, formats, name_job, filename, user): ).load_data() raw_docs = group_split( documents=raw_docs, - min_tokens=min_tokens, - max_tokens=max_tokens, + min_tokens=MIN_TOKENS, + max_tokens=MAX_TOKENS, token_check=token_check, ) docs = [Document.to_langchain_format(raw_doc) for raw_doc in raw_docs] + id = ObjectId() - call_openai_api(docs, full_path, self) + call_openai_api(docs, full_path, id, self) tokens = count_tokens_docs(docs) self.update_state(state="PROGRESS", meta={"current": 100}) @@ -138,24 +171,13 @@ def ingest_worker(self, directory, formats, name_job, filename, user): for i in range(min(5, len(raw_docs))): logging.info(f"Sample document {i}: {raw_docs[i]}") - # get files from outputs/inputs/index.faiss and outputs/inputs/index.pkl - # and send them to the server (provide user and name in form) - file_data = {"name": name_job, "user": user, "tokens":tokens} - if settings.VECTOR_STORE == "faiss": - files = { - "file_faiss": open(full_path + "/index.faiss", "rb"), - "file_pkl": open(full_path + "/index.pkl", "rb"), - } - response = requests.post( - urljoin(settings.API_URL, "/api/upload_index"), files=files, data=file_data - ) - response = requests.get( - urljoin(settings.API_URL, "/api/delete_old?path=" + full_path) - ) - else: - response = requests.post( - urljoin(settings.API_URL, "/api/upload_index"), data=file_data - ) + file_data.update({ + "tokens": tokens, + "retriever": retriever, + "id": str(id), + "type": "local", + }) + upload_index(full_path, file_data) # delete local shutil.rmtree(full_path) @@ -169,47 +191,114 @@ def ingest_worker(self, directory, formats, name_job, filename, user): "limited": False, } - -def remote_worker(self, source_data, name_job, user, loader, directory="temp"): +def remote_worker( + self, + source_data, + name_job, + user, + loader, + directory="temp", + retriever="classic", + sync_frequency="never", + operation_mode="upload", + doc_id=None, +): token_check = True - min_tokens = 150 - max_tokens = 1250 - full_path = directory + "/" + user + "/" + name_job + full_path = os.path.join(directory, user, name_job) if not os.path.exists(full_path): os.makedirs(full_path) self.update_state(state="PROGRESS", meta={"current": 1}) - logging.info(f"Remote job: {full_path}", extra={"user": user, "job": name_job, source_data: source_data}) + logging.info( + f"Remote job: {full_path}", + extra={"user": user, "job": name_job, "source_data": source_data}, + ) remote_loader = RemoteCreator.create_loader(loader) raw_docs = remote_loader.load_data(source_data) docs = group_split( documents=raw_docs, - min_tokens=min_tokens, - max_tokens=max_tokens, + min_tokens=MIN_TOKENS, + max_tokens=MAX_TOKENS, token_check=token_check, ) - # docs = [Document.to_langchain_format(raw_doc) for raw_doc in raw_docs] - call_openai_api(docs, full_path, self) tokens = count_tokens_docs(docs) + if operation_mode == "upload": + id = ObjectId() + call_openai_api(docs, full_path, id, self) + elif operation_mode == "sync": + if not doc_id or not ObjectId.is_valid(doc_id): + raise ValueError("doc_id must be provided for sync operation.") + id = ObjectId(doc_id) + call_openai_api(docs, full_path, id, self) self.update_state(state="PROGRESS", meta={"current": 100}) - # Proceed with uploading and cleaning as in the original function - file_data = {"name": name_job, "user": user, "tokens":tokens} - if settings.VECTOR_STORE == "faiss": - files = { - "file_faiss": open(full_path + "/index.faiss", "rb"), - "file_pkl": open(full_path + "/index.pkl", "rb"), - } - - requests.post( - urljoin(settings.API_URL, "/api/upload_index"), files=files, data=file_data - ) - requests.get(urljoin(settings.API_URL, "/api/delete_old?path=" + full_path)) - else: - requests.post(urljoin(settings.API_URL, "/api/upload_index"), data=file_data) + file_data = { + "name": name_job, + "user": user, + "tokens": tokens, + "retriever": retriever, + "id": str(id), + "type": loader, + "remote_data": source_data, + "sync_frequency": sync_frequency, + } + upload_index(full_path, file_data) shutil.rmtree(full_path) - return {"urls": source_data, "name_job": name_job, "user": user, "limited": False} \ No newline at end of file + return {"urls": source_data, "name_job": name_job, "user": user, "limited": False} + +def sync( + self, + source_data, + name_job, + user, + loader, + sync_frequency, + retriever, + doc_id=None, + directory="temp", +): + try: + remote_worker( + self, + source_data, + name_job, + user, + loader, + directory, + retriever, + sync_frequency, + "sync", + doc_id, + ) + except Exception as e: + logging.error(f"Error during sync: {e}") + return {"status": "error", "error": str(e)} + return {"status": "success"} + +def sync_worker(self, frequency): + sync_counts = Counter() + sources = sources_collection.find() + for doc in sources: + if doc.get("sync_frequency") == frequency: + name = doc.get("name") + user = doc.get("user") + source_type = doc.get("type") + source_data = doc.get("remote_data") + retriever = doc.get("retriever") + doc_id = str(doc.get("_id")) + resp = sync( + self, source_data, name, user, source_type, frequency, retriever, doc_id + ) + sync_counts["total_sync_count"] += 1 + sync_counts[ + "sync_success" if resp["status"] == "success" else "sync_failure" + ] += 1 + + return { + key: sync_counts[key] + for key in ["total_sync_count", "sync_success", "sync_failure"] + } diff --git a/docker-compose-local.yaml b/docker-compose-local.yaml index 74bf0101..d9fd248b 100644 --- a/docker-compose-local.yaml +++ b/docker-compose-local.yaml @@ -1,6 +1,8 @@ services: frontend: build: ./frontend + volumes: + - ./frontend/src:/app/src environment: - VITE_API_HOST=http://localhost:7091 - VITE_API_STREAMING=$VITE_API_STREAMING diff --git a/docker-compose.yaml b/docker-compose.yaml index 05c8c059..d3f3421a 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,6 +1,8 @@ services: frontend: build: ./frontend + volumes: + - ./frontend/src:/app/src environment: - VITE_API_HOST=http://localhost:7091 - VITE_API_STREAMING=$VITE_API_STREAMING @@ -18,6 +20,7 @@ services: - CELERY_BROKER_URL=redis://redis:6379/0 - CELERY_RESULT_BACKEND=redis://redis:6379/1 - MONGO_URI=mongodb://mongo:27017/docsgpt + - CACHE_REDIS_URL=redis://redis:6379/2 ports: - "7091:7091" volumes: @@ -30,7 +33,7 @@ services: worker: build: ./application - command: celery -A application.app.celery worker -l INFO + command: celery -A application.app.celery worker -l INFO -B environment: - API_KEY=$API_KEY - EMBEDDINGS_KEY=$API_KEY @@ -39,6 +42,7 @@ services: - CELERY_RESULT_BACKEND=redis://redis:6379/1 - MONGO_URI=mongodb://mongo:27017/docsgpt - API_URL=http://backend:7091 + - CACHE_REDIS_URL=redis://redis:6379/2 depends_on: - redis - mongo diff --git a/docs/README.md b/docs/README.md index 4b90b598..12ebbf08 100644 --- a/docs/README.md +++ b/docs/README.md @@ -46,6 +46,6 @@ yarn install yarn dev ``` -- Now, you should be able to view the docs on your local environment by visiting `http://localhost:5000`. You can explore the different markdown files and make changes as you see fit. +- Now, you should be able to view the docs on your local environment by visiting `http://localhost:3000`. You can explore the different markdown files and make changes as you see fit. - **Footnotes:** This guide assumes you have Node.js and npm installed. The guide involves running a local server using yarn, and viewing the documentation offline. If you encounter any issues, it may be worth verifying your Node.js and npm installations and whether you have installed yarn correctly. diff --git a/docs/package-lock.json b/docs/package-lock.json index 3b7679e2..99836cc6 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -8,7 +8,7 @@ "dependencies": { "@vercel/analytics": "^1.1.1", "docsgpt": "^0.4.1", - "next": "^14.1.1", + "next": "^14.2.12", "nextra": "^2.13.2", "nextra-theme-docs": "^2.13.2", "react": "^18.2.0", @@ -936,14 +936,14 @@ } }, "node_modules/@next/env": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/@next/env/-/env-14.1.1.tgz", - "integrity": "sha512-7CnQyD5G8shHxQIIg3c7/pSeYFeMhsNbpU/bmvH7ZnDql7mNRgg8O2JZrhrc/soFnfBnKP4/xXNiiSIPn2w8gA==" + "version": "14.2.12", + "resolved": "https://registry.npmjs.org/@next/env/-/env-14.2.12.tgz", + "integrity": "sha512-3fP29GIetdwVIfIRyLKM7KrvJaqepv+6pVodEbx0P5CaMLYBtx+7eEg8JYO5L9sveJO87z9eCReceZLi0hxO1Q==" }, "node_modules/@next/swc-darwin-arm64": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.1.tgz", - "integrity": "sha512-yDjSFKQKTIjyT7cFv+DqQfW5jsD+tVxXTckSe1KIouKk75t1qZmj/mV3wzdmFb0XHVGtyRjDMulfVG8uCKemOQ==", + "version": "14.2.12", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.12.tgz", + "integrity": "sha512-crHJ9UoinXeFbHYNok6VZqjKnd8rTd7K3Z2zpyzF1ch7vVNKmhjv/V7EHxep3ILoN8JB9AdRn/EtVVyG9AkCXw==", "cpu": [ "arm64" ], @@ -956,9 +956,9 @@ } }, "node_modules/@next/swc-darwin-x64": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.1.tgz", - "integrity": "sha512-KCQmBL0CmFmN8D64FHIZVD9I4ugQsDBBEJKiblXGgwn7wBCSe8N4Dx47sdzl4JAg39IkSN5NNrr8AniXLMb3aw==", + "version": "14.2.12", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.12.tgz", + "integrity": "sha512-JbEaGbWq18BuNBO+lCtKfxl563Uw9oy2TodnN2ioX00u7V1uzrsSUcg3Ep9ce+P0Z9es+JmsvL2/rLphz+Frcw==", "cpu": [ "x64" ], @@ -971,9 +971,9 @@ } }, "node_modules/@next/swc-linux-arm64-gnu": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.1.tgz", - "integrity": "sha512-YDQfbWyW0JMKhJf/T4eyFr4b3tceTorQ5w2n7I0mNVTFOvu6CGEzfwT3RSAQGTi/FFMTFcuspPec/7dFHuP7Eg==", + "version": "14.2.12", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.12.tgz", + "integrity": "sha512-qBy7OiXOqZrdp88QEl2H4fWalMGnSCrr1agT/AVDndlyw2YJQA89f3ttR/AkEIP9EkBXXeGl6cC72/EZT5r6rw==", "cpu": [ "arm64" ], @@ -986,9 +986,9 @@ } }, "node_modules/@next/swc-linux-arm64-musl": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.1.tgz", - "integrity": "sha512-fiuN/OG6sNGRN/bRFxRvV5LyzLB8gaL8cbDH5o3mEiVwfcMzyE5T//ilMmaTrnA8HLMS6hoz4cHOu6Qcp9vxgQ==", + "version": "14.2.12", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.12.tgz", + "integrity": "sha512-EfD9L7o9biaQxjwP1uWXnk3vYZi64NVcKUN83hpVkKocB7ogJfyH2r7o1pPnMtir6gHZiGCeHKagJ0yrNSLNHw==", "cpu": [ "arm64" ], @@ -1001,9 +1001,9 @@ } }, "node_modules/@next/swc-linux-x64-gnu": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.1.tgz", - "integrity": "sha512-rv6AAdEXoezjbdfp3ouMuVqeLjE1Bin0AuE6qxE6V9g3Giz5/R3xpocHoAi7CufRR+lnkuUjRBn05SYJ83oKNQ==", + "version": "14.2.12", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.12.tgz", + "integrity": "sha512-iQ+n2pxklJew9IpE47hE/VgjmljlHqtcD5UhZVeHICTPbLyrgPehaKf2wLRNjYH75udroBNCgrSSVSVpAbNoYw==", "cpu": [ "x64" ], @@ -1016,9 +1016,9 @@ } }, "node_modules/@next/swc-linux-x64-musl": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.1.tgz", - "integrity": "sha512-YAZLGsaNeChSrpz/G7MxO3TIBLaMN8QWMr3X8bt6rCvKovwU7GqQlDu99WdvF33kI8ZahvcdbFsy4jAFzFX7og==", + "version": "14.2.12", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.12.tgz", + "integrity": "sha512-rFkUkNwcQ0ODn7cxvcVdpHlcOpYxMeyMfkJuzaT74xjAa5v4fxP4xDk5OoYmPi8QNLDs3UgZPMSBmpBuv9zKWA==", "cpu": [ "x64" ], @@ -1031,9 +1031,9 @@ } }, "node_modules/@next/swc-win32-arm64-msvc": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.1.tgz", - "integrity": "sha512-1L4mUYPBMvVDMZg1inUYyPvFSduot0g73hgfD9CODgbr4xiTYe0VOMTZzaRqYJYBA9mana0x4eaAaypmWo1r5A==", + "version": "14.2.12", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.12.tgz", + "integrity": "sha512-PQFYUvwtHs/u0K85SG4sAdDXYIPXpETf9mcEjWc0R4JmjgMKSDwIU/qfZdavtP6MPNiMjuKGXHCtyhR/M5zo8g==", "cpu": [ "arm64" ], @@ -1046,9 +1046,9 @@ } }, "node_modules/@next/swc-win32-ia32-msvc": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.1.tgz", - "integrity": "sha512-jvIE9tsuj9vpbbXlR5YxrghRfMuG0Qm/nZ/1KDHc+y6FpnZ/apsgh+G6t15vefU0zp3WSpTMIdXRUsNl/7RSuw==", + "version": "14.2.12", + "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.12.tgz", + "integrity": "sha512-FAj2hMlcbeCV546eU2tEv41dcJb4NeqFlSXU/xL/0ehXywHnNpaYajOUvn3P8wru5WyQe6cTZ8fvckj/2XN4Vw==", "cpu": [ "ia32" ], @@ -1061,9 +1061,9 @@ } }, "node_modules/@next/swc-win32-x64-msvc": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.1.tgz", - "integrity": "sha512-S6K6EHDU5+1KrBDLko7/c1MNy/Ya73pIAmvKeFwsF4RmBFJSO7/7YeD4FnZ4iBdzE69PpQ4sOMU9ORKeNuxe8A==", + "version": "14.2.12", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.12.tgz", + "integrity": "sha512-yu8QvV53sBzoIVRHsxCHqeuS8jYq6Lrmdh0briivuh+Brsp6xjg80MAozUsBTAV9KNmY08KlX0KYTWz1lbPzEg==", "cpu": [ "x64" ], @@ -1175,6 +1175,58 @@ "node": ">=8" } }, + "node_modules/@parcel/core": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/@parcel/core/-/core-2.12.0.tgz", + "integrity": "sha512-s+6pwEj+GfKf7vqGUzN9iSEPueUssCCQrCBUlcAfKrJe0a22hTUCjewpB0I7lNrCIULt8dkndD+sMdOrXsRl6Q==", + "peer": true, + "dependencies": { + "@mischnic/json-sourcemap": "^0.1.0", + "@parcel/cache": "2.12.0", + "@parcel/diagnostic": "2.12.0", + "@parcel/events": "2.12.0", + "@parcel/fs": "2.12.0", + "@parcel/graph": "3.2.0", + "@parcel/logger": "2.12.0", + "@parcel/package-manager": "2.12.0", + "@parcel/plugin": "2.12.0", + "@parcel/profiler": "2.12.0", + "@parcel/rust": "2.12.0", + "@parcel/source-map": "^2.1.1", + "@parcel/types": "2.12.0", + "@parcel/utils": "2.12.0", + "@parcel/workers": "2.12.0", + "abortcontroller-polyfill": "^1.1.9", + "base-x": "^3.0.8", + "browserslist": "^4.6.6", + "clone": "^2.1.1", + "dotenv": "^7.0.0", + "dotenv-expand": "^5.1.0", + "json5": "^2.2.0", + "msgpackr": "^1.9.9", + "nullthrows": "^1.1.1", + "semver": "^7.5.2" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/core/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "peer": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/@parcel/diagnostic": { "version": "2.12.0", "resolved": "https://registry.npmjs.org/@parcel/diagnostic/-/diagnostic-2.12.0.tgz", @@ -1225,6 +1277,22 @@ "@parcel/core": "^2.12.0" } }, + "node_modules/@parcel/graph": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@parcel/graph/-/graph-3.2.0.tgz", + "integrity": "sha512-xlrmCPqy58D4Fg5umV7bpwDx5Vyt7MlnQPxW68vae5+BA4GSWetfZt+Cs5dtotMG2oCHzZxhIPt7YZ7NRyQzLA==", + "peer": true, + "dependencies": { + "nullthrows": "^1.1.1" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, "node_modules/@parcel/logger": { "version": "2.12.0", "resolved": "https://registry.npmjs.org/@parcel/logger/-/logger-2.12.0.tgz", @@ -2476,10 +2544,11 @@ "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==" }, "node_modules/@swc/helpers": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.2.tgz", - "integrity": "sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw==", + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.5.tgz", + "integrity": "sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A==", "dependencies": { + "@swc/counter": "^0.1.3", "tslib": "^2.4.0" } }, @@ -2648,6 +2717,12 @@ "server-only": "^0.0.1" } }, + "node_modules/abortcontroller-polyfill": { + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/abortcontroller-polyfill/-/abortcontroller-polyfill-1.7.5.tgz", + "integrity": "sha512-JMJ5soJWP18htbbxJjG7bG6yuI6pRhgJ0scHHTfkUjf6wjP912xZWvM+A4sJK3gqd9E8fcPbDnOefbA9Th/FIQ==", + "peer": true + }, "node_modules/acorn": { "version": "8.11.3", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", @@ -2727,6 +2802,15 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/base-x": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/base-x/-/base-x-3.0.10.tgz", + "integrity": "sha512-7d0s06rR9rYaIWHkpfLIFICM/tkSVdoPC9qYAQRpxn9DdKNWNsKC0uk++akckyLq16Tx2WIinnZ6WRriAt6njQ==", + "peer": true, + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", @@ -2941,6 +3025,15 @@ "node": ">=4" } }, + "node_modules/clone": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", + "integrity": "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==", + "peer": true, + "engines": { + "node": ">=0.8" + } + }, "node_modules/clsx": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz", @@ -3693,6 +3786,21 @@ "url": "https://github.com/fb55/domutils?sponsor=1" } }, + "node_modules/dotenv": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-7.0.0.tgz", + "integrity": "sha512-M3NhsLbV1i6HuGzBUH8vXrtxOk+tWmzWKDMbAVSUp3Zsjm7ywFeuwrUXhmhQyRK1q5B5GGy7hcXPbj3bnfZg2g==", + "peer": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/dotenv-expand": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-5.1.0.tgz", + "integrity": "sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==", + "peer": true + }, "node_modules/electron-to-chromium": { "version": "1.4.693", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.693.tgz", @@ -6241,12 +6349,12 @@ } }, "node_modules/next": { - "version": "14.1.1", - "resolved": "https://registry.npmjs.org/next/-/next-14.1.1.tgz", - "integrity": "sha512-McrGJqlGSHeaz2yTRPkEucxQKe5Zq7uPwyeHNmJaZNY4wx9E9QdxmTp310agFRoMuIYgQrCrT3petg13fSVOww==", + "version": "14.2.12", + "resolved": "https://registry.npmjs.org/next/-/next-14.2.12.tgz", + "integrity": "sha512-cDOtUSIeoOvt1skKNihdExWMTybx3exnvbFbb9ecZDIxlvIbREQzt9A5Km3Zn3PfU+IFjyYGsHS+lN9VInAGKA==", "dependencies": { - "@next/env": "14.1.1", - "@swc/helpers": "0.5.2", + "@next/env": "14.2.12", + "@swc/helpers": "0.5.5", "busboy": "1.6.0", "caniuse-lite": "^1.0.30001579", "graceful-fs": "^4.2.11", @@ -6260,18 +6368,19 @@ "node": ">=18.17.0" }, "optionalDependencies": { - "@next/swc-darwin-arm64": "14.1.1", - "@next/swc-darwin-x64": "14.1.1", - "@next/swc-linux-arm64-gnu": "14.1.1", - "@next/swc-linux-arm64-musl": "14.1.1", - "@next/swc-linux-x64-gnu": "14.1.1", - "@next/swc-linux-x64-musl": "14.1.1", - "@next/swc-win32-arm64-msvc": "14.1.1", - "@next/swc-win32-ia32-msvc": "14.1.1", - "@next/swc-win32-x64-msvc": "14.1.1" + "@next/swc-darwin-arm64": "14.2.12", + "@next/swc-darwin-x64": "14.2.12", + "@next/swc-linux-arm64-gnu": "14.2.12", + "@next/swc-linux-arm64-musl": "14.2.12", + "@next/swc-linux-x64-gnu": "14.2.12", + "@next/swc-linux-x64-musl": "14.2.12", + "@next/swc-win32-arm64-msvc": "14.2.12", + "@next/swc-win32-ia32-msvc": "14.2.12", + "@next/swc-win32-x64-msvc": "14.2.12" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", + "@playwright/test": "^1.41.2", "react": "^18.2.0", "react-dom": "^18.2.0", "sass": "^1.3.0" @@ -6280,6 +6389,9 @@ "@opentelemetry/api": { "optional": true }, + "@playwright/test": { + "optional": true + }, "sass": { "optional": true } @@ -9575,6 +9687,26 @@ "node": ">=6" } }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "peer": true + }, "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", @@ -9919,6 +10051,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/typescript": { + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.2.tgz", + "integrity": "sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw==", + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, "node_modules/unified": { "version": "10.1.2", "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", diff --git a/docs/package.json b/docs/package.json index e0180069..9acf9b2a 100644 --- a/docs/package.json +++ b/docs/package.json @@ -8,7 +8,7 @@ "dependencies": { "@vercel/analytics": "^1.1.1", "docsgpt": "^0.4.1", - "next": "^14.1.1", + "next": "^14.2.12", "nextra": "^2.13.2", "nextra-theme-docs": "^2.13.2", "react": "^18.2.0", diff --git a/docs/pages/Deploying/Quickstart.md b/docs/pages/Deploying/Quickstart.md index 4ab4828e..a2bdc706 100644 --- a/docs/pages/Deploying/Quickstart.md +++ b/docs/pages/Deploying/Quickstart.md @@ -67,46 +67,3 @@ To run the setup on Windows, you have two options: using the Windows Subsystem f These steps should help you set up and run the project on Windows using either WSL or Git Bash/Command Prompt. **Important:** Ensure that Docker is installed and properly configured on your Windows system for these steps to work. - - -For WINDOWS: - -To run the given setup on Windows, you can use the Windows Subsystem for Linux (WSL) or a Git Bash terminal to execute similar commands. Here are the steps adapted for Windows: - -Option 1: Using Windows Subsystem for Linux (WSL): - -1. Install WSL if you haven't already. You can follow the official Microsoft documentation for installation: (https://learn.microsoft.com/en-us/windows/wsl/install). -2. After setting up WSL, open the WSL terminal. -3. Clone the repository and create the `.env` file: - ```bash - git clone https://github.com/arc53/DocsGPT.git - cd DocsGPT - echo "API_KEY=Yourkey" > .env - echo "VITE_API_STREAMING=true" >> .env - ``` -4. Run the following command to start the setup with Docker Compose: - ```bash - ./run-with-docker-compose.sh - ``` -5. Open your web browser and navigate to http://localhost:5173/. -6. To stop the setup, just press **Ctrl + C** in the WSL terminal. - -Option 2: Using Git Bash or Command Prompt (CMD): - -1. Install Git for Windows if you haven't already. You can download it from the official website: (https://gitforwindows.org/). -2. Open Git Bash or Command Prompt. -3. Clone the repository and create the `.env` file: - ```bash - git clone https://github.com/arc53/DocsGPT.git - cd DocsGPT - echo "API_KEY=Yourkey" > .env - echo "VITE_API_STREAMING=true" >> .env - ``` -4. Run the following command to start the setup with Docker Compose: - ```bash - ./run-with-docker-compose.sh - ``` -5. Open your web browser and navigate to http://localhost:5173/. -6. To stop the setup, just press **Ctrl + C** in the Git Bash or Command Prompt terminal. - -These steps should help you set up and run the project on Windows using either WSL or Git Bash/Command Prompt. Make sure you have Docker installed and properly configured on your Windows system for this to work. diff --git a/docs/pages/Guides/How-to-train-on-other-documentation.mdx b/docs/pages/Guides/How-to-train-on-other-documentation.mdx index e5429a04..f0149618 100644 --- a/docs/pages/Guides/How-to-train-on-other-documentation.mdx +++ b/docs/pages/Guides/How-to-train-on-other-documentation.mdx @@ -28,15 +28,15 @@ Navigate to the sidebar where you will find `Source Docs` option,here you will f ### Step 2 -Click on the `Upload icon` just beside the source docs options,now borwse and upload the document which you want to train on or select the `remote` option if you have to insert the link of the documentation. +Click on the `Upload icon` just beside the source docs options,now browse and upload the document which you want to train on or select the `remote` option if you have to insert the link of the documentation. ### Step 3 -Now you will be able to see the name of the file uploaded under the Uploaded Files ,now click on `Train`,once you click on train it might take some time to train on the document. You will be able to see the `Training progress` and once the training is completed you can click the `finish` button and there you go your docuemnt is uploaded. +Now you will be able to see the name of the file uploaded under the Uploaded Files ,now click on `Train`,once you click on train it might take some time to train on the document. You will be able to see the `Training progress` and once the training is completed you can click the `finish` button and there you go your document is uploaded. ### Step 4 -Go to `New chat` and from the side bar select the document you uploaded under the `Source Docs` and go ahead with your chat, now you can ask qestions regarding the document you uploaded and you will get the effective answer based on it. +Go to `New chat` and from the side bar select the document you uploaded under the `Source Docs` and go ahead with your chat, now you can ask questions regarding the document you uploaded and you will get the effective answer based on it. diff --git a/docs/pages/Guides/How-to-use-different-LLM.mdx b/docs/pages/Guides/How-to-use-different-LLM.mdx index 7df77742..c867fdcc 100644 --- a/docs/pages/Guides/How-to-use-different-LLM.mdx +++ b/docs/pages/Guides/How-to-use-different-LLM.mdx @@ -33,7 +33,7 @@ For open source you have to edit .env file with LLM_NAME with their desired LLM All the supported LLM providers are here application/llm and you can check what env variable are needed for each List of latest supported LLMs are https://github.com/arc53/DocsGPT/blob/main/application/llm/llm_creator.py ### Step 3 -Visit application/llm and select the file of your selected llm and there you will find the speicifc requirements needed to be filled in order to use it,i.e API key of that llm. +Visit application/llm and select the file of your selected llm and there you will find the specific requirements needed to be filled in order to use it,i.e API key of that llm. ### For OpenAI-Compatible Endpoints: diff --git a/extensions/react-widget/package-lock.json b/extensions/react-widget/package-lock.json index 610909de..1e5fb367 100644 --- a/extensions/react-widget/package-lock.json +++ b/extensions/react-widget/package-lock.json @@ -1885,6 +1885,17 @@ "node": ">=6.0.0" } }, + "node_modules/@jridgewell/source-map": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.6.tgz", + "integrity": "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25" + } + }, "node_modules/@jridgewell/sourcemap-codec": { "version": "1.4.15", "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", @@ -2258,7 +2269,6 @@ "version": "2.12.0", "resolved": "https://registry.npmjs.org/@parcel/core/-/core-2.12.0.tgz", "integrity": "sha512-s+6pwEj+GfKf7vqGUzN9iSEPueUssCCQrCBUlcAfKrJe0a22hTUCjewpB0I7lNrCIULt8dkndD+sMdOrXsRl6Q==", - "dev": true, "dependencies": { "@mischnic/json-sourcemap": "^0.1.0", "@parcel/cache": "2.12.0", @@ -2298,7 +2308,6 @@ "version": "7.6.2", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", - "dev": true, "bin": { "semver": "bin/semver.js" }, @@ -2360,7 +2369,6 @@ "version": "3.2.0", "resolved": "https://registry.npmjs.org/@parcel/graph/-/graph-3.2.0.tgz", "integrity": "sha512-xlrmCPqy58D4Fg5umV7bpwDx5Vyt7MlnQPxW68vae5+BA4GSWetfZt+Cs5dtotMG2oCHzZxhIPt7YZ7NRyQzLA==", - "dev": true, "dependencies": { "nullthrows": "^1.1.1" }, @@ -4560,7 +4568,7 @@ "version": "0.5.11", "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.11.tgz", "integrity": "sha512-YNlnKRWF2sVojTpIyzwou9XoTNbzbzONwRhOoniEioF1AtaitTvVZblaQRrAzChWQ1bLYyYSWzM18y4WwgzJ+A==", - "dev": true, + "devOptional": true, "dependencies": { "tslib": "^2.4.0" } @@ -4590,6 +4598,13 @@ "@types/trusted-types": "*" } }, + "node_modules/@types/estree": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", + "dev": true, + "peer": true + }, "node_modules/@types/json-schema": { "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", @@ -4621,6 +4636,16 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/node": { + "version": "22.5.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.5.tgz", + "integrity": "sha512-Xjs4y5UPO/CLdzpgR6GirZJx36yScjh73+2NlLlkFRSoQN8B0DpfXPdZGnvVmLRLOsqDpOfTNv7D9trgGhmOIA==", + "dev": true, + "peer": true, + "dependencies": { + "undici-types": "~6.19.2" + } + }, "node_modules/@types/parse-json": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz", @@ -4662,11 +4687,208 @@ "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", "dev": true }, + "node_modules/@webassemblyjs/ast": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz", + "integrity": "sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/helper-numbers": "1.11.6", + "@webassemblyjs/helper-wasm-bytecode": "1.11.6" + } + }, + "node_modules/@webassemblyjs/floating-point-hex-parser": { + "version": "1.11.6", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz", + "integrity": "sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw==", + "dev": true, + "peer": true + }, + "node_modules/@webassemblyjs/helper-api-error": { + "version": "1.11.6", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz", + "integrity": "sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==", + "dev": true, + "peer": true + }, + "node_modules/@webassemblyjs/helper-buffer": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz", + "integrity": "sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==", + "dev": true, + "peer": true + }, + "node_modules/@webassemblyjs/helper-numbers": { + "version": "1.11.6", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz", + "integrity": "sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/floating-point-hex-parser": "1.11.6", + "@webassemblyjs/helper-api-error": "1.11.6", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/helper-wasm-bytecode": { + "version": "1.11.6", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz", + "integrity": "sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==", + "dev": true, + "peer": true + }, + "node_modules/@webassemblyjs/helper-wasm-section": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz", + "integrity": "sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/helper-buffer": "1.12.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.6", + "@webassemblyjs/wasm-gen": "1.12.1" + } + }, + "node_modules/@webassemblyjs/ieee754": { + "version": "1.11.6", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz", + "integrity": "sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg==", + "dev": true, + "peer": true, + "dependencies": { + "@xtuc/ieee754": "^1.2.0" + } + }, + "node_modules/@webassemblyjs/leb128": { + "version": "1.11.6", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.6.tgz", + "integrity": "sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ==", + "dev": true, + "peer": true, + "dependencies": { + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/utf8": { + "version": "1.11.6", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.6.tgz", + "integrity": "sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==", + "dev": true, + "peer": true + }, + "node_modules/@webassemblyjs/wasm-edit": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz", + "integrity": "sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/helper-buffer": "1.12.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.6", + "@webassemblyjs/helper-wasm-section": "1.12.1", + "@webassemblyjs/wasm-gen": "1.12.1", + "@webassemblyjs/wasm-opt": "1.12.1", + "@webassemblyjs/wasm-parser": "1.12.1", + "@webassemblyjs/wast-printer": "1.12.1" + } + }, + "node_modules/@webassemblyjs/wasm-gen": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz", + "integrity": "sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.6", + "@webassemblyjs/ieee754": "1.11.6", + "@webassemblyjs/leb128": "1.11.6", + "@webassemblyjs/utf8": "1.11.6" + } + }, + "node_modules/@webassemblyjs/wasm-opt": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz", + "integrity": "sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/helper-buffer": "1.12.1", + "@webassemblyjs/wasm-gen": "1.12.1", + "@webassemblyjs/wasm-parser": "1.12.1" + } + }, + "node_modules/@webassemblyjs/wasm-parser": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz", + "integrity": "sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/helper-api-error": "1.11.6", + "@webassemblyjs/helper-wasm-bytecode": "1.11.6", + "@webassemblyjs/ieee754": "1.11.6", + "@webassemblyjs/leb128": "1.11.6", + "@webassemblyjs/utf8": "1.11.6" + } + }, + "node_modules/@webassemblyjs/wast-printer": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz", + "integrity": "sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.12.1", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@xtuc/ieee754": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", + "dev": true, + "peer": true + }, + "node_modules/@xtuc/long": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", + "dev": true, + "peer": true + }, "node_modules/abortcontroller-polyfill": { "version": "1.7.5", "resolved": "https://registry.npmjs.org/abortcontroller-polyfill/-/abortcontroller-polyfill-1.7.5.tgz", - "integrity": "sha512-JMJ5soJWP18htbbxJjG7bG6yuI6pRhgJ0scHHTfkUjf6wjP912xZWvM+A4sJK3gqd9E8fcPbDnOefbA9Th/FIQ==", - "dev": true + "integrity": "sha512-JMJ5soJWP18htbbxJjG7bG6yuI6pRhgJ0scHHTfkUjf6wjP912xZWvM+A4sJK3gqd9E8fcPbDnOefbA9Th/FIQ==" + }, + "node_modules/acorn": { + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "dev": true, + "peer": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-import-attributes": { + "version": "1.9.5", + "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", + "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", + "dev": true, + "peer": true, + "peerDependencies": { + "acorn": "^8" + } }, "node_modules/ajv": { "version": "6.12.6", @@ -4771,7 +4993,6 @@ "version": "3.0.9", "resolved": "https://registry.npmjs.org/base-x/-/base-x-3.0.9.tgz", "integrity": "sha512-H7JU6iBHTal1gp56aKoaa//YUxEaAOUiydvrV/pILqIHXTtqxSkATOnDA2u+jZ/61sD+L/412+7kzXRtWukhpQ==", - "dev": true, "dependencies": { "safe-buffer": "^5.0.1" } @@ -4832,6 +5053,13 @@ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "peer": true + }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -4922,7 +5150,6 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", "integrity": "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==", - "dev": true, "engines": { "node": ">=0.8" } @@ -5132,7 +5359,6 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-7.0.0.tgz", "integrity": "sha512-M3NhsLbV1i6HuGzBUH8vXrtxOk+tWmzWKDMbAVSUp3Zsjm7ywFeuwrUXhmhQyRK1q5B5GGy7hcXPbj3bnfZg2g==", - "dev": true, "engines": { "node": ">=6" } @@ -5140,8 +5366,7 @@ "node_modules/dotenv-expand": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-5.1.0.tgz", - "integrity": "sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==", - "dev": true + "integrity": "sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==" }, "node_modules/electron-to-chromium": { "version": "1.4.788", @@ -5157,6 +5382,20 @@ "node": ">= 4" } }, + "node_modules/enhanced-resolve": { + "version": "5.17.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz", + "integrity": "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==", + "dev": true, + "peer": true, + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, "node_modules/entities": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", @@ -5185,6 +5424,13 @@ "is-arrayish": "^0.2.1" } }, + "node_modules/es-module-lexer": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.5.4.tgz", + "integrity": "sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw==", + "dev": true, + "peer": true + }, "node_modules/escalade": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", @@ -5201,6 +5447,53 @@ "node": ">=0.8.0" } }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "peer": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "peer": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "peer": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "peer": true, + "engines": { + "node": ">=4.0" + } + }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -5210,6 +5503,16 @@ "node": ">=0.10.0" } }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "dev": true, + "peer": true, + "engines": { + "node": ">=0.8.x" + } + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -5300,6 +5603,13 @@ "node": ">=6" } }, + "node_modules/glob-to-regexp": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", + "dev": true, + "peer": true + }, "node_modules/globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", @@ -5308,6 +5618,13 @@ "node": ">=4" } }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "peer": true + }, "node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -5513,6 +5830,47 @@ "node": ">=0.12.0" } }, + "node_modules/jest-worker": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", + "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", + "dev": true, + "peer": true, + "dependencies": { + "@types/node": "*", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/jest-worker/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "peer": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -5813,6 +6171,16 @@ "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-6.1.0.tgz", "integrity": "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==" }, + "node_modules/loader-runner": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz", + "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6.11.5" + } + }, "node_modules/loader-utils": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", @@ -5902,10 +6270,17 @@ "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", "license": "MIT" }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "peer": true + }, "node_modules/micromatch": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", - "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" @@ -5914,6 +6289,29 @@ "node": ">=8.6" } }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "peer": true, + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -5976,6 +6374,13 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true, + "peer": true + }, "node_modules/node-addon-api": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.0.tgz", @@ -8750,6 +9155,16 @@ "node": ">=6" } }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "peer": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, "node_modules/react": { "version": "18.3.1", "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", @@ -8888,7 +9303,6 @@ "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, "funding": [ { "type": "github", @@ -8938,6 +9352,16 @@ "semver": "bin/semver.js" } }, + "node_modules/serialize-javascript": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", + "dev": true, + "peer": true, + "dependencies": { + "randombytes": "^2.1.0" + } + }, "node_modules/shallowequal": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz", @@ -8959,6 +9383,17 @@ "node": ">=0.10.0" } }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "peer": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, "node_modules/stable": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz", @@ -9025,6 +9460,16 @@ "resolved": "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz", "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==" }, + "node_modules/tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6" + } + }, "node_modules/term-size": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/term-size/-/term-size-2.2.1.tgz", @@ -9037,6 +9482,86 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/terser": { + "version": "5.33.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.33.0.tgz", + "integrity": "sha512-JuPVaB7s1gdFKPKTelwUyRq5Sid2A3Gko2S0PncwdBq7kN9Ti9HPWDQ06MPsEDGsZeVESjKEnyGy68quBk1w6g==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/source-map": "^0.3.3", + "acorn": "^8.8.2", + "commander": "^2.20.0", + "source-map-support": "~0.5.20" + }, + "bin": { + "terser": "bin/terser" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/terser-webpack-plugin": { + "version": "5.3.10", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz", + "integrity": "sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.20", + "jest-worker": "^27.4.5", + "schema-utils": "^3.1.1", + "serialize-javascript": "^6.0.1", + "terser": "^5.26.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "uglify-js": { + "optional": true + } + } + }, + "node_modules/terser-webpack-plugin/node_modules/schema-utils": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", + "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", + "dev": true, + "peer": true, + "dependencies": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/terser/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true, + "peer": true + }, "node_modules/timsort": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/timsort/-/timsort-0.3.0.tgz", @@ -9083,7 +9608,6 @@ "version": "5.4.5", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", - "dev": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -9098,6 +9622,13 @@ "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", "license": "MIT" }, + "node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true, + "peer": true + }, "node_modules/unicode-canonical-property-names-ecmascript": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", @@ -9184,11 +9715,101 @@ "node": ">= 4" } }, + "node_modules/watchpack": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz", + "integrity": "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw==", + "dev": true, + "peer": true, + "dependencies": { + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.1.2" + }, + "engines": { + "node": ">=10.13.0" + } + }, "node_modules/weak-lru-cache": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/weak-lru-cache/-/weak-lru-cache-1.2.2.tgz", "integrity": "sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==" }, + "node_modules/webpack": { + "version": "5.94.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.94.0.tgz", + "integrity": "sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg==", + "dev": true, + "peer": true, + "dependencies": { + "@types/estree": "^1.0.5", + "@webassemblyjs/ast": "^1.12.1", + "@webassemblyjs/wasm-edit": "^1.12.1", + "@webassemblyjs/wasm-parser": "^1.12.1", + "acorn": "^8.7.1", + "acorn-import-attributes": "^1.9.5", + "browserslist": "^4.21.10", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^5.17.1", + "es-module-lexer": "^1.2.1", + "eslint-scope": "5.1.1", + "events": "^3.2.0", + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.2.11", + "json-parse-even-better-errors": "^2.3.1", + "loader-runner": "^4.2.0", + "mime-types": "^2.1.27", + "neo-async": "^2.6.2", + "schema-utils": "^3.2.0", + "tapable": "^2.1.1", + "terser-webpack-plugin": "^5.3.10", + "watchpack": "^2.4.1", + "webpack-sources": "^3.2.3" + }, + "bin": { + "webpack": "bin/webpack.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependenciesMeta": { + "webpack-cli": { + "optional": true + } + } + }, + "node_modules/webpack-sources": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", + "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", + "dev": true, + "peer": true, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/webpack/node_modules/schema-utils": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", + "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", + "dev": true, + "peer": true, + "dependencies": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, "node_modules/yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", diff --git a/extensions/react-widget/package.json b/extensions/react-widget/package.json index 813478e2..d449d0a3 100644 --- a/extensions/react-widget/package.json +++ b/extensions/react-widget/package.json @@ -1,5 +1,5 @@ { - "name": "docsgpt-react", + "name": "docsgpt", "version": "0.4.2", "private": false, "description": "DocsGPT 🦖 is an innovative open-source tool designed to simplify the retrieval of information from project documentation using advanced GPT models 🤖.", @@ -11,6 +11,18 @@ "dist", "package.json" ], + "targets": { + "modern": { + "engines": { + "browsers": "Chrome 80" + } + }, + "legacy": { + "engines": { + "browsers": "> 0.5%, last 2 versions, not dead" + } + } + }, "@parcel/resolver-default": { "packageExports": true }, diff --git a/extensions/react-widget/src/assets/dislike.svg b/extensions/react-widget/src/assets/dislike.svg new file mode 100644 index 00000000..ec1d24c2 --- /dev/null +++ b/extensions/react-widget/src/assets/dislike.svg @@ -0,0 +1,4 @@ + + + + diff --git a/extensions/react-widget/src/assets/like.svg b/extensions/react-widget/src/assets/like.svg new file mode 100644 index 00000000..c49604ed --- /dev/null +++ b/extensions/react-widget/src/assets/like.svg @@ -0,0 +1,4 @@ + + + + diff --git a/extensions/react-widget/src/components/DocsGPTWidget.tsx b/extensions/react-widget/src/components/DocsGPTWidget.tsx index bc6adb6e..83defbcf 100644 --- a/extensions/react-widget/src/components/DocsGPTWidget.tsx +++ b/extensions/react-widget/src/components/DocsGPTWidget.tsx @@ -1,11 +1,13 @@ "use client"; -import React from 'react' +import React, { useRef } from 'react' import DOMPurify from 'dompurify'; import styled, { keyframes, createGlobalStyle } from 'styled-components'; import { PaperPlaneIcon, RocketIcon, ExclamationTriangleIcon, Cross2Icon } from '@radix-ui/react-icons'; -import { MESSAGE_TYPE, Query, Status, WidgetProps } from '../types/index'; -import { fetchAnswerStreaming } from '../requests/streamingApi'; +import { FEEDBACK, MESSAGE_TYPE, Query, Status, WidgetProps } from '../types/index'; +import { fetchAnswerStreaming, sendFeedback } from '../requests/streamingApi'; import { ThemeProvider } from 'styled-components'; +import Like from "../assets/like.svg" +import Dislike from "../assets/dislike.svg" import MarkdownIt from 'markdown-it'; const themes = { dark: { @@ -63,6 +65,10 @@ const GlobalStyles = createGlobalStyle` background-color: #646464; color: #fff !important; } +.response code { + white-space: pre-wrap !important; + line-break: loose !important; +} `; const Overlay = styled.div` position: fixed; @@ -195,12 +201,24 @@ const Conversation = styled.div<{ size: string }>` width:${props => props.size === 'large' ? '90vw' : props.size === 'medium' ? '60vw' : '400px'} !important; } `; - +const Feedback = styled.div` + background-color: transparent; + font-weight: normal; + gap: 12px; + display: flex; + padding: 6px; + clear: both; +`; const MessageBubble = styled.div<{ type: MESSAGE_TYPE }>` - display: flex; + display: block; font-size: 16px; - justify-content: ${props => props.type === 'QUESTION' ? 'flex-end' : 'flex-start'}; - margin: 0.5rem; + position: relative; + width: 100%;; + float: right; + margin: 0rem; + &:hover ${Feedback} * { + visibility: visible !important; + } `; const Message = styled.div<{ type: MESSAGE_TYPE }>` background: ${props => props.type === 'QUESTION' ? @@ -208,6 +226,7 @@ const Message = styled.div<{ type: MESSAGE_TYPE }>` props.theme.secondary.bg}; color: ${props => props.type === 'ANSWER' ? props.theme.primary.text : '#fff'}; border: none; + float: ${props => props.type === 'QUESTION' ? 'right' : 'left'}; max-width: ${props => props.type === 'ANSWER' ? '100%' : '80'}; overflow: auto; margin: 4px; @@ -315,6 +334,7 @@ const HeroDescription = styled.p` font-size: 14px; line-height: 1.5; `; + const Hero = ({ title, description, theme }: { title: string, description: string, theme: string }) => { return ( <> @@ -345,7 +365,8 @@ export const DocsGPTWidget = ({ size = 'small', theme = 'dark', buttonIcon = 'https://d3dg1063dc54p9.cloudfront.net/widget/message.svg', - buttonBg = 'linear-gradient(to bottom right, #5AF0EC, #E80D9D)' + buttonBg = 'linear-gradient(to bottom right, #5AF0EC, #E80D9D)', + collectFeedback = true }: WidgetProps) => { const [prompt, setPrompt] = React.useState(''); const [status, setStatus] = React.useState('idle'); @@ -353,6 +374,7 @@ export const DocsGPTWidget = ({ const [conversationId, setConversationId] = React.useState(null) const [open, setOpen] = React.useState(false) const [eventInterrupt, setEventInterrupt] = React.useState(false); //click or scroll by user while autoScrolling + const isBubbleHovered = useRef(false) const endMessageRef = React.useRef(null); const md = new MarkdownIt(); @@ -376,6 +398,36 @@ export const DocsGPTWidget = ({ !eventInterrupt && scrollToBottom(endMessageRef.current); }, [queries.length, queries[queries.length - 1]?.response]); + async function handleFeedback(feedback: FEEDBACK, index: number) { + let query = queries[index] + if (!query.response) + return; + if (query.feedback != feedback) { + sendFeedback({ + question: query.prompt, + answer: query.response, + feedback: feedback, + apikey: apiKey + }, apiHost) + .then(res => { + if (res.status == 200) { + query.feedback = feedback; + setQueries((prev: Query[]) => { + return prev.map((q, i) => (i === index ? query : q)); + }); + } + }) + .catch(err => console.log("Connection failed",err)) + } + else { + delete query.feedback; + setQueries((prev: Query[]) => { + return prev.map((q, i) => (i === index ? query : q)); + }); + + } + } + async function stream(question: string) { setStatus('loading') try { @@ -473,7 +525,7 @@ export const DocsGPTWidget = ({ } { - query.response ? + query.response ? { isBubbleHovered.current = true }} type='ANSWER'> + + {collectFeedback && + + handleFeedback("LIKE", index)} /> + handleFeedback("DISLIKE", index)} /> + } :
{ @@ -518,7 +588,7 @@ export const DocsGPTWidget = ({ type='text' placeholder="What do you want to do?" /> + disabled={prompt.trim().length == 0 || status !== 'idle'}> diff --git a/extensions/react-widget/src/requests/streamingApi.ts b/extensions/react-widget/src/requests/streamingApi.ts index b594915f..9cb9fddc 100644 --- a/extensions/react-widget/src/requests/streamingApi.ts +++ b/extensions/react-widget/src/requests/streamingApi.ts @@ -1,3 +1,4 @@ +import { FEEDBACK } from "@/types"; interface HistoryItem { prompt: string; response?: string; @@ -11,6 +12,12 @@ interface FetchAnswerStreamingProps { apiHost?: string; onEvent?: (event: MessageEvent) => void; } +interface FeedbackPayload { + question: string; + answer: string; + apikey: string; + feedback: FEEDBACK; +} export function fetchAnswerStreaming({ question = '', apiKey = '', @@ -20,12 +27,12 @@ export function fetchAnswerStreaming({ onEvent = () => { console.log("Event triggered, but no handler provided."); } }: FetchAnswerStreamingProps): Promise { return new Promise((resolve, reject) => { - const body= { + const body = { question: question, history: JSON.stringify(history), conversation_id: conversationId, model: 'default', - api_key:apiKey + api_key: apiKey }; fetch(apiHost + '/stream', { method: 'POST', @@ -80,4 +87,20 @@ export function fetchAnswerStreaming({ reject(error); }); }); -} \ No newline at end of file +} + + +export const sendFeedback = (payload: FeedbackPayload,apiHost:string): Promise => { + return fetch(`${apiHost}/api/feedback`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + question: payload.question, + answer: payload.answer, + feedback: payload.feedback, + api_key:payload.apikey + }), + }); +}; \ No newline at end of file diff --git a/extensions/react-widget/src/types/index.ts b/extensions/react-widget/src/types/index.ts index cb46f06b..a55b6342 100644 --- a/extensions/react-widget/src/types/index.ts +++ b/extensions/react-widget/src/types/index.ts @@ -23,4 +23,5 @@ export interface WidgetProps { theme?:THEME, buttonIcon?:string; buttonBg?:string; + collectFeedback?:boolean } \ No newline at end of file diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 0f7675fa..4087e4f5 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -8,50 +8,65 @@ "name": "frontend", "version": "0.0.0", "dependencies": { - "@reduxjs/toolkit": "^1.9.2", - "@vercel/analytics": "^0.1.10", - "i18next": "^23.14.0", + "@reduxjs/toolkit": "^2.2.7", + "chart.js": "^4.4.4", + "i18next": "^23.15.1", "i18next-browser-languagedetector": "^8.0.0", "prop-types": "^15.8.1", "react": "^18.2.0", + "react-chartjs-2": "^5.2.0", "react-copy-to-clipboard": "^5.1.0", "react-dom": "^18.3.1", "react-dropzone": "^14.2.3", - "react-i18next": "^15.0.1", + "react-i18next": "^15.0.2", "react-markdown": "^9.0.1", "react-redux": "^8.0.5", "react-router-dom": "^6.8.1", "react-syntax-highlighter": "^15.5.0", - "remark-gfm": "^4.0.0" + "rehype-katex": "^7.0.1", + "remark-gfm": "^4.0.0", + "remark-math": "^6.0.0" }, "devDependencies": { "@types/react": "^18.0.27", "@types/react-dom": "^18.3.0", - "@types/react-syntax-highlighter": "^15.5.6", + "@types/react-syntax-highlighter": "^15.5.13", "@typescript-eslint/eslint-plugin": "^5.51.0", "@typescript-eslint/parser": "^5.62.0", "@vitejs/plugin-react": "^4.3.1", "autoprefixer": "^10.4.13", - "eslint": "^8.33.0", - "eslint-config-prettier": "^8.6.0", + "eslint": "^8.57.1", + "eslint-config-prettier": "^9.1.0", "eslint-config-standard-with-typescript": "^34.0.0", - "eslint-plugin-import": "^2.27.5", + "eslint-plugin-import": "^2.30.0", "eslint-plugin-n": "^15.7.0", "eslint-plugin-prettier": "^5.2.1", "eslint-plugin-promise": "^6.6.0", "eslint-plugin-react": "^7.35.0", - "eslint-plugin-unused-imports": "^2.0.0", + "eslint-plugin-unused-imports": "^4.1.4", "husky": "^8.0.0", - "lint-staged": "^15.2.8", + "lint-staged": "^15.2.10", "postcss": "^8.4.41", "prettier": "^3.3.3", - "prettier-plugin-tailwindcss": "^0.2.2", - "tailwindcss": "^3.2.4", - "typescript": "^4.9.5", - "vite": "^5.3.5", + "prettier-plugin-tailwindcss": "^0.6.8", + "tailwindcss": "^3.4.11", + "typescript": "^5.6.2", + "vite": "^5.4.6", "vite-plugin-svgr": "^4.2.0" } }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@ampproject/remapping": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", @@ -740,15 +755,39 @@ "node": ">=12" } }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.1.tgz", + "integrity": "sha512-m4DVN9ZqskZoLU5GlWZadwDnYo3vAEydiUayB9widCl9ffWx2IvPnp6n3on5rJmziJSw9Bv+Z3ChDVdMwXCY8Q==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, "node_modules/@eslint/eslintrc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.4.1.tgz", - "integrity": "sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", - "espree": "^9.4.0", + "espree": "^9.6.0", "globals": "^13.19.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", @@ -764,9 +803,9 @@ } }, "node_modules/@eslint/eslintrc/node_modules/globals": { - "version": "13.20.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", - "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, "dependencies": { "type-fest": "^0.20.2" @@ -778,14 +817,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@eslint/js": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.8", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", - "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", "dev": true, "dependencies": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", + "@humanwhocodes/object-schema": "^2.0.3", + "debug": "^4.3.1", "minimatch": "^3.0.5" }, "engines": { @@ -806,11 +855,108 @@ } }, "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", "dev": true }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/@jridgewell/gen-mapping": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz", @@ -858,6 +1004,11 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@kurkle/color": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@kurkle/color/-/color-0.3.2.tgz", + "integrity": "sha512-fuscdXJ9G1qb7W8VdHi+IwRqij3lBkosAm4ydQtEmbY58OzHXqQhvlxqEkoz0yssNVn38bcpRWgA9PP+OGoisw==" + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -893,6 +1044,16 @@ "node": ">= 8" } }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "optional": true, + "engines": { + "node": ">=14" + } + }, "node_modules/@pkgr/core": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.1.1.tgz", @@ -906,18 +1067,18 @@ } }, "node_modules/@reduxjs/toolkit": { - "version": "1.9.2", - "resolved": "https://registry.npmjs.org/@reduxjs/toolkit/-/toolkit-1.9.2.tgz", - "integrity": "sha512-5ZAZ7hwAKWSii5T6NTPmgIBUqyVdlDs+6JjThz6J6dmHLDm6zCzv2OjHIFAi3Vvs1qjmXU0bm6eBojukYXjVMQ==", + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/@reduxjs/toolkit/-/toolkit-2.2.7.tgz", + "integrity": "sha512-faI3cZbSdFb8yv9dhDTmGwclW0vk0z5o1cia+kf7gCbaCwHI5e+7tP57mJUv22pNcNbeA62GSrPpfrUfdXcQ6g==", "dependencies": { - "immer": "^9.0.16", - "redux": "^4.2.0", - "redux-thunk": "^2.4.2", - "reselect": "^4.1.7" + "immer": "^10.0.3", + "redux": "^5.0.1", + "redux-thunk": "^3.1.0", + "reselect": "^5.1.0" }, "peerDependencies": { "react": "^16.9.0 || ^17.0.0 || ^18", - "react-redux": "^7.2.1 || ^8.0.2" + "react-redux": "^7.2.1 || ^8.1.3 || ^9.0.0" }, "peerDependenciesMeta": { "react": { @@ -959,9 +1120,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.20.0.tgz", - "integrity": "sha512-TSpWzflCc4VGAUJZlPpgAJE1+V60MePDQnBd7PPkpuEmOy8i87aL6tinFGKBFKuEDikYpig72QzdT3QPYIi+oA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.22.4.tgz", + "integrity": "sha512-Fxamp4aEZnfPOcGA8KSNEohV8hX7zVHOemC8jVBoBUHu5zpJK/Eu3uJwt6BMgy9fkvzxDaurgj96F/NiLukF2w==", "cpu": [ "arm" ], @@ -972,9 +1133,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.20.0.tgz", - "integrity": "sha512-u00Ro/nok7oGzVuh/FMYfNoGqxU5CPWz1mxV85S2w9LxHR8OoMQBuSk+3BKVIDYgkpeOET5yXkx90OYFc+ytpQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.22.4.tgz", + "integrity": "sha512-VXoK5UMrgECLYaMuGuVTOx5kcuap1Jm8g/M83RnCHBKOqvPPmROFJGQaZhGccnsFtfXQ3XYa4/jMCJvZnbJBdA==", "cpu": [ "arm64" ], @@ -985,9 +1146,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.20.0.tgz", - "integrity": "sha512-uFVfvzvsdGtlSLuL0ZlvPJvl6ZmrH4CBwLGEFPe7hUmf7htGAN+aXo43R/V6LATyxlKVC/m6UsLb7jbG+LG39Q==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.22.4.tgz", + "integrity": "sha512-xMM9ORBqu81jyMKCDP+SZDhnX2QEVQzTcC6G18KlTQEzWK8r/oNZtKuZaCcHhnsa6fEeOBionoyl5JsAbE/36Q==", "cpu": [ "arm64" ], @@ -998,9 +1159,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.20.0.tgz", - "integrity": "sha512-xbrMDdlev53vNXexEa6l0LffojxhqDTBeL+VUxuuIXys4x6xyvbKq5XqTXBCEUA8ty8iEJblHvFaWRJTk/icAQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.22.4.tgz", + "integrity": "sha512-aJJyYKQwbHuhTUrjWjxEvGnNNBCnmpHDvrb8JFDbeSH3m2XdHcxDd3jthAzvmoI8w/kSjd2y0udT+4okADsZIw==", "cpu": [ "x64" ], @@ -1011,9 +1172,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.20.0.tgz", - "integrity": "sha512-jMYvxZwGmoHFBTbr12Xc6wOdc2xA5tF5F2q6t7Rcfab68TT0n+r7dgawD4qhPEvasDsVpQi+MgDzj2faOLsZjA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.22.4.tgz", + "integrity": "sha512-j63YtCIRAzbO+gC2L9dWXRh5BFetsv0j0va0Wi9epXDgU/XUi5dJKo4USTttVyK7fGw2nPWK0PbAvyliz50SCQ==", "cpu": [ "arm" ], @@ -1024,9 +1185,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.20.0.tgz", - "integrity": "sha512-1asSTl4HKuIHIB1GcdFHNNZhxAYEdqML/MW4QmPS4G0ivbEcBr1JKlFLKsIRqjSwOBkdItn3/ZDlyvZ/N6KPlw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.22.4.tgz", + "integrity": "sha512-dJnWUgwWBX1YBRsuKKMOlXCzh2Wu1mlHzv20TpqEsfdZLb3WoJW2kIEsGwLkroYf24IrPAvOT/ZQ2OYMV6vlrg==", "cpu": [ "arm" ], @@ -1037,9 +1198,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.20.0.tgz", - "integrity": "sha512-COBb8Bkx56KldOYJfMf6wKeYJrtJ9vEgBRAOkfw6Ens0tnmzPqvlpjZiLgkhg6cA3DGzCmLmmd319pmHvKWWlQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.22.4.tgz", + "integrity": "sha512-AdPRoNi3NKVLolCN/Sp4F4N1d98c4SBnHMKoLuiG6RXgoZ4sllseuGioszumnPGmPM2O7qaAX/IJdeDU8f26Aw==", "cpu": [ "arm64" ], @@ -1050,9 +1211,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.20.0.tgz", - "integrity": "sha512-+it+mBSyMslVQa8wSPvBx53fYuZK/oLTu5RJoXogjk6x7Q7sz1GNRsXWjn6SwyJm8E/oMjNVwPhmNdIjwP135Q==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.22.4.tgz", + "integrity": "sha512-Gl0AxBtDg8uoAn5CCqQDMqAx22Wx22pjDOjBdmG0VIWX3qUBHzYmOKh8KXHL4UpogfJ14G4wk16EQogF+v8hmA==", "cpu": [ "arm64" ], @@ -1063,9 +1224,9 @@ ] }, "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.20.0.tgz", - "integrity": "sha512-yAMvqhPfGKsAxHN8I4+jE0CpLWD8cv4z7CK7BMmhjDuz606Q2tFKkWRY8bHR9JQXYcoLfopo5TTqzxgPUjUMfw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.22.4.tgz", + "integrity": "sha512-3aVCK9xfWW1oGQpTsYJJPF6bfpWfhbRnhdlyhak2ZiyFLDaayz0EP5j9V1RVLAAxlmWKTDfS9wyRyY3hvhPoOg==", "cpu": [ "ppc64" ], @@ -1076,9 +1237,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.20.0.tgz", - "integrity": "sha512-qmuxFpfmi/2SUkAw95TtNq/w/I7Gpjurx609OOOV7U4vhvUhBcftcmXwl3rqAek+ADBwSjIC4IVNLiszoj3dPA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.22.4.tgz", + "integrity": "sha512-ePYIir6VYnhgv2C5Xe9u+ico4t8sZWXschR6fMgoPUK31yQu7hTEJb7bCqivHECwIClJfKgE7zYsh1qTP3WHUA==", "cpu": [ "riscv64" ], @@ -1089,9 +1250,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.20.0.tgz", - "integrity": "sha512-I0BtGXddHSHjV1mqTNkgUZLnS3WtsqebAXv11D5BZE/gfw5KoyXSAXVqyJximQXNvNzUo4GKlCK/dIwXlz+jlg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.22.4.tgz", + "integrity": "sha512-GqFJ9wLlbB9daxhVlrTe61vJtEY99/xB3C8e4ULVsVfflcpmR6c8UZXjtkMA6FhNONhj2eA5Tk9uAVw5orEs4Q==", "cpu": [ "s390x" ], @@ -1102,9 +1263,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.20.0.tgz", - "integrity": "sha512-y+eoL2I3iphUg9tN9GB6ku1FA8kOfmF4oUEWhztDJ4KXJy1agk/9+pejOuZkNFhRwHAOxMsBPLbXPd6mJiCwew==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.22.4.tgz", + "integrity": "sha512-87v0ol2sH9GE3cLQLNEy0K/R0pz1nvg76o8M5nhMR0+Q+BBGLnb35P0fVz4CQxHYXaAOhE8HhlkaZfsdUOlHwg==", "cpu": [ "x64" ], @@ -1115,9 +1276,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.20.0.tgz", - "integrity": "sha512-hM3nhW40kBNYUkZb/r9k2FKK+/MnKglX7UYd4ZUy5DJs8/sMsIbqWK2piZtVGE3kcXVNj3B2IrUYROJMMCikNg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.22.4.tgz", + "integrity": "sha512-UV6FZMUgePDZrFjrNGIWzDo/vABebuXBhJEqrHxrGiU6HikPy0Z3LfdtciIttEUQfuDdCn8fqh7wiFJjCNwO+g==", "cpu": [ "x64" ], @@ -1128,9 +1289,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.20.0.tgz", - "integrity": "sha512-psegMvP+Ik/Bg7QRJbv8w8PAytPA7Uo8fpFjXyCRHWm6Nt42L+JtoqH8eDQ5hRP7/XW2UiIriy1Z46jf0Oa1kA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.22.4.tgz", + "integrity": "sha512-BjI+NVVEGAXjGWYHz/vv0pBqfGoUH0IGZ0cICTn7kB9PyjrATSkX+8WkguNjWoj2qSr1im/+tTGRaY+4/PdcQw==", "cpu": [ "arm64" ], @@ -1141,9 +1302,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.20.0.tgz", - "integrity": "sha512-GabekH3w4lgAJpVxkk7hUzUf2hICSQO0a/BLFA11/RMxQT92MabKAqyubzDZmMOC/hcJNlc+rrypzNzYl4Dx7A==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.22.4.tgz", + "integrity": "sha512-SiWG/1TuUdPvYmzmYnmd3IEifzR61Tragkbx9D3+R8mzQqDBz8v+BvZNDlkiTtI9T15KYZhP0ehn3Dld4n9J5g==", "cpu": [ "ia32" ], @@ -1154,9 +1315,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.20.0.tgz", - "integrity": "sha512-aJ1EJSuTdGnM6qbVC4B5DSmozPTqIag9fSzXRNNo+humQLG89XpPgdt16Ia56ORD7s+H8Pmyx44uczDQ0yDzpg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.22.4.tgz", + "integrity": "sha512-j8pPKp53/lq9lMXN57S8cFz0MynJk8OWNuUnXct/9KCpKU7DgU3bYMJhwWmcqC0UU29p8Lr0/7KEVcaM6bf47Q==", "cpu": [ "x64" ], @@ -1166,6 +1327,12 @@ "win32" ] }, + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true + }, "node_modules/@svgr/babel-plugin-add-jsx-attribute": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-8.0.0.tgz", @@ -1470,6 +1637,12 @@ "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, + "node_modules/@types/katex": { + "version": "0.16.7", + "resolved": "https://registry.npmjs.org/@types/katex/-/katex-0.16.7.tgz", + "integrity": "sha512-HMwFiRujE5PjrgwHQ25+bsLJgowjGjm5Z8FVSf0N6PwgJrwxH0QxzHYDcKsTfV3wva0vzrpqMTJS2jXPr5BMEQ==", + "license": "MIT" + }, "node_modules/@types/mdast": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", @@ -1508,9 +1681,9 @@ } }, "node_modules/@types/react-syntax-highlighter": { - "version": "15.5.6", - "resolved": "https://registry.npmjs.org/@types/react-syntax-highlighter/-/react-syntax-highlighter-15.5.6.tgz", - "integrity": "sha512-i7wFuLbIAFlabTeD2I1cLjEOrG/xdMa/rpx2zwzAoGHuXJDhSqp9BSfDlMHSh9JSuNfxHk9eEmMX6D55GiyjGg==", + "version": "15.5.13", + "resolved": "https://registry.npmjs.org/@types/react-syntax-highlighter/-/react-syntax-highlighter-15.5.13.tgz", + "integrity": "sha512-uLGJ87j6Sz8UaBAooU0T6lWJ0dBmjZgN1PZTrj05TNql2/XpC6+4HhMT5syIdFUUt+FASfCeLLv4kBygNU+8qA==", "dev": true, "dependencies": { "@types/react": "*" @@ -1915,14 +2088,6 @@ "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==" }, - "node_modules/@vercel/analytics": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/@vercel/analytics/-/analytics-0.1.10.tgz", - "integrity": "sha512-jjJ8GzcPnQp0cMxpfYoUycMRBtDiaIeyVjZPiEPe99Dj1PdjMzAFYEASiV/hpNsXHkpcNYCveDFh6jnmh0YSDQ==", - "peerDependencies": { - "react": "^16.8||^17||^18" - } - }, "node_modules/@vitejs/plugin-react": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.3.1.tgz", @@ -1943,9 +2108,9 @@ } }, "node_modules/acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -1963,26 +2128,6 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/acorn-node": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/acorn-node/-/acorn-node-1.8.2.tgz", - "integrity": "sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A==", - "dev": true, - "dependencies": { - "acorn": "^7.0.0", - "acorn-walk": "^7.0.0", - "xtend": "^4.0.2" - } - }, - "node_modules/acorn-walk": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", - "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -2035,6 +2180,12 @@ "node": ">=4" } }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true + }, "node_modules/anymatch": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", @@ -2125,15 +2276,35 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/array.prototype.findlastindex": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.5.tgz", + "integrity": "sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/array.prototype.flat": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz", - "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz", + "integrity": "sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", "es-shim-unscopables": "^1.0.0" }, "engines": { @@ -2503,6 +2674,18 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/chart.js": { + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.4.tgz", + "integrity": "sha512-emICKGBABnxhMjUjlYRR12PmOXhJ2eJjEHL2/dZlWjxRAZT1D8xplLFq5M0tMQK8ja+wBS/tuVEJB5C6r7VxJA==", + "license": "MIT", + "dependencies": { + "@kurkle/color": "^0.3.0" + }, + "engines": { + "pnpm": ">=8" + } + }, "node_modules/chokidar": { "version": "3.5.3", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", @@ -2814,15 +2997,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/defined": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.1.tgz", - "integrity": "sha512-hsBd2qSVCRE+5PmNdHt1uzyrFu5d3RwmFDKzyNZMFq/EwDNJF7Ee5+D5oEKF0hU6LhtoUF1macFvOe4AskQC1Q==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/dequal": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", @@ -2831,23 +3005,6 @@ "node": ">=6" } }, - "node_modules/detective": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/detective/-/detective-5.2.1.tgz", - "integrity": "sha512-v9XE1zRnz1wRtgurGu0Bs8uHKFSTdteYZNbIPFVhUZ39L/S79ppMpdmVOZAnoz1jfEFodc48n6MX483Xo3t1yw==", - "dev": true, - "dependencies": { - "acorn-node": "^1.8.2", - "defined": "^1.0.0", - "minimist": "^1.2.6" - }, - "bin": { - "detective": "bin/detective.js" - }, - "engines": { - "node": ">=0.8.0" - } - }, "node_modules/devlop": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", @@ -2912,6 +3069,12 @@ "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", "dev": true }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true + }, "node_modules/electron-to-chromium": { "version": "1.5.11", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.11.tgz", @@ -2928,7 +3091,6 @@ "version": "4.5.0", "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "dev": true, "engines": { "node": ">=0.12" }, @@ -3172,49 +3334,48 @@ } }, "node_modules/eslint": { - "version": "8.33.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.33.0.tgz", - "integrity": "sha512-WjOpFQgKK8VrCnAtl8We0SUOy/oVZ5NHykyMiagV1M9r8IFpIJX7DduK6n1mpfhlG7T1NLWm2SuD8QB7KFySaA==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", "dev": true, "dependencies": { - "@eslint/eslintrc": "^1.4.1", - "@humanwhocodes/config-array": "^0.11.8", + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.10.0", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.1.1", - "eslint-utils": "^3.0.0", - "eslint-visitor-keys": "^3.3.0", - "espree": "^9.4.0", - "esquery": "^1.4.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "globals": "^13.19.0", - "grapheme-splitter": "^1.0.4", + "graphemer": "^1.4.0", "ignore": "^5.2.0", - "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", - "js-sdsl": "^4.1.4", "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", - "optionator": "^0.9.1", - "regexpp": "^3.2.0", + "optionator": "^0.9.3", "strip-ansi": "^6.0.1", - "strip-json-comments": "^3.1.0", "text-table": "^0.2.0" }, "bin": { @@ -3228,9 +3389,9 @@ } }, "node_modules/eslint-config-prettier": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.6.0.tgz", - "integrity": "sha512-bAF0eLpLVqP5oEVUFKpMA+NnRFICwn9X8B5jrR9FcqnYBuPbqWEjTEspPWMj5ye6czoSLDweCzSo3Ko7gGrZaA==", + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", "dev": true, "bin": { "eslint-config-prettier": "bin/cli.js" @@ -3284,14 +3445,14 @@ } }, "node_modules/eslint-import-resolver-node": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz", - "integrity": "sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA==", + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", "dev": true, "dependencies": { "debug": "^3.2.7", - "is-core-module": "^2.11.0", - "resolve": "^1.22.1" + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" } }, "node_modules/eslint-import-resolver-node/node_modules/debug": { @@ -3304,9 +3465,9 @@ } }, "node_modules/eslint-module-utils": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz", - "integrity": "sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==", + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.11.0.tgz", + "integrity": "sha512-gbBE5Hitek/oG6MUVj6sFuzEjA/ClzNflVrLovHi/JgLdC7fiN5gLAY1WIPW1a0V5I999MnsrvVrCOGmmVqDBQ==", "dev": true, "dependencies": { "debug": "^3.2.7" @@ -3373,26 +3534,29 @@ } }, "node_modules/eslint-plugin-import": { - "version": "2.27.5", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.27.5.tgz", - "integrity": "sha512-LmEt3GVofgiGuiE+ORpnvP+kAm3h6MLZJ4Q5HCyHADofsb4VzXFsRiWj3c0OFiV+3DWFh0qg3v9gcPlfc3zRow==", + "version": "2.30.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.30.0.tgz", + "integrity": "sha512-/mHNE9jINJfiD2EKkg1BKyPyUk4zdnT54YgbOgfjSakWT5oyX/qQLVNTkehyfpcMxZXMy1zyonZ2v7hZTX43Yw==", "dev": true, "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flat": "^1.3.1", - "array.prototype.flatmap": "^1.3.1", + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.8", + "array.prototype.findlastindex": "^1.2.5", + "array.prototype.flat": "^1.3.2", + "array.prototype.flatmap": "^1.3.2", "debug": "^3.2.7", "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.7", - "eslint-module-utils": "^2.7.4", - "has": "^1.0.3", - "is-core-module": "^2.11.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.9.0", + "hasown": "^2.0.2", + "is-core-module": "^2.15.1", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.values": "^1.1.6", - "resolve": "^1.22.1", - "semver": "^6.3.0", - "tsconfig-paths": "^3.14.1" + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.0", + "semver": "^6.3.1", + "tsconfig-paths": "^3.15.0" }, "engines": { "node": ">=4" @@ -3596,19 +3760,13 @@ } }, "node_modules/eslint-plugin-unused-imports": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-2.0.0.tgz", - "integrity": "sha512-3APeS/tQlTrFa167ThtP0Zm0vctjr4M44HMpeg1P4bK6wItarumq0Ma82xorMKdFsWpphQBlRPzw/pxiVELX1A==", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-4.1.4.tgz", + "integrity": "sha512-YptD6IzQjDardkl0POxnnRBhU1OEePMV0nd6siHaRBbd+lyh6NAhFEobiznKU7kTsSsDeSD62Pe7kAM1b7dAZQ==", "dev": true, - "dependencies": { - "eslint-rule-composer": "^0.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, "peerDependencies": { - "@typescript-eslint/eslint-plugin": "^5.0.0", - "eslint": "^8.0.0" + "@typescript-eslint/eslint-plugin": "^8.0.0-0 || ^7.0.0 || ^6.0.0 || ^5.0.0", + "eslint": "^9.0.0 || ^8.0.0" }, "peerDependenciesMeta": { "@typescript-eslint/eslint-plugin": { @@ -3616,15 +3774,6 @@ } } }, - "node_modules/eslint-rule-composer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/eslint-rule-composer/-/eslint-rule-composer-0.3.0.tgz", - "integrity": "sha512-bt+Sh8CtDmn2OajxvNO+BX7Wn4CIWMpTRm3MaiKPCQcnnlm0CS2mhui6QaoeQugs+3Kj2ESKEEGJUdVafwhiCg==", - "dev": true, - "engines": { - "node": ">=4.0.0" - } - }, "node_modules/eslint-scope": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", @@ -3666,12 +3815,15 @@ } }, "node_modules/eslint-visitor-keys": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", - "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, "node_modules/eslint/node_modules/ansi-styles": { @@ -3730,9 +3882,9 @@ } }, "node_modules/eslint/node_modules/eslint-scope": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", - "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", "dev": true, "dependencies": { "esrecurse": "^4.3.0", @@ -3740,6 +3892,9 @@ }, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, "node_modules/eslint/node_modules/estraverse": { @@ -3788,14 +3943,14 @@ } }, "node_modules/espree": { - "version": "9.4.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.1.tgz", - "integrity": "sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==", + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", "dev": true, "dependencies": { - "acorn": "^8.8.0", + "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^3.3.0" + "eslint-visitor-keys": "^3.4.1" }, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -3804,22 +3959,10 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/espree/node_modules/acorn": { - "version": "8.8.2", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz", - "integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==", - "dev": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", "dev": true, "dependencies": { "estraverse": "^5.1.0" @@ -3938,9 +4081,9 @@ "dev": true }, "node_modules/fast-glob": { - "version": "3.2.12", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", - "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", "dev": true, "dependencies": { "@nodelib/fs.stat": "^2.0.2", @@ -4082,6 +4225,22 @@ "is-callable": "^1.1.3" } }, + "node_modules/foreground-child": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", + "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/format": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/format/-/format-0.2.2.tgz", @@ -4322,17 +4481,11 @@ "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", "dev": true }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true }, "node_modules/has-bigints": { "version": "1.0.2", @@ -4415,6 +4568,193 @@ "node": ">= 0.4" } }, + "node_modules/hast-util-from-dom": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hast-util-from-dom/-/hast-util-from-dom-5.0.0.tgz", + "integrity": "sha512-d6235voAp/XR3Hh5uy7aGLbM3S4KamdW0WEgOaU1YoewnuYw4HXb5eRtv9g65m/RFGEfUY1Mw4UqCc5Y8L4Stg==", + "license": "ISC", + "dependencies": { + "@types/hast": "^3.0.0", + "hastscript": "^8.0.0", + "web-namespaces": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-dom/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-from-dom/node_modules/hast-util-parse-selector": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", + "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-dom/node_modules/hastscript": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-8.0.0.tgz", + "integrity": "sha512-dMOtzCEd3ABUeSIISmrETiKuyydk1w0pa+gE/uormcTpSYuaNJPbX1NU3JLyscSLjwAQM8bWMhhIlnCqnRvDTw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-html": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/hast-util-from-html/-/hast-util-from-html-2.0.3.tgz", + "integrity": "sha512-CUSRHXyKjzHov8yKsQjGOElXy/3EKpyX56ELnkHH34vDVw1N1XSQ1ZcAvTyAPtGqLTuKP/uxM+aLkSPqF/EtMw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "devlop": "^1.1.0", + "hast-util-from-parse5": "^8.0.0", + "parse5": "^7.0.0", + "vfile": "^6.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-html-isomorphic": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hast-util-from-html-isomorphic/-/hast-util-from-html-isomorphic-2.0.0.tgz", + "integrity": "sha512-zJfpXq44yff2hmE0XmwEOzdWin5xwH+QIhMLOScpX91e/NSGPsAzNCvLQDIEPyO2TXi+lBmU6hjLIhV8MwP2kw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "hast-util-from-dom": "^5.0.0", + "hast-util-from-html": "^2.0.0", + "unist-util-remove-position": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-html-isomorphic/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-from-html/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-from-parse5": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-8.0.1.tgz", + "integrity": "sha512-Er/Iixbc7IEa7r/XLtuG52zoqn/b3Xng/w6aZQ0xGVxzhw5xUFxcRqdPzP6yFi/4HBYRaifaI5fQ1RH8n0ZeOQ==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "devlop": "^1.0.0", + "hastscript": "^8.0.0", + "property-information": "^6.0.0", + "vfile": "^6.0.0", + "vfile-location": "^5.0.0", + "web-namespaces": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-from-parse5/node_modules/hast-util-parse-selector": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", + "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5/node_modules/hastscript": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-8.0.0.tgz", + "integrity": "sha512-dMOtzCEd3ABUeSIISmrETiKuyydk1w0pa+gE/uormcTpSYuaNJPbX1NU3JLyscSLjwAQM8bWMhhIlnCqnRvDTw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-is-element": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-3.0.0.tgz", + "integrity": "sha512-Val9mnv2IWpLbNPqc/pUem+a7Ipj2aHacCwgNfTiK0vJKl0LF+4Ba4+v1oPHFpf3bLYmreq0/l3Gud9S5OH42g==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-is-element/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, "node_modules/hast-util-parse-selector": { "version": "2.2.5", "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz", @@ -4458,6 +4798,31 @@ "@types/unist": "*" } }, + "node_modules/hast-util-to-text": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/hast-util-to-text/-/hast-util-to-text-4.0.2.tgz", + "integrity": "sha512-KK6y/BN8lbaq654j7JgBydev7wuNMcID54lkRav1P0CaE1e47P72AWWPiGKXTJU271ooYzcvTAn/Zt0REnvc7A==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "hast-util-is-element": "^3.0.0", + "unist-util-find-after": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-text/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, "node_modules/hast-util-whitespace": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", @@ -4583,9 +4948,9 @@ } }, "node_modules/i18next": { - "version": "23.14.0", - "resolved": "https://registry.npmjs.org/i18next/-/i18next-23.14.0.tgz", - "integrity": "sha512-Y5GL4OdA8IU2geRrt2+Uc1iIhsjICdHZzT9tNwQ3TVqdNzgxHToGCKf/TPRP80vTCAP6svg2WbbJL+Gx5MFQVA==", + "version": "23.15.1", + "resolved": "https://registry.npmjs.org/i18next/-/i18next-23.15.1.tgz", + "integrity": "sha512-wB4abZ3uK7EWodYisHl/asf8UYEhrI/vj/8aoSsrj/ZDxj4/UXPOa1KvFt1Fq5hkUHquNqwFlDprmjZ8iySgYA==", "funding": [ { "type": "individual", @@ -4623,9 +4988,9 @@ } }, "node_modules/immer": { - "version": "9.0.19", - "resolved": "https://registry.npmjs.org/immer/-/immer-9.0.19.tgz", - "integrity": "sha512-eY+Y0qcsB4TZKwgQzLaE/lqYMlKhv5J9dyd2RhhtGhNo2njPXDqU9XPfcNfa3MIDsdtZt5KlkIsirlo4dHsWdQ==", + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/immer/-/immer-10.1.1.tgz", + "integrity": "sha512-s2MPrmjovJcoMaHtx6K11Ra7oD05NT97w1IC5zpMkT6Atjr7H8LjaDd81iIxUYpMKSRRNMJE703M1Fhr/TctHw==", "funding": { "type": "opencollective", "url": "https://opencollective.com/immer" @@ -4803,9 +5168,9 @@ } }, "node_modules/is-core-module": { - "version": "2.15.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.0.tgz", - "integrity": "sha512-Dd+Lb2/zvk9SKy1TGCt1wFJFo/MWBPMX5x7KcvLajWTGuomczdQX61PvY5yK6SVACwpoexWo81IfFyoKY2QnTA==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", "dev": true, "dependencies": { "hasown": "^2.0.2" @@ -5158,14 +5523,28 @@ "set-function-name": "^2.0.1" } }, - "node_modules/js-sdsl": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz", - "integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==", + "node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", "dev": true, + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/js-sdsl" + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/jiti": { + "version": "1.21.6", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.6.tgz", + "integrity": "sha512-2yTgeWTWzMWkHu6Jp9NKgePDaYHbntiwvYuuJLbbN9vl7DC9DvXKOB2BC3ZZ92D3cvV/aflH0osDfwpHepQ53w==", + "dev": true, + "bin": { + "jiti": "bin/jiti.js" } }, "node_modules/js-tokens": { @@ -5240,6 +5619,31 @@ "node": ">=4.0" } }, + "node_modules/katex": { + "version": "0.16.11", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.11.tgz", + "integrity": "sha512-RQrI8rlHY92OLf3rho/Ts8i/XvjgguEjOkO1BEXcU3N8BqPpSzBNwV/G0Ukr+P/l3ivvJUE/Fa/CwbS6HesGNQ==", + "funding": [ + "https://opencollective.com/katex", + "https://github.com/sponsors/katex" + ], + "license": "MIT", + "dependencies": { + "commander": "^8.3.0" + }, + "bin": { + "katex": "cli.js" + } + }, + "node_modules/katex/node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, "node_modules/levn": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", @@ -5254,9 +5658,9 @@ } }, "node_modules/lilconfig": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.6.tgz", - "integrity": "sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", "dev": true, "engines": { "node": ">=10" @@ -5269,9 +5673,9 @@ "dev": true }, "node_modules/lint-staged": { - "version": "15.2.8", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-15.2.8.tgz", - "integrity": "sha512-PUWFf2zQzsd9EFU+kM1d7UP+AZDbKFKuj+9JNVTBkhUFhbg4MAt6WfyMMwBfM4lYqd4D2Jwac5iuTu9rVj4zCQ==", + "version": "15.2.10", + "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-15.2.10.tgz", + "integrity": "sha512-5dY5t743e1byO19P9I4b3x8HJwalIznL5E1FWYnU6OWw33KxNBSLAc6Cy7F2PsFEO8FKnLwjwm5hx7aMF0jzZg==", "dev": true, "dependencies": { "chalk": "~5.3.0", @@ -5280,7 +5684,7 @@ "execa": "~8.0.1", "lilconfig": "~3.1.2", "listr2": "~8.2.4", - "micromatch": "~4.0.7", + "micromatch": "~4.0.8", "pidtree": "~0.6.0", "string-argv": "~0.3.2", "yaml": "~2.5.0" @@ -5319,18 +5723,6 @@ "url": "https://github.com/sponsors/antonk52" } }, - "node_modules/lint-staged/node_modules/yaml": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.0.tgz", - "integrity": "sha512-2wWLbGbYDiSqqIKoPjar3MPgB94ErzCtrNE1FdqGuaO0pi2JGjmE8aW8TDZwzU7vuxcGRdL/4gPQwQ7hD5AMSw==", - "dev": true, - "bin": { - "yaml": "bin.mjs" - }, - "engines": { - "node": ">= 14" - } - }, "node_modules/listr2": { "version": "8.2.4", "resolved": "https://registry.npmjs.org/listr2/-/listr2-8.2.4.tgz", @@ -5668,6 +6060,34 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/mdast-util-math": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-math/-/mdast-util-math-3.0.0.tgz", + "integrity": "sha512-Tl9GBNeG/AhJnQM221bJR2HPvLOSnLE/T9cJI9tlc6zwQk2nPk/4f0cHkOdEixQPC/j8UtKDdITswvLAy1OZ1w==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "longest-streak": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.1.0", + "unist-util-remove-position": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-math/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, "node_modules/mdast-util-mdx-expression": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.0.tgz", @@ -6100,6 +6520,25 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/micromark-extension-math": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-math/-/micromark-extension-math-3.1.0.tgz", + "integrity": "sha512-lvEqd+fHjATVs+2v/8kg9i5Q0AP2k85H0WUOwpIVvUML8BapsMvh1XAogmQjOCsLpoKRCVQqEkQBB3NhVBcsOg==", + "license": "MIT", + "dependencies": { + "@types/katex": "^0.16.0", + "devlop": "^1.0.0", + "katex": "^0.16.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/micromark-factory-destination": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.0.tgz", @@ -6504,19 +6943,39 @@ } }, "node_modules/minimist": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz", - "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==", + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, "node_modules/nanoid": { "version": "3.3.7", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", @@ -6702,6 +7161,20 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/object.groupby": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/object.values": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.0.tgz", @@ -6744,9 +7217,9 @@ } }, "node_modules/optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "dev": true, "dependencies": { "deep-is": "^0.1.3", @@ -6754,7 +7227,7 @@ "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", - "word-wrap": "^1.2.3" + "word-wrap": "^1.2.5" }, "engines": { "node": ">= 0.8.0" @@ -6790,6 +7263,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/package-json-from-dist": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz", + "integrity": "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==", + "dev": true + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -6846,6 +7325,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "license": "MIT", + "dependencies": { + "entities": "^4.4.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -6879,6 +7370,28 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true + }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -6889,9 +7402,9 @@ } }, "node_modules/picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", "dev": true }, "node_modules/picomatch": { @@ -6927,6 +7440,15 @@ "node": ">=0.10.0" } }, + "node_modules/pirates": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", + "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, "node_modules/possible-typed-array-names": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", @@ -6937,9 +7459,9 @@ } }, "node_modules/postcss": { - "version": "8.4.41", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.41.tgz", - "integrity": "sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ==", + "version": "8.4.47", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz", + "integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==", "dev": true, "funding": [ { @@ -6957,17 +7479,17 @@ ], "dependencies": { "nanoid": "^3.3.7", - "picocolors": "^1.0.1", - "source-map-js": "^1.2.0" + "picocolors": "^1.1.0", + "source-map-js": "^1.2.1" }, "engines": { "node": "^10 || ^12 || >=14" } }, "node_modules/postcss-import": { - "version": "14.1.0", - "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-14.1.0.tgz", - "integrity": "sha512-flwI+Vgm4SElObFVPpTIT7SU7R3qk2L7PyduMcokiaVKuWv9d/U+Gm/QAd8NDLuykTWTkcrjOeD2Pp1rMeBTGw==", + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", "dev": true, "dependencies": { "postcss-value-parser": "^4.0.0", @@ -6975,16 +7497,16 @@ "resolve": "^1.1.7" }, "engines": { - "node": ">=10.0.0" + "node": ">=14.0.0" }, "peerDependencies": { "postcss": "^8.0.0" } }, "node_modules/postcss-js": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.0.tgz", - "integrity": "sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz", + "integrity": "sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==", "dev": true, "dependencies": { "camelcase-css": "^2.0.1" @@ -6997,24 +7519,30 @@ "url": "https://opencollective.com/postcss/" }, "peerDependencies": { - "postcss": "^8.3.3" + "postcss": "^8.4.21" } }, "node_modules/postcss-load-config": { - "version": "3.1.4", - "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-3.1.4.tgz", - "integrity": "sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-4.0.2.tgz", + "integrity": "sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==", "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], "dependencies": { - "lilconfig": "^2.0.5", - "yaml": "^1.10.2" + "lilconfig": "^3.0.0", + "yaml": "^2.3.4" }, "engines": { - "node": ">= 10" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" + "node": ">= 14" }, "peerDependencies": { "postcss": ">=8.0.9", @@ -7029,29 +7557,47 @@ } } }, - "node_modules/postcss-nested": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.0.0.tgz", - "integrity": "sha512-0DkamqrPcmkBDsLn+vQDIrtkSbNkv5AD/M322ySo9kqFkCIYklym2xEmWkwo+Y3/qZo34tzEPNUw4y7yMCdv5w==", + "node_modules/postcss-load-config/node_modules/lilconfig": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.2.tgz", + "integrity": "sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow==", "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/postcss-nested": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", + "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], "dependencies": { - "postcss-selector-parser": "^6.0.10" + "postcss-selector-parser": "^6.1.1" }, "engines": { "node": ">=12.0" }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, "peerDependencies": { "postcss": "^8.2.14" } }, "node_modules/postcss-selector-parser": { - "version": "6.0.11", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.11.tgz", - "integrity": "sha512-zbARubNdogI9j7WY4nQJBiNqQf3sLS3wCP4WfOidu+p28LofJqDH1tcXypGrcmMHhDk2t9wGhCsYe/+szLTy1g==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", "dev": true, "dependencies": { "cssesc": "^3.0.0", @@ -7104,32 +7650,34 @@ } }, "node_modules/prettier-plugin-tailwindcss": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/prettier-plugin-tailwindcss/-/prettier-plugin-tailwindcss-0.2.2.tgz", - "integrity": "sha512-5RjUbWRe305pUpc48MosoIp6uxZvZxrM6GyOgsbGLTce+ehePKNm7ziW2dLG2air9aXbGuXlHVSQQw4Lbosq3w==", + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/prettier-plugin-tailwindcss/-/prettier-plugin-tailwindcss-0.6.8.tgz", + "integrity": "sha512-dGu3kdm7SXPkiW4nzeWKCl3uoImdd5CTZEJGxyypEPL37Wj0HT2pLqjrvSei1nTeuQfO4PUfjeW5cTUNRLZ4sA==", "dev": true, "engines": { - "node": ">=12.17.0" + "node": ">=14.21.3" }, "peerDependencies": { - "@prettier/plugin-php": "*", + "@ianvs/prettier-plugin-sort-imports": "*", "@prettier/plugin-pug": "*", "@shopify/prettier-plugin-liquid": "*", - "@shufo/prettier-plugin-blade": "*", "@trivago/prettier-plugin-sort-imports": "*", - "prettier": ">=2.2.0", + "@zackad/prettier-plugin-twig-melody": "*", + "prettier": "^3.0", "prettier-plugin-astro": "*", "prettier-plugin-css-order": "*", "prettier-plugin-import-sort": "*", "prettier-plugin-jsdoc": "*", + "prettier-plugin-marko": "*", + "prettier-plugin-multiline-arrays": "*", "prettier-plugin-organize-attributes": "*", "prettier-plugin-organize-imports": "*", + "prettier-plugin-sort-imports": "*", "prettier-plugin-style-order": "*", - "prettier-plugin-svelte": "*", - "prettier-plugin-twig-melody": "*" + "prettier-plugin-svelte": "*" }, "peerDependenciesMeta": { - "@prettier/plugin-php": { + "@ianvs/prettier-plugin-sort-imports": { "optional": true }, "@prettier/plugin-pug": { @@ -7138,10 +7686,10 @@ "@shopify/prettier-plugin-liquid": { "optional": true }, - "@shufo/prettier-plugin-blade": { + "@trivago/prettier-plugin-sort-imports": { "optional": true }, - "@trivago/prettier-plugin-sort-imports": { + "@zackad/prettier-plugin-twig-melody": { "optional": true }, "prettier-plugin-astro": { @@ -7156,20 +7704,26 @@ "prettier-plugin-jsdoc": { "optional": true }, + "prettier-plugin-marko": { + "optional": true + }, + "prettier-plugin-multiline-arrays": { + "optional": true + }, "prettier-plugin-organize-attributes": { "optional": true }, "prettier-plugin-organize-imports": { "optional": true }, + "prettier-plugin-sort-imports": { + "optional": true + }, "prettier-plugin-style-order": { "optional": true }, "prettier-plugin-svelte": { "optional": true - }, - "prettier-plugin-twig-melody": { - "optional": true } } }, @@ -7202,9 +7756,9 @@ } }, "node_modules/punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", "dev": true, "engines": { "node": ">=6" @@ -7230,18 +7784,6 @@ } ] }, - "node_modules/quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/react": { "version": "18.3.1", "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", @@ -7253,6 +7795,15 @@ "node": ">=0.10.0" } }, + "node_modules/react-chartjs-2": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/react-chartjs-2/-/react-chartjs-2-5.2.0.tgz", + "integrity": "sha512-98iN5aguJyVSxp5U3CblRLH67J8gkfyGNbiK3c+l1QI/G4irHMPQw44aEPmjVag+YKTyQ260NcF82GTQ3bdscA==", + "peerDependencies": { + "chart.js": "^4.1.1", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, "node_modules/react-copy-to-clipboard": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/react-copy-to-clipboard/-/react-copy-to-clipboard-5.1.0.tgz", @@ -7294,11 +7845,11 @@ } }, "node_modules/react-i18next": { - "version": "15.0.1", - "resolved": "https://registry.npmjs.org/react-i18next/-/react-i18next-15.0.1.tgz", - "integrity": "sha512-NwxLqNM6CLbeGA9xPsjits0EnXdKgCRSS6cgkgOdNcPXqL+1fYNl8fBg1wmnnHvFy812Bt4IWTPE9zjoPmFj3w==", + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/react-i18next/-/react-i18next-15.0.2.tgz", + "integrity": "sha512-z0W3/RES9Idv3MmJUcf0mDNeeMOUXe+xoL0kPfQPbDoZHmni/XsIoq5zgT2MCFUiau283GuBUK578uD/mkAbLQ==", "dependencies": { - "@babel/runtime": "^7.24.8", + "@babel/runtime": "^7.25.0", "html-parse-stringify": "^3.0.1" }, "peerDependencies": { @@ -7353,9 +7904,9 @@ } }, "node_modules/react-redux": { - "version": "8.0.5", - "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-8.0.5.tgz", - "integrity": "sha512-Q2f6fCKxPFpkXt1qNRZdEDLlScsDWyrgSj0mliK59qU6W5gvBiKkdMEG2lJzhd1rCctf0hb6EtePPLZ2e0m1uw==", + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-8.1.3.tgz", + "integrity": "sha512-n0ZrutD7DaX/j9VscF+uTALI3oUPa/pO4Z3soOBIjuRn/FzVu6aehhysxZCLi6y7duMf52WNZGMl7CtuK5EnRw==", "dependencies": { "@babel/runtime": "^7.12.1", "@types/hoist-non-react-statics": "^3.3.1", @@ -7370,7 +7921,7 @@ "react": "^16.8 || ^17.0 || ^18.0", "react-dom": "^16.8 || ^17.0 || ^18.0", "react-native": ">=0.59", - "redux": "^4" + "redux": "^4 || ^5.0.0-beta.0" }, "peerDependenciesMeta": { "@types/react": { @@ -7471,19 +8022,16 @@ } }, "node_modules/redux": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/redux/-/redux-4.2.1.tgz", - "integrity": "sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w==", - "dependencies": { - "@babel/runtime": "^7.9.2" - } + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/redux/-/redux-5.0.1.tgz", + "integrity": "sha512-M9/ELqF6fy8FwmkpnF0S3YKOqMyoWJ4+CS5Efg2ct3oY9daQvd/Pc71FpGZsVsbl3Cpb+IIcjBDUnnyBdQbq4w==" }, "node_modules/redux-thunk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/redux-thunk/-/redux-thunk-2.4.2.tgz", - "integrity": "sha512-+P3TjtnP0k/FEjcBL5FZpoovtvrTNT/UXd4/sluaSyrURlSlhLSzEdfsTBW7WsKB6yPvgd7q/iZPICFjW4o57Q==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/redux-thunk/-/redux-thunk-3.1.0.tgz", + "integrity": "sha512-NW2r5T6ksUKXCabzhL9z+h206HQw/NJkcLm1GPImRQ8IzfXwRGqjVhKJGauHirT0DAuyy6hjdnMZaRoAcy0Klw==", "peerDependencies": { - "redux": "^4" + "redux": "^5.0.0" } }, "node_modules/reflect.getprototypeof": { @@ -7565,6 +8113,34 @@ "url": "https://github.com/sponsors/mysticatea" } }, + "node_modules/rehype-katex": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/rehype-katex/-/rehype-katex-7.0.1.tgz", + "integrity": "sha512-OiM2wrZ/wuhKkigASodFoo8wimG3H12LWQaH8qSPVJn9apWKFSH3YOCtbKpBorTVw/eI7cuT21XBbvwEswbIOA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/katex": "^0.16.0", + "hast-util-from-html-isomorphic": "^2.0.0", + "hast-util-to-text": "^4.0.0", + "katex": "^0.16.0", + "unist-util-visit-parents": "^6.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-katex/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, "node_modules/remark-gfm": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.0.tgz", @@ -7582,6 +8158,22 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/remark-math": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/remark-math/-/remark-math-6.0.0.tgz", + "integrity": "sha512-MMqgnP74Igy+S3WwnhQ7kqGlEerTETXMvJhrUzDikVZ2/uogJCb+WHUg97hK9/jcfc0dkD73s3LN8zU49cTEtA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-math": "^3.0.0", + "micromark-extension-math": "^3.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/remark-parse": { "version": "11.0.0", "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", @@ -7636,17 +8228,17 @@ } }, "node_modules/reselect": { - "version": "4.1.7", - "resolved": "https://registry.npmjs.org/reselect/-/reselect-4.1.7.tgz", - "integrity": "sha512-Zu1xbUt3/OPwsXL46hvOOoQrap2azE7ZQbokq61BQfiXvhewsKDwhMeZjTX9sX0nvw1t/U5Audyn1I9P/m9z0A==" + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/reselect/-/reselect-5.1.1.tgz", + "integrity": "sha512-K/BG6eIky/SBpzfHZv/dd+9JBFiS4SWV7FIujVyJRux6e45+73RaUHXLmIR1f7WOMaQ0U1km6qwklRQxpJJY0w==" }, "node_modules/resolve": { - "version": "1.22.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", - "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", "dev": true, "dependencies": { - "is-core-module": "^2.9.0", + "is-core-module": "^2.13.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, @@ -7729,9 +8321,9 @@ } }, "node_modules/rollup": { - "version": "4.20.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.20.0.tgz", - "integrity": "sha512-6rbWBChcnSGzIlXeIdNIZTopKYad8ZG8ajhl78lGRLsI2rX8IkaotQhVas2Ma+GPxJav19wrSzvRvuiv0YKzWw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.22.4.tgz", + "integrity": "sha512-vD8HJ5raRcWOyymsR6Z3o6+RzfEPCnVLMFJ6vRslO1jt4LO6dUo5Qnpg7y4RkZFM2DMe3WUirkI5c16onjrc6A==", "dev": true, "dependencies": { "@types/estree": "1.0.5" @@ -7744,22 +8336,22 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.20.0", - "@rollup/rollup-android-arm64": "4.20.0", - "@rollup/rollup-darwin-arm64": "4.20.0", - "@rollup/rollup-darwin-x64": "4.20.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.20.0", - "@rollup/rollup-linux-arm-musleabihf": "4.20.0", - "@rollup/rollup-linux-arm64-gnu": "4.20.0", - "@rollup/rollup-linux-arm64-musl": "4.20.0", - "@rollup/rollup-linux-powerpc64le-gnu": "4.20.0", - "@rollup/rollup-linux-riscv64-gnu": "4.20.0", - "@rollup/rollup-linux-s390x-gnu": "4.20.0", - "@rollup/rollup-linux-x64-gnu": "4.20.0", - "@rollup/rollup-linux-x64-musl": "4.20.0", - "@rollup/rollup-win32-arm64-msvc": "4.20.0", - "@rollup/rollup-win32-ia32-msvc": "4.20.0", - "@rollup/rollup-win32-x64-msvc": "4.20.0", + "@rollup/rollup-android-arm-eabi": "4.22.4", + "@rollup/rollup-android-arm64": "4.22.4", + "@rollup/rollup-darwin-arm64": "4.22.4", + "@rollup/rollup-darwin-x64": "4.22.4", + "@rollup/rollup-linux-arm-gnueabihf": "4.22.4", + "@rollup/rollup-linux-arm-musleabihf": "4.22.4", + "@rollup/rollup-linux-arm64-gnu": "4.22.4", + "@rollup/rollup-linux-arm64-musl": "4.22.4", + "@rollup/rollup-linux-powerpc64le-gnu": "4.22.4", + "@rollup/rollup-linux-riscv64-gnu": "4.22.4", + "@rollup/rollup-linux-s390x-gnu": "4.22.4", + "@rollup/rollup-linux-x64-gnu": "4.22.4", + "@rollup/rollup-linux-x64-musl": "4.22.4", + "@rollup/rollup-win32-arm64-msvc": "4.22.4", + "@rollup/rollup-win32-ia32-msvc": "4.22.4", + "@rollup/rollup-win32-x64-msvc": "4.22.4", "fsevents": "~2.3.2" } }, @@ -7975,9 +8567,9 @@ "dev": true }, "node_modules/source-map-js": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", - "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -8018,6 +8610,36 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/string-width-cjs/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/string-width/node_modules/ansi-regex": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", @@ -8164,6 +8786,19 @@ "node": ">=8" } }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/strip-bom": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", @@ -8205,6 +8840,95 @@ "inline-style-parser": "0.2.3" } }, + "node_modules/sucrase": { + "version": "3.35.0", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.0.tgz", + "integrity": "sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "glob": "^10.3.10", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/sucrase/node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/sucrase/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/sucrase/node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/sucrase/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sucrase/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -8258,44 +8982,40 @@ "dev": true }, "node_modules/tailwindcss": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.2.4.tgz", - "integrity": "sha512-AhwtHCKMtR71JgeYDaswmZXhPcW9iuI9Sp2LvZPo9upDZ7231ZJ7eA9RaURbhpXGVlrjX4cFNlB4ieTetEb7hQ==", + "version": "3.4.11", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.11.tgz", + "integrity": "sha512-qhEuBcLemjSJk5ajccN9xJFtM/h0AVCPaA6C92jNP+M2J8kX+eMJHI7R2HFKUvvAsMpcfLILMCFYSeDwpMmlUg==", "dev": true, "dependencies": { + "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", "chokidar": "^3.5.3", - "color-name": "^1.1.4", - "detective": "^5.2.1", "didyoumean": "^1.2.2", "dlv": "^1.1.3", - "fast-glob": "^3.2.12", + "fast-glob": "^3.3.0", "glob-parent": "^6.0.2", "is-glob": "^4.0.3", - "lilconfig": "^2.0.6", + "jiti": "^1.21.0", + "lilconfig": "^2.1.0", "micromatch": "^4.0.5", "normalize-path": "^3.0.0", "object-hash": "^3.0.0", "picocolors": "^1.0.0", - "postcss": "^8.4.18", - "postcss-import": "^14.1.0", - "postcss-js": "^4.0.0", - "postcss-load-config": "^3.1.4", - "postcss-nested": "6.0.0", - "postcss-selector-parser": "^6.0.10", - "postcss-value-parser": "^4.2.0", - "quick-lru": "^5.1.1", - "resolve": "^1.22.1" + "postcss": "^8.4.23", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.1", + "postcss-nested": "^6.0.1", + "postcss-selector-parser": "^6.0.11", + "resolve": "^1.22.2", + "sucrase": "^3.32.0" }, "bin": { "tailwind": "lib/cli.js", "tailwindcss": "lib/cli.js" }, "engines": { - "node": ">=12.13.0" - }, - "peerDependencies": { - "postcss": "^8.0.9" + "node": ">=14.0.0" } }, "node_modules/text-table": { @@ -8304,6 +9024,27 @@ "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", "dev": true }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, "node_modules/to-fast-properties": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", @@ -8348,14 +9089,20 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "dev": true + }, "node_modules/tsconfig-paths": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", - "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", + "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", "dev": true, "dependencies": { "@types/json5": "^0.0.29", - "json5": "^1.0.1", + "json5": "^1.0.2", "minimist": "^1.2.6", "strip-bom": "^3.0.0" } @@ -8491,16 +9238,16 @@ } }, "node_modules/typescript": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", - "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.2.tgz", + "integrity": "sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw==", "dev": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" }, "engines": { - "node": ">=4.2.0" + "node": ">=14.17" } }, "node_modules/unbox-primitive": { @@ -8536,6 +9283,20 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/unist-util-find-after": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-find-after/-/unist-util-find-after-5.0.0.tgz", + "integrity": "sha512-amQa0Ep2m6hE2g72AugUItjbuM8X8cGQnFoHk0pGfrFeT9GZhzN5SW8nRsiGKK7Aif4CrACPENkA6P/Lw6fHGQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/unist-util-is": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", @@ -8679,6 +9440,20 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/vfile-location": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-5.0.3.tgz", + "integrity": "sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/vfile-message": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", @@ -8693,14 +9468,14 @@ } }, "node_modules/vite": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.3.5.tgz", - "integrity": "sha512-MdjglKR6AQXQb9JGiS7Rc2wC6uMjcm7Go/NHNO63EwiJXfuk9PgqiP/n5IDJCziMkfw9n4Ubp7lttNwz+8ZVKA==", + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.6.tgz", + "integrity": "sha512-IeL5f8OO5nylsgzd9tq4qD2QqI0k2CQLGrWD0rCN0EQJZpBK5vJAx0I+GDkMOXxQX/OfFHMuLIx6ddAxGX/k+Q==", "dev": true, "dependencies": { "esbuild": "^0.21.3", - "postcss": "^8.4.39", - "rollup": "^4.13.0" + "postcss": "^8.4.43", + "rollup": "^4.20.0" }, "bin": { "vite": "bin/vite.js" @@ -8719,6 +9494,7 @@ "less": "*", "lightningcss": "^1.21.0", "sass": "*", + "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.4.0" @@ -8736,6 +9512,9 @@ "sass": { "optional": true }, + "sass-embedded": { + "optional": true + }, "stylus": { "optional": true }, @@ -8770,6 +9549,16 @@ "node": ">=0.10.0" } }, + "node_modules/web-namespaces": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", + "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -8865,9 +9654,9 @@ } }, "node_modules/word-wrap": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", - "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -8890,6 +9679,80 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/wrap-ansi-cjs/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/wrap-ansi/node_modules/ansi-regex": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", @@ -8950,12 +9813,15 @@ "dev": true }, "node_modules/yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.1.tgz", + "integrity": "sha512-bLQOjaX/ADgQ20isPJRvF0iRUHIxVhYvr53Of7wGcWlO2jvtUlH5m87DsmulFVxRpNLOnI4tB6p/oh8D7kpn9Q==", "dev": true, + "bin": { + "yaml": "bin.mjs" + }, "engines": { - "node": ">= 6" + "node": ">= 14" } }, "node_modules/yocto-queue": { diff --git a/frontend/package.json b/frontend/package.json index e45fbd36..83d531d6 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -19,47 +19,50 @@ ] }, "dependencies": { - "@reduxjs/toolkit": "^1.9.2", - "@vercel/analytics": "^0.1.10", - "i18next": "^23.14.0", + "@reduxjs/toolkit": "^2.2.7", + "chart.js": "^4.4.4", + "i18next": "^23.15.1", "i18next-browser-languagedetector": "^8.0.0", "prop-types": "^15.8.1", "react": "^18.2.0", + "react-chartjs-2": "^5.2.0", "react-copy-to-clipboard": "^5.1.0", "react-dom": "^18.3.1", "react-dropzone": "^14.2.3", - "react-i18next": "^15.0.1", + "react-i18next": "^15.0.2", "react-markdown": "^9.0.1", "react-redux": "^8.0.5", "react-router-dom": "^6.8.1", "react-syntax-highlighter": "^15.5.0", - "remark-gfm": "^4.0.0" + "rehype-katex": "^7.0.1", + "remark-gfm": "^4.0.0", + "remark-math": "^6.0.0" }, "devDependencies": { "@types/react": "^18.0.27", "@types/react-dom": "^18.3.0", - "@types/react-syntax-highlighter": "^15.5.6", + "@types/react-syntax-highlighter": "^15.5.13", "@typescript-eslint/eslint-plugin": "^5.51.0", "@typescript-eslint/parser": "^5.62.0", "@vitejs/plugin-react": "^4.3.1", "autoprefixer": "^10.4.13", - "eslint": "^8.33.0", - "eslint-config-prettier": "^8.6.0", + "eslint": "^8.57.1", + "eslint-config-prettier": "^9.1.0", "eslint-config-standard-with-typescript": "^34.0.0", - "eslint-plugin-import": "^2.27.5", + "eslint-plugin-import": "^2.30.0", "eslint-plugin-n": "^15.7.0", "eslint-plugin-prettier": "^5.2.1", "eslint-plugin-promise": "^6.6.0", "eslint-plugin-react": "^7.35.0", - "eslint-plugin-unused-imports": "^2.0.0", + "eslint-plugin-unused-imports": "^4.1.4", "husky": "^8.0.0", - "lint-staged": "^15.2.8", + "lint-staged": "^15.2.10", "postcss": "^8.4.41", "prettier": "^3.3.3", - "prettier-plugin-tailwindcss": "^0.2.2", - "tailwindcss": "^3.2.4", - "typescript": "^4.9.5", - "vite": "^5.3.5", + "prettier-plugin-tailwindcss": "^0.6.8", + "tailwindcss": "^3.4.11", + "typescript": "^5.6.2", + "vite": "^5.4.6", "vite-plugin-svgr": "^4.2.0" } } diff --git a/frontend/public/fonts/IBMPlexMono-Medium.ttf b/frontend/public/fonts/IBMPlexMono-Medium.ttf new file mode 100644 index 00000000..39f178db Binary files /dev/null and b/frontend/public/fonts/IBMPlexMono-Medium.ttf differ diff --git a/frontend/signal-desktop-keyring.gpg b/frontend/signal-desktop-keyring.gpg new file mode 100644 index 00000000..b5e68a04 Binary files /dev/null and b/frontend/signal-desktop-keyring.gpg differ diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 63f4ac62..ba0a4bd7 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -3,7 +3,6 @@ import Navigation from './Navigation'; import Conversation from './conversation/Conversation'; import About from './About'; import PageNotFound from './PageNotFound'; -import { inject } from '@vercel/analytics'; import { useMediaQuery } from './hooks'; import { useState } from 'react'; import Setting from './settings'; @@ -11,16 +10,16 @@ import './locale/i18n'; import { Outlet } from 'react-router-dom'; import { SharedConversation } from './conversation/SharedConversation'; import { useDarkTheme } from './hooks'; -inject(); function MainLayout() { const { isMobile } = useMediaQuery(); const [navOpen, setNavOpen] = useState(!isMobile); + return ( -
+
; + } return ( - <> +
}> } /> @@ -45,6 +47,6 @@ export default function App() { } /> } /> - +
); } diff --git a/frontend/src/Hero.tsx b/frontend/src/Hero.tsx index 8a3d17c8..644848dc 100644 --- a/frontend/src/Hero.tsx +++ b/frontend/src/Hero.tsx @@ -19,7 +19,7 @@ export default function Hero({ }>; return (
@@ -37,7 +37,7 @@ export default function Hero({ + + {conversationId && }
); diff --git a/frontend/src/api/endpoints.ts b/frontend/src/api/endpoints.ts index af2fb920..84674049 100644 --- a/frontend/src/api/endpoints.ts +++ b/frontend/src/api/endpoints.ts @@ -10,8 +10,13 @@ const endpoints = { DELETE_PROMPT: '/api/delete_prompt', UPDATE_PROMPT: '/api/update_prompt', SINGLE_PROMPT: (id: string) => `/api/get_single_prompt?id=${id}`, - DELETE_PATH: (docPath: string) => `/api/delete_old?path=${docPath}`, + DELETE_PATH: (docPath: string) => `/api/delete_old?source_id=${docPath}`, TASK_STATUS: (task_id: string) => `/api/task_status?task_id=${task_id}`, + MESSAGE_ANALYTICS: '/api/get_message_analytics', + TOKEN_ANALYTICS: '/api/get_token_analytics', + FEEDBACK_ANALYTICS: '/api/get_feedback_analytics', + LOGS: `/api/get_user_logs`, + MANAGE_SYNC: '/api/manage_sync', }, CONVERSATION: { ANSWER: '/api/answer', diff --git a/frontend/src/api/services/userService.ts b/frontend/src/api/services/userService.ts index 193fe6ad..c5bbba7d 100644 --- a/frontend/src/api/services/userService.ts +++ b/frontend/src/api/services/userService.ts @@ -23,6 +23,16 @@ const userService = { apiClient.get(endpoints.USER.DELETE_PATH(docPath)), getTaskStatus: (task_id: string): Promise => apiClient.get(endpoints.USER.TASK_STATUS(task_id)), + getMessageAnalytics: (data: any): Promise => + apiClient.post(endpoints.USER.MESSAGE_ANALYTICS, data), + getTokenAnalytics: (data: any): Promise => + apiClient.post(endpoints.USER.TOKEN_ANALYTICS, data), + getFeedbackAnalytics: (data: any): Promise => + apiClient.post(endpoints.USER.FEEDBACK_ANALYTICS, data), + getLogs: (data: any): Promise => + apiClient.post(endpoints.USER.LOGS, data), + manageSync: (data: any): Promise => + apiClient.post(endpoints.USER.MANAGE_SYNC, data), }; export default userService; diff --git a/frontend/src/assets/chevron-right.svg b/frontend/src/assets/chevron-right.svg new file mode 100644 index 00000000..1463b6f7 --- /dev/null +++ b/frontend/src/assets/chevron-right.svg @@ -0,0 +1,3 @@ + + + diff --git a/frontend/src/assets/documentation-dark.svg b/frontend/src/assets/documentation-dark.svg index 78440206..5cbde1b1 100644 --- a/frontend/src/assets/documentation-dark.svg +++ b/frontend/src/assets/documentation-dark.svg @@ -1,3 +1,4 @@ - - + + + diff --git a/frontend/src/assets/documentation.svg b/frontend/src/assets/documentation.svg index f9f7c596..955d392f 100644 --- a/frontend/src/assets/documentation.svg +++ b/frontend/src/assets/documentation.svg @@ -1,3 +1,4 @@ - - + + + diff --git a/frontend/src/assets/envelope-dark.svg b/frontend/src/assets/envelope-dark.svg new file mode 100644 index 00000000..a61bec4f --- /dev/null +++ b/frontend/src/assets/envelope-dark.svg @@ -0,0 +1,3 @@ + + + diff --git a/frontend/src/assets/envelope.svg b/frontend/src/assets/envelope.svg new file mode 100644 index 00000000..a4c25032 --- /dev/null +++ b/frontend/src/assets/envelope.svg @@ -0,0 +1,3 @@ + + + diff --git a/frontend/src/assets/file_upload.svg b/frontend/src/assets/file_upload.svg new file mode 100644 index 00000000..f48d8d81 --- /dev/null +++ b/frontend/src/assets/file_upload.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/src/assets/sync.svg b/frontend/src/assets/sync.svg new file mode 100644 index 00000000..003dec43 --- /dev/null +++ b/frontend/src/assets/sync.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/assets/website_collect.svg b/frontend/src/assets/website_collect.svg new file mode 100644 index 00000000..b7aa60cf --- /dev/null +++ b/frontend/src/assets/website_collect.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/frontend/src/components/CopyButton.tsx b/frontend/src/components/CopyButton.tsx index e28fdcaf..e13f9133 100644 --- a/frontend/src/components/CopyButton.tsx +++ b/frontend/src/components/CopyButton.tsx @@ -1,16 +1,24 @@ -import { useState } from 'react'; -import Copy from './../assets/copy.svg?react'; -import CheckMark from './../assets/checkmark.svg?react'; import copy from 'copy-to-clipboard'; +import { useState } from 'react'; -export default function CoppyButton({ text }: { text: string }) { +import CheckMark from '../assets/checkmark.svg?react'; +import Copy from '../assets/copy.svg?react'; + +export default function CoppyButton({ + text, + colorLight, + colorDark, +}: { + text: string; + colorLight?: string; + colorDark?: string; +}) { const [copied, setCopied] = useState(false); const [isCopyHovered, setIsCopyHovered] = useState(false); const handleCopyClick = (text: string) => { copy(text); setCopied(true); - // Reset copied to false after a few seconds setTimeout(() => { setCopied(false); }, 3000); @@ -20,8 +28,8 @@ export default function CoppyButton({ text }: { text: string }) {
{copied ? ( diff --git a/frontend/src/components/Dropdown.tsx b/frontend/src/components/Dropdown.tsx index adf17889..07f33650 100644 --- a/frontend/src/components/Dropdown.tsx +++ b/frontend/src/components/Dropdown.tsx @@ -16,6 +16,7 @@ function Dropdown({ showDelete, onDelete, placeholder, + contentSize = 'text-base', }: { options: | string[] @@ -26,6 +27,7 @@ function Dropdown({ | string | { label: string; value: string } | { value: number; description: string } + | { name: string; id: string; type: string } | null; onSelect: | ((value: string) => void) @@ -41,6 +43,7 @@ function Dropdown({ showDelete?: boolean; onDelete?: (value: string) => void; placeholder?: string; + contentSize?: string; }) { const dropdownRef = React.useRef(null); const [isOpen, setIsOpen] = React.useState(false); @@ -79,14 +82,14 @@ function Dropdown({ }`} > {typeof selectedValue === 'string' ? ( - + {selectedValue} ) : ( {selectedValue && 'label' in selectedValue ? selectedValue.label @@ -123,7 +126,7 @@ function Dropdown({ onSelect(option); setIsOpen(false); }} - className="ml-5 flex-1 overflow-hidden overflow-ellipsis whitespace-nowrap py-3 dark:text-light-gray" + className={`ml-5 flex-1 overflow-hidden overflow-ellipsis whitespace-nowrap py-3 dark:text-light-gray ${contentSize}`} > {typeof option === 'string' ? option diff --git a/frontend/src/components/DropdownMenu.tsx b/frontend/src/components/DropdownMenu.tsx new file mode 100644 index 00000000..787d3b84 --- /dev/null +++ b/frontend/src/components/DropdownMenu.tsx @@ -0,0 +1,88 @@ +import React from 'react'; + +type DropdownMenuProps = { + name: string; + options: { label: string; value: string }[]; + onSelect: (value: string) => void; + defaultValue?: string; + icon?: string; +}; + +export default function DropdownMenu({ + name, + options, + onSelect, + defaultValue = 'none', + icon, +}: DropdownMenuProps) { + const dropdownRef = React.useRef(null); + const [isOpen, setIsOpen] = React.useState(false); + const [selectedOption, setSelectedOption] = React.useState( + options.find((option) => option.value === defaultValue) || options[0], + ); + + const handleToggle = () => { + setIsOpen(!isOpen); + }; + const handleClickOutside = (event: MouseEvent) => { + if ( + dropdownRef.current && + !dropdownRef.current.contains(event.target as Node) + ) { + setIsOpen(false); + } + }; + const handleClickOption = (optionId: number) => { + setIsOpen(false); + setSelectedOption(options[optionId]); + onSelect(options[optionId].value); + }; + + React.useEffect(() => { + document.addEventListener('mousedown', handleClickOutside); + return () => { + document.removeEventListener('mousedown', handleClickOutside); + }; + }, []); + return ( +
+ +
+
+ {options.map((option, idx) => ( + + ))} +
+
+
+ ); +} diff --git a/frontend/src/components/Help.tsx b/frontend/src/components/Help.tsx new file mode 100644 index 00000000..0112a805 --- /dev/null +++ b/frontend/src/components/Help.tsx @@ -0,0 +1,80 @@ +import { useState, useRef, useEffect } from 'react'; +import Info from '../assets/info.svg'; +import PageIcon from '../assets/documentation.svg'; +import EmailIcon from '../assets/envelope.svg'; +import { useTranslation } from 'react-i18next'; +const Help = () => { + const [isOpen, setIsOpen] = useState(false); + const dropdownRef = useRef(null); + const buttonRef = useRef(null); + const { t } = useTranslation(); + + const toggleDropdown = () => { + setIsOpen((prev) => !prev); + }; + + const handleClickOutside = (event: MouseEvent) => { + if ( + dropdownRef.current && + !dropdownRef.current.contains(event.target as Node) && + buttonRef.current && + !buttonRef.current.contains(event.target as Node) + ) { + setIsOpen(false); + } + }; + + useEffect(() => { + document.addEventListener('mousedown', handleClickOutside); + return () => { + document.removeEventListener('mousedown', handleClickOutside); + }; + }, []); + + return ( +
+ + {isOpen && ( + + )} +
+ ); +}; + +export default Help; diff --git a/frontend/src/components/Input.tsx b/frontend/src/components/Input.tsx index 56ca1d52..17e60190 100644 --- a/frontend/src/components/Input.tsx +++ b/frontend/src/components/Input.tsx @@ -10,6 +10,7 @@ const Input = ({ maxLength, className, colorVariant = 'silver', + borderVariant = 'thick', children, onChange, onPaste, @@ -20,10 +21,13 @@ const Input = ({ jet: 'border-jet', gray: 'border-gray-5000 dark:text-silver', }; - + const borderStyles = { + thin: 'border', + thick: 'border-2', + }; return ( ) => ( diff --git a/frontend/src/components/SettingsBar.tsx b/frontend/src/components/SettingsBar.tsx new file mode 100644 index 00000000..f617c6e8 --- /dev/null +++ b/frontend/src/components/SettingsBar.tsx @@ -0,0 +1,96 @@ +import React, { useCallback, useRef, useState } from 'react'; +import ArrowLeft from '../assets/arrow-left.svg'; +import ArrowRight from '../assets/arrow-right.svg'; +import { useTranslation } from 'react-i18next'; + +type HiddenGradientType = 'left' | 'right' | undefined; + +const useTabs = () => { + const { t } = useTranslation(); + const tabs = [ + t('settings.general.label'), + t('settings.documents.label'), + t('settings.apiKeys.label'), + t('settings.analytics.label'), + t('settings.logs.label'), + ]; + return tabs; +}; + +interface SettingsBarProps { + setActiveTab: React.Dispatch>; + activeTab: string; +} + +const SettingsBar = ({ setActiveTab, activeTab }: SettingsBarProps) => { + const [hiddenGradient, setHiddenGradient] = + useState('left'); + const containerRef = useRef(null); + const tabs = useTabs(); + const scrollTabs = useCallback( + (direction: number) => { + if (containerRef.current) { + const container = containerRef.current; + container.scrollLeft += direction * 100; // Adjust the scroll amount as needed + if (container.scrollLeft === 0) { + setHiddenGradient('left'); + } else if ( + container.scrollLeft + container.offsetWidth === + container.scrollWidth + ) { + setHiddenGradient('right'); + } else { + setHiddenGradient(undefined); + } + } + }, + [containerRef.current], + ); + return ( +
+
+
+ +
+ +
+
+ {tabs.map((tab, index) => ( + + ))} +
+
+ +
+
+ ); +}; + +export default SettingsBar; diff --git a/frontend/src/components/ShareButton.tsx b/frontend/src/components/ShareButton.tsx new file mode 100644 index 00000000..8d4a93de --- /dev/null +++ b/frontend/src/components/ShareButton.tsx @@ -0,0 +1,36 @@ +import { useState } from 'react'; +import ShareIcon from '../assets/share.svg'; +import { ShareConversationModal } from '../modals/ShareConversationModal'; + +type ShareButtonProps = { + conversationId: string; +}; + +export default function ShareButton({ conversationId }: ShareButtonProps) { + const [isShareModalOpen, setShareModalState] = useState(false); + return ( + <> + + {isShareModalOpen && ( + { + setShareModalState(false); + }} + conversationId={conversationId} + /> + )} + + ); +} diff --git a/frontend/src/components/SkeletonLoader.tsx b/frontend/src/components/SkeletonLoader.tsx new file mode 100644 index 00000000..e9a136e4 --- /dev/null +++ b/frontend/src/components/SkeletonLoader.tsx @@ -0,0 +1,138 @@ +import React, { useState, useEffect } from 'react'; + +interface SkeletonLoaderProps { + count?: number; + component?: 'default' | 'analysis' | 'chatbot' | 'logs'; +} + +const SkeletonLoader: React.FC = ({ + count = 1, + component = 'default', +}) => { + const [skeletonCount, setSkeletonCount] = useState(count); + + useEffect(() => { + const handleResize = () => { + const windowWidth = window.innerWidth; + + if (windowWidth > 1024) { + setSkeletonCount(1); + } else if (windowWidth > 768) { + setSkeletonCount(count); + } else { + setSkeletonCount(Math.min(count, 2)); + } + }; + + handleResize(); + window.addEventListener('resize', handleResize); + + return () => { + window.removeEventListener('resize', handleResize); + }; + }, [count]); + + return ( +
+ {component === 'default' ? ( + [...Array(skeletonCount)].map((_, idx) => ( +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ )) + ) : component === 'analysis' ? ( + [...Array(skeletonCount)].map((_, idx) => ( +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ )) + ) : component === 'chatbot' ? ( +
+
+
+
+
+
+
+
+ + {[...Array(skeletonCount * 6)].map((_, idx) => ( +
+
+
+
+
+
+ ))} +
+ ) : ( + [...Array(skeletonCount)].map((_, idx) => ( +
+
+
+
+
+
+
+
+
+ )) + )} +
+ ); +}; + +export default SkeletonLoader; diff --git a/frontend/src/components/SourceDropdown.tsx b/frontend/src/components/SourceDropdown.tsx index ce130b4d..f92173a0 100644 --- a/frontend/src/components/SourceDropdown.tsx +++ b/frontend/src/components/SourceDropdown.tsx @@ -1,7 +1,7 @@ import React from 'react'; import Trash from '../assets/trash.svg'; import Arrow2 from '../assets/dropdown-arrow.svg'; -import { Doc } from '../preferences/preferenceApi'; +import { Doc } from '../models/misc'; import { useDispatch } from 'react-redux'; import { useTranslation } from 'react-i18next'; type Props = { @@ -11,6 +11,7 @@ type Props = { isDocsListOpen: boolean; setIsDocsListOpen: React.Dispatch>; handleDeleteClick: any; + handlePostDocumentSelect: any; }; function SourceDropdown({ @@ -20,6 +21,7 @@ function SourceDropdown({ setIsDocsListOpen, isDocsListOpen, handleDeleteClick, + handlePostDocumentSelect, // Callback function fired after a document is selected }: Props) { const dispatch = useDispatch(); const { t } = useTranslation(); @@ -63,9 +65,6 @@ function SourceDropdown({

{selectedDocs?.name || 'None'}

-

- {selectedDocs?.version} -

{ dispatch(setSelectedDocs(option)); setIsDocsListOpen(false); + handlePostDocumentSelect(option); }} > - + { + handlePostDocumentSelect(null); + }} + > {t('none')}
diff --git a/frontend/src/components/types/index.ts b/frontend/src/components/types/index.ts index 0d6172ab..7af1c545 100644 --- a/frontend/src/components/types/index.ts +++ b/frontend/src/components/types/index.ts @@ -2,6 +2,7 @@ export type InputProps = { type: 'text' | 'number'; value: string | string[] | number; colorVariant?: 'silver' | 'jet' | 'gray'; + borderVariant?: 'thin' | 'thick'; isAutoFocused?: boolean; id?: string; maxLength?: number; diff --git a/frontend/src/conversation/Conversation.tsx b/frontend/src/conversation/Conversation.tsx index 01b8a2de..ae4ed7a9 100644 --- a/frontend/src/conversation/Conversation.tsx +++ b/frontend/src/conversation/Conversation.tsx @@ -5,13 +5,11 @@ import { useDispatch, useSelector } from 'react-redux'; import ArrowDown from '../assets/arrow-down.svg'; import Send from '../assets/send.svg'; import SendDark from '../assets/send_dark.svg'; -import ShareIcon from '../assets/share.svg'; import SpinnerDark from '../assets/spinner-dark.svg'; import Spinner from '../assets/spinner.svg'; import RetryIcon from '../components/RetryIcon'; import Hero from '../Hero'; -import { useDarkTheme } from '../hooks'; -import { ShareConversationModal } from '../modals/ShareConversationModal'; +import { useDarkTheme, useMediaQuery } from '../hooks'; import { selectConversationId } from '../preferences/preferenceSlice'; import { AppDispatch } from '../store'; import ConversationBubble from './ConversationBubble'; @@ -24,6 +22,7 @@ import { selectStatus, updateQuery, } from './conversationSlice'; +import ShareButton from '../components/ShareButton'; export default function Conversation() { const queries = useSelector(selectQueries); @@ -37,8 +36,8 @@ export default function Conversation() { const fetchStream = useRef(null); const [eventInterrupt, setEventInterrupt] = useState(false); const [lastQueryReturnedErr, setLastQueryReturnedErr] = useState(false); - const [isShareModalOpen, setShareModalState] = useState(false); const { t } = useTranslation(); + const { isMobile } = useMediaQuery(); const handleUserInterruption = () => { if (!eventInterrupt && status === 'loading') setEventInterrupt(true); @@ -54,10 +53,6 @@ export default function Conversation() { } }, []); - useEffect(() => { - fetchStream.current && fetchStream.current.abort(); - }, [conversationId]); - useEffect(() => { if (queries.length) { queries[queries.length - 1].error && setLastQueryReturnedErr(true); @@ -143,7 +138,7 @@ export default function Conversation() { } else if (query.error) { const retryBtn = ( ); responseView = ( @@ -182,46 +179,33 @@ export default function Conversation() { )}px`; } }; - + const checkScroll = () => { + const el = conversationRef.current; + if (!el) return; + const isBottom = el.scrollHeight - el.scrollTop - el.clientHeight < 10; + setHasScrolledToLast(isBottom); + }; useEffect(() => { handleInput(); window.addEventListener('resize', handleInput); + conversationRef.current?.addEventListener('scroll', checkScroll); return () => { window.removeEventListener('resize', handleInput); + conversationRef.current?.removeEventListener('scroll', checkScroll); }; }, []); return ( -
+
{conversationId && ( - <> - - {isShareModalOpen && ( - { - setShareModalState(false); - }} - conversationId={conversationId} - /> - )} - +
+ +
)}
{queries.length > 0 && !hasScrolledToLast && (