mirror of
https://github.com/arc53/DocsGPT.git
synced 2026-01-20 14:00:55 +00:00
proxy for api-tool
-Only for api_tool for now, if this solution works well then implementation for other tools is required - Need to check api keys creation with the current proxies - Show connection string example at creation - locale needs updates for other languages
This commit is contained in:
@@ -11,4 +11,7 @@ class AgentCreator:
|
||||
agent_class = cls.agents.get(type.lower())
|
||||
if not agent_class:
|
||||
raise ValueError(f"No agent class found for type {type}")
|
||||
config = kwargs.pop('config', None)
|
||||
if isinstance(config, dict) and 'proxy_id' in config and 'proxy_id' not in kwargs:
|
||||
kwargs['proxy_id'] = config['proxy_id']
|
||||
return agent_class(*args, **kwargs)
|
||||
|
||||
@@ -17,6 +17,7 @@ class BaseAgent:
|
||||
api_key,
|
||||
user_api_key=None,
|
||||
decoded_token=None,
|
||||
proxy_id=None,
|
||||
):
|
||||
self.endpoint = endpoint
|
||||
self.llm = LLMCreator.create_llm(
|
||||
@@ -30,6 +31,7 @@ class BaseAgent:
|
||||
self.tools = []
|
||||
self.tool_config = {}
|
||||
self.tool_calls = []
|
||||
self.proxy_id = proxy_id
|
||||
|
||||
def gen(self, *args, **kwargs) -> Generator[Dict, None, None]:
|
||||
raise NotImplementedError('Method "gen" must be implemented in the child class')
|
||||
@@ -41,6 +43,11 @@ class BaseAgent:
|
||||
user_tools = user_tools_collection.find({"user": user, "status": True})
|
||||
user_tools = list(user_tools)
|
||||
tools_by_id = {str(tool["_id"]): tool for tool in user_tools}
|
||||
if hasattr(self, 'proxy_id') and self.proxy_id:
|
||||
for tool_id, tool in tools_by_id.items():
|
||||
if 'config' not in tool:
|
||||
tool['config'] = {}
|
||||
tool['config']['proxy_id'] = self.proxy_id
|
||||
return tools_by_id
|
||||
|
||||
def _build_tool_parameters(self, action):
|
||||
@@ -126,6 +133,7 @@ class BaseAgent:
|
||||
"method": tool_data["config"]["actions"][action_name]["method"],
|
||||
"headers": headers,
|
||||
"query_params": query_params,
|
||||
"proxy_id": self.proxy_id,
|
||||
}
|
||||
if tool_data["name"] == "api_tool"
|
||||
else tool_data["config"]
|
||||
|
||||
@@ -18,9 +18,10 @@ class ClassicAgent(BaseAgent):
|
||||
prompt="",
|
||||
chat_history=None,
|
||||
decoded_token=None,
|
||||
proxy_id=None,
|
||||
):
|
||||
super().__init__(
|
||||
endpoint, llm_name, gpt_model, api_key, user_api_key, decoded_token
|
||||
endpoint, llm_name, gpt_model, api_key, user_api_key, decoded_token, proxy_id
|
||||
)
|
||||
self.user = decoded_token.get("sub")
|
||||
self.prompt = prompt
|
||||
|
||||
@@ -23,15 +23,43 @@ class APITool(Tool):
|
||||
)
|
||||
|
||||
def _make_api_call(self, url, method, headers, query_params, body):
|
||||
sanitized_headers = {}
|
||||
for key, value in headers.items():
|
||||
if isinstance(value, str):
|
||||
sanitized_value = value.encode('latin-1', errors='ignore').decode('latin-1')
|
||||
sanitized_headers[key] = sanitized_value
|
||||
else:
|
||||
sanitized_headers[key] = value
|
||||
|
||||
if query_params:
|
||||
url = f"{url}?{requests.compat.urlencode(query_params)}"
|
||||
if isinstance(body, dict):
|
||||
body = json.dumps(body)
|
||||
response = None
|
||||
try:
|
||||
print(f"Making API call: {method} {url} with body: {body}")
|
||||
if body == "{}":
|
||||
body = None
|
||||
response = requests.request(method, url, headers=headers, data=body)
|
||||
|
||||
proxy_id = self.config.get("proxy_id", None)
|
||||
request_kwargs = {
|
||||
'method': method,
|
||||
'url': url,
|
||||
'headers': sanitized_headers,
|
||||
'data': body
|
||||
}
|
||||
try:
|
||||
if proxy_id:
|
||||
from application.agents.tools.proxy_handler import apply_proxy_to_request
|
||||
response = apply_proxy_to_request(
|
||||
requests.request,
|
||||
proxy_id=proxy_id,
|
||||
**request_kwargs
|
||||
)
|
||||
else:
|
||||
response = requests.request(**request_kwargs)
|
||||
except ImportError:
|
||||
response = requests.request(**request_kwargs)
|
||||
response.raise_for_status()
|
||||
content_type = response.headers.get(
|
||||
"Content-Type", "application/json"
|
||||
|
||||
63
application/agents/tools/proxy_handler.py
Normal file
63
application/agents/tools/proxy_handler.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import logging
|
||||
import requests
|
||||
from typing import Dict, Optional
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from application.core.mongo_db import MongoDB
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Get MongoDB connection
|
||||
mongo = MongoDB.get_client()
|
||||
db = mongo["docsgpt"]
|
||||
proxies_collection = db["proxies"]
|
||||
|
||||
def get_proxy_config(proxy_id: str) -> Optional[Dict[str, str]]:
|
||||
"""
|
||||
Retrieve proxy configuration from the database.
|
||||
|
||||
Args:
|
||||
proxy_id: The ID of the proxy configuration
|
||||
|
||||
Returns:
|
||||
A dictionary with proxy configuration or None if not found
|
||||
"""
|
||||
if not proxy_id or proxy_id == "none":
|
||||
return None
|
||||
|
||||
try:
|
||||
if ObjectId.is_valid(proxy_id):
|
||||
proxy_config = proxies_collection.find_one({"_id": ObjectId(proxy_id)})
|
||||
if proxy_config and "connection" in proxy_config:
|
||||
connection_str = proxy_config["connection"].strip()
|
||||
if connection_str:
|
||||
# Format proxy for requests library
|
||||
return {
|
||||
"http": connection_str,
|
||||
"https": connection_str
|
||||
}
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error retrieving proxy configuration: {e}")
|
||||
return None
|
||||
|
||||
def apply_proxy_to_request(request_func, proxy_id=None, **kwargs):
|
||||
"""
|
||||
Apply proxy configuration to a requests function if available.
|
||||
This is a minimal wrapper that doesn't change the function signature.
|
||||
|
||||
Args:
|
||||
request_func: The requests function to call (e.g., requests.get, requests.post)
|
||||
proxy_id: Optional proxy ID to use
|
||||
**kwargs: Arguments to pass to the request function
|
||||
|
||||
Returns:
|
||||
The response from the request
|
||||
"""
|
||||
if proxy_id:
|
||||
proxy_config = get_proxy_config(proxy_id)
|
||||
if proxy_config:
|
||||
kwargs['proxies'] = proxy_config
|
||||
logger.info(f"Using proxy for request")
|
||||
|
||||
return request_func(**kwargs)
|
||||
@@ -335,6 +335,9 @@ class Stream(Resource):
|
||||
"prompt_id": fields.String(
|
||||
required=False, default="default", description="Prompt ID"
|
||||
),
|
||||
"proxy_id": fields.String(
|
||||
required=False, description="Proxy ID to use for API calls"
|
||||
),
|
||||
"chunks": fields.Integer(
|
||||
required=False, default=2, description="Number of chunks"
|
||||
),
|
||||
@@ -376,6 +379,7 @@ class Stream(Resource):
|
||||
)
|
||||
conversation_id = data.get("conversation_id")
|
||||
prompt_id = data.get("prompt_id", "default")
|
||||
proxy_id = data.get("proxy_id", None)
|
||||
|
||||
index = data.get("index", None)
|
||||
chunks = int(data.get("chunks", 2))
|
||||
@@ -386,6 +390,7 @@ class Stream(Resource):
|
||||
data_key = get_data_from_api_key(data["api_key"])
|
||||
chunks = int(data_key.get("chunks", 2))
|
||||
prompt_id = data_key.get("prompt_id", "default")
|
||||
proxy_id = data_key.get("proxy_id", None)
|
||||
source = {"active_docs": data_key.get("source")}
|
||||
retriever_name = data_key.get("retriever", retriever_name)
|
||||
user_api_key = data["api_key"]
|
||||
@@ -422,6 +427,7 @@ class Stream(Resource):
|
||||
api_key=settings.API_KEY,
|
||||
user_api_key=user_api_key,
|
||||
prompt=prompt,
|
||||
proxy_id=proxy_id,
|
||||
chat_history=history,
|
||||
decoded_token=decoded_token,
|
||||
)
|
||||
@@ -496,6 +502,9 @@ class Answer(Resource):
|
||||
"prompt_id": fields.String(
|
||||
required=False, default="default", description="Prompt ID"
|
||||
),
|
||||
"proxy_id": fields.String(
|
||||
required=False, description="Proxy ID to use for API calls"
|
||||
),
|
||||
"chunks": fields.Integer(
|
||||
required=False, default=2, description="Number of chunks"
|
||||
),
|
||||
@@ -527,6 +536,7 @@ class Answer(Resource):
|
||||
)
|
||||
conversation_id = data.get("conversation_id")
|
||||
prompt_id = data.get("prompt_id", "default")
|
||||
proxy_id = data.get("proxy_id", None)
|
||||
chunks = int(data.get("chunks", 2))
|
||||
token_limit = data.get("token_limit", settings.DEFAULT_MAX_HISTORY)
|
||||
retriever_name = data.get("retriever", "classic")
|
||||
@@ -535,6 +545,7 @@ class Answer(Resource):
|
||||
data_key = get_data_from_api_key(data["api_key"])
|
||||
chunks = int(data_key.get("chunks", 2))
|
||||
prompt_id = data_key.get("prompt_id", "default")
|
||||
proxy_id = data_key.get("proxy_id", None)
|
||||
source = {"active_docs": data_key.get("source")}
|
||||
retriever_name = data_key.get("retriever", retriever_name)
|
||||
user_api_key = data["api_key"]
|
||||
@@ -569,6 +580,7 @@ class Answer(Resource):
|
||||
api_key=settings.API_KEY,
|
||||
user_api_key=user_api_key,
|
||||
prompt=prompt,
|
||||
proxy_id=proxy_id,
|
||||
chat_history=history,
|
||||
decoded_token=decoded_token,
|
||||
)
|
||||
|
||||
@@ -27,6 +27,7 @@ db = mongo["docsgpt"]
|
||||
conversations_collection = db["conversations"]
|
||||
sources_collection = db["sources"]
|
||||
prompts_collection = db["prompts"]
|
||||
proxies_collection = db["proxies"]
|
||||
feedback_collection = db["feedback"]
|
||||
api_key_collection = db["api_keys"]
|
||||
token_usage_collection = db["token_usage"]
|
||||
@@ -919,6 +920,183 @@ class UpdatePrompt(Resource):
|
||||
|
||||
return make_response(jsonify({"success": True}), 200)
|
||||
|
||||
@user_ns.route("/api/get_proxies")
|
||||
class GetProxies(Resource):
|
||||
@api.doc(description="Get all proxies for the user")
|
||||
def get(self):
|
||||
decoded_token = request.decoded_token
|
||||
if not decoded_token:
|
||||
return make_response(jsonify({"success": False}), 401)
|
||||
user = decoded_token.get("sub")
|
||||
try:
|
||||
proxies = proxies_collection.find({"user": user})
|
||||
list_proxies = [
|
||||
{"id": "none", "name": "None", "type": "public"},
|
||||
]
|
||||
|
||||
for proxy in proxies:
|
||||
list_proxies.append(
|
||||
{
|
||||
"id": str(proxy["_id"]),
|
||||
"name": proxy["name"],
|
||||
"type": "private",
|
||||
}
|
||||
)
|
||||
except Exception as err:
|
||||
current_app.logger.error(f"Error retrieving proxies: {err}")
|
||||
return make_response(jsonify({"success": False}), 400)
|
||||
|
||||
return make_response(jsonify(list_proxies), 200)
|
||||
|
||||
|
||||
@user_ns.route("/api/get_single_proxy")
|
||||
class GetSingleProxy(Resource):
|
||||
@api.doc(params={"id": "ID of the proxy"}, description="Get a single proxy by ID")
|
||||
def get(self):
|
||||
decoded_token = request.decoded_token
|
||||
if not decoded_token:
|
||||
return make_response(jsonify({"success": False}), 401)
|
||||
user = decoded_token.get("sub")
|
||||
proxy_id = request.args.get("id")
|
||||
if not proxy_id:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "ID is required"}), 400
|
||||
)
|
||||
|
||||
try:
|
||||
if proxy_id == "none":
|
||||
return make_response(jsonify({"connection": ""}), 200)
|
||||
|
||||
proxy = proxies_collection.find_one(
|
||||
{"_id": ObjectId(proxy_id), "user": user}
|
||||
)
|
||||
if not proxy:
|
||||
return make_response(jsonify({"status": "not found"}), 404)
|
||||
except Exception as err:
|
||||
current_app.logger.error(f"Error retrieving proxy: {err}")
|
||||
return make_response(jsonify({"success": False}), 400)
|
||||
|
||||
return make_response(jsonify({"connection": proxy["connection"]}), 200)
|
||||
|
||||
|
||||
@user_ns.route("/api/create_proxy")
|
||||
class CreateProxy(Resource):
|
||||
create_proxy_model = api.model(
|
||||
"CreateProxyModel",
|
||||
{
|
||||
"connection": fields.String(
|
||||
required=True, description="Connection string of the proxy"
|
||||
),
|
||||
"name": fields.String(required=True, description="Name of the proxy"),
|
||||
},
|
||||
)
|
||||
|
||||
@api.expect(create_proxy_model)
|
||||
@api.doc(description="Create a new proxy")
|
||||
def post(self):
|
||||
decoded_token = request.decoded_token
|
||||
if not decoded_token:
|
||||
return make_response(jsonify({"success": False}), 401)
|
||||
data = request.get_json()
|
||||
required_fields = ["connection", "name"]
|
||||
missing_fields = check_required_fields(data, required_fields)
|
||||
if missing_fields:
|
||||
return missing_fields
|
||||
|
||||
user = decoded_token.get("sub")
|
||||
try:
|
||||
resp = proxies_collection.insert_one(
|
||||
{
|
||||
"name": data["name"],
|
||||
"connection": data["connection"],
|
||||
"user": user,
|
||||
}
|
||||
)
|
||||
new_id = str(resp.inserted_id)
|
||||
except Exception as err:
|
||||
current_app.logger.error(f"Error creating proxy: {err}")
|
||||
return make_response(jsonify({"success": False}), 400)
|
||||
|
||||
return make_response(jsonify({"id": new_id}), 200)
|
||||
|
||||
|
||||
@user_ns.route("/api/delete_proxy")
|
||||
class DeleteProxy(Resource):
|
||||
delete_proxy_model = api.model(
|
||||
"DeleteProxyModel",
|
||||
{"id": fields.String(required=True, description="Proxy ID to delete")},
|
||||
)
|
||||
|
||||
@api.expect(delete_proxy_model)
|
||||
@api.doc(description="Delete a proxy by ID")
|
||||
def post(self):
|
||||
decoded_token = request.decoded_token
|
||||
if not decoded_token:
|
||||
return make_response(jsonify({"success": False}), 401)
|
||||
user = decoded_token.get("sub")
|
||||
data = request.get_json()
|
||||
required_fields = ["id"]
|
||||
missing_fields = check_required_fields(data, required_fields)
|
||||
if missing_fields:
|
||||
return missing_fields
|
||||
|
||||
try:
|
||||
# Don't allow deleting the 'none' proxy
|
||||
if data["id"] == "none":
|
||||
return make_response(jsonify({"success": False, "message": "Cannot delete default proxy"}), 400)
|
||||
|
||||
result = proxies_collection.delete_one({"_id": ObjectId(data["id"]), "user": user})
|
||||
if result.deleted_count == 0:
|
||||
return make_response(jsonify({"success": False, "message": "Proxy not found"}), 404)
|
||||
except Exception as err:
|
||||
current_app.logger.error(f"Error deleting proxy: {err}")
|
||||
return make_response(jsonify({"success": False}), 400)
|
||||
|
||||
return make_response(jsonify({"success": True}), 200)
|
||||
|
||||
|
||||
@user_ns.route("/api/update_proxy")
|
||||
class UpdateProxy(Resource):
|
||||
update_proxy_model = api.model(
|
||||
"UpdateProxyModel",
|
||||
{
|
||||
"id": fields.String(required=True, description="Proxy ID to update"),
|
||||
"name": fields.String(required=True, description="New name of the proxy"),
|
||||
"connection": fields.String(
|
||||
required=True, description="New connection string of the proxy"
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
@api.expect(update_proxy_model)
|
||||
@api.doc(description="Update an existing proxy")
|
||||
def post(self):
|
||||
decoded_token = request.decoded_token
|
||||
if not decoded_token:
|
||||
return make_response(jsonify({"success": False}), 401)
|
||||
user = decoded_token.get("sub")
|
||||
data = request.get_json()
|
||||
required_fields = ["id", "name", "connection"]
|
||||
missing_fields = check_required_fields(data, required_fields)
|
||||
if missing_fields:
|
||||
return missing_fields
|
||||
|
||||
try:
|
||||
# Don't allow updating the 'none' proxy
|
||||
if data["id"] == "none":
|
||||
return make_response(jsonify({"success": False, "message": "Cannot update default proxy"}), 400)
|
||||
|
||||
result = proxies_collection.update_one(
|
||||
{"_id": ObjectId(data["id"]), "user": user},
|
||||
{"$set": {"name": data["name"], "connection": data["connection"]}},
|
||||
)
|
||||
if result.modified_count == 0:
|
||||
return make_response(jsonify({"success": False, "message": "Proxy not found"}), 404)
|
||||
except Exception as err:
|
||||
current_app.logger.error(f"Error updating proxy: {err}")
|
||||
return make_response(jsonify({"success": False}), 400)
|
||||
|
||||
return make_response(jsonify({"success": True}), 200)
|
||||
|
||||
@user_ns.route("/api/get_api_keys")
|
||||
class GetApiKeys(Resource):
|
||||
|
||||
@@ -14,6 +14,7 @@ esutils==1.0.1
|
||||
Flask==3.1.0
|
||||
faiss-cpu==1.9.0.post1
|
||||
flask-restx==1.3.0
|
||||
gevent==24.11.1
|
||||
google-genai==1.3.0
|
||||
google-generativeai==0.8.3
|
||||
gTTS==2.5.4
|
||||
@@ -35,7 +36,7 @@ langchain-community==0.3.19
|
||||
langchain-core==0.3.45
|
||||
langchain-openai==0.3.8
|
||||
langchain-text-splitters==0.3.6
|
||||
langsmith==0.3.19
|
||||
langsmith==0.3.15
|
||||
lazy-object-proxy==1.10.0
|
||||
lxml==5.3.1
|
||||
markupsafe==3.0.2
|
||||
@@ -65,7 +66,7 @@ py==1.11.0
|
||||
pydantic==2.10.6
|
||||
pydantic-core==2.27.2
|
||||
pydantic-settings==2.7.1
|
||||
pymongo==4.11.3
|
||||
pymongo==4.10.1
|
||||
pypdf==5.2.0
|
||||
python-dateutil==2.9.0.post0
|
||||
python-dotenv==1.0.1
|
||||
|
||||
@@ -35,7 +35,7 @@ services:
|
||||
|
||||
worker:
|
||||
build: ../application
|
||||
command: celery -A application.app.celery worker -l INFO -B
|
||||
command: celery -A application.app.celery worker -l INFO --pool=gevent -B
|
||||
environment:
|
||||
- API_KEY=$API_KEY
|
||||
- EMBEDDINGS_KEY=$API_KEY
|
||||
|
||||
636
docsgpt_scanner.py
Normal file
636
docsgpt_scanner.py
Normal file
@@ -0,0 +1,636 @@
|
||||
import os
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import datetime
|
||||
import re
|
||||
import json
|
||||
|
||||
class DocsGPTDocumentationGenerator:
|
||||
def __init__(self, root_dir, config=None):
|
||||
"""
|
||||
Initialize the documentation generator with customized settings for DocsGPT.
|
||||
|
||||
Args:
|
||||
root_dir (str): The path to the root directory of the project.
|
||||
config (dict, optional): Configuration overrides.
|
||||
"""
|
||||
self.root_dir = os.path.abspath(root_dir)
|
||||
|
||||
# Default configuration optimized for DocsGPT
|
||||
self.config = {
|
||||
# Directories to exclude completely
|
||||
'excluded_dirs': [
|
||||
'__pycache__', 'venv', '.venv', 'node_modules', '.git', '.idea', '.vscode',
|
||||
'dist', 'build', 'model', 'temp', 'indexes', 'model', 'postgres_data',
|
||||
'logs', 'out', 'vectors'
|
||||
],
|
||||
|
||||
# File patterns to exclude
|
||||
'excluded_patterns': [
|
||||
'*.pyc', '*.bin', '*.faiss', '*.pkl', '*.so', '*.o',
|
||||
'*.jpg', '*.jpeg', '*.png', '*.gif', '*.webp', '*.ico', '*.lock',
|
||||
'*.pdf' # Exclude PDFs as they're just data in your project
|
||||
],
|
||||
|
||||
# Files that should always be included despite other exclusions
|
||||
'always_include': [
|
||||
'README.md', 'LICENSE', 'CONTRIBUTING.md', 'requirements.txt',
|
||||
'package.json', 'Dockerfile', 'docker-compose*.yaml', 'docker-compose*.yml'
|
||||
],
|
||||
|
||||
# Core code directories to focus on
|
||||
'core_dirs': [
|
||||
'application', 'frontend/src', 'extensions', 'docs'
|
||||
],
|
||||
|
||||
# File types to include content in documentation
|
||||
'content_file_types': [
|
||||
'.py', '.js', '.jsx', '.ts', '.tsx', '.md', '.txt',
|
||||
'.yaml', '.yml', '.json', '.dockerfile'
|
||||
],
|
||||
|
||||
# Max file size to include content (100KB)
|
||||
'max_content_size': 100 * 1024,
|
||||
|
||||
# Max number of files to include full content for each directory
|
||||
'max_files_per_dir': 5,
|
||||
|
||||
# Max characters to show in file previews
|
||||
'preview_length': 500
|
||||
}
|
||||
|
||||
# Override config with provided values
|
||||
if config:
|
||||
self.config.update(config)
|
||||
|
||||
def should_exclude(self, path):
|
||||
"""
|
||||
Determine if a path should be excluded from the documentation.
|
||||
|
||||
Args:
|
||||
path (str): The path to check.
|
||||
|
||||
Returns:
|
||||
bool: True if the path should be excluded, False otherwise.
|
||||
"""
|
||||
name = os.path.basename(path)
|
||||
|
||||
# Always include certain files
|
||||
for pattern in self.config['always_include']:
|
||||
if self._match_pattern(name, pattern):
|
||||
return False
|
||||
|
||||
# Check if it's in excluded directories
|
||||
parts = Path(path).relative_to(self.root_dir).parts
|
||||
for part in parts:
|
||||
for excluded_dir in self.config['excluded_dirs']:
|
||||
if self._match_pattern(part, excluded_dir):
|
||||
return True
|
||||
|
||||
# Check excluded patterns
|
||||
for pattern in self.config['excluded_patterns']:
|
||||
if self._match_pattern(name, pattern):
|
||||
return True
|
||||
|
||||
# Exclude hidden files
|
||||
if name.startswith('.') and name not in ['.env.example']:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _match_pattern(self, name, pattern):
|
||||
"""
|
||||
Check if a name matches a pattern with simple wildcard support.
|
||||
|
||||
Args:
|
||||
name (str): The name to check.
|
||||
pattern (str): The pattern to match against.
|
||||
|
||||
Returns:
|
||||
bool: True if the name matches the pattern, False otherwise.
|
||||
"""
|
||||
if pattern.startswith('*.'):
|
||||
# Extension pattern
|
||||
return name.endswith(pattern[1:])
|
||||
elif '*' in pattern:
|
||||
# Convert to regex pattern
|
||||
regex_pattern = pattern.replace('.', r'\.').replace('*', '.*')
|
||||
return bool(re.match(f"^{regex_pattern}$", name))
|
||||
else:
|
||||
# Exact match
|
||||
return name == pattern
|
||||
|
||||
def scan_directory(self):
|
||||
"""
|
||||
Scan the directory and build a structure representation.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary representation of the project structure
|
||||
"""
|
||||
structure = {}
|
||||
|
||||
for root, dirs, files in os.walk(self.root_dir):
|
||||
# Skip excluded directories
|
||||
dirs[:] = [d for d in dirs if not self.should_exclude(os.path.join(root, d))]
|
||||
|
||||
# Get the relative path from the root directory
|
||||
rel_path = os.path.relpath(root, self.root_dir)
|
||||
if rel_path == '.':
|
||||
rel_path = ''
|
||||
|
||||
# Filter files based on excluded patterns
|
||||
filtered_files = [file for file in files if not self.should_exclude(os.path.join(root, file))]
|
||||
|
||||
# Add directory and its files to the structure
|
||||
if rel_path:
|
||||
current_level = structure
|
||||
for part in rel_path.split(os.path.sep):
|
||||
if part not in current_level:
|
||||
current_level[part] = {}
|
||||
current_level = current_level[part]
|
||||
current_level['__files__'] = filtered_files
|
||||
else:
|
||||
structure['__files__'] = filtered_files
|
||||
|
||||
return structure
|
||||
|
||||
def print_structure(self, structure=None, indent=0, is_last=True, prefix="", file=None):
|
||||
"""
|
||||
Print the directory structure in a tree-like format.
|
||||
|
||||
Args:
|
||||
structure (dict): Dictionary representing the directory structure.
|
||||
indent (int): Current indentation level.
|
||||
is_last (bool): Whether this is the last item in its parent.
|
||||
prefix (str): Prefix for the current line.
|
||||
file: File object to write to.
|
||||
"""
|
||||
if structure is None:
|
||||
# First call, print the root directory name
|
||||
structure = self.scan_directory()
|
||||
root_name = os.path.basename(self.root_dir) + "/"
|
||||
line = root_name
|
||||
if file:
|
||||
file.write(f"{line}\n")
|
||||
print(line)
|
||||
|
||||
# Print files
|
||||
if '__files__' in structure:
|
||||
files = structure.pop('__files__')
|
||||
for i, file_name in enumerate(sorted(files)):
|
||||
is_last_file = (i == len(files) - 1) and len(structure) == 0
|
||||
connector = "└── " if is_last_file else "├── "
|
||||
line = f"{prefix}{connector}{file_name}"
|
||||
if file:
|
||||
file.write(f"{line}\n")
|
||||
print(line)
|
||||
|
||||
# Process directories
|
||||
items = list(sorted(structure.items()))
|
||||
for i, (dir_name, contents) in enumerate(items):
|
||||
is_last_dir = i == len(items) - 1
|
||||
connector = "└── " if is_last_dir else "├── "
|
||||
line = f"{prefix}{connector}{dir_name}/"
|
||||
if file:
|
||||
file.write(f"{line}\n")
|
||||
print(line)
|
||||
|
||||
new_prefix = prefix + (" " if is_last_dir else "│ ")
|
||||
self.print_structure(contents, indent + 1, is_last_dir, new_prefix, file)
|
||||
|
||||
def _get_file_language(self, file_path):
|
||||
"""
|
||||
Determine the language of a file for code block formatting.
|
||||
|
||||
Args:
|
||||
file_path (str): Path to the file.
|
||||
|
||||
Returns:
|
||||
str: Language identifier for markdown code block.
|
||||
"""
|
||||
ext = os.path.splitext(file_path)[1].lower()
|
||||
name = os.path.basename(file_path)
|
||||
|
||||
# Map file extensions to language identifiers
|
||||
ext_to_lang = {
|
||||
'.py': 'python',
|
||||
'.js': 'javascript',
|
||||
'.jsx': 'jsx',
|
||||
'.ts': 'typescript',
|
||||
'.tsx': 'tsx',
|
||||
'.html': 'html',
|
||||
'.css': 'css',
|
||||
'.scss': 'scss',
|
||||
'.md': 'markdown',
|
||||
'.json': 'json',
|
||||
'.yaml': 'yaml',
|
||||
'.yml': 'yaml',
|
||||
'.sh': 'bash'
|
||||
}
|
||||
|
||||
# Special files
|
||||
if name in ['Dockerfile']:
|
||||
return 'dockerfile'
|
||||
elif name in ['docker-compose.yml', 'docker-compose.yaml']:
|
||||
return 'yaml'
|
||||
elif name in ['Makefile']:
|
||||
return 'makefile'
|
||||
elif name in ['.gitignore', 'requirements.txt', '.env.example']:
|
||||
return '' # Plain text
|
||||
|
||||
return ext_to_lang.get(ext, '')
|
||||
|
||||
def should_include_content(self, file_path):
|
||||
"""
|
||||
Check if a file's content should be included in the documentation.
|
||||
|
||||
Args:
|
||||
file_path (str): The path to the file.
|
||||
|
||||
Returns:
|
||||
bool: True if content should be included, False otherwise.
|
||||
"""
|
||||
# Check file size
|
||||
if os.path.getsize(file_path) > self.config['max_content_size']:
|
||||
return False
|
||||
|
||||
# Check file extension
|
||||
ext = os.path.splitext(file_path)[1].lower()
|
||||
if ext not in self.config['content_file_types']:
|
||||
return False
|
||||
|
||||
# Check if file is in a core directory
|
||||
rel_path = os.path.relpath(file_path, self.root_dir)
|
||||
for core_dir in self.config['core_dirs']:
|
||||
if rel_path.startswith(core_dir):
|
||||
return True
|
||||
|
||||
# Include any README or key configuration files
|
||||
name = os.path.basename(file_path)
|
||||
if any(self._match_pattern(name, pattern) for pattern in self.config['always_include']):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def count_files_by_type(self):
|
||||
"""
|
||||
Count the number of files by type in the project.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary mapping file extensions to counts.
|
||||
"""
|
||||
ext_counts = {}
|
||||
|
||||
for root, _, files in os.walk(self.root_dir):
|
||||
if self.should_exclude(root):
|
||||
continue
|
||||
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
if self.should_exclude(file_path):
|
||||
continue
|
||||
|
||||
ext = os.path.splitext(file)[1].lower()
|
||||
if not ext:
|
||||
ext = '(no extension)'
|
||||
|
||||
ext_counts[ext] = ext_counts.get(ext, 0) + 1
|
||||
|
||||
return ext_counts
|
||||
|
||||
def generate_code_snippets(self, structure=None, path="", snippets=None):
|
||||
"""
|
||||
Generate representative code snippets from the project.
|
||||
|
||||
Args:
|
||||
structure (dict): Project structure dictionary.
|
||||
path (str): Current path in the structure.
|
||||
snippets (dict): Dictionary to store snippets by directory.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary mapping directories to lists of file snippets.
|
||||
"""
|
||||
if snippets is None:
|
||||
snippets = {}
|
||||
structure = self.scan_directory()
|
||||
|
||||
# Process files in the current directory
|
||||
if '__files__' in structure:
|
||||
files = structure.pop('__files__')
|
||||
dir_snippets = []
|
||||
|
||||
# Sort files to prioritize key files
|
||||
sorted_files = sorted(files, key=lambda f: f.startswith(('README', 'main', 'app')) and not f.startswith('.'), reverse=True)
|
||||
|
||||
for file in sorted_files[:self.config['max_files_per_dir']]:
|
||||
file_path = os.path.join(self.root_dir, path, file)
|
||||
|
||||
if self.should_include_content(file_path):
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
||||
content = f.read(self.config['preview_length'])
|
||||
too_long = len(content) >= self.config['preview_length']
|
||||
|
||||
dir_snippets.append({
|
||||
'name': file,
|
||||
'path': os.path.join(path, file),
|
||||
'language': self._get_file_language(file_path),
|
||||
'content': content + ('...' if too_long else ''),
|
||||
'full_path': file_path
|
||||
})
|
||||
except Exception as e:
|
||||
# Skip files that can't be read
|
||||
pass
|
||||
|
||||
if dir_snippets:
|
||||
snippets[path or '.'] = dir_snippets
|
||||
|
||||
# Process subdirectories
|
||||
for dir_name, contents in structure.items():
|
||||
self.generate_code_snippets(contents, os.path.join(path, dir_name), snippets)
|
||||
|
||||
return snippets
|
||||
|
||||
def find_important_files(self):
|
||||
"""
|
||||
Find and return a list of important files in the project.
|
||||
|
||||
Returns:
|
||||
list: List of important file paths.
|
||||
"""
|
||||
important_files = []
|
||||
|
||||
# Files to look for in any directory
|
||||
common_important_files = [
|
||||
'README.md', 'Dockerfile', 'docker-compose.yml', 'docker-compose.yaml',
|
||||
'requirements.txt', 'setup.py', 'package.json', 'app.py', 'main.py',
|
||||
'settings.py', 'config.py', 'wsgi.py', '.env.example'
|
||||
]
|
||||
|
||||
for root, _, files in os.walk(self.root_dir):
|
||||
if self.should_exclude(root):
|
||||
continue
|
||||
|
||||
for file in files:
|
||||
if file in common_important_files:
|
||||
important_files.append(os.path.join(root, file))
|
||||
|
||||
return important_files
|
||||
|
||||
def generate_markdown(self, output_file):
|
||||
"""
|
||||
Generate a comprehensive markdown document for the DocsGPT project.
|
||||
|
||||
Args:
|
||||
output_file (str): Path to the output markdown file.
|
||||
"""
|
||||
structure = self.scan_directory()
|
||||
ext_counts = self.count_files_by_type()
|
||||
important_files = self.find_important_files()
|
||||
snippets = self.generate_code_snippets()
|
||||
|
||||
with open(output_file, 'w', encoding='utf-8') as md_file:
|
||||
# Title and metadata
|
||||
md_file.write(f"# DocsGPT Project Documentation\n\n")
|
||||
md_file.write(f"Generated: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M')}\n\n")
|
||||
|
||||
# Project Overview
|
||||
md_file.write("## 1. Project Overview\n\n")
|
||||
|
||||
# Try to include README content
|
||||
readme_path = os.path.join(self.root_dir, "README.md")
|
||||
if os.path.exists(readme_path):
|
||||
try:
|
||||
with open(readme_path, 'r', encoding='utf-8', errors='replace') as readme:
|
||||
content = readme.read()
|
||||
md_file.write("### From README.md\n\n")
|
||||
md_file.write(f"{content}\n\n")
|
||||
except Exception:
|
||||
md_file.write("*Error reading README.md*\n\n")
|
||||
|
||||
# Project stats
|
||||
md_file.write("### Project Statistics\n\n")
|
||||
|
||||
# Count directories and files
|
||||
total_dirs = 0
|
||||
total_files = 0
|
||||
for root, dirs, files in os.walk(self.root_dir):
|
||||
if not self.should_exclude(root):
|
||||
total_dirs += sum(1 for d in dirs if not self.should_exclude(os.path.join(root, d)))
|
||||
total_files += sum(1 for f in files if not self.should_exclude(os.path.join(root, f)))
|
||||
|
||||
md_file.write(f"- **Total Directories:** {total_dirs}\n")
|
||||
md_file.write(f"- **Total Files:** {total_files}\n\n")
|
||||
|
||||
md_file.write("#### File Types\n\n")
|
||||
for ext, count in sorted(ext_counts.items(), key=lambda x: x[1], reverse=True)[:15]:
|
||||
md_file.write(f"- **{ext}:** {count} files\n")
|
||||
md_file.write("\n")
|
||||
|
||||
# Directory Structure
|
||||
md_file.write("## 2. Directory Structure\n\n")
|
||||
md_file.write("```\n")
|
||||
self.print_structure(file=md_file)
|
||||
md_file.write("```\n\n")
|
||||
|
||||
# Key Components
|
||||
md_file.write("## 3. Key Components\n\n")
|
||||
|
||||
# Application component
|
||||
md_file.write("### 3.1. Application Core\n\n")
|
||||
if 'application' in snippets:
|
||||
md_file.write("The application core contains the main backend logic for DocsGPT.\n\n")
|
||||
for snippet in snippets['application'][:3]:
|
||||
md_file.write(f"#### {snippet['path']}\n\n")
|
||||
md_file.write(f"```{snippet['language']}\n{snippet['content']}\n```\n\n")
|
||||
|
||||
# Frontend component
|
||||
md_file.write("### 3.2. Frontend\n\n")
|
||||
frontend_snippets = [s for path, files in snippets.items()
|
||||
for s in files if path.startswith('frontend/src')]
|
||||
if frontend_snippets:
|
||||
md_file.write("The frontend is built with React and provides the user interface.\n\n")
|
||||
for snippet in frontend_snippets[:3]:
|
||||
md_file.write(f"#### {snippet['path']}\n\n")
|
||||
md_file.write(f"```{snippet['language']}\n{snippet['content']}\n```\n\n")
|
||||
|
||||
# Extensions
|
||||
md_file.write("### 3.3. Extensions\n\n")
|
||||
extension_snippets = [s for path, files in snippets.items()
|
||||
for s in files if path.startswith('extensions')]
|
||||
if extension_snippets:
|
||||
md_file.write("DocsGPT includes various extensions for different platforms.\n\n")
|
||||
for snippet in extension_snippets[:3]:
|
||||
md_file.write(f"#### {snippet['path']}\n\n")
|
||||
md_file.write(f"```{snippet['language']}\n{snippet['content']}\n```\n\n")
|
||||
|
||||
# Configuration Files
|
||||
md_file.write("## 4. Configuration Files\n\n")
|
||||
|
||||
# Docker files
|
||||
md_file.write("### 4.1. Docker Configuration\n\n")
|
||||
docker_files = [f for f in important_files if os.path.basename(f) in
|
||||
['Dockerfile', 'docker-compose.yml', 'docker-compose.yaml']]
|
||||
|
||||
for file_path in docker_files:
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
||||
content = f.read()
|
||||
rel_path = os.path.relpath(file_path, self.root_dir)
|
||||
md_file.write(f"#### {rel_path}\n\n")
|
||||
|
||||
lang = 'dockerfile' if os.path.basename(file_path) == 'Dockerfile' else 'yaml'
|
||||
md_file.write(f"```{lang}\n{content}\n```\n\n")
|
||||
except Exception as e:
|
||||
md_file.write(f"*Error reading {os.path.relpath(file_path, self.root_dir)}: {e}*\n\n")
|
||||
|
||||
# Requirements and package files
|
||||
md_file.write("### 4.2. Dependencies\n\n")
|
||||
dep_files = [f for f in important_files if os.path.basename(f) in
|
||||
['requirements.txt', 'package.json']]
|
||||
|
||||
for file_path in dep_files:
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
||||
content = f.read()
|
||||
rel_path = os.path.relpath(file_path, self.root_dir)
|
||||
md_file.write(f"#### {rel_path}\n\n")
|
||||
|
||||
lang = 'json' if file_path.endswith('.json') else ''
|
||||
md_file.write(f"```{lang}\n{content}\n```\n\n")
|
||||
except Exception as e:
|
||||
md_file.write(f"*Error reading {os.path.relpath(file_path, self.root_dir)}: {e}*\n\n")
|
||||
|
||||
# Environment files
|
||||
env_files = [f for f in important_files if os.path.basename(f) == '.env.example']
|
||||
if env_files:
|
||||
md_file.write("### 4.3. Environment Configuration\n\n")
|
||||
|
||||
for file_path in env_files:
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
||||
content = f.read()
|
||||
rel_path = os.path.relpath(file_path, self.root_dir)
|
||||
md_file.write(f"#### {rel_path}\n\n")
|
||||
md_file.write(f"```\n{content}\n```\n\n")
|
||||
except Exception as e:
|
||||
md_file.write(f"*Error reading {os.path.relpath(file_path, self.root_dir)}: {e}*\n\n")
|
||||
|
||||
# API Documentation (if we can find routes)
|
||||
md_file.write("## 5. API Documentation\n\n")
|
||||
api_files = []
|
||||
for root, _, files in os.walk(os.path.join(self.root_dir, 'application/api')):
|
||||
if self.should_exclude(root):
|
||||
continue
|
||||
|
||||
for file in files:
|
||||
if file == 'routes.py':
|
||||
api_files.append(os.path.join(root, file))
|
||||
|
||||
if api_files:
|
||||
md_file.write("### API Routes\n\n")
|
||||
for file_path in api_files[:5]: # Limit to 5 route files
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
|
||||
content = f.read()
|
||||
rel_path = os.path.relpath(file_path, self.root_dir)
|
||||
md_file.write(f"#### {rel_path}\n\n")
|
||||
md_file.write(f"```python\n{content}\n```\n\n")
|
||||
except Exception as e:
|
||||
md_file.write(f"*Error reading {os.path.relpath(file_path, self.root_dir)}: {e}*\n\n")
|
||||
|
||||
# Conclusion
|
||||
md_file.write("## 6. Additional Information\n\n")
|
||||
md_file.write("This documentation provides an overview of the DocsGPT project structure and key components. "
|
||||
"For more detailed information, please refer to the official documentation and code comments.\n\n")
|
||||
|
||||
md_file.write("### License\n\n")
|
||||
license_path = os.path.join(self.root_dir, "LICENSE")
|
||||
if os.path.exists(license_path):
|
||||
try:
|
||||
with open(license_path, 'r', encoding='utf-8', errors='replace') as f:
|
||||
content = f.read(500) # Just read the beginning of the license
|
||||
md_file.write(f"```\n{content}...\n```\n\n")
|
||||
except Exception:
|
||||
md_file.write("*Error reading LICENSE file*\n\n")
|
||||
|
||||
# Generation metadata
|
||||
md_file.write("---\n\n")
|
||||
md_file.write(f"*Documentation generated on {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}*\n")
|
||||
md_file.write(f"*Generator: DocsGPT Project Documentation Generator*\n")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='DocsGPT Project Documentation Generator')
|
||||
|
||||
parser.add_argument('--root', '-r', type=str, default='.',
|
||||
help='Root directory of the project (default: current directory)')
|
||||
|
||||
parser.add_argument('--output', '-o', type=str,
|
||||
help='Output markdown file (default: project_name_docs.md in the root directory)')
|
||||
|
||||
parser.add_argument('--exclude-dirs', '-e', type=str, nargs='+',
|
||||
help='Additional directories to exclude')
|
||||
|
||||
parser.add_argument('--exclude-files', '-ef', type=str, nargs='+',
|
||||
help='Additional file patterns to exclude')
|
||||
|
||||
parser.add_argument('--include-files', '-if', type=str, nargs='+',
|
||||
help='Files to always include despite exclusions')
|
||||
|
||||
parser.add_argument('--core-dirs', '-c', type=str, nargs='+',
|
||||
help='Core directories to focus on for code snippets')
|
||||
|
||||
parser.add_argument('--config-file', '-cf', type=str,
|
||||
help='Path to JSON configuration file')
|
||||
|
||||
parser.add_argument('--tree-only', action='store_true',
|
||||
help='Only print the directory tree structure, do not generate documentation')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get absolute path of the root directory
|
||||
root_dir = os.path.abspath(args.root)
|
||||
|
||||
# Load configuration from file if provided
|
||||
config = None
|
||||
if args.config_file:
|
||||
try:
|
||||
with open(args.config_file, 'r') as f:
|
||||
config = json.load(f)
|
||||
except Exception as e:
|
||||
print(f"Error loading configuration file: {e}")
|
||||
return
|
||||
else:
|
||||
# Build configuration from command line arguments
|
||||
config = {}
|
||||
if args.exclude_dirs:
|
||||
config['excluded_dirs'] = args.exclude_dirs
|
||||
if args.exclude_files:
|
||||
config['excluded_patterns'] = args.exclude_files
|
||||
if args.include_files:
|
||||
config['always_include'] = args.include_files
|
||||
if args.core_dirs:
|
||||
config['core_dirs'] = args.core_dirs
|
||||
|
||||
# Create the generator
|
||||
generator = DocsGPTDocumentationGenerator(root_dir=root_dir, config=config)
|
||||
|
||||
if args.tree_only:
|
||||
# Just print the tree structure
|
||||
print(f"Directory structure for: {root_dir}\n")
|
||||
generator.print_structure()
|
||||
else:
|
||||
# Generate full documentation
|
||||
output_file = args.output
|
||||
if not output_file:
|
||||
project_name = os.path.basename(root_dir)
|
||||
output_file = os.path.join(root_dir, f"{project_name}_documentation.md")
|
||||
|
||||
print(f"Generating documentation for {root_dir}...")
|
||||
generator.generate_markdown(output_file)
|
||||
print(f"Documentation saved to: {output_file}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -13,6 +13,11 @@ const endpoints = {
|
||||
DELETE_PROMPT: '/api/delete_prompt',
|
||||
UPDATE_PROMPT: '/api/update_prompt',
|
||||
SINGLE_PROMPT: (id: string) => `/api/get_single_prompt?id=${id}`,
|
||||
PROXIES: '/api/get_proxies',
|
||||
CREATE_PROXY: '/api/create_proxy',
|
||||
DELETE_PROXY: '/api/delete_proxy',
|
||||
UPDATE_PROXY: '/api/update_proxy',
|
||||
SINGLE_PROXY: (id: string) => `/api/get_single_proxy?id=${id}`,
|
||||
DELETE_PATH: (docPath: string) => `/api/delete_old?source_id=${docPath}`,
|
||||
TASK_STATUS: (task_id: string) => `/api/task_status?task_id=${task_id}`,
|
||||
MESSAGE_ANALYTICS: '/api/get_message_analytics',
|
||||
|
||||
@@ -27,6 +27,16 @@ const userService = {
|
||||
apiClient.post(endpoints.USER.UPDATE_PROMPT, data, token),
|
||||
getSinglePrompt: (id: string, token: string | null): Promise<any> =>
|
||||
apiClient.get(endpoints.USER.SINGLE_PROMPT(id), token),
|
||||
getProxies: (token: string | null): Promise<any> =>
|
||||
apiClient.get(endpoints.USER.PROXIES, token),
|
||||
createProxy: (data: any, token: string | null): Promise<any> =>
|
||||
apiClient.post(endpoints.USER.CREATE_PROXY, data, token),
|
||||
deleteProxy: (data: any, token: string | null): Promise<any> =>
|
||||
apiClient.post(endpoints.USER.DELETE_PROXY, data, token),
|
||||
updateProxy: (data: any, token: string | null): Promise<any> =>
|
||||
apiClient.post(endpoints.USER.UPDATE_PROXY, data, token),
|
||||
getSingleProxy: (id: string, token: string | null): Promise<any> =>
|
||||
apiClient.get(endpoints.USER.SINGLE_PROXY(id), token),
|
||||
deletePath: (docPath: string, token: string | null): Promise<any> =>
|
||||
apiClient.get(endpoints.USER.DELETE_PATH(docPath), token),
|
||||
getTaskStatus: (task_id: string, token: string | null): Promise<any> =>
|
||||
|
||||
@@ -11,6 +11,7 @@ export function handleFetchAnswer(
|
||||
history: Array<any> = [],
|
||||
conversationId: string | null,
|
||||
promptId: string | null,
|
||||
proxyId: string | null,
|
||||
chunks: string,
|
||||
token_limit: number,
|
||||
): Promise<
|
||||
@@ -44,6 +45,7 @@ export function handleFetchAnswer(
|
||||
history: JSON.stringify(history),
|
||||
conversation_id: conversationId,
|
||||
prompt_id: promptId,
|
||||
proxy_id: proxyId,
|
||||
chunks: chunks,
|
||||
token_limit: token_limit,
|
||||
isNoneDoc: selectedDocs === null,
|
||||
@@ -82,6 +84,7 @@ export function handleFetchAnswerSteaming(
|
||||
history: Array<any> = [],
|
||||
conversationId: string | null,
|
||||
promptId: string | null,
|
||||
proxyId: string | null,
|
||||
chunks: string,
|
||||
token_limit: number,
|
||||
onEvent: (event: MessageEvent) => void,
|
||||
@@ -99,6 +102,7 @@ export function handleFetchAnswerSteaming(
|
||||
history: JSON.stringify(history),
|
||||
conversation_id: conversationId,
|
||||
prompt_id: promptId,
|
||||
proxy_id: proxyId,
|
||||
chunks: chunks,
|
||||
token_limit: token_limit,
|
||||
isNoneDoc: selectedDocs === null,
|
||||
|
||||
@@ -43,6 +43,7 @@ export interface RetrievalPayload {
|
||||
history: string;
|
||||
conversation_id: string | null;
|
||||
prompt_id?: string | null;
|
||||
proxy_id?: string | null;
|
||||
chunks: string;
|
||||
token_limit: number;
|
||||
isNoneDoc: boolean;
|
||||
|
||||
@@ -47,6 +47,7 @@ export const fetchAnswer = createAsyncThunk<
|
||||
state.conversation.queries,
|
||||
state.conversation.conversationId,
|
||||
state.preference.prompt.id,
|
||||
state.preference.proxy?.id ?? null,
|
||||
state.preference.chunks,
|
||||
state.preference.token_limit,
|
||||
(event) => {
|
||||
@@ -120,6 +121,7 @@ export const fetchAnswer = createAsyncThunk<
|
||||
state.conversation.queries,
|
||||
state.conversation.conversationId,
|
||||
state.preference.prompt.id,
|
||||
state.preference.proxy?.id ?? null,
|
||||
state.preference.chunks,
|
||||
state.preference.token_limit,
|
||||
);
|
||||
|
||||
@@ -41,6 +41,7 @@
|
||||
"selectLanguage": "Select Language",
|
||||
"chunks": "Chunks processed per query",
|
||||
"prompt": "Active Prompt",
|
||||
"proxy": "Active Proxy",
|
||||
"deleteAllLabel": "Delete All Conversations",
|
||||
"deleteAllBtn": "Delete All",
|
||||
"addNew": "Add New",
|
||||
@@ -205,6 +206,14 @@
|
||||
"promptText": "Prompt Text",
|
||||
"save": "Save",
|
||||
"nameExists": "Name already exists"
|
||||
},
|
||||
"proxies": {
|
||||
"addProxy": "Add Proxy",
|
||||
"addDescription": "Add your custom proxy to query tools and save it to DocsGPT",
|
||||
"editProxy": "Edit Proxy",
|
||||
"proxyName": "Proxy Name",
|
||||
"proxyProtocol": "Proxy Protocol",
|
||||
"connectionString": "Connection String"
|
||||
}
|
||||
},
|
||||
"sharedConv": {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import React, { useState } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
|
||||
import Input from '../components/Input';
|
||||
import { ActiveState } from '../models/misc';
|
||||
@@ -19,7 +18,13 @@ export default function JWTModal({
|
||||
if (modalState !== 'ACTIVE') return null;
|
||||
|
||||
return (
|
||||
<WrapperModal className="p-4" isPerformingTask={true} close={() => {}}>
|
||||
<WrapperModal
|
||||
className="p-4"
|
||||
isPerformingTask={true}
|
||||
close={() => {
|
||||
/* Modal close handler */
|
||||
}}
|
||||
>
|
||||
<div className="mb-6">
|
||||
<span className="text-lg text-jet dark:text-bright-gray">
|
||||
Add JWT Token
|
||||
|
||||
279
frontend/src/preferences/ProxiesModal.tsx
Normal file
279
frontend/src/preferences/ProxiesModal.tsx
Normal file
@@ -0,0 +1,279 @@
|
||||
import React from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import Input from '../components/Input';
|
||||
import WrapperModal from '../modals/WrapperModal';
|
||||
import { ActiveState } from '../models/misc';
|
||||
|
||||
function AddProxy({
|
||||
setModalState,
|
||||
handleAddProxy,
|
||||
newProxyName,
|
||||
setNewProxyName,
|
||||
newProxyConnection,
|
||||
setNewProxyConnection,
|
||||
disableSave,
|
||||
}: {
|
||||
setModalState: (state: ActiveState) => void;
|
||||
handleAddProxy?: () => void;
|
||||
newProxyName: string;
|
||||
setNewProxyName: (name: string) => void;
|
||||
newProxyConnection: string;
|
||||
setNewProxyConnection: (content: string) => void;
|
||||
disableSave: boolean;
|
||||
}) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<div>
|
||||
<p className="mb-1 text-xl text-jet dark:text-bright-gray">
|
||||
{t('modals.proxies.addProxy')}
|
||||
</p>
|
||||
<p className="mb-7 text-xs text-[#747474] dark:text-[#7F7F82]">
|
||||
{t('modals.proxies.addDescription')}
|
||||
</p>
|
||||
<div>
|
||||
<Input
|
||||
placeholder={t('modals.proxies.proxyName')}
|
||||
type="text"
|
||||
className="mb-4"
|
||||
value={newProxyName}
|
||||
onChange={(e) => setNewProxyName(e.target.value)}
|
||||
labelBgClassName="bg-white dark:bg-[#26272E]"
|
||||
borderVariant="thin"
|
||||
/>
|
||||
<Input
|
||||
placeholder={t('modals.proxies.proxyProtocol')}
|
||||
type="text"
|
||||
className="mb-4 opacity-70 cursor-not-allowed"
|
||||
value="HTTP/S"
|
||||
onChange={() => {
|
||||
/* Protocol field is read-only */
|
||||
}}
|
||||
labelBgClassName="bg-white dark:bg-[#26272E]"
|
||||
borderVariant="thin"
|
||||
disabled={true}
|
||||
/>
|
||||
<Input
|
||||
placeholder={t('modals.proxies.connectionString')}
|
||||
type="text"
|
||||
className="mb-4"
|
||||
value={newProxyConnection}
|
||||
onChange={(e) => setNewProxyConnection(e.target.value)}
|
||||
labelBgClassName="bg-white dark:bg-[#26272E]"
|
||||
borderVariant="thin"
|
||||
/>
|
||||
</div>
|
||||
<div className="mt-6 flex flex-row-reverse">
|
||||
<button
|
||||
onClick={handleAddProxy}
|
||||
className="rounded-3xl bg-purple-30 px-5 py-2 text-sm text-white transition-all hover:bg-violets-are-blue disabled:hover:bg-purple-30"
|
||||
disabled={disableSave}
|
||||
title={
|
||||
disableSave && newProxyName ? t('modals.prompts.nameExists') : ''
|
||||
}
|
||||
>
|
||||
{t('modals.prompts.save')}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function EditProxy({
|
||||
setModalState,
|
||||
handleEditProxy,
|
||||
editProxyName,
|
||||
setEditProxyName,
|
||||
editProxyConnection,
|
||||
setEditProxyConnection,
|
||||
currentProxyEdit,
|
||||
disableSave,
|
||||
}: {
|
||||
setModalState: (state: ActiveState) => void;
|
||||
handleEditProxy?: (id: string, type: string) => void;
|
||||
editProxyName: string;
|
||||
setEditProxyName: (name: string) => void;
|
||||
editProxyConnection: string;
|
||||
setEditProxyConnection: (content: string) => void;
|
||||
currentProxyEdit: { name: string; id: string; type: string };
|
||||
disableSave: boolean;
|
||||
}) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className="">
|
||||
<p className="mb-1 text-xl text-jet dark:text-bright-gray">
|
||||
{t('modals.proxies.editProxy')}
|
||||
</p>
|
||||
<p className="mb-7 text-xs text-[#747474] dark:text-[#7F7F82]">
|
||||
{t('modals.proxies.addDescription')}
|
||||
</p>
|
||||
<div>
|
||||
<Input
|
||||
placeholder={t('modals.proxies.proxyName')}
|
||||
type="text"
|
||||
className="mb-4"
|
||||
value={editProxyName}
|
||||
onChange={(e) => setEditProxyName(e.target.value)}
|
||||
labelBgClassName="bg-white dark:bg-charleston-green-2"
|
||||
borderVariant="thin"
|
||||
/>
|
||||
<Input
|
||||
placeholder={t('modals.proxies.proxyProtocol')}
|
||||
type="text"
|
||||
className="mb-4 opacity-70 cursor-not-allowed"
|
||||
value="HTTP/S"
|
||||
onChange={() => {
|
||||
/* Protocol field is read-only */
|
||||
}}
|
||||
labelBgClassName="bg-white dark:bg-charleston-green-2"
|
||||
borderVariant="thin"
|
||||
disabled={true}
|
||||
/>
|
||||
<Input
|
||||
placeholder={t('modals.proxies.connectionString')}
|
||||
type="text"
|
||||
className="mb-4"
|
||||
value={editProxyConnection}
|
||||
onChange={(e) => setEditProxyConnection(e.target.value)}
|
||||
labelBgClassName="bg-white dark:bg-charleston-green-2"
|
||||
borderVariant="thin"
|
||||
/>
|
||||
</div>
|
||||
<div className="mt-6 flex flex-row-reverse gap-4">
|
||||
<button
|
||||
className={`rounded-3xl bg-purple-30 disabled:hover:bg-purple-30 hover:bg-violets-are-blue px-5 py-2 text-sm text-white transition-all ${
|
||||
currentProxyEdit.type === 'public'
|
||||
? 'cursor-not-allowed opacity-50'
|
||||
: ''
|
||||
}`}
|
||||
onClick={() => {
|
||||
handleEditProxy &&
|
||||
handleEditProxy(currentProxyEdit.id, currentProxyEdit.type);
|
||||
}}
|
||||
disabled={currentProxyEdit.type === 'public' || disableSave}
|
||||
title={
|
||||
disableSave && editProxyName ? t('modals.prompts.nameExists') : ''
|
||||
}
|
||||
>
|
||||
{t('modals.prompts.save')}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default function ProxiesModal({
|
||||
existingProxies,
|
||||
modalState,
|
||||
setModalState,
|
||||
type,
|
||||
newProxyName,
|
||||
setNewProxyName,
|
||||
newProxyConnection,
|
||||
setNewProxyConnection,
|
||||
editProxyName,
|
||||
setEditProxyName,
|
||||
editProxyConnection,
|
||||
setEditProxyConnection,
|
||||
currentProxyEdit,
|
||||
handleAddProxy,
|
||||
handleEditProxy,
|
||||
}: {
|
||||
existingProxies: { name: string; id: string; type: string }[];
|
||||
modalState: ActiveState;
|
||||
setModalState: (state: ActiveState) => void;
|
||||
type: 'ADD' | 'EDIT';
|
||||
newProxyName: string;
|
||||
setNewProxyName: (name: string) => void;
|
||||
newProxyConnection: string;
|
||||
setNewProxyConnection: (content: string) => void;
|
||||
editProxyName: string;
|
||||
setEditProxyName: (name: string) => void;
|
||||
editProxyConnection: string;
|
||||
setEditProxyConnection: (content: string) => void;
|
||||
currentProxyEdit: { id: string; name: string; type: string };
|
||||
handleAddProxy?: () => void;
|
||||
handleEditProxy?: (id: string, type: string) => void;
|
||||
}) {
|
||||
const [disableSave, setDisableSave] = React.useState(true);
|
||||
const { t } = useTranslation();
|
||||
|
||||
React.useEffect(() => {
|
||||
// Check if fields are filled to enable/disable save button
|
||||
if (type === 'ADD') {
|
||||
const nameExists = existingProxies.some(
|
||||
(proxy) => proxy.name.toLowerCase() === newProxyName.toLowerCase(),
|
||||
);
|
||||
setDisableSave(
|
||||
newProxyName === '' || newProxyConnection === '' || nameExists,
|
||||
);
|
||||
} else {
|
||||
const nameExists = existingProxies.some(
|
||||
(proxy) =>
|
||||
proxy.name.toLowerCase() === editProxyName.toLowerCase() &&
|
||||
proxy.id !== currentProxyEdit.id,
|
||||
);
|
||||
setDisableSave(
|
||||
editProxyName === '' || editProxyConnection === '' || nameExists,
|
||||
);
|
||||
}
|
||||
}, [
|
||||
newProxyName,
|
||||
newProxyConnection,
|
||||
editProxyName,
|
||||
editProxyConnection,
|
||||
type,
|
||||
existingProxies,
|
||||
currentProxyEdit,
|
||||
]);
|
||||
|
||||
let view;
|
||||
|
||||
if (type === 'ADD') {
|
||||
view = (
|
||||
<AddProxy
|
||||
setModalState={setModalState}
|
||||
handleAddProxy={handleAddProxy}
|
||||
newProxyName={newProxyName}
|
||||
setNewProxyName={setNewProxyName}
|
||||
newProxyConnection={newProxyConnection}
|
||||
setNewProxyConnection={setNewProxyConnection}
|
||||
disableSave={disableSave}
|
||||
/>
|
||||
);
|
||||
} else if (type === 'EDIT') {
|
||||
view = (
|
||||
<EditProxy
|
||||
setModalState={setModalState}
|
||||
handleEditProxy={handleEditProxy}
|
||||
editProxyName={editProxyName}
|
||||
setEditProxyName={setEditProxyName}
|
||||
editProxyConnection={editProxyConnection}
|
||||
setEditProxyConnection={setEditProxyConnection}
|
||||
currentProxyEdit={currentProxyEdit}
|
||||
disableSave={disableSave}
|
||||
/>
|
||||
);
|
||||
} else {
|
||||
view = <></>;
|
||||
}
|
||||
|
||||
return modalState === 'ACTIVE' ? (
|
||||
<WrapperModal
|
||||
close={() => {
|
||||
setModalState('INACTIVE');
|
||||
if (type === 'ADD') {
|
||||
setNewProxyName('');
|
||||
setNewProxyConnection('');
|
||||
}
|
||||
}}
|
||||
className="sm:w-[512px] mt-24"
|
||||
>
|
||||
{view}
|
||||
</WrapperModal>
|
||||
) : null;
|
||||
}
|
||||
@@ -11,6 +11,7 @@ import { ActiveState, Doc } from '../models/misc';
|
||||
export interface Preference {
|
||||
apiKey: string;
|
||||
prompt: { name: string; id: string; type: string };
|
||||
proxy: { name: string; id: string; type: string } | null;
|
||||
chunks: string;
|
||||
token_limit: number;
|
||||
selectedDocs: Doc | null;
|
||||
@@ -27,6 +28,7 @@ export interface Preference {
|
||||
const initialState: Preference = {
|
||||
apiKey: 'xxx',
|
||||
prompt: { name: 'default', id: 'default', type: 'public' },
|
||||
proxy: null,
|
||||
chunks: '2',
|
||||
token_limit: 2000,
|
||||
selectedDocs: {
|
||||
@@ -73,6 +75,9 @@ export const prefSlice = createSlice({
|
||||
setPrompt: (state, action) => {
|
||||
state.prompt = action.payload;
|
||||
},
|
||||
setProxy: (state, action) => {
|
||||
state.proxy = action.payload;
|
||||
},
|
||||
setChunks: (state, action) => {
|
||||
state.chunks = action.payload;
|
||||
},
|
||||
@@ -92,6 +97,7 @@ export const {
|
||||
setConversations,
|
||||
setToken,
|
||||
setPrompt,
|
||||
setProxy,
|
||||
setChunks,
|
||||
setTokenLimit,
|
||||
setModalStateDeleteConv,
|
||||
@@ -126,6 +132,16 @@ prefListenerMiddleware.startListening({
|
||||
},
|
||||
});
|
||||
|
||||
prefListenerMiddleware.startListening({
|
||||
matcher: isAnyOf(setProxy),
|
||||
effect: (action, listenerApi) => {
|
||||
localStorage.setItem(
|
||||
'DocsGPTProxy',
|
||||
JSON.stringify((listenerApi.getState() as RootState).preference.proxy),
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
prefListenerMiddleware.startListening({
|
||||
matcher: isAnyOf(setChunks),
|
||||
effect: (action, listenerApi) => {
|
||||
@@ -165,6 +181,7 @@ export const selectConversationId = (state: RootState) =>
|
||||
state.conversation.conversationId;
|
||||
export const selectToken = (state: RootState) => state.preference.token;
|
||||
export const selectPrompt = (state: RootState) => state.preference.prompt;
|
||||
export const selectProxy = (state: RootState) => state.preference.proxy;
|
||||
export const selectChunks = (state: RootState) => state.preference.chunks;
|
||||
export const selectTokenLimit = (state: RootState) =>
|
||||
state.preference.token_limit;
|
||||
|
||||
@@ -8,14 +8,17 @@ import { useDarkTheme } from '../hooks';
|
||||
import {
|
||||
selectChunks,
|
||||
selectPrompt,
|
||||
selectProxy,
|
||||
selectToken,
|
||||
selectTokenLimit,
|
||||
setChunks,
|
||||
setModalStateDeleteConv,
|
||||
setPrompt,
|
||||
setProxy,
|
||||
setTokenLimit,
|
||||
} from '../preferences/preferenceSlice';
|
||||
import Prompts from './Prompts';
|
||||
import Proxies from './Proxies';
|
||||
|
||||
export default function General() {
|
||||
const {
|
||||
@@ -48,6 +51,9 @@ export default function General() {
|
||||
const [prompts, setPrompts] = React.useState<
|
||||
{ name: string; id: string; type: string }[]
|
||||
>([]);
|
||||
const [proxies, setProxies] = React.useState<
|
||||
{ name: string; id: string; type: string }[]
|
||||
>([]);
|
||||
const selectedChunks = useSelector(selectChunks);
|
||||
const selectedTokenLimit = useSelector(selectTokenLimit);
|
||||
const [isDarkTheme, toggleTheme] = useDarkTheme();
|
||||
@@ -62,6 +68,44 @@ export default function General() {
|
||||
: languageOptions[0],
|
||||
);
|
||||
const selectedPrompt = useSelector(selectPrompt);
|
||||
const selectedProxy = useSelector(selectProxy);
|
||||
|
||||
React.useEffect(() => {
|
||||
// Set default proxy state first (only if no stored preference exists)
|
||||
const storedProxy = localStorage.getItem('DocsGPTProxy');
|
||||
if (!storedProxy) {
|
||||
const noneProxy = { name: 'None', id: 'none', type: 'public' };
|
||||
dispatch(setProxy(noneProxy));
|
||||
} else {
|
||||
try {
|
||||
const parsedProxy = JSON.parse(storedProxy);
|
||||
dispatch(setProxy(parsedProxy));
|
||||
} catch (e) {
|
||||
console.error('Error parsing stored proxy', e);
|
||||
// Fallback to None if parsing fails
|
||||
dispatch(setProxy({ name: 'None', id: 'none', type: 'public' }));
|
||||
}
|
||||
}
|
||||
// Fetch available proxies
|
||||
const handleFetchProxies = async () => {
|
||||
try {
|
||||
const response = await userService.getProxies(token);
|
||||
if (!response.ok) {
|
||||
console.warn('Proxies API not implemented yet or failed to fetch');
|
||||
return;
|
||||
}
|
||||
const proxiesData = await response.json();
|
||||
if (proxiesData && Array.isArray(proxiesData)) {
|
||||
// Filter out 'none' as we add it separately in the component
|
||||
const filteredProxies = proxiesData.filter((p) => p.id !== 'none');
|
||||
setProxies(filteredProxies);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error fetching proxies:', error);
|
||||
}
|
||||
};
|
||||
handleFetchProxies();
|
||||
}, [token, dispatch]);
|
||||
|
||||
React.useEffect(() => {
|
||||
const handleFetchPrompts = async () => {
|
||||
@@ -77,12 +121,13 @@ export default function General() {
|
||||
}
|
||||
};
|
||||
handleFetchPrompts();
|
||||
}, []);
|
||||
}, [token]);
|
||||
|
||||
React.useEffect(() => {
|
||||
localStorage.setItem('docsgpt-locale', selectedLanguage?.value as string);
|
||||
changeLanguage(selectedLanguage?.value);
|
||||
}, [selectedLanguage, changeLanguage]);
|
||||
|
||||
return (
|
||||
<div className="mt-12 flex flex-col gap-4">
|
||||
{' '}
|
||||
@@ -171,6 +216,16 @@ export default function General() {
|
||||
setPrompts={setPrompts}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col gap-4">
|
||||
<Proxies
|
||||
proxies={proxies}
|
||||
selectedProxy={selectedProxy}
|
||||
onSelectProxy={(name, id, type) =>
|
||||
dispatch(setProxy({ name: name, id: id, type: type }))
|
||||
}
|
||||
setProxies={setProxies}
|
||||
/>
|
||||
</div>
|
||||
<hr className="border-t w-[calc(min(665px,100%))] my-4 border-silver dark:border-silver/40" />
|
||||
<div className="flex flex-col gap-2">
|
||||
<button
|
||||
|
||||
316
frontend/src/settings/Proxies.tsx
Normal file
316
frontend/src/settings/Proxies.tsx
Normal file
@@ -0,0 +1,316 @@
|
||||
import React from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSelector } from 'react-redux';
|
||||
|
||||
import userService from '../api/services/userService';
|
||||
import Dropdown from '../components/Dropdown';
|
||||
import { ActiveState } from '../models/misc';
|
||||
import ProxiesModal from '../preferences/ProxiesModal';
|
||||
import { selectToken } from '../preferences/preferenceSlice';
|
||||
|
||||
export interface ProxyProps {
|
||||
proxies: { name: string; id: string; type: string }[];
|
||||
selectedProxy: {
|
||||
name: string;
|
||||
id: string;
|
||||
type: string;
|
||||
} | null;
|
||||
onSelectProxy: (name: string, id: string, type: string) => void;
|
||||
setProxies: React.Dispatch<
|
||||
React.SetStateAction<{ name: string; id: string; type: string }[]>
|
||||
>;
|
||||
}
|
||||
|
||||
export default function Proxies({
|
||||
proxies,
|
||||
selectedProxy,
|
||||
onSelectProxy,
|
||||
setProxies,
|
||||
}: ProxyProps) {
|
||||
const handleSelectProxy = ({
|
||||
name,
|
||||
id,
|
||||
type,
|
||||
}: {
|
||||
name: string;
|
||||
id: string;
|
||||
type: string;
|
||||
}) => {
|
||||
setEditProxyName(name);
|
||||
onSelectProxy(name, id, type);
|
||||
};
|
||||
const token = useSelector(selectToken);
|
||||
const [newProxyName, setNewProxyName] = React.useState('');
|
||||
const [newProxyConnection, setNewProxyConnection] = React.useState('');
|
||||
const [editProxyName, setEditProxyName] = React.useState('');
|
||||
const [editProxyConnection, setEditProxyConnection] = React.useState('');
|
||||
const [currentProxyEdit, setCurrentProxyEdit] = React.useState({
|
||||
id: '',
|
||||
name: '',
|
||||
type: '',
|
||||
});
|
||||
const [modalType, setModalType] = React.useState<'ADD' | 'EDIT'>('ADD');
|
||||
const [modalState, setModalState] = React.useState<ActiveState>('INACTIVE');
|
||||
const { t } = useTranslation();
|
||||
|
||||
const handleAddProxy = async () => {
|
||||
try {
|
||||
const response = await userService.createProxy(
|
||||
{
|
||||
name: newProxyName,
|
||||
connection: newProxyConnection,
|
||||
},
|
||||
token,
|
||||
);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to add proxy');
|
||||
}
|
||||
const newProxy = await response.json();
|
||||
const newProxyObject = {
|
||||
name: newProxyName,
|
||||
id: newProxy.id,
|
||||
type: 'private',
|
||||
};
|
||||
console.log(
|
||||
'Before selecting new proxy:',
|
||||
newProxyName,
|
||||
newProxy.id,
|
||||
'private',
|
||||
);
|
||||
if (setProxies) {
|
||||
const updatedProxies = [...proxies, newProxyObject];
|
||||
setProxies(updatedProxies);
|
||||
console.log('Updated proxies list:', updatedProxies);
|
||||
}
|
||||
setModalState('INACTIVE');
|
||||
onSelectProxy(newProxyName, newProxy.id, 'private');
|
||||
setNewProxyName('');
|
||||
setNewProxyConnection('');
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
// Fallback to just adding to the local state if API doesn't exist yet
|
||||
const newId = `proxy_${Date.now()}`;
|
||||
if (setProxies) {
|
||||
// Store connection string in localStorage for local fallback
|
||||
localStorage.setItem(`proxy_connection_${newId}`, newProxyConnection);
|
||||
setProxies([
|
||||
...proxies,
|
||||
{ name: newProxyName, id: newId, type: 'private' },
|
||||
]);
|
||||
}
|
||||
setModalState('INACTIVE');
|
||||
onSelectProxy(newProxyName, newId, 'private');
|
||||
setNewProxyName('');
|
||||
setNewProxyConnection('');
|
||||
}
|
||||
};
|
||||
|
||||
const handleDeleteProxy = (id: string) => {
|
||||
// We don't delete the "none" proxy
|
||||
if (id === 'none') return;
|
||||
userService
|
||||
.deleteProxy({ id }, token)
|
||||
.then((response) => {
|
||||
if (response.ok) {
|
||||
// Remove from local state after successful deletion
|
||||
setProxies(proxies.filter((proxy) => proxy.id !== id));
|
||||
// Also remove any locally stored connection string
|
||||
localStorage.removeItem(`proxy_connection_${id}`);
|
||||
// If we deleted the currently selected proxy, switch to "None"
|
||||
if (selectedProxy && selectedProxy.id === id) {
|
||||
onSelectProxy('None', 'none', 'public');
|
||||
}
|
||||
} else {
|
||||
console.warn('Failed to delete proxy');
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(error);
|
||||
});
|
||||
};
|
||||
|
||||
const handleFetchProxyConnection = async (id: string) => {
|
||||
try {
|
||||
// We don't need to fetch connection for the "none" proxy
|
||||
if (id === 'none') {
|
||||
setEditProxyConnection('');
|
||||
return;
|
||||
}
|
||||
// Check if this is a locally stored proxy (for API fallback)
|
||||
const localConnection = localStorage.getItem(`proxy_connection_${id}`);
|
||||
if (localConnection) {
|
||||
setEditProxyConnection(localConnection);
|
||||
return;
|
||||
}
|
||||
// Otherwise proceed with API call
|
||||
const response = await userService.getSingleProxy(id, token);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch proxy connection');
|
||||
}
|
||||
const proxyData = await response.json();
|
||||
setEditProxyConnection(proxyData.connection);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
// Set empty string instead of a placeholder
|
||||
setEditProxyConnection('');
|
||||
}
|
||||
};
|
||||
|
||||
const handleSaveChanges = (id: string, type: string) => {
|
||||
userService
|
||||
.updateProxy(
|
||||
{
|
||||
id: id,
|
||||
name: editProxyName,
|
||||
connection: editProxyConnection,
|
||||
},
|
||||
token,
|
||||
)
|
||||
.then((response) => {
|
||||
if (!response.ok) {
|
||||
// If API doesn't exist yet, just handle locally
|
||||
console.warn('API not implemented yet');
|
||||
// Store connection string in localStorage
|
||||
localStorage.setItem(`proxy_connection_${id}`, editProxyConnection);
|
||||
}
|
||||
if (setProxies) {
|
||||
const existingProxyIndex = proxies.findIndex(
|
||||
(proxy) => proxy.id === id,
|
||||
);
|
||||
if (existingProxyIndex === -1) {
|
||||
setProxies([
|
||||
...proxies,
|
||||
{ name: editProxyName, id: id, type: type },
|
||||
]);
|
||||
} else {
|
||||
const updatedProxies = [...proxies];
|
||||
updatedProxies[existingProxyIndex] = {
|
||||
name: editProxyName,
|
||||
id: id,
|
||||
type: type,
|
||||
};
|
||||
setProxies(updatedProxies);
|
||||
}
|
||||
}
|
||||
setModalState('INACTIVE');
|
||||
onSelectProxy(editProxyName, id, type);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(error);
|
||||
// Handle locally if API fails
|
||||
// Store connection string in localStorage
|
||||
localStorage.setItem(`proxy_connection_${id}`, editProxyConnection);
|
||||
if (setProxies) {
|
||||
const existingProxyIndex = proxies.findIndex(
|
||||
(proxy) => proxy.id === id,
|
||||
);
|
||||
if (existingProxyIndex !== -1) {
|
||||
const updatedProxies = [...proxies];
|
||||
updatedProxies[existingProxyIndex] = {
|
||||
name: editProxyName,
|
||||
id: id,
|
||||
type: type,
|
||||
};
|
||||
setProxies(updatedProxies);
|
||||
}
|
||||
}
|
||||
setModalState('INACTIVE');
|
||||
onSelectProxy(editProxyName, id, type);
|
||||
});
|
||||
};
|
||||
|
||||
// Split proxies into 'None' and custom proxies
|
||||
const customProxies = proxies.filter(
|
||||
(p) => p.id !== 'none' && p.name !== 'None',
|
||||
);
|
||||
|
||||
// Create options array with None first
|
||||
const noneProxy = { name: 'None', id: 'none', type: 'public' };
|
||||
const allProxies = [noneProxy, ...customProxies];
|
||||
|
||||
// Ensure valid selectedProxy or default to None
|
||||
const finalSelectedProxy =
|
||||
selectedProxy && selectedProxy.id !== 'from-url'
|
||||
? selectedProxy
|
||||
: noneProxy;
|
||||
|
||||
// Check if the current proxy is the None proxy
|
||||
const isNoneSelected =
|
||||
!finalSelectedProxy ||
|
||||
finalSelectedProxy.id === 'none' ||
|
||||
finalSelectedProxy.name === 'None';
|
||||
|
||||
return (
|
||||
<>
|
||||
<div>
|
||||
<div className="flex flex-col gap-4">
|
||||
<p className="font-medium dark:text-bright-gray">
|
||||
{t('settings.general.proxy')}
|
||||
</p>
|
||||
<div className="flex flex-row justify-start items-baseline gap-6">
|
||||
<Dropdown
|
||||
options={allProxies}
|
||||
selectedValue={finalSelectedProxy.name}
|
||||
placeholder="None"
|
||||
onSelect={handleSelectProxy}
|
||||
size="w-56"
|
||||
rounded="3xl"
|
||||
border="border"
|
||||
showEdit={!isNoneSelected}
|
||||
showDelete={!isNoneSelected}
|
||||
onEdit={({
|
||||
id,
|
||||
name,
|
||||
type,
|
||||
}: {
|
||||
id: string;
|
||||
name: string;
|
||||
type: string;
|
||||
}) => {
|
||||
setModalType('EDIT');
|
||||
setEditProxyName(name);
|
||||
handleFetchProxyConnection(id);
|
||||
setCurrentProxyEdit({ id: id, name: name, type: type });
|
||||
setModalState('ACTIVE');
|
||||
}}
|
||||
onDelete={(id: string) => {
|
||||
handleDeleteProxy(id);
|
||||
}}
|
||||
/>
|
||||
|
||||
<button
|
||||
className="rounded-3xl w-20 h-10 text-sm border border-solid border-violets-are-blue text-violets-are-blue transition-colors hover:text-white hover:bg-violets-are-blue"
|
||||
onClick={() => {
|
||||
setModalType('ADD');
|
||||
setNewProxyName('');
|
||||
setNewProxyConnection('');
|
||||
setModalState('ACTIVE');
|
||||
}}
|
||||
>
|
||||
{t('settings.general.add')}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{modalState === 'ACTIVE' && (
|
||||
<ProxiesModal
|
||||
existingProxies={proxies}
|
||||
type={modalType}
|
||||
modalState={modalState}
|
||||
setModalState={setModalState}
|
||||
newProxyName={newProxyName}
|
||||
setNewProxyName={setNewProxyName}
|
||||
newProxyConnection={newProxyConnection}
|
||||
setNewProxyConnection={setNewProxyConnection}
|
||||
editProxyName={editProxyName}
|
||||
setEditProxyName={setEditProxyName}
|
||||
editProxyConnection={editProxyConnection}
|
||||
setEditProxyConnection={setEditProxyConnection}
|
||||
currentProxyEdit={currentProxyEdit}
|
||||
handleAddProxy={handleAddProxy}
|
||||
handleEditProxy={handleSaveChanges}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
Reference in New Issue
Block a user