From 5b6cfa6ecc7c69f0a2370e059076c2ced672192f Mon Sep 17 00:00:00 2001 From: Siddhant Rai <47355538+siiddhantt@users.noreply.github.com> Date: Tue, 23 Dec 2025 19:07:44 +0530 Subject: [PATCH] feat: enhance API tool with body serialization and content type handling (#2192) * feat: enhance API tool with body serialization and content type handling * feat: enhance ToolConfig with import functionality and user action management - Added ImportSpecModal to allow importing actions into the tool configuration. - Implemented search functionality for user actions with expandable action details. - Introduced method colors for better visual distinction of HTTP methods. - Updated APIActionType and ParameterGroupType to include optional 'required' field. - Refactored action rendering to improve usability and maintainability. * feat: add base URL input to ImportSpecModal for action URL customization * feat: update TestBaseAgentTools to include 'required' field for parameters * feat: standardize API call timeout to DEFAULT_TIMEOUT constant * feat: add import specification functionality and related translations for multiple languages --------- Co-authored-by: Alex --- application/agents/base.py | 30 +- .../agents/tools/api_body_serializer.py | 323 +++++ application/agents/tools/api_tool.py | 242 +++- application/agents/tools/spec_parser.py | 342 +++++ application/api/user/tools/routes.py | 55 + frontend/src/api/endpoints.ts | 1 + frontend/src/api/services/userService.ts | 5 + frontend/src/locale/de.json | 21 + frontend/src/locale/en.json | 21 + frontend/src/locale/es.json | 21 + frontend/src/locale/jp.json | 21 + frontend/src/locale/ru.json | 21 + frontend/src/locale/zh-TW.json | 21 + frontend/src/locale/zh.json | 21 + frontend/src/modals/ImportSpecModal.tsx | 321 +++++ frontend/src/settings/ToolConfig.tsx | 1164 ++++++++++++----- frontend/src/settings/types/index.ts | 17 +- tests/agents/test_base_agent.py | 8 +- 18 files changed, 2308 insertions(+), 347 deletions(-) create mode 100644 application/agents/tools/api_body_serializer.py create mode 100644 application/agents/tools/spec_parser.py create mode 100644 frontend/src/modals/ImportSpecModal.tsx diff --git a/application/agents/base.py b/application/agents/base.py index b2d79c03..44df7ee4 100644 --- a/application/agents/base.py +++ b/application/agents/base.py @@ -120,10 +120,10 @@ class BaseAgent(ABC): params["properties"][k] = { key: value for key, value in v.items() - if key != "filled_by_llm" and key != "value" + if key not in ("filled_by_llm", "value", "required") } - - params["required"].append(k) + if v.get("required", False): + params["required"].append(k) return params def _prepare_tools(self, tools_dict): @@ -219,7 +219,11 @@ class BaseAgent(ABC): for param_type, target_dict in param_types.items(): if param_type in action_data and action_data[param_type].get("properties"): for param, details in action_data[param_type]["properties"].items(): - if param not in call_args and "value" in details: + if ( + param not in call_args + and "value" in details + and details["value"] + ): target_dict[param] = details["value"] for param, value in call_args.items(): for param_type, target_dict in param_types.items(): @@ -232,12 +236,20 @@ class BaseAgent(ABC): # Prepare tool_config and add tool_id for memory tools if tool_data["name"] == "api_tool": + action_config = tool_data["config"]["actions"][action_name] tool_config = { - "url": tool_data["config"]["actions"][action_name]["url"], - "method": tool_data["config"]["actions"][action_name]["method"], + "url": action_config["url"], + "method": action_config["method"], "headers": headers, "query_params": query_params, } + if "body_content_type" in action_config: + tool_config["body_content_type"] = action_config.get( + "body_content_type", "application/json" + ) + tool_config["body_encoding_rules"] = action_config.get( + "body_encoding_rules", {} + ) else: tool_config = tool_data["config"].copy() if tool_data["config"] else {} # Add tool_id from MongoDB _id for tools that need instance isolation (like memory tool) @@ -247,15 +259,15 @@ class BaseAgent(ABC): tool = tm.load_tool( tool_data["name"], tool_config=tool_config, - user_id=self.user, # Pass user ID for MCP tools credential decryption + user_id=self.user, ) if tool_data["name"] == "api_tool": - print( + logger.debug( f"Executing api: {action_name} with query_params: {query_params}, headers: {headers}, body: {body}" ) result = tool.execute_action(action_name, **body) else: - print(f"Executing tool: {action_name} with args: {call_args}") + logger.debug(f"Executing tool: {action_name} with args: {call_args}") result = tool.execute_action(action_name, **parameters) tool_call_data["result"] = ( f"{str(result)[:50]}..." if len(str(result)) > 50 else result diff --git a/application/agents/tools/api_body_serializer.py b/application/agents/tools/api_body_serializer.py new file mode 100644 index 00000000..d23d1fcf --- /dev/null +++ b/application/agents/tools/api_body_serializer.py @@ -0,0 +1,323 @@ +import base64 +import json +import logging +from enum import Enum +from typing import Any, Dict, Optional, Union +from urllib.parse import quote, urlencode + +logger = logging.getLogger(__name__) + + +class ContentType(str, Enum): + """Supported content types for request bodies.""" + + JSON = "application/json" + FORM_URLENCODED = "application/x-www-form-urlencoded" + MULTIPART_FORM_DATA = "multipart/form-data" + TEXT_PLAIN = "text/plain" + XML = "application/xml" + OCTET_STREAM = "application/octet-stream" + + +class RequestBodySerializer: + """Serializes request bodies according to content-type and OpenAPI 3.1 spec.""" + + @staticmethod + def serialize( + body_data: Dict[str, Any], + content_type: str = ContentType.JSON, + encoding_rules: Optional[Dict[str, Dict[str, Any]]] = None, + ) -> tuple[Union[str, bytes], Dict[str, str]]: + """ + Serialize body data to appropriate format. + + Args: + body_data: Dictionary of body parameters + content_type: Content-Type header value + encoding_rules: OpenAPI Encoding Object rules per field + + Returns: + Tuple of (serialized_body, updated_headers_dict) + + Raises: + ValueError: If serialization fails + """ + if not body_data: + return None, {} + + try: + content_type_lower = content_type.lower().split(";")[0].strip() + + if content_type_lower == ContentType.JSON: + return RequestBodySerializer._serialize_json(body_data) + + elif content_type_lower == ContentType.FORM_URLENCODED: + return RequestBodySerializer._serialize_form_urlencoded( + body_data, encoding_rules + ) + + elif content_type_lower == ContentType.MULTIPART_FORM_DATA: + return RequestBodySerializer._serialize_multipart_form_data( + body_data, encoding_rules + ) + + elif content_type_lower == ContentType.TEXT_PLAIN: + return RequestBodySerializer._serialize_text_plain(body_data) + + elif content_type_lower == ContentType.XML: + return RequestBodySerializer._serialize_xml(body_data) + + elif content_type_lower == ContentType.OCTET_STREAM: + return RequestBodySerializer._serialize_octet_stream(body_data) + + else: + logger.warning( + f"Unknown content type: {content_type}, treating as JSON" + ) + return RequestBodySerializer._serialize_json(body_data) + + except Exception as e: + logger.error(f"Error serializing body: {str(e)}", exc_info=True) + raise ValueError(f"Failed to serialize request body: {str(e)}") + + @staticmethod + def _serialize_json(body_data: Dict[str, Any]) -> tuple[str, Dict[str, str]]: + """Serialize body as JSON per OpenAPI spec.""" + try: + serialized = json.dumps( + body_data, separators=(",", ":"), ensure_ascii=False + ) + headers = {"Content-Type": ContentType.JSON.value} + return serialized, headers + except (TypeError, ValueError) as e: + raise ValueError(f"Failed to serialize JSON body: {str(e)}") + + @staticmethod + def _serialize_form_urlencoded( + body_data: Dict[str, Any], + encoding_rules: Optional[Dict[str, Dict[str, Any]]] = None, + ) -> tuple[str, Dict[str, str]]: + """Serialize body as application/x-www-form-urlencoded per RFC1866/RFC3986.""" + encoding_rules = encoding_rules or {} + params = [] + + for key, value in body_data.items(): + if value is None: + continue + + rule = encoding_rules.get(key, {}) + style = rule.get("style", "form") + explode = rule.get("explode", style == "form") + content_type = rule.get("contentType", "text/plain") + + serialized_value = RequestBodySerializer._serialize_form_value( + value, style, explode, content_type, key + ) + + if isinstance(serialized_value, list): + for sv in serialized_value: + params.append((key, sv)) + else: + params.append((key, serialized_value)) + + # Use standard urlencode (replaces space with +) + serialized = urlencode(params, safe="") + headers = {"Content-Type": ContentType.FORM_URLENCODED.value} + return serialized, headers + + @staticmethod + def _serialize_form_value( + value: Any, style: str, explode: bool, content_type: str, key: str + ) -> Union[str, list]: + """Serialize individual form value with encoding rules.""" + if isinstance(value, dict): + if content_type == "application/json": + return json.dumps(value, separators=(",", ":")) + elif content_type == "application/xml": + return RequestBodySerializer._dict_to_xml(value) + else: + if style == "deepObject" and explode: + return [ + f"{RequestBodySerializer._percent_encode(str(v))}" + for v in value.values() + ] + elif explode: + return [ + f"{RequestBodySerializer._percent_encode(str(v))}" + for v in value.values() + ] + else: + pairs = [f"{k},{v}" for k, v in value.items()] + return RequestBodySerializer._percent_encode(",".join(pairs)) + + elif isinstance(value, (list, tuple)): + if explode: + return [ + RequestBodySerializer._percent_encode(str(item)) for item in value + ] + else: + return RequestBodySerializer._percent_encode( + ",".join(str(v) for v in value) + ) + + else: + return RequestBodySerializer._percent_encode(str(value)) + + @staticmethod + def _serialize_multipart_form_data( + body_data: Dict[str, Any], + encoding_rules: Optional[Dict[str, Dict[str, Any]]] = None, + ) -> tuple[bytes, Dict[str, str]]: + """ + Serialize body as multipart/form-data per RFC7578. + + Supports file uploads and encoding rules. + """ + import secrets + + encoding_rules = encoding_rules or {} + boundary = f"----DocsGPT{secrets.token_hex(16)}" + parts = [] + + for key, value in body_data.items(): + if value is None: + continue + + rule = encoding_rules.get(key, {}) + content_type = rule.get("contentType", "text/plain") + headers_rule = rule.get("headers", {}) + + part = RequestBodySerializer._create_multipart_part( + key, value, content_type, headers_rule + ) + parts.append(part) + + body_bytes = f"--{boundary}\r\n".encode("utf-8") + body_bytes += f"--{boundary}\r\n".join(parts).encode("utf-8") + body_bytes += f"\r\n--{boundary}--\r\n".encode("utf-8") + + headers = { + "Content-Type": f"multipart/form-data; boundary={boundary}", + } + return body_bytes, headers + + @staticmethod + def _create_multipart_part( + name: str, value: Any, content_type: str, headers_rule: Dict[str, Any] + ) -> str: + """Create a single multipart/form-data part.""" + headers = [ + f'Content-Disposition: form-data; name="{RequestBodySerializer._percent_encode(name)}"' + ] + + if isinstance(value, bytes): + if content_type == "application/octet-stream": + value_encoded = base64.b64encode(value).decode("utf-8") + else: + value_encoded = value.decode("utf-8", errors="replace") + headers.append(f"Content-Type: {content_type}") + headers.append("Content-Transfer-Encoding: base64") + elif isinstance(value, dict): + if content_type == "application/json": + value_encoded = json.dumps(value, separators=(",", ":")) + elif content_type == "application/xml": + value_encoded = RequestBodySerializer._dict_to_xml(value) + else: + value_encoded = str(value) + headers.append(f"Content-Type: {content_type}") + elif isinstance(value, str) and content_type != "text/plain": + try: + if content_type == "application/json": + json.loads(value) + value_encoded = value + elif content_type == "application/xml": + value_encoded = value + else: + value_encoded = str(value) + except json.JSONDecodeError: + value_encoded = str(value) + headers.append(f"Content-Type: {content_type}") + else: + value_encoded = str(value) + if content_type != "text/plain": + headers.append(f"Content-Type: {content_type}") + + part = "\r\n".join(headers) + "\r\n\r\n" + value_encoded + "\r\n" + return part + + @staticmethod + def _serialize_text_plain(body_data: Dict[str, Any]) -> tuple[str, Dict[str, str]]: + """Serialize body as plain text.""" + if len(body_data) == 1: + value = list(body_data.values())[0] + return str(value), {"Content-Type": ContentType.TEXT_PLAIN.value} + else: + text = "\n".join(f"{k}: {v}" for k, v in body_data.items()) + return text, {"Content-Type": ContentType.TEXT_PLAIN.value} + + @staticmethod + def _serialize_xml(body_data: Dict[str, Any]) -> tuple[str, Dict[str, str]]: + """Serialize body as XML.""" + xml_str = RequestBodySerializer._dict_to_xml(body_data) + return xml_str, {"Content-Type": ContentType.XML.value} + + @staticmethod + def _serialize_octet_stream( + body_data: Dict[str, Any], + ) -> tuple[bytes, Dict[str, str]]: + """Serialize body as binary octet stream.""" + if isinstance(body_data, bytes): + return body_data, {"Content-Type": ContentType.OCTET_STREAM.value} + elif isinstance(body_data, str): + return body_data.encode("utf-8"), { + "Content-Type": ContentType.OCTET_STREAM.value + } + else: + serialized = json.dumps(body_data) + return serialized.encode("utf-8"), { + "Content-Type": ContentType.OCTET_STREAM.value + } + + @staticmethod + def _percent_encode(value: str, safe_chars: str = "") -> str: + """ + Percent-encode per RFC3986. + + Args: + value: String to encode + safe_chars: Additional characters to not encode + """ + return quote(value, safe=safe_chars) + + @staticmethod + def _dict_to_xml(data: Dict[str, Any], root_name: str = "root") -> str: + """ + Convert dict to simple XML format. + """ + + def build_xml(obj: Any, name: str) -> str: + if isinstance(obj, dict): + inner = "".join(build_xml(v, k) for k, v in obj.items()) + return f"<{name}>{inner}" + elif isinstance(obj, (list, tuple)): + items = "".join( + build_xml(item, f"{name[:-1] if name.endswith('s') else name}") + for item in obj + ) + return items + else: + return f"<{name}>{RequestBodySerializer._escape_xml(str(obj))}" + + root = build_xml(data, root_name) + return f'{root}' + + @staticmethod + def _escape_xml(value: str) -> str: + """Escape XML special characters.""" + return ( + value.replace("&", "&") + .replace("<", "<") + .replace(">", ">") + .replace('"', """) + .replace("'", "'") + ) diff --git a/application/agents/tools/api_tool.py b/application/agents/tools/api_tool.py index 063313c4..6bd2eb8d 100644 --- a/application/agents/tools/api_tool.py +++ b/application/agents/tools/api_tool.py @@ -1,72 +1,256 @@ import json +import logging +import re +from typing import Any, Dict, Optional +from urllib.parse import urlencode import requests + +from application.agents.tools.api_body_serializer import ( + ContentType, + RequestBodySerializer, +) from application.agents.tools.base import Tool +logger = logging.getLogger(__name__) + +DEFAULT_TIMEOUT = 90 # seconds + class APITool(Tool): """ API Tool - A flexible tool for performing various API actions (e.g., sending messages, retrieving data) via custom user-specified APIs + A flexible tool for performing various API actions (e.g., sending messages, retrieving data) via custom user-specified APIs. """ def __init__(self, config): self.config = config self.url = config.get("url", "") self.method = config.get("method", "GET") - self.headers = config.get("headers", {"Content-Type": "application/json"}) + self.headers = config.get("headers", {}) self.query_params = config.get("query_params", {}) + self.body_content_type = config.get("body_content_type", ContentType.JSON) + self.body_encoding_rules = config.get("body_encoding_rules", {}) def execute_action(self, action_name, **kwargs): + """Execute an API action with the given arguments.""" return self._make_api_call( - self.url, self.method, self.headers, self.query_params, kwargs + self.url, + self.method, + self.headers, + self.query_params, + kwargs, + self.body_content_type, + self.body_encoding_rules, ) - def _make_api_call(self, url, method, headers, query_params, body): - if query_params: - url = f"{url}?{requests.compat.urlencode(query_params)}" - # if isinstance(body, dict): - # body = json.dumps(body) + def _make_api_call( + self, + url: str, + method: str, + headers: Dict[str, str], + query_params: Dict[str, Any], + body: Dict[str, Any], + content_type: str = ContentType.JSON, + encoding_rules: Optional[Dict[str, Dict[str, Any]]] = None, + ) -> Dict[str, Any]: + """ + Make an API call with proper body serialization and error handling. + + Args: + url: API endpoint URL + method: HTTP method (GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS) + headers: Request headers dict + query_params: URL query parameters + body: Request body as dict + content_type: Content-Type for serialization + encoding_rules: OpenAPI encoding rules + + Returns: + Dict with status_code, data, and message + """ + request_url = url + request_headers = headers.copy() if headers else {} + response = None + try: - print(f"Making API call: {method} {url} with body: {body}") - if body == "{}": - body = None - response = requests.request(method, url, headers=headers, data=body) - response.raise_for_status() - content_type = response.headers.get( - "Content-Type", "application/json" - ).lower() - if "application/json" in content_type: + path_params_used = set() + if query_params: + for match in re.finditer(r"\{([^}]+)\}", request_url): + param_name = match.group(1) + if param_name in query_params: + request_url = request_url.replace( + f"{{{param_name}}}", str(query_params[param_name]) + ) + path_params_used.add(param_name) + remaining_params = { + k: v for k, v in query_params.items() if k not in path_params_used + } + if remaining_params: + query_string = urlencode(remaining_params) + separator = "&" if "?" in request_url else "?" + request_url = f"{request_url}{separator}{query_string}" + # Serialize body based on content type + + if body and body != {}: try: - data = response.json() - except json.JSONDecodeError as e: - print(f"Error decoding JSON: {e}. Raw response: {response.text}") + serialized_body, body_headers = RequestBodySerializer.serialize( + body, content_type, encoding_rules + ) + request_headers.update(body_headers) + except ValueError as e: + logger.error(f"Body serialization failed: {str(e)}") return { - "status_code": response.status_code, - "message": f"API call returned invalid JSON. Error: {e}", - "data": response.text, + "status_code": None, + "message": f"Body serialization error: {str(e)}", + "data": None, } - elif "text/" in content_type or "application/xml" in content_type: - data = response.text - elif not response.content: - data = None else: - print(f"Unsupported content type: {content_type}") - data = response.content + serialized_body = None + if "Content-Type" not in request_headers and method not in [ + "GET", + "HEAD", + "DELETE", + ]: + request_headers["Content-Type"] = ContentType.JSON + logger.debug( + f"API Call: {method} {request_url} | Content-Type: {request_headers.get('Content-Type', 'N/A')}" + ) + + if method.upper() == "GET": + response = requests.get( + request_url, headers=request_headers, timeout=DEFAULT_TIMEOUT + ) + elif method.upper() == "POST": + response = requests.post( + request_url, + data=serialized_body, + headers=request_headers, + timeout=DEFAULT_TIMEOUT, + ) + elif method.upper() == "PUT": + response = requests.put( + request_url, + data=serialized_body, + headers=request_headers, + timeout=DEFAULT_TIMEOUT, + ) + elif method.upper() == "DELETE": + response = requests.delete( + request_url, headers=request_headers, timeout=DEFAULT_TIMEOUT + ) + elif method.upper() == "PATCH": + response = requests.patch( + request_url, + data=serialized_body, + headers=request_headers, + timeout=DEFAULT_TIMEOUT, + ) + elif method.upper() == "HEAD": + response = requests.head( + request_url, headers=request_headers, timeout=DEFAULT_TIMEOUT + ) + elif method.upper() == "OPTIONS": + response = requests.options( + request_url, headers=request_headers, timeout=DEFAULT_TIMEOUT + ) + else: + return { + "status_code": None, + "message": f"Unsupported HTTP method: {method}", + "data": None, + } + response.raise_for_status() + + data = self._parse_response(response) return { "status_code": response.status_code, "data": data, "message": "API call successful.", } + except requests.exceptions.Timeout: + logger.error(f"Request timeout for {request_url}") + return { + "status_code": None, + "message": f"Request timeout ({DEFAULT_TIMEOUT}s exceeded)", + "data": None, + } + except requests.exceptions.ConnectionError as e: + logger.error(f"Connection error: {str(e)}") + return { + "status_code": None, + "message": f"Connection error: {str(e)}", + "data": None, + } + except requests.exceptions.HTTPError as e: + logger.error(f"HTTP error {response.status_code}: {str(e)}") + try: + error_data = response.json() + except (json.JSONDecodeError, ValueError): + error_data = response.text + return { + "status_code": response.status_code, + "message": f"HTTP Error {response.status_code}", + "data": error_data, + } except requests.exceptions.RequestException as e: + logger.error(f"Request failed: {str(e)}") return { "status_code": response.status_code if response else None, "message": f"API call failed: {str(e)}", + "data": None, + } + except Exception as e: + logger.error(f"Unexpected error in API call: {str(e)}", exc_info=True) + return { + "status_code": None, + "message": f"Unexpected error: {str(e)}", + "data": None, } + def _parse_response(self, response: requests.Response) -> Any: + """ + Parse response based on Content-Type header. + + Supports: JSON, XML, plain text, binary data. + """ + content_type = response.headers.get("Content-Type", "").lower() + + if not response.content: + return None + # JSON response + + if "application/json" in content_type: + try: + return response.json() + except json.JSONDecodeError as e: + logger.warning(f"Failed to parse JSON response: {str(e)}") + return response.text + # XML response + + elif "application/xml" in content_type or "text/xml" in content_type: + return response.text + # Plain text response + + elif "text/plain" in content_type or "text/html" in content_type: + return response.text + # Binary/unknown response + + else: + # Try to decode as text first, fall back to base64 + + try: + return response.text + except (UnicodeDecodeError, AttributeError): + import base64 + + return base64.b64encode(response.content).decode("utf-8") + def get_actions_metadata(self): + """Return metadata for available actions (none for API Tool - actions are user-defined).""" return [] def get_config_requirements(self): + """Return configuration requirements for the tool.""" return {} diff --git a/application/agents/tools/spec_parser.py b/application/agents/tools/spec_parser.py new file mode 100644 index 00000000..336f00f8 --- /dev/null +++ b/application/agents/tools/spec_parser.py @@ -0,0 +1,342 @@ +""" +API Specification Parser + +Parses OpenAPI 3.x and Swagger 2.0 specifications and converts them +to API Tool action definitions for use in DocsGPT. +""" + +import json +import logging +import re +from typing import Any, Dict, List, Optional, Tuple + +import yaml + +logger = logging.getLogger(__name__) + +SUPPORTED_METHODS = frozenset( + {"get", "post", "put", "delete", "patch", "head", "options"} +) + + +def parse_spec(spec_content: str) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: + """ + Parse an API specification and convert operations to action definitions. + + Supports OpenAPI 3.x and Swagger 2.0 formats in JSON or YAML. + + Args: + spec_content: Raw specification content as string + + Returns: + Tuple of (metadata dict, list of action dicts) + + Raises: + ValueError: If the spec is invalid or uses an unsupported format + """ + spec = _load_spec(spec_content) + _validate_spec(spec) + + is_swagger = "swagger" in spec + metadata = _extract_metadata(spec, is_swagger) + actions = _extract_actions(spec, is_swagger) + + return metadata, actions + + +def _load_spec(content: str) -> Dict[str, Any]: + """Parse spec content from JSON or YAML string.""" + content = content.strip() + if not content: + raise ValueError("Empty specification content") + try: + if content.startswith("{"): + return json.loads(content) + return yaml.safe_load(content) + except json.JSONDecodeError as e: + raise ValueError(f"Invalid JSON format: {e.msg}") + except yaml.YAMLError as e: + raise ValueError(f"Invalid YAML format: {e}") + + +def _validate_spec(spec: Dict[str, Any]) -> None: + """Validate spec version and required fields.""" + if not isinstance(spec, dict): + raise ValueError("Specification must be a valid object") + openapi_version = spec.get("openapi", "") + swagger_version = spec.get("swagger", "") + + if not (openapi_version.startswith("3.") or swagger_version == "2.0"): + raise ValueError( + "Unsupported specification version. Expected OpenAPI 3.x or Swagger 2.0" + ) + if "paths" not in spec or not spec["paths"]: + raise ValueError("No API paths defined in the specification") + + +def _extract_metadata(spec: Dict[str, Any], is_swagger: bool) -> Dict[str, Any]: + """Extract API metadata from specification.""" + info = spec.get("info", {}) + base_url = _get_base_url(spec, is_swagger) + + return { + "title": info.get("title", "Untitled API"), + "description": (info.get("description", "") or "")[:500], + "version": info.get("version", ""), + "base_url": base_url, + } + + +def _get_base_url(spec: Dict[str, Any], is_swagger: bool) -> str: + """Extract base URL from spec (handles both OpenAPI 3.x and Swagger 2.0).""" + if is_swagger: + schemes = spec.get("schemes", ["https"]) + host = spec.get("host", "") + base_path = spec.get("basePath", "") + if host: + scheme = schemes[0] if schemes else "https" + return f"{scheme}://{host}{base_path}".rstrip("/") + return "" + servers = spec.get("servers", []) + if servers and isinstance(servers, list) and servers[0].get("url"): + return servers[0]["url"].rstrip("/") + return "" + + +def _extract_actions(spec: Dict[str, Any], is_swagger: bool) -> List[Dict[str, Any]]: + """Extract all API operations as action definitions.""" + actions = [] + paths = spec.get("paths", {}) + base_url = _get_base_url(spec, is_swagger) + + components = spec.get("components", {}) + definitions = spec.get("definitions", {}) + + for path, path_item in paths.items(): + if not isinstance(path_item, dict): + continue + path_params = path_item.get("parameters", []) + + for method in SUPPORTED_METHODS: + operation = path_item.get(method) + if not isinstance(operation, dict): + continue + try: + action = _build_action( + path=path, + method=method, + operation=operation, + path_params=path_params, + base_url=base_url, + components=components, + definitions=definitions, + is_swagger=is_swagger, + ) + actions.append(action) + except Exception as e: + logger.warning( + f"Failed to parse operation {method.upper()} {path}: {e}" + ) + continue + return actions + + +def _build_action( + path: str, + method: str, + operation: Dict[str, Any], + path_params: List[Dict], + base_url: str, + components: Dict[str, Any], + definitions: Dict[str, Any], + is_swagger: bool, +) -> Dict[str, Any]: + """Build a single action from an API operation.""" + action_name = _generate_action_name(operation, method, path) + full_url = f"{base_url}{path}" if base_url else path + + all_params = path_params + operation.get("parameters", []) + query_params, headers = _categorize_parameters(all_params, components, definitions) + + body, body_content_type = _extract_request_body( + operation, components, definitions, is_swagger + ) + + description = operation.get("summary", "") or operation.get("description", "") + + return { + "name": action_name, + "url": full_url, + "method": method.upper(), + "description": (description or "")[:500], + "query_params": {"type": "object", "properties": query_params}, + "headers": {"type": "object", "properties": headers}, + "body": {"type": "object", "properties": body}, + "body_content_type": body_content_type, + "active": True, + } + + +def _generate_action_name(operation: Dict[str, Any], method: str, path: str) -> str: + """Generate a valid action name from operationId or method+path.""" + if operation.get("operationId"): + name = operation["operationId"] + else: + path_slug = re.sub(r"[{}]", "", path) + path_slug = re.sub(r"[^a-zA-Z0-9]", "_", path_slug) + path_slug = re.sub(r"_+", "_", path_slug).strip("_") + name = f"{method}_{path_slug}" + name = re.sub(r"[^a-zA-Z0-9_-]", "_", name) + return name[:64] + + +def _categorize_parameters( + parameters: List[Dict], + components: Dict[str, Any], + definitions: Dict[str, Any], +) -> Tuple[Dict, Dict]: + """Categorize parameters into query params and headers.""" + query_params = {} + headers = {} + + for param in parameters: + resolved = _resolve_ref(param, components, definitions) + if not resolved or "name" not in resolved: + continue + location = resolved.get("in", "query") + prop = _param_to_property(resolved) + + if location in ("query", "path"): + query_params[resolved["name"]] = prop + elif location == "header": + headers[resolved["name"]] = prop + return query_params, headers + + +def _param_to_property(param: Dict) -> Dict[str, Any]: + """Convert an API parameter to an action property definition.""" + schema = param.get("schema", {}) + param_type = schema.get("type", param.get("type", "string")) + + mapped_type = "integer" if param_type in ("integer", "number") else "string" + + return { + "type": mapped_type, + "description": (param.get("description", "") or "")[:200], + "value": "", + "filled_by_llm": param.get("required", False), + "required": param.get("required", False), + } + + +def _extract_request_body( + operation: Dict[str, Any], + components: Dict[str, Any], + definitions: Dict[str, Any], + is_swagger: bool, +) -> Tuple[Dict, str]: + """Extract request body schema and content type.""" + content_types = [ + "application/json", + "application/x-www-form-urlencoded", + "multipart/form-data", + "text/plain", + "application/xml", + ] + + if is_swagger: + consumes = operation.get("consumes", []) + body_param = next( + (p for p in operation.get("parameters", []) if p.get("in") == "body"), None + ) + if not body_param: + return {}, "application/json" + selected_type = consumes[0] if consumes else "application/json" + schema = body_param.get("schema", {}) + else: + request_body = operation.get("requestBody", {}) + if not request_body: + return {}, "application/json" + request_body = _resolve_ref(request_body, components, definitions) + content = request_body.get("content", {}) + + selected_type = "application/json" + schema = {} + + for ct in content_types: + if ct in content: + selected_type = ct + schema = content[ct].get("schema", {}) + break + if not schema and content: + first_type = next(iter(content)) + selected_type = first_type + schema = content[first_type].get("schema", {}) + properties = _schema_to_properties(schema, components, definitions) + return properties, selected_type + + +def _schema_to_properties( + schema: Dict, + components: Dict[str, Any], + definitions: Dict[str, Any], + depth: int = 0, +) -> Dict[str, Any]: + """Convert schema to action body properties (limited depth to prevent recursion).""" + if depth > 3: + return {} + schema = _resolve_ref(schema, components, definitions) + if not schema or not isinstance(schema, dict): + return {} + properties = {} + schema_type = schema.get("type", "object") + + if schema_type == "object": + required_fields = set(schema.get("required", [])) + for prop_name, prop_schema in schema.get("properties", {}).items(): + resolved = _resolve_ref(prop_schema, components, definitions) + if not isinstance(resolved, dict): + continue + prop_type = resolved.get("type", "string") + mapped_type = "integer" if prop_type in ("integer", "number") else "string" + + properties[prop_name] = { + "type": mapped_type, + "description": (resolved.get("description", "") or "")[:200], + "value": "", + "filled_by_llm": prop_name in required_fields, + "required": prop_name in required_fields, + } + return properties + + +def _resolve_ref( + obj: Any, + components: Dict[str, Any], + definitions: Dict[str, Any], +) -> Optional[Dict]: + """Resolve $ref references in the specification.""" + if not isinstance(obj, dict): + return obj if isinstance(obj, dict) else None + if "$ref" not in obj: + return obj + ref_path = obj["$ref"] + + if ref_path.startswith("#/components/"): + parts = ref_path.replace("#/components/", "").split("/") + return _traverse_path(components, parts) + elif ref_path.startswith("#/definitions/"): + parts = ref_path.replace("#/definitions/", "").split("/") + return _traverse_path(definitions, parts) + logger.debug(f"Unsupported ref path: {ref_path}") + return None + + +def _traverse_path(obj: Dict, parts: List[str]) -> Optional[Dict]: + """Traverse a nested dictionary using path parts.""" + try: + for part in parts: + obj = obj[part] + return obj if isinstance(obj, dict) else None + except (KeyError, TypeError): + return None diff --git a/application/api/user/tools/routes.py b/application/api/user/tools/routes.py index 0d4bc6f8..1503ef7e 100644 --- a/application/api/user/tools/routes.py +++ b/application/api/user/tools/routes.py @@ -4,6 +4,7 @@ from bson.objectid import ObjectId from flask import current_app, jsonify, make_response, request from flask_restx import fields, Namespace, Resource +from application.agents.tools.spec_parser import parse_spec from application.agents.tools.tool_manager import ToolManager from application.api import api from application.api.user.base import user_tools_collection @@ -414,3 +415,57 @@ class DeleteTool(Resource): current_app.logger.error(f"Error deleting tool: {err}", exc_info=True) return {"success": False}, 400 return {"success": True}, 200 + + +@tools_ns.route("/parse_spec") +class ParseSpec(Resource): + @api.doc( + description="Parse an API specification (OpenAPI 3.x or Swagger 2.0) and return actions" + ) + def post(self): + decoded_token = request.decoded_token + if not decoded_token: + return make_response(jsonify({"success": False}), 401) + if "file" in request.files: + file = request.files["file"] + if not file.filename: + return make_response( + jsonify({"success": False, "message": "No file selected"}), 400 + ) + try: + spec_content = file.read().decode("utf-8") + except UnicodeDecodeError: + return make_response( + jsonify({"success": False, "message": "Invalid file encoding"}), 400 + ) + elif request.is_json: + data = request.get_json() + spec_content = data.get("spec_content", "") + else: + return make_response( + jsonify({"success": False, "message": "No spec provided"}), 400 + ) + if not spec_content or not spec_content.strip(): + return make_response( + jsonify({"success": False, "message": "Empty spec content"}), 400 + ) + try: + metadata, actions = parse_spec(spec_content) + return make_response( + jsonify( + { + "success": True, + "metadata": metadata, + "actions": actions, + } + ), + 200, + ) + except ValueError as e: + error_msg = str(e) + current_app.logger.error(f"Spec validation error: {error_msg}") + return make_response(jsonify({"success": False, "error": error_msg}), 400) + except Exception as err: + error_msg = str(err) + current_app.logger.error(f"Error parsing spec: {error_msg}", exc_info=True) + return make_response(jsonify({"success": False, "error": error_msg}), 500) diff --git a/frontend/src/api/endpoints.ts b/frontend/src/api/endpoints.ts index d3636dd2..6bd8a834 100644 --- a/frontend/src/api/endpoints.ts +++ b/frontend/src/api/endpoints.ts @@ -40,6 +40,7 @@ const endpoints = { UPDATE_TOOL_STATUS: '/api/update_tool_status', UPDATE_TOOL: '/api/update_tool', DELETE_TOOL: '/api/delete_tool', + PARSE_SPEC: '/api/parse_spec', SYNC_CONNECTOR: '/api/connectors/sync', GET_CHUNKS: ( docId: string, diff --git a/frontend/src/api/services/userService.ts b/frontend/src/api/services/userService.ts index ecd3df3d..1dcf9f4c 100644 --- a/frontend/src/api/services/userService.ts +++ b/frontend/src/api/services/userService.ts @@ -84,6 +84,11 @@ const userService = { apiClient.post(endpoints.USER.UPDATE_TOOL, data, token), deleteTool: (data: any, token: string | null): Promise => apiClient.post(endpoints.USER.DELETE_TOOL, data, token), + parseSpec: (file: File, token: string | null): Promise => { + const formData = new FormData(); + formData.append('file', file); + return apiClient.postFormData(endpoints.USER.PARSE_SPEC, formData, token); + }, getDocumentChunks: ( docId: string, page: number, diff --git a/frontend/src/locale/de.json b/frontend/src/locale/de.json index c4b58a3e..55e10877 100644 --- a/frontend/src/locale/de.json +++ b/frontend/src/locale/de.json @@ -162,12 +162,17 @@ "authentication": "Authentifizierung", "actions": "Aktionen", "addAction": "Aktion hinzufügen", + "importSpec": "Spezifikation importieren", + "searchActions": "Aktionen suchen...", + "noActionsMatch": "Keine Aktionen passen zu deiner Suche", + "actionAlreadyExists": "Eine Aktion mit diesem Namen existiert bereits", "noActionsFound": "Keine Aktionen gefunden", "url": "URL", "urlPlaceholder": "URL eingeben", "method": "Methode", "description": "Beschreibung", "descriptionPlaceholder": "Beschreibung eingeben", + "bodyContentType": "Body-Inhaltstyp", "headers": "Header", "queryParameters": "Abfrageparameter", "body": "Body", @@ -441,6 +446,22 @@ "generate": "Generieren", "test": "Testen", "learnMore": "Mehr erfahren" + }, + "importSpec": { + "title": "API-Spezifikation importieren", + "description": "Lade eine OpenAPI 3.x- oder Swagger 2.0-Spezifikationsdatei hoch, um automatisch Aktionen zu generieren.", + "dropzoneText": "Zum Hochladen klicken oder per Drag & Drop", + "supportedFormats": "JSON- oder YAML-Format", + "invalidFileType": "Ungültiger Dateityp. Bitte eine JSON- oder YAML-Datei hochladen.", + "parseError": "Spezifikation konnte nicht geparst werden. Bitte Dateiformat prüfen.", + "version": "Version", + "baseUrl": "Basis-URL", + "actionsFound": "{{count}} Aktionen gefunden", + "selectAll": "Alle auswählen", + "deselectAll": "Alle abwählen", + "cancel": "Abbrechen", + "parse": "Parsen", + "import": "Importieren ({{count}})" } }, "sharedConv": { diff --git a/frontend/src/locale/en.json b/frontend/src/locale/en.json index dbb8bdbe..94680a96 100644 --- a/frontend/src/locale/en.json +++ b/frontend/src/locale/en.json @@ -162,12 +162,17 @@ "authentication": "Authentication", "actions": "Actions", "addAction": "Add action", + "importSpec": "Import Spec", + "searchActions": "Search actions...", + "noActionsMatch": "No actions match your search", + "actionAlreadyExists": "An action with this name already exists", "noActionsFound": "No actions found", "url": "URL", "urlPlaceholder": "Enter URL", "method": "Method", "description": "Description", "descriptionPlaceholder": "Enter description", + "bodyContentType": "Body Content Type", "headers": "Headers", "queryParameters": "Query Parameters", "body": "Body", @@ -441,6 +446,22 @@ "generate": "Generate", "test": "Test", "learnMore": "Learn more" + }, + "importSpec": { + "title": "Import API Specification", + "description": "Upload an OpenAPI 3.x or Swagger 2.0 specification file to automatically generate actions.", + "dropzoneText": "Click to upload or drag and drop", + "supportedFormats": "JSON or YAML format", + "invalidFileType": "Invalid file type. Please upload a JSON or YAML file.", + "parseError": "Failed to parse the specification. Please check the file format.", + "version": "Version", + "baseUrl": "Base URL", + "actionsFound": "{{count}} actions found", + "selectAll": "Select all", + "deselectAll": "Deselect all", + "cancel": "Cancel", + "parse": "Parse", + "import": "Import ({{count}})" } }, "sharedConv": { diff --git a/frontend/src/locale/es.json b/frontend/src/locale/es.json index 49f09344..f388796e 100644 --- a/frontend/src/locale/es.json +++ b/frontend/src/locale/es.json @@ -162,12 +162,17 @@ "authentication": "Autenticación", "actions": "Acciones", "addAction": "Agregar acción", + "importSpec": "Importar especificación", + "searchActions": "Buscar acciones...", + "noActionsMatch": "No hay acciones que coincidan con tu búsqueda", + "actionAlreadyExists": "Ya existe una acción con este nombre", "noActionsFound": "No se encontraron acciones", "url": "URL", "urlPlaceholder": "Ingresa url", "method": "Método", "description": "Descripción", "descriptionPlaceholder": "Ingresa descripción", + "bodyContentType": "Tipo de contenido del cuerpo", "headers": "Encabezados", "queryParameters": "Parámetros de Consulta", "body": "Cuerpo", @@ -441,6 +446,22 @@ "generate": "Generate", "test": "Test", "learnMore": "Learn more" + }, + "importSpec": { + "title": "Importar especificación de API", + "description": "Sube un archivo de especificación OpenAPI 3.x o Swagger 2.0 para generar acciones automáticamente.", + "dropzoneText": "Haz clic para subir o arrastra y suelta", + "supportedFormats": "Formato JSON o YAML", + "invalidFileType": "Tipo de archivo no válido. Sube un archivo JSON o YAML.", + "parseError": "No se pudo analizar la especificación. Verifica el formato del archivo.", + "version": "Versión", + "baseUrl": "URL base", + "actionsFound": "{{count}} acciones encontradas", + "selectAll": "Seleccionar todo", + "deselectAll": "Deseleccionar todo", + "cancel": "Cancelar", + "parse": "Analizar", + "import": "Importar ({{count}})" } }, "sharedConv": { diff --git a/frontend/src/locale/jp.json b/frontend/src/locale/jp.json index b8458d2c..cc5a9de3 100644 --- a/frontend/src/locale/jp.json +++ b/frontend/src/locale/jp.json @@ -162,12 +162,17 @@ "authentication": "認証", "actions": "アクション", "addAction": "アクションを追加", + "importSpec": "仕様をインポート", + "searchActions": "アクションを検索...", + "noActionsMatch": "検索に一致するアクションがありません", + "actionAlreadyExists": "この名前のアクションは既に存在します", "noActionsFound": "アクションが見つかりません", "url": "URL", "urlPlaceholder": "URLを入力", "method": "メソッド", "description": "説明", "descriptionPlaceholder": "説明を入力", + "bodyContentType": "ボディのコンテンツタイプ", "headers": "ヘッダー", "queryParameters": "クエリパラメータ", "body": "ボディ", @@ -441,6 +446,22 @@ "generate": "Generate", "test": "Test", "learnMore": "Learn more" + }, + "importSpec": { + "title": "API仕様のインポート", + "description": "OpenAPI 3.x または Swagger 2.0 の仕様ファイルをアップロードして、アクションを自動生成します。", + "dropzoneText": "クリックしてアップロード、またはドラッグ&ドロップ", + "supportedFormats": "JSON または YAML 形式", + "invalidFileType": "無効なファイル形式です。JSON または YAML ファイルをアップロードしてください。", + "parseError": "仕様の解析に失敗しました。ファイル形式を確認してください。", + "version": "バージョン", + "baseUrl": "ベースURL", + "actionsFound": "{{count}} 件のアクションが見つかりました", + "selectAll": "すべて選択", + "deselectAll": "すべて解除", + "cancel": "キャンセル", + "parse": "解析", + "import": "インポート ({{count}})" } }, "sharedConv": { diff --git a/frontend/src/locale/ru.json b/frontend/src/locale/ru.json index 103f3627..d9146781 100644 --- a/frontend/src/locale/ru.json +++ b/frontend/src/locale/ru.json @@ -162,12 +162,17 @@ "authentication": "Аутентификация", "actions": "Действия", "addAction": "Добавить действие", + "importSpec": "Импорт спецификации", + "searchActions": "Поиск действий...", + "noActionsMatch": "Нет действий, соответствующих вашему поиску", + "actionAlreadyExists": "Действие с таким именем уже существует", "noActionsFound": "Действия не найдены", "url": "URL", "urlPlaceholder": "Введите URL", "method": "Метод", "description": "Описание", "descriptionPlaceholder": "Введите описание", + "bodyContentType": "Тип содержимого тела", "headers": "Заголовки", "queryParameters": "Параметры запроса", "body": "Тело запроса", @@ -441,6 +446,22 @@ "generate": "Generate", "test": "Test", "learnMore": "Learn more" + }, + "importSpec": { + "title": "Импорт спецификации API", + "description": "Загрузите файл спецификации OpenAPI 3.x или Swagger 2.0 для автоматического создания действий.", + "dropzoneText": "Нажмите для загрузки или перетащите файл", + "supportedFormats": "Формат JSON или YAML", + "invalidFileType": "Неверный тип файла. Пожалуйста, загрузите файл JSON или YAML.", + "parseError": "Не удалось разобрать спецификацию. Проверьте формат файла.", + "version": "Версия", + "baseUrl": "Базовый URL", + "actionsFound": "{{count}} действий найдено", + "selectAll": "Выбрать все", + "deselectAll": "Снять выделение со всех", + "cancel": "Отмена", + "parse": "Разобрать", + "import": "Импорт ({{count}})" } }, "sharedConv": { diff --git a/frontend/src/locale/zh-TW.json b/frontend/src/locale/zh-TW.json index 47c0b236..8e8d9714 100644 --- a/frontend/src/locale/zh-TW.json +++ b/frontend/src/locale/zh-TW.json @@ -162,12 +162,17 @@ "authentication": "認證", "actions": "操作", "addAction": "新增操作", + "importSpec": "匯入規格", + "searchActions": "搜尋操作...", + "noActionsMatch": "沒有符合搜尋的操作", + "actionAlreadyExists": "已存在同名操作", "noActionsFound": "找不到操作", "url": "URL", "urlPlaceholder": "輸入url", "method": "方法", "description": "描述", "descriptionPlaceholder": "輸入描述", + "bodyContentType": "主體內容類型", "headers": "標頭", "queryParameters": "查詢參數", "body": "主體", @@ -441,6 +446,22 @@ "generate": "Generate", "test": "Test", "learnMore": "Learn more" + }, + "importSpec": { + "title": "匯入 API 規格", + "description": "上傳 OpenAPI 3.x 或 Swagger 2.0 規格檔以自動產生操作。", + "dropzoneText": "點擊上傳或拖放", + "supportedFormats": "JSON 或 YAML 格式", + "invalidFileType": "無效的檔案類型。請上傳 JSON 或 YAML 檔案。", + "parseError": "解析規格失敗。請檢查檔案格式。", + "version": "版本", + "baseUrl": "基礎 URL", + "actionsFound": "找到 {{count}} 個操作", + "selectAll": "全選", + "deselectAll": "取消全選", + "cancel": "取消", + "parse": "解析", + "import": "匯入 ({{count}})" } }, "sharedConv": { diff --git a/frontend/src/locale/zh.json b/frontend/src/locale/zh.json index 12b867fa..f85cef5b 100644 --- a/frontend/src/locale/zh.json +++ b/frontend/src/locale/zh.json @@ -162,12 +162,17 @@ "authentication": "认证", "actions": "操作", "addAction": "添加操作", + "importSpec": "导入规范", + "searchActions": "搜索操作...", + "noActionsMatch": "没有与搜索匹配的操作", + "actionAlreadyExists": "已存在同名操作", "noActionsFound": "未找到操作", "url": "URL", "urlPlaceholder": "输入url", "method": "方法", "description": "描述", "descriptionPlaceholder": "输入描述", + "bodyContentType": "请求体内容类型", "headers": "请求头", "queryParameters": "查询参数", "body": "请求体", @@ -441,6 +446,22 @@ "generate": "Generate", "test": "Test", "learnMore": "Learn more" + }, + "importSpec": { + "title": "导入 API 规范", + "description": "上传 OpenAPI 3.x 或 Swagger 2.0 规范文件以自动生成操作。", + "dropzoneText": "点击上传或拖拽到此处", + "supportedFormats": "JSON 或 YAML 格式", + "invalidFileType": "文件类型无效。请上传 JSON 或 YAML 文件。", + "parseError": "解析规范失败。请检查文件格式。", + "version": "版本", + "baseUrl": "基础 URL", + "actionsFound": "找到 {{count}} 个操作", + "selectAll": "全选", + "deselectAll": "取消全选", + "cancel": "取消", + "parse": "解析", + "import": "导入 ({{count}})" } }, "sharedConv": { diff --git a/frontend/src/modals/ImportSpecModal.tsx b/frontend/src/modals/ImportSpecModal.tsx new file mode 100644 index 00000000..df68aea6 --- /dev/null +++ b/frontend/src/modals/ImportSpecModal.tsx @@ -0,0 +1,321 @@ +import { useRef, useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { useSelector } from 'react-redux'; + +import userService from '../api/services/userService'; +import Upload from '../assets/upload.svg'; +import Spinner from '../components/Spinner'; +import { ActiveState } from '../models/misc'; +import { selectToken } from '../preferences/preferenceSlice'; +import { APIActionType } from '../settings/types'; +import WrapperModal from './WrapperModal'; + +interface ImportSpecModalProps { + modalState: ActiveState; + setModalState: (state: ActiveState) => void; + onImport: (actions: APIActionType[]) => void; +} + +interface ParsedResult { + metadata: { + title: string; + description: string; + version: string; + base_url: string; + }; + actions: APIActionType[]; +} + +const METHOD_COLORS: Record = { + GET: 'bg-[#D1FAE5] text-[#065F46] dark:bg-[#064E3B]/60 dark:text-[#6EE7B7]', + POST: 'bg-[#DBEAFE] text-[#1E40AF] dark:bg-[#1E3A8A]/60 dark:text-[#93C5FD]', + PUT: 'bg-[#FEF3C7] text-[#92400E] dark:bg-[#78350F]/60 dark:text-[#FCD34D]', + DELETE: + 'bg-[#FEE2E2] text-[#991B1B] dark:bg-[#7F1D1D]/60 dark:text-[#FCA5A5]', + PATCH: 'bg-[#EDE9FE] text-[#5B21B6] dark:bg-[#4C1D95]/60 dark:text-[#C4B5FD]', + HEAD: 'bg-[#F3F4F6] text-[#374151] dark:bg-[#374151]/60 dark:text-[#D1D5DB]', + OPTIONS: + 'bg-[#F3F4F6] text-[#374151] dark:bg-[#374151]/60 dark:text-[#D1D5DB]', +}; + +export default function ImportSpecModal({ + modalState, + setModalState, + onImport, +}: ImportSpecModalProps) { + const { t } = useTranslation(); + const token = useSelector(selectToken); + const fileInputRef = useRef(null); + + const [file, setFile] = useState(null); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + const [parsedResult, setParsedResult] = useState(null); + const [selectedActions, setSelectedActions] = useState>( + new Set(), + ); + const [baseUrl, setBaseUrl] = useState(''); + + const handleClose = () => { + setModalState('INACTIVE'); + setFile(null); + setLoading(false); + setError(null); + setParsedResult(null); + setSelectedActions(new Set()); + setBaseUrl(''); + }; + + const handleFileChange = (e: React.ChangeEvent) => { + const selectedFile = e.target.files?.[0]; + if (!selectedFile) return; + + const validExtensions = ['.json', '.yaml', '.yml']; + const hasValidExtension = validExtensions.some((ext) => + selectedFile.name.toLowerCase().endsWith(ext), + ); + + if (!hasValidExtension) { + setError(t('modals.importSpec.invalidFileType')); + return; + } + + setFile(selectedFile); + setError(null); + setParsedResult(null); + }; + + const handleParse = async () => { + if (!file) return; + + setLoading(true); + setError(null); + + try { + const response = await userService.parseSpec(file, token); + if (!response.ok) { + const errorData = await response.json(); + setError( + errorData.error || + errorData.message || + t('modals.importSpec.parseError'), + ); + return; + } + + const result = await response.json(); + if (result.success) { + setParsedResult(result); + setBaseUrl(result.metadata.base_url || ''); + setSelectedActions( + new Set( + result.actions.map((_: APIActionType, i: number) => i), + ), + ); + } else { + setError( + result.error || result.message || t('modals.importSpec.parseError'), + ); + } + } catch { + setError(t('modals.importSpec.parseError')); + } finally { + setLoading(false); + } + }; + + const toggleAction = (index: number) => { + setSelectedActions((prev) => { + const next = new Set(prev); + if (next.has(index)) { + next.delete(index); + } else { + next.add(index); + } + return next; + }); + }; + + const toggleAll = () => { + if (!parsedResult) return; + if (selectedActions.size === parsedResult.actions.length) { + setSelectedActions(new Set()); + } else { + setSelectedActions(new Set(parsedResult.actions.map((_, i) => i))); + } + }; + + const handleImport = () => { + if (!parsedResult) return; + const actionsToImport = parsedResult.actions + .filter((_, i) => selectedActions.has(i)) + .map((action) => ({ + ...action, + url: action.url.replace(parsedResult.metadata.base_url, baseUrl.trim()), + })); + onImport(actionsToImport); + handleClose(); + }; + + if (modalState !== 'ACTIVE') return null; + + return ( + +
+

+ {t('modals.importSpec.title')} +

+ + {!parsedResult ? ( +
+

+ {t('modals.importSpec.description')} +

+ +
fileInputRef.current?.click()} + className="border-silver dark:border-silver/40 hover:border-purple-30 dark:hover:border-purple-30 flex cursor-pointer flex-col items-center justify-center rounded-xl border-2 border-dashed p-8 transition-colors" + > + Upload +

+ {file ? file.name : t('modals.importSpec.dropzoneText')} +

+

+ {t('modals.importSpec.supportedFormats')} +

+ +
+ + {error && ( +

{error}

+ )} +
+ ) : ( +
+
+

+ {parsedResult.metadata.title} +

+ {parsedResult.metadata.description && ( +

+ {parsedResult.metadata.description} +

+ )} +

+ {t('modals.importSpec.version')}:{' '} + {parsedResult.metadata.version} +

+
+ + setBaseUrl(e.target.value)} + className="border-silver dark:border-silver/40 text-jet dark:text-bright-gray w-full rounded-lg border bg-white px-3 py-2 text-sm outline-hidden dark:bg-[#2C2C2C]" + placeholder={ + parsedResult.metadata.base_url || 'https://api.example.com' + } + /> +
+
+ +
+

+ {t('modals.importSpec.actionsFound', { + count: parsedResult.actions.length, + })} +

+ +
+ +
+ {parsedResult.actions.map((action, index) => ( + + ))} +
+
+ )} + +
+ {!parsedResult ? ( + + ) : ( + + )} + +
+
+
+ ); +} diff --git a/frontend/src/settings/ToolConfig.tsx b/frontend/src/settings/ToolConfig.tsx index bca5c6ce..b6177483 100644 --- a/frontend/src/settings/ToolConfig.tsx +++ b/frontend/src/settings/ToolConfig.tsx @@ -1,24 +1,38 @@ import React from 'react'; +import { useTranslation } from 'react-i18next'; import { useSelector } from 'react-redux'; import userService from '../api/services/userService'; import ArrowLeft from '../assets/arrow-left.svg'; +import ChevronRight from '../assets/chevron-right.svg'; import CircleCheck from '../assets/circle-check.svg'; import CircleX from '../assets/circle-x.svg'; +import NoFilesDarkIcon from '../assets/no-files-dark.svg'; +import NoFilesIcon from '../assets/no-files.svg'; import Trash from '../assets/trash.svg'; import Dropdown from '../components/Dropdown'; import Input from '../components/Input'; import ToggleSwitch from '../components/ToggleSwitch'; +import { useDarkTheme } from '../hooks'; import AddActionModal from '../modals/AddActionModal'; import ConfirmationModal from '../modals/ConfirmationModal'; +import ImportSpecModal from '../modals/ImportSpecModal'; import { ActiveState } from '../models/misc'; import { selectToken } from '../preferences/preferenceSlice'; -import { APIActionType, APIToolType, UserToolType } from './types'; -import { useTranslation } from 'react-i18next'; import { areObjectsEqual } from '../utils/objectUtils'; -import { useDarkTheme } from '../hooks'; -import NoFilesIcon from '../assets/no-files.svg'; -import NoFilesDarkIcon from '../assets/no-files-dark.svg'; +import { APIActionType, APIToolType, UserToolType } from './types'; + +const METHOD_COLORS: Record = { + GET: 'bg-[#D1FAE5] text-[#065F46] dark:bg-[#064E3B]/60 dark:text-[#6EE7B7]', + POST: 'bg-[#DBEAFE] text-[#1E40AF] dark:bg-[#1E3A8A]/60 dark:text-[#93C5FD]', + PUT: 'bg-[#FEF3C7] text-[#92400E] dark:bg-[#78350F]/60 dark:text-[#FCD34D]', + DELETE: + 'bg-[#FEE2E2] text-[#991B1B] dark:bg-[#7F1D1D]/60 dark:text-[#FCA5A5]', + PATCH: 'bg-[#EDE9FE] text-[#5B21B6] dark:bg-[#4C1D95]/60 dark:text-[#C4B5FD]', + HEAD: 'bg-[#F3F4F6] text-[#374151] dark:bg-[#374151]/60 dark:text-[#D1D5DB]', + OPTIONS: + 'bg-[#F3F4F6] text-[#374151] dark:bg-[#374151]/60 dark:text-[#D1D5DB]', +}; export default function ToolConfig({ tool, @@ -51,6 +65,8 @@ export default function ToolConfig({ ); const [actionModalState, setActionModalState] = React.useState('INACTIVE'); + const [importModalState, setImportModalState] = + React.useState('INACTIVE'); const [initialState, setInitialState] = React.useState({ customName: tool.customName || '', authKey: 'token' in tool.config ? tool.config.token : '', @@ -59,9 +75,38 @@ export default function ToolConfig({ }); const [hasUnsavedChanges, setHasUnsavedChanges] = React.useState(false); const [showUnsavedModal, setShowUnsavedModal] = React.useState(false); + const [userActionsSearch, setUserActionsSearch] = React.useState(''); + const [expandedUserActions, setExpandedUserActions] = React.useState< + Set + >(new Set()); const { t } = useTranslation(); const [isDarkTheme] = useDarkTheme(); + const toggleUserActionExpand = (index: number) => { + setExpandedUserActions((prev) => { + const newSet = new Set(prev); + if (newSet.has(index)) { + newSet.delete(index); + } else { + newSet.add(index); + } + return newSet; + }); + }; + + const filteredUserActions = React.useMemo(() => { + if (!('actions' in tool) || !tool.actions) return []; + const query = userActionsSearch.toLowerCase(); + return tool.actions + .map((action, index) => ({ action, originalIndex: index })) + .filter( + ({ action }) => + action.name.toLowerCase().includes(query) || + action.description?.toLowerCase().includes(query), + ) + .sort((a, b) => a.action.name.localeCompare(b.action.name)); + }, [tool, userActionsSearch]); + const handleBackClick = () => { if (hasUnsavedChanges) { setShowUnsavedModal(true); @@ -88,6 +133,8 @@ export default function ToolConfig({ 'actions' in tool ? tool.actions.map((action, index) => { if (index === actionIndex) { + const newFilledByLlm = + !action.parameters.properties[property].filled_by_llm; return { ...action, parameters: { @@ -96,8 +143,8 @@ export default function ToolConfig({ ...action.parameters.properties, [property]: { ...action.parameters.properties[property], - filled_by_llm: - !action.parameters.properties[property].filled_by_llm, + filled_by_llm: newFilledByLlm, + required: newFilledByLlm, }, }, }, @@ -164,6 +211,13 @@ export default function ToolConfig({ }; const handleAddNewAction = (actionName: string) => { + const toolCopy = tool as APIToolType; + + if (toolCopy.config.actions && toolCopy.config.actions[actionName]) { + alert(t('settings.tools.actionAlreadyExists')); + return; + } + const newAction: APIActionType = { name: actionName, method: 'GET', @@ -182,8 +236,10 @@ export default function ToolConfig({ type: 'object', }, active: true, + body_content_type: 'application/json', + body_encoding_rules: {}, }; - const toolCopy = tool as APIToolType; + setTool({ ...toolCopy, config: { @@ -192,6 +248,30 @@ export default function ToolConfig({ }, }); }; + + const handleImportActions = (actions: APIActionType[]) => { + const toolCopy = tool as APIToolType; + const existingActions = toolCopy.config.actions || {}; + const newActions: { [key: string]: APIActionType } = {}; + + actions.forEach((action) => { + let actionName = action.name; + let counter = 1; + while (existingActions[actionName] || newActions[actionName]) { + actionName = `${action.name}_${counter}`; + counter++; + } + newActions[actionName] = { ...action, name: actionName }; + }); + + setTool({ + ...toolCopy, + config: { + ...toolCopy.config, + actions: { ...existingActions, ...newActions }, + }, + }); + }; return (
@@ -271,16 +351,22 @@ export default function ToolConfig({

{t('settings.tools.actions')}

- {tool.name === 'api_tool' && - (!tool.config.actions || - Object.keys(tool.config.actions).length === 0) && ( + {tool.name === 'api_tool' && ( +
+ - )} +
+ )}
{tool.name === 'api_tool' ? ( <> @@ -301,180 +387,247 @@ export default function ToolConfig({ )} ) : ( -
+
{'actions' in tool && tool.actions && tool.actions.length > 0 ? ( - tool.actions.map((action, actionIndex) => ( -
-
-

- {action.name} -

- { - setTool({ - ...tool, - actions: tool.actions.map((act, index) => { - if (index === actionIndex) { - return { ...act, active: checked }; - } - return act; - }), - }); - }} - size="small" - id={`actionToggle-${actionIndex}`} + <> +
+ setUserActionsSearch(e.target.value)} + placeholder={t('settings.tools.searchActions')} + className="border-silver dark:border-silver/40 dark:bg-raisin-black w-full rounded-full border px-4 py-2 pl-10 text-sm outline-none focus:border-purple-500 dark:text-white dark:placeholder-gray-500" + /> + + -
-
- { - setTool({ - ...tool, - actions: tool.actions.map((act, index) => { - if (index === actionIndex) { - return { - ...act, - description: e.target.value, - }; - } - return act; - }), - }); - }} - borderVariant="thin" - /> -
-
- - - - - - - - - - - - {Object.entries(action.parameters?.properties).map( - (param, index) => { - const uniqueKey = `${actionIndex}-${param[0]}`; - return ( - - - - - - - - ); - }, - )} - -
{t('settings.tools.fieldName')}{t('settings.tools.fieldType')}{t('settings.tools.filledByLLM')}{t('settings.tools.fieldDescription')}{t('settings.tools.value')}
{param[0]}{param[1].type} - - - { - setTool({ - ...tool, - actions: tool.actions.map( - (act, index) => { - if (index === actionIndex) { - return { - ...act, - parameters: { - ...act.parameters, - properties: { - ...act.parameters - .properties, - [param[0]]: { - ...act.parameters - .properties[param[0]], - description: - e.target.value, - }, - }, - }, - }; - } - return act; - }, - ), - }); - }} - > - - { - setTool({ - ...tool, - actions: tool.actions.map( - (act, index) => { - if (index === actionIndex) { - return { - ...act, - parameters: { - ...act.parameters, - properties: { - ...act.parameters - .properties, - [param[0]]: { - ...act.parameters - .properties[param[0]], - value: e.target.value, - }, - }, - }, - }; - } - return act; - }, - ), - }); - }} - > -
-
+
- )) + + {filteredUserActions.length === 0 && userActionsSearch && ( +

+ {t('settings.tools.noActionsMatch')} +

+ )} + + {filteredUserActions.map(({ action, originalIndex }) => { + const isExpanded = expandedUserActions.has(originalIndex); + return ( +
+
toggleUserActionExpand(originalIndex)} + > +
+ expand +

+ {action.name} +

+ {action.description && ( +

+ {action.description} +

+ )} +
+
e.stopPropagation()} + > + { + setTool({ + ...tool, + actions: tool.actions.map((act, index) => { + if (index === originalIndex) { + return { ...act, active: checked }; + } + return act; + }), + }); + }} + size="small" + id={`actionToggle-${originalIndex}`} + /> +
+
+ {isExpanded && ( + <> +
+ { + setTool({ + ...tool, + actions: tool.actions.map((act, index) => { + if (index === originalIndex) { + return { + ...act, + description: e.target.value, + }; + } + return act; + }), + }); + }} + borderVariant="thin" + /> +
+
+ + + + + + + + + + + + {Object.entries( + action.parameters?.properties, + ).map((param, paramIndex) => { + const uniqueKey = `${originalIndex}-${param[0]}`; + return ( + + + + + + + + ); + })} + +
{t('settings.tools.fieldName')}{t('settings.tools.fieldType')}{t('settings.tools.filledByLLM')} + {t('settings.tools.fieldDescription')} + {t('settings.tools.value')}
{param[0]}{param[1].type} + + + { + setTool({ + ...tool, + actions: tool.actions.map( + (act, index) => { + if (index === originalIndex) { + return { + ...act, + parameters: { + ...act.parameters, + properties: { + ...act.parameters + .properties, + [param[0]]: { + ...act.parameters + .properties[ + param[0] + ], + description: + e.target.value, + }, + }, + }, + }; + } + return act; + }, + ), + }); + }} + > + + { + setTool({ + ...tool, + actions: tool.actions.map( + (act, index) => { + if (index === originalIndex) { + return { + ...act, + parameters: { + ...act.parameters, + properties: { + ...act.parameters + .properties, + [param[0]]: { + ...act.parameters + .properties[ + param[0] + ], + value: + e.target.value, + }, + }, + }, + }; + } + return act; + }, + ), + }); + }} + > +
+
+ + )} +
+ ); + })} + ) : (
+ {showUnsavedModal && ( ('INACTIVE'); + const [searchQuery, setSearchQuery] = React.useState(''); + const [expandedActions, setExpandedActions] = React.useState>( + new Set(), + ); + + const toggleActionExpand = (actionName: string) => { + setExpandedActions((prev) => { + const newSet = new Set(prev); + if (newSet.has(actionName)) { + newSet.delete(actionName); + } else { + newSet.add(actionName); + } + return newSet; + }); + }; + + const filteredActions = React.useMemo(() => { + if (!apiTool.config.actions) return []; + const entries = Object.entries(apiTool.config.actions); + const filtered = entries.filter(([actionName, action]) => { + const query = searchQuery.toLowerCase(); + return ( + actionName.toLowerCase().includes(query) || + action.name.toLowerCase().includes(query) || + action.description?.toLowerCase().includes(query) || + action.url?.toLowerCase().includes(query) + ); + }); + return filtered.sort((a, b) => a[0].localeCompare(b[0])); + }, [apiTool.config.actions, searchQuery]); const handleDeleteActionClick = (actionName: string) => { setActionToDelete(actionName); @@ -623,21 +812,78 @@ function APIToolConfig({ React.useEffect(() => { setTool(apiTool); }, [apiTool]); + + const getMethodColor = (method: string) => { + return METHOD_COLORS[method.toUpperCase()] || METHOD_COLORS.GET; + }; + return ( -
- {/* Actions list */} - {apiTool.config.actions && - Object.entries(apiTool.config.actions).map( - ([actionName, action], actionIndex) => ( +
+
+ setSearchQuery(e.target.value)} + placeholder={t('settings.tools.searchActions')} + className="border-silver dark:border-silver/40 dark:bg-raisin-black w-full rounded-full border px-4 py-2 pl-10 text-sm outline-none focus:border-purple-500 dark:text-white dark:placeholder-gray-500" + /> + + + +
+ + {filteredActions.length === 0 && searchQuery && ( +

+ {t('settings.tools.noActionsMatch')} +

+ )} + +
+ {filteredActions.map(([actionName, action], actionIndex) => { + const isExpanded = expandedActions.has(actionName); + return (
-
-

- {action.name} -

-
+
toggleActionExpand(actionName)} + > +
+ expand + + {action.method} + +

+ {action.name} +

+ {action.description && ( +

+ {action.description} +

+ )} +
+
e.stopPropagation()} + >
-
- { - setApiTool((prevApiTool) => { - const updatedActions = { - ...prevApiTool.config.actions, - }; - const updatedAction = { - ...updatedActions[actionName], - }; - updatedAction.url = e.target.value; - updatedActions[actionName] = updatedAction; - return { - ...prevApiTool, - config: { - ...prevApiTool.config, - actions: updatedActions, - }, - }; - }); - }} - borderVariant="thin" - placeholder={t('settings.tools.urlPlaceholder')} - /> -
-
-
- - {t('settings.tools.method')} - - { - setApiTool((prevApiTool) => { - const updatedActions = { - ...prevApiTool.config.actions, - }; - const updatedAction = { - ...updatedActions[actionName], - }; - updatedAction.method = value as - | 'GET' - | 'POST' - | 'PUT' - | 'DELETE'; - updatedActions[actionName] = updatedAction; - return { - ...prevApiTool, - config: { - ...prevApiTool.config, - actions: updatedActions, - }, - }; - }); - }} - size="w-56" - rounded="3xl" - border="border" - /> -
-
-
- { - setApiTool((prevApiTool) => { - const updatedActions = { - ...prevApiTool.config.actions, - }; - const updatedAction = { - ...updatedActions[actionName], - }; - updatedAction.description = e.target.value; - updatedActions[actionName] = updatedAction; - return { - ...prevApiTool, - config: { - ...prevApiTool.config, - actions: updatedActions, - }, - }; - }); - }} - borderVariant="thin" - placeholder={t('settings.tools.descriptionPlaceholder')} - /> -
-
- -
+ {isExpanded && ( + <> +
+ { + setApiTool((prevApiTool) => { + const updatedActions = { + ...prevApiTool.config.actions, + }; + const updatedAction = { + ...updatedActions[actionName], + }; + updatedAction.url = e.target.value; + updatedActions[actionName] = updatedAction; + return { + ...prevApiTool, + config: { + ...prevApiTool.config, + actions: updatedActions, + }, + }; + }); + }} + borderVariant="thin" + placeholder={t('settings.tools.urlPlaceholder')} + /> +
+
+
+ + {t('settings.tools.method')} + + { + setApiTool((prevApiTool) => { + const updatedActions = { + ...prevApiTool.config.actions, + }; + const updatedAction = { + ...updatedActions[actionName], + }; + updatedAction.method = value as + | 'GET' + | 'POST' + | 'PUT' + | 'DELETE' + | 'PATCH' + | 'HEAD' + | 'OPTIONS'; + updatedActions[actionName] = updatedAction; + return { + ...prevApiTool, + config: { + ...prevApiTool.config, + actions: updatedActions, + }, + }; + }); + }} + size="w-56" + rounded="3xl" + border="border" + /> +
+
+
+ { + setApiTool((prevApiTool) => { + const updatedActions = { + ...prevApiTool.config.actions, + }; + const updatedAction = { + ...updatedActions[actionName], + }; + updatedAction.description = e.target.value; + updatedActions[actionName] = updatedAction; + return { + ...prevApiTool, + config: { + ...prevApiTool.config, + actions: updatedActions, + }, + }; + }); + }} + borderVariant="thin" + placeholder={t('settings.tools.descriptionPlaceholder')} + /> +
+ {(action.method === 'POST' || + action.method === 'PUT' || + action.method === 'PATCH' || + action.method === 'HEAD' || + action.method === 'OPTIONS') && ( +
+
+ + {t('settings.tools.bodyContentType')} + + { + setApiTool((prevApiTool) => { + const updatedActions = { + ...prevApiTool.config.actions, + }; + const updatedAction = { + ...updatedActions[actionName], + }; + updatedAction.body_content_type = value as + | 'application/json' + | 'application/x-www-form-urlencoded' + | 'multipart/form-data' + | 'text/plain' + | 'application/xml' + | 'application/octet-stream'; + updatedActions[actionName] = updatedAction; + return { + ...prevApiTool, + config: { + ...prevApiTool.config, + actions: updatedActions, + }, + }; + }); + }} + size="w-56" + rounded="3xl" + border="border" + /> +
+

+ {action.body_content_type === 'multipart/form-data' && + 'For APIs requiring multipart format. File uploads not supported through LLM.'} + {action.body_content_type === + 'application/octet-stream' && + 'Raw binary data, base64-encoded for transmission.'} + {action.body_content_type === + 'application/x-www-form-urlencoded' && + 'Standard form submission format. Best for legacy APIs and login forms.'} + {action.body_content_type === 'application/xml' && + 'Structured XML format. Use for SOAP and enterprise APIs.'} + {action.body_content_type === 'text/plain' && + 'Raw text data. Each field on a new line.'} + {(!action.body_content_type || + action.body_content_type === 'application/json') && + 'Most common format. Use for modern REST APIs.'} +

+
+ )} +
+ +
+ + )}
- ), - )} + ); + })} +
{/* Confirmation Modal */} {deleteModalState === 'ACTIVE' && actionToDelete && ( @@ -793,6 +1126,9 @@ function APIActionTable({ const [action, setAction] = React.useState(apiAction); const [newPropertyKey, setNewPropertyKey] = React.useState(''); + const [newPropertyType, setNewPropertyType] = React.useState< + 'string' | 'integer' + >('string'); const [addingPropertySection, setAddingPropertySection] = React.useState< 'headers' | 'query_params' | 'body' | null >(null); @@ -808,12 +1144,17 @@ function APIActionTable({ value: string | number | boolean, ) => { setAction((prevAction) => { + const currentProperty = prevAction[section].properties[key]; + const updatedProperty: typeof currentProperty = { + ...currentProperty, + [field]: value, + ...(field === 'filled_by_llm' && typeof value === 'boolean' + ? { required: value } + : {}), + }; const updatedProperties = { ...prevAction[section].properties, - [key]: { - ...prevAction[section].properties[key], - [field]: value, - }, + [key]: updatedProperty, }; return { ...prevAction, @@ -831,10 +1172,12 @@ function APIActionTable({ setEditingPropertyKey({ section: null, oldKey: null }); setAddingPropertySection(section); setNewPropertyKey(''); + setNewPropertyType('string'); }; const handleAddPropertyCancel = () => { setAddingPropertySection(null); setNewPropertyKey(''); + setNewPropertyType('string'); }; const handleAddProperty = () => { if (addingPropertySection && newPropertyKey.trim() !== '') { @@ -842,10 +1185,11 @@ function APIActionTable({ const updatedProperties = { ...prevAction[addingPropertySection].properties, [newPropertyKey.trim()]: { - type: 'string', + type: newPropertyType, description: '', value: '', filled_by_llm: false, + required: false, }, }; return { @@ -857,6 +1201,7 @@ function APIActionTable({ }; }); setNewPropertyKey(''); + setNewPropertyType('string'); setAddingPropertySection(null); } }; @@ -872,6 +1217,7 @@ function APIActionTable({ const handleRenamePropertyCancel = () => { setEditingPropertyKey({ section: null, oldKey: null }); setNewPropertyKey(''); + setNewPropertyType('string'); }; const handleRenameProperty = () => { if ( @@ -901,6 +1247,7 @@ function APIActionTable({ }); setEditingPropertyKey({ section: null, oldKey: null }); setNewPropertyKey(''); + setNewPropertyType('string'); } }; @@ -921,6 +1268,29 @@ function APIActionTable({ }); }; + const handlePropertyTypeChange = ( + section: 'headers' | 'query_params' | 'body', + key: string, + newType: 'string' | 'integer', + ) => { + setAction((prevAction) => { + const updatedProperties = { + ...prevAction[section].properties, + [key]: { + ...prevAction[section].properties[key], + type: newType, + }, + }; + return { + ...prevAction, + [section]: { + ...prevAction[section], + properties: updatedProperties, + }, + }; + }); + }; + React.useEffect(() => { setAction(apiAction); }, [apiAction]); @@ -978,7 +1348,22 @@ function APIActionTable({ /> )} - {param.type} + + +
+ ) : ( + handleRenamePropertyStart('headers', key)} + readOnly + /> + )} + + + + handlePropertyChange( + 'headers', + key, + 'value', + e.target.value, + ) + } + placeholder="e.g., application/json" + className="border-silver dark:border-silver/40 w-full rounded-lg border bg-transparent px-2 py-1 text-sm outline-hidden" + /> + + + + handlePropertyChange( + 'headers', + key, + 'description', + e.target.value, + ) + } + /> + + + + + + ), + )} + {addingPropertySection === 'headers' ? ( + + + setNewPropertyKey(e.target.value)} + onKeyDown={(e) => { + if (e.key === 'Enter') { + handleAddProperty(); + } + }} + placeholder={t('settings.tools.propertyName')} + className="border-silver dark:border-silver/40 flex w-full min-w-[130.5px] items-start rounded-lg border bg-transparent px-2 py-1 text-sm outline-hidden" + /> + + + + + + + + ) : ( + + + + + + + )} + + ); + }; + return (
@@ -1115,17 +1671,11 @@ function APIActionTable({ {t('settings.tools.name')} - {t('settings.tools.type')} - - - {t('settings.tools.filledByLLM')} + {t('settings.tools.value')} {t('settings.tools.description')} - - {t('settings.tools.value')} - - {renderPropertiesTable('headers')} + {renderHeadersTable()}
diff --git a/frontend/src/settings/types/index.ts b/frontend/src/settings/types/index.ts index 5ce3733f..c202f443 100644 --- a/frontend/src/settings/types/index.ts +++ b/frontend/src/settings/types/index.ts @@ -33,6 +33,7 @@ export type ParameterGroupType = { description: string; value: string | number; filled_by_llm: boolean; + required?: boolean; }; }; }; @@ -57,6 +58,7 @@ export type UserToolType = { description: string; filled_by_llm: boolean; value: string; + required?: boolean; }; }; additionalProperties: boolean; @@ -71,11 +73,24 @@ export type APIActionType = { name: string; url: string; description: string; - method: 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH'; + method: 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH' | 'HEAD' | 'OPTIONS'; query_params: ParameterGroupType; headers: ParameterGroupType; body: ParameterGroupType; active: boolean; + body_content_type?: + | 'application/json' + | 'application/x-www-form-urlencoded' + | 'multipart/form-data' + | 'text/plain' + | 'application/xml' + | 'application/octet-stream'; + body_encoding_rules?: { + [key: string]: { + style?: 'form' | 'spaceDelimited' | 'pipeDelimited' | 'deepObject'; + explode?: boolean; + }; + }; }; export type APIToolType = { diff --git a/tests/agents/test_base_agent.py b/tests/agents/test_base_agent.py index 1bf5aec0..7510c563 100644 --- a/tests/agents/test_base_agent.py +++ b/tests/agents/test_base_agent.py @@ -229,8 +229,14 @@ class TestBaseAgentTools: "type": "string", "description": "Test param", "filled_by_llm": True, + "required": True, + }, + "param2": { + "type": "number", + "filled_by_llm": False, + "value": 42, + "required": False, }, - "param2": {"type": "number", "filled_by_llm": False, "value": 42}, } } }