diff --git a/.gitignore b/.gitignore index f5480882..8e3c2125 100644 --- a/.gitignore +++ b/.gitignore @@ -113,6 +113,7 @@ venv.bak/ # Spyder project settings .spyderproject .spyproject +.jwt_secret_key # Rope project settings .ropeproject diff --git a/application/api/answer/routes.py b/application/api/answer/routes.py index abc1f9ba..2d889b34 100644 --- a/application/api/answer/routes.py +++ b/application/api/answer/routes.py @@ -88,19 +88,28 @@ def run_async_chain(chain, question, chat_history): def get_agent_key(agent_id, user_id): if not agent_id: - return None + return None, False, None try: agent = agents_collection.find_one({"_id": ObjectId(agent_id)}) if agent is None: raise Exception("Agent not found", 404) - if agent.get("user") == user_id: + is_owner = agent.get("user") == user_id + + if is_owner: agents_collection.update_one( {"_id": ObjectId(agent_id)}, {"$set": {"lastUsedAt": datetime.datetime.now(datetime.timezone.utc)}}, ) - return str(agent["key"]) + return str(agent["key"]), False, None + + is_shared_with_user = agent.get( + "shared_publicly", False + ) or user_id in agent.get("shared_with", []) + + if is_shared_with_user: + return str(agent["key"]), True, agent.get("shared_token") raise Exception("Unauthorized access to the agent", 403) @@ -153,6 +162,8 @@ def save_conversation( index=None, api_key=None, agent_id=None, + is_shared_usage=False, + shared_token=None, ): current_time = datetime.datetime.now(datetime.timezone.utc) if conversation_id is not None and index is not None: @@ -228,6 +239,9 @@ def save_conversation( if api_key: if agent_id: conversation_data["agent_id"] = agent_id + if is_shared_usage: + conversation_data["is_shared_usage"] = is_shared_usage + conversation_data["shared_token"] = shared_token api_key_doc = agents_collection.find_one({"key": api_key}) if api_key_doc: conversation_data["api_key"] = api_key_doc["key"] @@ -261,6 +275,8 @@ def complete_stream( should_save_conversation=True, attachments=None, agent_id=None, + is_shared_usage=False, + shared_token=None, ): try: response_full, thought, source_log_docs, tool_calls = "", "", [], [] @@ -325,6 +341,8 @@ def complete_stream( index, api_key=user_api_key, agent_id=agent_id, + is_shared_usage=is_shared_usage, + shared_token=shared_token, ) else: conversation_id = None @@ -433,7 +451,9 @@ class Stream(Resource): retriever_name = data.get("retriever", "classic") agent_id = data.get("agent_id", None) agent_type = settings.AGENT_NAME - agent_key = get_agent_key(agent_id, request.decoded_token.get("sub")) + agent_key, is_shared_usage, shared_token = get_agent_key( + agent_id, request.decoded_token.get("sub") + ) if agent_key: data.update({"api_key": agent_key}) @@ -448,7 +468,10 @@ class Stream(Resource): retriever_name = data_key.get("retriever", retriever_name) user_api_key = data["api_key"] agent_type = data_key.get("agent_type", agent_type) - decoded_token = {"sub": data_key.get("user")} + if is_shared_usage: + decoded_token = request.decoded_token + else: + decoded_token = {"sub": data_key.get("user")} elif "active_docs" in data: source = {"active_docs": data["active_docs"]} @@ -514,6 +537,8 @@ class Stream(Resource): index=index, should_save_conversation=save_conv, agent_id=agent_id, + is_shared_usage=is_shared_usage, + shared_token=shared_token, ), mimetype="text/event-stream", ) @@ -881,6 +906,8 @@ def get_attachments_content(attachment_ids, user): if attachment_doc: attachments.append(attachment_doc) except Exception as e: - logger.error(f"Error retrieving attachment {attachment_id}: {e}", exc_info=True) + logger.error( + f"Error retrieving attachment {attachment_id}: {e}", exc_info=True + ) return attachments diff --git a/application/api/user/routes.py b/application/api/user/routes.py index 1ca8fc32..bdc01d87 100644 --- a/application/api/user/routes.py +++ b/application/api/user/routes.py @@ -29,6 +29,7 @@ from application.tts.google_tts import GoogleTTS from application.utils import check_required_fields, validate_function_name from application.vectorstore.vector_creator import VectorCreator from application.storage.storage_creator import StorageCreator + storage = StorageCreator.get_storage() mongo = MongoDB.get_client() @@ -43,6 +44,12 @@ shared_conversations_collections = db["shared_conversations"] user_logs_collection = db["user_logs"] user_tools_collection = db["user_tools"] +agents_collection.create_index( + [("shared", 1)], + name="shared_index", + background=True, +) + user = Blueprint("user", __name__) user_ns = Namespace("user", description="User related operations", path="/") api.add_namespace(user_ns) @@ -112,7 +119,9 @@ class DeleteConversation(Resource): {"_id": ObjectId(conversation_id), "user": decoded_token["sub"]} ) except Exception as err: - current_app.logger.error(f"Error deleting conversation: {err}", exc_info=True) + current_app.logger.error( + f"Error deleting conversation: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response(jsonify({"success": True}), 200) @@ -130,7 +139,9 @@ class DeleteAllConversations(Resource): try: conversations_collection.delete_many({"user": user_id}) except Exception as err: - current_app.logger.error(f"Error deleting all conversations: {err}", exc_info=True) + current_app.logger.error( + f"Error deleting all conversations: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response(jsonify({"success": True}), 200) @@ -164,11 +175,15 @@ class GetConversations(Resource): "id": str(conversation["_id"]), "name": conversation["name"], "agent_id": conversation.get("agent_id", None), + "is_shared_usage": conversation.get("is_shared_usage", False), + "shared_token": conversation.get("shared_token", None), } for conversation in conversations ] except Exception as err: - current_app.logger.error(f"Error retrieving conversations: {err}", exc_info=True) + current_app.logger.error( + f"Error retrieving conversations: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response(jsonify(list_conversations), 200) @@ -196,12 +211,16 @@ class GetSingleConversation(Resource): if not conversation: return make_response(jsonify({"status": "not found"}), 404) except Exception as err: - current_app.logger.error(f"Error retrieving conversation: {err}", exc_info=True) + current_app.logger.error( + f"Error retrieving conversation: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) data = { "queries": conversation["queries"], "agent_id": conversation.get("agent_id"), + "is_shared_usage": conversation.get("is_shared_usage", False), + "shared_token": conversation.get("shared_token", None), } return make_response(jsonify(data), 200) @@ -238,7 +257,9 @@ class UpdateConversationName(Resource): {"$set": {"name": data["name"]}}, ) except Exception as err: - current_app.logger.error(f"Error updating conversation name: {err}", exc_info=True) + current_app.logger.error( + f"Error updating conversation name: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response(jsonify({"success": True}), 200) @@ -379,7 +400,9 @@ class DeleteOldIndexes(Resource): except FileNotFoundError: pass except Exception as err: - current_app.logger.error(f"Error deleting old indexes: {err}", exc_info=True) + current_app.logger.error( + f"Error deleting old indexes: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) sources_collection.delete_one({"_id": ObjectId(source_id)}) @@ -446,17 +469,25 @@ class UploadFile(Resource): def create_zip_archive(temp_paths, job_name, storage): import tempfile - with tempfile.NamedTemporaryFile(delete=False, suffix=".zip") as temp_zip_file: + with tempfile.NamedTemporaryFile( + delete=False, suffix=".zip" + ) as temp_zip_file: zip_output_path = temp_zip_file.name with tempfile.TemporaryDirectory() as stage_dir: for path in temp_paths: try: file_data = storage.get_file(path) - with open(os.path.join(stage_dir, os.path.basename(path)), "wb") as f: + with open( + os.path.join(stage_dir, os.path.basename(path)), + "wb", + ) as f: f.write(file_data.read()) except Exception as e: - current_app.logger.error(f"Error processing file {path} for zipping: {e}", exc_info=True) + current_app.logger.error( + f"Error processing file {path} for zipping: {e}", + exc_info=True, + ) if os.path.exists(zip_output_path): os.remove(zip_output_path) raise @@ -467,7 +498,9 @@ class UploadFile(Resource): root_dir=stage_dir, ) except Exception as e: - current_app.logger.error(f"Error creating zip archive: {e}", exc_info=True) + current_app.logger.error( + f"Error creating zip archive: {e}", exc_info=True + ) if os.path.exists(zip_output_path): os.remove(zip_output_path) raise @@ -508,13 +541,16 @@ class UploadFile(Resource): try: storage.delete_file(temp_path) except Exception as e: - current_app.logger.error(f"Error deleting temporary file {temp_path}: {e}", exc_info=True) + current_app.logger.error( + f"Error deleting temporary file {temp_path}: {e}", + exc_info=True, + ) # Clean up the zip file if it was created if zip_temp_path and os.path.exists(zip_temp_path): os.remove(zip_temp_path) - else: # Keep this else block for single file upload + else: # Keep this else block for single file upload # For single file file = files[0] filename = secure_filename(file.filename) @@ -542,7 +578,7 @@ class UploadFile(Resource): ".jpeg", ], job_name, - filename, # Corrected variable for single-file case + filename, # Corrected variable for single-file case user, ) @@ -600,7 +636,9 @@ class UploadRemote(Resource): loader=data["source"], ) except Exception as err: - current_app.logger.error(f"Error uploading remote source: {err}", exc_info=True) + current_app.logger.error( + f"Error uploading remote source: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response(jsonify({"success": True, "task_id": task.id}), 200) @@ -712,7 +750,9 @@ class PaginatedSources(Resource): return make_response(jsonify(response), 200) except Exception as err: - current_app.logger.error(f"Error retrieving paginated sources: {err}", exc_info=True) + current_app.logger.error( + f"Error retrieving paginated sources: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) @@ -1021,17 +1061,30 @@ class GetAgent(Resource): "id": str(agent["_id"]), "name": agent["name"], "description": agent.get("description", ""), - "source": (str(source_doc["_id"]) if isinstance(agent.get("source"), DBRef) and (source_doc := db.dereference(agent.get("source"))) else ""), + "source": ( + str(source_doc["_id"]) + if isinstance(agent.get("source"), DBRef) + and (source_doc := db.dereference(agent.get("source"))) + else "" + ), "chunks": agent["chunks"], "retriever": agent.get("retriever", ""), - "prompt_id": agent["prompt_id"], + "prompt_id": agent.get("prompt_id", ""), "tools": agent.get("tools", []), - "agent_type": agent["agent_type"], - "status": agent["status"], - "createdAt": agent["createdAt"], - "updatedAt": agent["updatedAt"], - "lastUsedAt": agent["lastUsedAt"], - "key": f"{agent['key'][:4]}...{agent['key'][-4:]}", + "agent_type": agent.get("agent_type", ""), + "status": agent.get("status", ""), + "created_at": agent.get("createdAt", ""), + "updated_at": agent.get("updatedAt", ""), + "last_used_at": agent.get("lastUsedAt", ""), + "key": ( + f"{agent['key'][:4]}...{agent['key'][-4:]}" + if "key" in agent + else "" + ), + "pinned": agent.get("pinned", False), + "shared": agent.get("shared_publicly", False), + "shared_metadata": agent.get("shared_metadata", {}), + "shared_token": agent.get("shared_token", ""), } except Exception as err: current_app.logger.error(f"Error retrieving agent: {err}", exc_info=True) @@ -1055,17 +1108,30 @@ class GetAgents(Resource): "id": str(agent["_id"]), "name": agent["name"], "description": agent.get("description", ""), - "source": (str(source_doc["_id"]) if isinstance(agent.get("source"), DBRef) and (source_doc := db.dereference(agent.get("source"))) else ""), + "source": ( + str(source_doc["_id"]) + if isinstance(agent.get("source"), DBRef) + and (source_doc := db.dereference(agent.get("source"))) + else "" + ), "chunks": agent["chunks"], "retriever": agent.get("retriever", ""), - "prompt_id": agent["prompt_id"], + "prompt_id": agent.get("prompt_id", ""), "tools": agent.get("tools", []), - "agent_type": agent["agent_type"], - "status": agent["status"], - "created_at": agent["createdAt"], - "updated_at": agent["updatedAt"], - "last_used_at": agent["lastUsedAt"], - "key": f"{agent['key'][:4]}...{agent['key'][-4:]}", + "agent_type": agent.get("agent_type", ""), + "status": agent.get("status", ""), + "created_at": agent.get("createdAt", ""), + "updated_at": agent.get("updatedAt", ""), + "last_used_at": agent.get("lastUsedAt", ""), + "key": ( + f"{agent['key'][:4]}...{agent['key'][-4:]}" + if "key" in agent + else "" + ), + "pinned": agent.get("pinned", False), + "shared": agent.get("shared_publicly", False), + "shared_metadata": agent.get("shared_metadata", {}), + "shared_token": agent.get("shared_token", ""), } for agent in agents if "source" in agent or "retriever" in agent @@ -1156,6 +1222,10 @@ class CreateAgent(Resource): "lastUsedAt": None, "key": key, } + if new_agent["chunks"] == "": + new_agent["chunks"] = "0" + if new_agent["source"] == "" and new_agent["retriever"] == "": + new_agent["retriever"] = "classic" resp = agents_collection.insert_one(new_agent) new_id = str(resp.inserted_id) @@ -1211,7 +1281,9 @@ class UpdateAgent(Resource): existing_agent = agents_collection.find_one({"_id": oid, "user": user}) except Exception as err: return make_response( - current_app.logger.error(f"Error finding agent {agent_id}: {err}", exc_info=True), + current_app.logger.error( + f"Error finding agent {agent_id}: {err}", exc_info=True + ), jsonify({"success": False, "message": "Database error finding agent"}), 500, ) @@ -1266,6 +1338,33 @@ class UpdateAgent(Resource): ) else: update_fields[field] = "" + elif field == "chunks": + chunks_value = data.get("chunks") + if chunks_value == "": + update_fields[field] = "0" + else: + try: + if int(chunks_value) < 0: + return make_response( + jsonify( + { + "success": False, + "message": "Chunks value must be a positive integer", + } + ), + 400, + ) + update_fields[field] = chunks_value + except ValueError: + return make_response( + jsonify( + { + "success": False, + "message": "Invalid chunks value provided", + } + ), + 400, + ) else: update_fields[field] = data[field] @@ -1334,7 +1433,9 @@ class UpdateAgent(Resource): ) except Exception as err: - current_app.logger.error(f"Error updating agent {agent_id}: {err}", exc_info=True) + current_app.logger.error( + f"Error updating agent {agent_id}: {err}", exc_info=True + ) return make_response( jsonify({"success": False, "message": "Database error during update"}), 500, @@ -1383,6 +1484,275 @@ class DeleteAgent(Resource): return make_response(jsonify({"id": deleted_id}), 200) +@user_ns.route("/api/pinned_agents") +class PinnedAgents(Resource): + @api.doc(description="Get pinned agents for the user") + def get(self): + decoded_token = request.decoded_token + if not decoded_token: + return make_response(jsonify({"success": False}), 401) + user = decoded_token.get("sub") + try: + pinned_agents = agents_collection.find({"user": user, "pinned": True}) + list_pinned_agents = [ + { + "id": str(agent["_id"]), + "name": agent.get("name", ""), + "description": agent.get("description", ""), + "source": ( + str(db.dereference(agent["source"])["_id"]) + if "source" in agent and isinstance(agent["source"], DBRef) + else "" + ), + "chunks": agent.get("chunks", ""), + "retriever": agent.get("retriever", ""), + "prompt_id": agent.get("prompt_id", ""), + "tools": agent.get("tools", []), + "agent_type": agent.get("agent_type", ""), + "status": agent.get("status", ""), + "created_at": agent.get("createdAt", ""), + "updated_at": agent.get("updatedAt", ""), + "last_used_at": agent.get("lastUsedAt", ""), + "key": ( + f"{agent['key'][:4]}...{agent['key'][-4:]}" + if "key" in agent + else "" + ), + "pinned": agent.get("pinned", False), + } + for agent in pinned_agents + if "source" in agent or "retriever" in agent + ] + except Exception as err: + current_app.logger.error(f"Error retrieving pinned agents: {err}") + return make_response(jsonify({"success": False}), 400) + return make_response(jsonify(list_pinned_agents), 200) + + +@user_ns.route("/api/pin_agent") +class PinAgent(Resource): + @api.doc(params={"id": "ID of the agent"}, description="Pin or unpin an agent") + def post(self): + decoded_token = request.decoded_token + if not decoded_token: + return make_response(jsonify({"success": False}), 401) + user = decoded_token.get("sub") + agent_id = request.args.get("id") + if not agent_id: + return make_response( + jsonify({"success": False, "message": "ID is required"}), 400 + ) + + try: + agent = agents_collection.find_one( + {"_id": ObjectId(agent_id), "user": user} + ) + if not agent: + return make_response( + jsonify({"success": False, "message": "Agent not found"}), 404 + ) + + pinned_status = not agent.get("pinned", False) + agents_collection.update_one( + {"_id": ObjectId(agent_id), "user": user}, + {"$set": {"pinned": pinned_status}}, + ) + except Exception as err: + current_app.logger.error(f"Error pinning/unpinning agent: {err}") + return make_response(jsonify({"success": False}), 400) + + return make_response(jsonify({"success": True}), 200) + + +@user_ns.route("/api/shared_agent") +class SharedAgent(Resource): + @api.doc( + params={ + "token": "Shared token of the agent", + }, + description="Get a shared agent by token or ID", + ) + def get(self): + shared_token = request.args.get("token") + + if not shared_token: + return make_response( + jsonify({"success": False, "message": "Token or ID is required"}), 400 + ) + + try: + query = {} + query["shared_publicly"] = True + query["shared_token"] = shared_token + + shared_agent = agents_collection.find_one(query) + if not shared_agent: + return make_response( + jsonify({"success": False, "message": "Shared agent not found"}), + 404, + ) + + data = { + "id": str(shared_agent["_id"]), + "user": shared_agent.get("user", ""), + "name": shared_agent.get("name", ""), + "description": shared_agent.get("description", ""), + "tools": shared_agent.get("tools", []), + "agent_type": shared_agent.get("agent_type", ""), + "status": shared_agent.get("status", ""), + "created_at": shared_agent.get("createdAt", ""), + "updated_at": shared_agent.get("updatedAt", ""), + "shared": shared_agent.get("shared_publicly", False), + "shared_token": shared_agent.get("shared_token", ""), + "shared_metadata": shared_agent.get("shared_metadata", {}), + } + + if data["tools"]: + enriched_tools = [] + for tool in data["tools"]: + tool_data = user_tools_collection.find_one({"_id": ObjectId(tool)}) + if tool_data: + enriched_tools.append(tool_data.get("name", "")) + data["tools"] = enriched_tools + + except Exception as err: + current_app.logger.error(f"Error retrieving shared agent: {err}") + return make_response(jsonify({"success": False}), 400) + + return make_response(jsonify(data), 200) + + +@user_ns.route("/api/shared_agents") +class SharedAgents(Resource): + @api.doc(description="Get shared agents") + def get(self): + try: + decoded_token = request.decoded_token + if not decoded_token: + return make_response(jsonify({"success": False}), 401) + user = decoded_token.get("sub") + shared_agents = agents_collection.find( + {"shared_publicly": True, "user": {"$ne": user}} + ) + list_shared_agents = [ + { + "id": str(shared_agent["_id"]), + "name": shared_agent.get("name", ""), + "description": shared_agent.get("description", ""), + "tools": shared_agent.get("tools", []), + "agent_type": shared_agent.get("agent_type", ""), + "status": shared_agent.get("status", ""), + "created_at": shared_agent.get("createdAt", ""), + "updated_at": shared_agent.get("updatedAt", ""), + "shared": shared_agent.get("shared_publicly", False), + "shared_token": shared_agent.get("shared_token", ""), + "shared_metadata": shared_agent.get("shared_metadata", {}), + } + for shared_agent in shared_agents + ] + except Exception as err: + current_app.logger.error(f"Error retrieving shared agents: {err}") + return make_response(jsonify({"success": False}), 400) + return make_response(jsonify(list_shared_agents), 200) + + +@user_ns.route("/api/share_agent") +class ShareAgent(Resource): + @api.expect( + api.model( + "ShareAgentModel", + { + "id": fields.String(required=True, description="ID of the agent"), + "shared": fields.Boolean( + required=True, description="Share or unshare the agent" + ), + "username": fields.String( + required=False, description="Name of the user" + ), + }, + ) + ) + @api.doc(description="Share or unshare an agent") + def put(self): + decoded_token = request.decoded_token + if not decoded_token: + return make_response(jsonify({"success": False}), 401) + + user = decoded_token.get("sub") + + data = request.get_json() + if not data: + return make_response( + jsonify({"success": False, "message": "Missing JSON body"}), 400 + ) + + agent_id = data.get("id") + shared = data.get("shared") + username = data.get("username", "") + + if not agent_id: + return make_response( + jsonify({"success": False, "message": "ID is required"}), 400 + ) + + if shared is None: + return make_response( + jsonify( + { + "success": False, + "message": "Shared parameter is required and must be true or false", + } + ), + 400, + ) + + try: + try: + agent_oid = ObjectId(agent_id) + except Exception: + return make_response( + jsonify({"success": False, "message": "Invalid agent ID"}), 400 + ) + + agent = agents_collection.find_one({"_id": agent_oid, "user": user}) + if not agent: + return make_response( + jsonify({"success": False, "message": "Agent not found"}), 404 + ) + + if shared: + shared_metadata = { + "shared_by": username, + "shared_at": datetime.datetime.now(datetime.timezone.utc), + } + shared_token = secrets.token_urlsafe(32) + agents_collection.update_one( + {"_id": agent_oid, "user": user}, + { + "$set": { + "shared_publicly": shared, + "shared_metadata": shared_metadata, + "shared_token": shared_token, + } + }, + ) + else: + agents_collection.update_one( + {"_id": agent_oid, "user": user}, + {"$set": {"shared_publicly": shared, "shared_token": None}}, + {"$unset": {"shared_metadata": ""}}, + ) + + except Exception as err: + current_app.logger.error(f"Error sharing/unsharing agent: {err}") + return make_response(jsonify({"success": False, "error": str(err)}), 400) + + shared_token = shared_token if shared else None + return make_response( + jsonify({"success": True, "shared_token": shared_token}), 200 + ) + + @user_ns.route("/api/agent_webhook") class AgentWebhook(Resource): @api.doc( @@ -1420,7 +1790,9 @@ class AgentWebhook(Resource): full_webhook_url = f"{base_url}/api/webhooks/agents/{webhook_token}" except Exception as err: - current_app.logger.error(f"Error generating webhook URL: {err}", exc_info=True) + current_app.logger.error( + f"Error generating webhook URL: {err}", exc_info=True + ) return make_response( jsonify({"success": False, "message": "Error generating webhook URL"}), 400, @@ -1709,7 +2081,9 @@ class ShareConversation(Resource): 201, ) except Exception as err: - current_app.logger.error(f"Error sharing conversation: {err}", exc_info=True) + current_app.logger.error( + f"Error sharing conversation: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) @@ -1765,7 +2139,9 @@ class GetPubliclySharedConversations(Resource): res["api_key"] = shared["api_key"] return make_response(jsonify(res), 200) except Exception as err: - current_app.logger.error(f"Error getting shared conversation: {err}", exc_info=True) + current_app.logger.error( + f"Error getting shared conversation: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) @@ -1885,7 +2261,9 @@ class GetMessageAnalytics(Resource): daily_messages[entry["_id"]] = entry["count"] except Exception as err: - current_app.logger.error(f"Error getting message analytics: {err}", exc_info=True) + current_app.logger.error( + f"Error getting message analytics: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response( @@ -2044,7 +2422,9 @@ class GetTokenAnalytics(Resource): daily_token_usage[entry["_id"]["day"]] = entry["total_tokens"] except Exception as err: - current_app.logger.error(f"Error getting token analytics: {err}", exc_info=True) + current_app.logger.error( + f"Error getting token analytics: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response( @@ -2209,7 +2589,9 @@ class GetFeedbackAnalytics(Resource): } except Exception as err: - current_app.logger.error(f"Error getting feedback analytics: {err}", exc_info=True) + current_app.logger.error( + f"Error getting feedback analytics: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response( @@ -2345,7 +2727,9 @@ class ManageSync(Resource): update_data, ) except Exception as err: - current_app.logger.error(f"Error updating sync frequency: {err}", exc_info=True) + current_app.logger.error( + f"Error updating sync frequency: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response(jsonify({"success": True}), 200) @@ -2406,7 +2790,9 @@ class AvailableTools(Resource): } ) except Exception as err: - current_app.logger.error(f"Error getting available tools: {err}", exc_info=True) + current_app.logger.error( + f"Error getting available tools: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response(jsonify({"success": True, "data": tools_metadata}), 200) @@ -2610,7 +2996,9 @@ class UpdateToolConfig(Resource): {"$set": {"config": data["config"]}}, ) except Exception as err: - current_app.logger.error(f"Error updating tool config: {err}", exc_info=True) + current_app.logger.error( + f"Error updating tool config: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response(jsonify({"success": True}), 200) @@ -2649,7 +3037,9 @@ class UpdateToolActions(Resource): {"$set": {"actions": data["actions"]}}, ) except Exception as err: - current_app.logger.error(f"Error updating tool actions: {err}", exc_info=True) + current_app.logger.error( + f"Error updating tool actions: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response(jsonify({"success": True}), 200) @@ -2686,7 +3076,9 @@ class UpdateToolStatus(Resource): {"$set": {"status": data["status"]}}, ) except Exception as err: - current_app.logger.error(f"Error updating tool status: {err}", exc_info=True) + current_app.logger.error( + f"Error updating tool status: {err}", exc_info=True + ) return make_response(jsonify({"success": False}), 400) return make_response(jsonify({"success": True}), 200) @@ -2980,14 +3372,14 @@ class StoreAttachment(Resource): attachment_id = ObjectId() original_filename = secure_filename(os.path.basename(file.filename)) relative_path = f"{settings.UPLOAD_FOLDER}/{user}/attachments/{str(attachment_id)}/{original_filename}" - + metadata = storage.save_file(file, relative_path) - + file_info = { "filename": original_filename, "attachment_id": str(attachment_id), "path": relative_path, - "metadata": metadata + "metadata": metadata, } task = store_attachment.delay(file_info, user) diff --git a/application/requirements.txt b/application/requirements.txt index 56edab9f..2430a787 100644 --- a/application/requirements.txt +++ b/application/requirements.txt @@ -6,19 +6,15 @@ dataclasses-json==0.6.7 docx2txt==0.8 duckduckgo-search==7.5.2 ebooklib==0.18 -elastic-transport==8.17.0 -elasticsearch==8.17.1 escodegen==1.0.11 esprima==4.0.1 esutils==1.0.1 -Flask==3.1.0 +Flask==3.1.1 faiss-cpu==1.9.0.post1 flask-restx==1.3.0 google-genai==1.3.0 -google-generativeai==0.8.5 gTTS==2.5.4 gunicorn==23.0.0 -html2text==2024.2.26 javalang==0.13.0 jinja2==3.1.6 jiter==0.8.2 @@ -26,39 +22,33 @@ jmespath==1.0.1 joblib==1.4.2 jsonpatch==1.33 jsonpointer==3.0.0 -jsonschema==4.23.0 -jsonschema-spec==0.2.4 -jsonschema-specifications==2023.7.1 kombu==5.4.2 langchain==0.3.20 langchain-community==0.3.19 -langchain-core==0.3.45 -langchain-openai==0.3.8 -langchain-text-splitters==0.3.6 -langsmith==0.3.19 +langchain-core==0.3.59 +langchain-openai==0.3.16 +langchain-text-splitters==0.3.8 +langsmith==0.3.42 lazy-object-proxy==1.10.0 lxml==5.3.1 markupsafe==3.0.2 marshmallow==3.26.1 mpmath==1.3.0 -multidict==6.3.2 +multidict==6.4.3 mypy-extensions==1.0.0 networkx==3.4.2 numpy==2.2.1 -openai==1.66.3 -openapi-schema-validator==0.6.3 -openapi-spec-validator==0.6.0 -openapi3-parser==1.1.19 +openai==1.78.1 +openapi3-parser==1.1.21 orjson==3.10.14 -packaging==24.1 +packaging==24.2 pandas==2.2.3 openpyxl==3.1.5 pathable==0.4.4 pillow==11.1.0 -portalocker==2.10.1 +portalocker==3.1.1 prance==23.6.21.0 -primp==0.14.0 -prompt-toolkit==3.0.50 +prompt-toolkit==3.0.51 protobuf==5.29.3 psycopg2-binary==2.9.10 py==1.11.0 @@ -71,18 +61,17 @@ python-dateutil==2.9.0.post0 python-dotenv==1.0.1 python-jose==3.4.0 python-pptx==1.0.2 -qdrant-client==1.13.2 redis==5.2.1 -referencing==0.30.2 +referencing==0.36.2 regex==2024.11.6 requests==2.32.3 retry==0.9.2 sentence-transformers==3.3.1 tiktoken==0.8.0 tokenizers==0.21.0 -torch==2.5.1 +torch==2.7.0 tqdm==4.67.1 -transformers==4.49.0 +transformers==4.51.3 typing-extensions==4.12.2 typing-inspect==0.9.0 tzdata==2024.2 @@ -90,7 +79,7 @@ urllib3==2.3.0 vine==5.1.0 wcwidth==0.2.13 werkzeug==3.1.3 -yarl==1.18.3 +yarl==1.20.0 markdownify==0.14.1 tldextract==5.1.3 websockets==14.1 diff --git a/application/vectorstore/elasticsearch.py b/application/vectorstore/elasticsearch.py index e393e4a5..dfa0314f 100644 --- a/application/vectorstore/elasticsearch.py +++ b/application/vectorstore/elasticsearch.py @@ -1,9 +1,6 @@ from application.vectorstore.base import BaseVectorStore from application.core.settings import settings from application.vectorstore.document_class import Document -import elasticsearch - - class ElasticsearchStore(BaseVectorStore): @@ -26,8 +23,7 @@ class ElasticsearchStore(BaseVectorStore): else: raise ValueError("Please provide either elasticsearch_url or cloud_id.") - - + import elasticsearch ElasticsearchStore._es_connection = elasticsearch.Elasticsearch(**connection_params) self.docsearch = ElasticsearchStore._es_connection @@ -155,8 +151,6 @@ class ElasticsearchStore(BaseVectorStore): **kwargs, ): - from elasticsearch.helpers import BulkIndexError, bulk - bulk_kwargs = bulk_kwargs or {} import uuid embeddings = [] @@ -189,6 +183,7 @@ class ElasticsearchStore(BaseVectorStore): if len(requests) > 0: + from elasticsearch.helpers import BulkIndexError, bulk try: success, failed = bulk( self._es_connection, diff --git a/application/vectorstore/qdrant.py b/application/vectorstore/qdrant.py index 3f94505f..61a9d63d 100644 --- a/application/vectorstore/qdrant.py +++ b/application/vectorstore/qdrant.py @@ -1,11 +1,12 @@ -from langchain_community.vectorstores.qdrant import Qdrant from application.vectorstore.base import BaseVectorStore from application.core.settings import settings -from qdrant_client import models class QdrantStore(BaseVectorStore): def __init__(self, source_id: str = "", embeddings_key: str = "embeddings"): + from qdrant_client import models + from langchain_community.vectorstores.qdrant import Qdrant + self._filter = models.Filter( must=[ models.FieldCondition( diff --git a/frontend/.env.development b/frontend/.env.development index 7a87f762..4083d677 100644 --- a/frontend/.env.development +++ b/frontend/.env.development @@ -1,3 +1,4 @@ # Please put appropriate value -VITE_API_HOST=http://0.0.0.0:7091 +VITE_BASE_URL=http://localhost:5173 +VITE_API_HOST=http://127.0.0.1:7091 VITE_API_STREAMING=true \ No newline at end of file diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 58e9c924..715db197 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -2014,7 +2014,7 @@ "version": "18.3.0", "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.0.tgz", "integrity": "sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg==", - "dev": true, + "devOptional": true, "dependencies": { "@types/react": "*" } @@ -10619,7 +10619,7 @@ "version": "5.7.2", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz", "integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==", - "dev": true, + "devOptional": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" diff --git a/frontend/src/Navigation.tsx b/frontend/src/Navigation.tsx index 02e9f19c..838d665a 100644 --- a/frontend/src/Navigation.tsx +++ b/frontend/src/Navigation.tsx @@ -13,12 +13,14 @@ import Expand from './assets/expand.svg'; import Github from './assets/github.svg'; import Hamburger from './assets/hamburger.svg'; import openNewChat from './assets/openNewChat.svg'; +import Pin from './assets/pin.svg'; import Robot from './assets/robot.svg'; import SettingGear from './assets/settingGear.svg'; import Spark from './assets/spark.svg'; import SpinnerDark from './assets/spinner-dark.svg'; import Spinner from './assets/spinner.svg'; import Twitter from './assets/TwitterX.svg'; +import UnPin from './assets/unpin.svg'; import Help from './components/Help'; import { handleAbort, @@ -35,16 +37,16 @@ import JWTModal from './modals/JWTModal'; import { ActiveState } from './models/misc'; import { getConversations } from './preferences/preferenceApi'; import { + selectAgents, selectConversationId, selectConversations, selectModalStateDeleteConv, selectSelectedAgent, selectToken, + setAgents, setConversations, setModalStateDeleteConv, setSelectedAgent, - setAgents, - selectAgents, } from './preferences/preferenceSlice'; import Upload from './upload/Upload'; @@ -80,24 +82,34 @@ export default function Navigation({ navOpen, setNavOpen }: NavigationProps) { async function fetchRecentAgents() { try { - let recentAgents: Agent[] = []; + const response = await userService.getPinnedAgents(token); + if (!response.ok) throw new Error('Failed to fetch pinned agents'); + const pinnedAgents: Agent[] = await response.json(); + if (pinnedAgents.length >= 3) { + setRecentAgents(pinnedAgents); + return; + } + let tempAgents: Agent[] = []; if (!agents) { const response = await userService.getAgents(token); if (!response.ok) throw new Error('Failed to fetch agents'); const data: Agent[] = await response.json(); dispatch(setAgents(data)); - recentAgents = data; - } else recentAgents = agents; - setRecentAgents( - recentAgents - .filter((agent: Agent) => agent.status === 'published') - .sort( - (a: Agent, b: Agent) => - new Date(b.last_used_at ?? 0).getTime() - - new Date(a.last_used_at ?? 0).getTime(), - ) - .slice(0, 3), - ); + tempAgents = data; + } else tempAgents = agents; + const additionalAgents = tempAgents + .filter( + (agent: Agent) => + agent.status === 'published' && + !pinnedAgents.some((pinned) => pinned.id === agent.id), + ) + .sort( + (a: Agent, b: Agent) => + new Date(b.last_used_at ?? 0).getTime() - + new Date(a.last_used_at ?? 0).getTime(), + ) + .slice(0, 3 - pinnedAgents.length); + setRecentAgents([...pinnedAgents, ...additionalAgents]); } catch (error) { console.error('Failed to fetch recent agents: ', error); } @@ -116,7 +128,7 @@ export default function Navigation({ navOpen, setNavOpen }: NavigationProps) { } useEffect(() => { - if (token) fetchRecentAgents(); + fetchRecentAgents(); }, [agents, token, dispatch]); useEffect(() => { @@ -152,12 +164,23 @@ export default function Navigation({ navOpen, setNavOpen }: NavigationProps) { navigate('/'); }; + const handleTogglePin = (agent: Agent) => { + userService.togglePinAgent(agent.id ?? '', token).then((response) => { + if (response.ok) { + const updatedAgents = agents?.map((a) => + a.id === agent.id ? { ...a, pinned: !a.pinned } : a, + ); + dispatch(setAgents(updatedAgents)); + } + }); + }; + const handleConversationClick = (index: string) => { + dispatch(setSelectedAgent(null)); conversationService .getConversation(index, token) .then((response) => response.json()) .then((data) => { - navigate('/'); dispatch(setConversation(data.queries)); dispatch( updateConversationId({ @@ -165,14 +188,30 @@ export default function Navigation({ navOpen, setNavOpen }: NavigationProps) { }), ); if (data.agent_id) { - userService.getAgent(data.agent_id, token).then((response) => { - if (response.ok) { - response.json().then((agent: Agent) => { - dispatch(setSelectedAgent(agent)); + if (data.is_shared_usage) { + userService + .getSharedAgent(data.shared_token, token) + .then((response) => { + if (response.ok) { + response.json().then((agent: Agent) => { + navigate(`/agents/shared/${agent.shared_token}`); + }); + } }); - } - }); - } else dispatch(setSelectedAgent(null)); + } else { + userService.getAgent(data.agent_id, token).then((response) => { + if (response.ok) { + response.json().then((agent: Agent) => { + navigate('/'); + dispatch(setSelectedAgent(agent)); + }); + } + }); + } + } else { + navigate('/'); + dispatch(setSelectedAgent(null)); + } }); }; @@ -336,23 +375,41 @@ export default function Navigation({ navOpen, setNavOpen }: NavigationProps) { {recentAgents.map((agent, idx) => (
+ {agent.name} +
+- {agent.name} -
+ (Draft) +
+ )} ++ {agent.name} +
++ {agent.description} +
+{agent.name}
+{agent.name}
++ {agent.last_used_at + ? 'Last used at ' + + new Date(agent.last_used_at).toLocaleString() + : 'No usage history'} +
++ No agent found. Please ensure the agent is shared. +
++ {t('tagline')} +
++ {agent.description} +
++ by {agent.shared_metadata.shared_by} +
+ )} + {agent.shared_metadata?.shared_at && ( ++ Shared on{' '} + {new Date(agent.shared_metadata.shared_at).toLocaleString('en-US', { + month: 'long', + day: 'numeric', + year: 'numeric', + hour: '2-digit', + minute: '2-digit', + hour12: true, + })} +
+ )} ++ Connected Tools +
+Discover and create custom versions of DocsGPT that combine - instructions, extra knowledge, and any combination of skills. + instructions, extra knowledge, and any combination of skills
{/* Premade agents section */} {/*+ {sectionConfig[section].description} +
+You don’t have any created agents yet
+ )} +{sectionConfig[section].emptyStateDescription}
+ {sectionConfig[section].showNewAgentButton && ( -