mirror of
https://github.com/coleam00/ai-agents-masterclass.git
synced 2026-01-19 21:40:32 +00:00
863 lines
31 KiB
JSON
863 lines
31 KiB
JSON
{
|
|
"nodes": [
|
|
{
|
|
"id": "toolAgent_0",
|
|
"position": {
|
|
"x": 656,
|
|
"y": 156
|
|
},
|
|
"type": "customNode",
|
|
"data": {
|
|
"id": "toolAgent_0",
|
|
"label": "Tool Agent",
|
|
"version": 2,
|
|
"name": "toolAgent",
|
|
"type": "AgentExecutor",
|
|
"baseClasses": [
|
|
"AgentExecutor",
|
|
"BaseChain",
|
|
"Runnable"
|
|
],
|
|
"category": "Agents",
|
|
"description": "Agent that uses Function Calling to pick the tools and args to call",
|
|
"inputParams": [
|
|
{
|
|
"label": "System Message",
|
|
"name": "systemMessage",
|
|
"type": "string",
|
|
"default": "You are a helpful AI assistant.",
|
|
"description": "If Chat Prompt Template is provided, this will be ignored",
|
|
"rows": 4,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "toolAgent_0-input-systemMessage-string"
|
|
},
|
|
{
|
|
"label": "Max Iterations",
|
|
"name": "maxIterations",
|
|
"type": "number",
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "toolAgent_0-input-maxIterations-number"
|
|
}
|
|
],
|
|
"inputAnchors": [
|
|
{
|
|
"label": "Tools",
|
|
"name": "tools",
|
|
"type": "Tool",
|
|
"list": true,
|
|
"id": "toolAgent_0-input-tools-Tool"
|
|
},
|
|
{
|
|
"label": "Memory",
|
|
"name": "memory",
|
|
"type": "BaseChatMemory",
|
|
"id": "toolAgent_0-input-memory-BaseChatMemory"
|
|
},
|
|
{
|
|
"label": "Tool Calling Chat Model",
|
|
"name": "model",
|
|
"type": "BaseChatModel",
|
|
"description": "Only compatible with models that are capable of function calling: ChatOpenAI, ChatMistral, ChatAnthropic, ChatGoogleGenerativeAI, ChatVertexAI, GroqChat",
|
|
"id": "toolAgent_0-input-model-BaseChatModel"
|
|
},
|
|
{
|
|
"label": "Chat Prompt Template",
|
|
"name": "chatPromptTemplate",
|
|
"type": "ChatPromptTemplate",
|
|
"description": "Override existing prompt with Chat Prompt Template. Human Message must includes {input} variable",
|
|
"optional": true,
|
|
"id": "toolAgent_0-input-chatPromptTemplate-ChatPromptTemplate"
|
|
},
|
|
{
|
|
"label": "Input Moderation",
|
|
"description": "Detect text that could generate harmful output and prevent it from being sent to the language model",
|
|
"name": "inputModeration",
|
|
"type": "Moderation",
|
|
"optional": true,
|
|
"list": true,
|
|
"id": "toolAgent_0-input-inputModeration-Moderation"
|
|
}
|
|
],
|
|
"inputs": {
|
|
"tools": [
|
|
"{{braveSearchAPI_0.data.instance}}",
|
|
"{{customTool_0.data.instance}}",
|
|
"{{customTool_1.data.instance}}",
|
|
"{{customTool_2.data.instance}}",
|
|
"{{customTool_3.data.instance}}"
|
|
],
|
|
"memory": "{{bufferMemory_0.data.instance}}",
|
|
"model": "{{chatOllama_0.data.instance}}",
|
|
"chatPromptTemplate": "",
|
|
"systemMessage": "You are a helpful AI assistant.",
|
|
"inputModeration": "",
|
|
"maxIterations": ""
|
|
},
|
|
"outputAnchors": [
|
|
{
|
|
"id": "toolAgent_0-output-toolAgent-AgentExecutor|BaseChain|Runnable",
|
|
"name": "toolAgent",
|
|
"label": "AgentExecutor",
|
|
"description": "Agent that uses Function Calling to pick the tools and args to call",
|
|
"type": "AgentExecutor | BaseChain | Runnable"
|
|
}
|
|
],
|
|
"outputs": {},
|
|
"selected": false
|
|
},
|
|
"width": 300,
|
|
"height": 484,
|
|
"selected": false,
|
|
"positionAbsolute": {
|
|
"x": 656,
|
|
"y": 156
|
|
}
|
|
},
|
|
{
|
|
"id": "bufferMemory_0",
|
|
"position": {
|
|
"x": 232.6843615160352,
|
|
"y": -162.57529096209916
|
|
},
|
|
"type": "customNode",
|
|
"data": {
|
|
"id": "bufferMemory_0",
|
|
"label": "Buffer Memory",
|
|
"version": 2,
|
|
"name": "bufferMemory",
|
|
"type": "BufferMemory",
|
|
"baseClasses": [
|
|
"BufferMemory",
|
|
"BaseChatMemory",
|
|
"BaseMemory"
|
|
],
|
|
"category": "Memory",
|
|
"description": "Retrieve chat messages stored in database",
|
|
"inputParams": [
|
|
{
|
|
"label": "Session Id",
|
|
"name": "sessionId",
|
|
"type": "string",
|
|
"description": "If not specified, a random id will be used. Learn <a target=\"_blank\" href=\"https://docs.flowiseai.com/memory#ui-and-embedded-chat\">more</a>",
|
|
"default": "",
|
|
"additionalParams": true,
|
|
"optional": true,
|
|
"id": "bufferMemory_0-input-sessionId-string"
|
|
},
|
|
{
|
|
"label": "Memory Key",
|
|
"name": "memoryKey",
|
|
"type": "string",
|
|
"default": "chat_history",
|
|
"additionalParams": true,
|
|
"id": "bufferMemory_0-input-memoryKey-string"
|
|
}
|
|
],
|
|
"inputAnchors": [],
|
|
"inputs": {
|
|
"sessionId": "",
|
|
"memoryKey": "chat_history"
|
|
},
|
|
"outputAnchors": [
|
|
{
|
|
"id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
|
|
"name": "bufferMemory",
|
|
"label": "BufferMemory",
|
|
"description": "Retrieve chat messages stored in database",
|
|
"type": "BufferMemory | BaseChatMemory | BaseMemory"
|
|
}
|
|
],
|
|
"outputs": {},
|
|
"selected": false
|
|
},
|
|
"width": 300,
|
|
"height": 251,
|
|
"selected": false,
|
|
"positionAbsolute": {
|
|
"x": 232.6843615160352,
|
|
"y": -162.57529096209916
|
|
},
|
|
"dragging": false
|
|
},
|
|
{
|
|
"id": "chatOllama_0",
|
|
"position": {
|
|
"x": 203.62742857142882,
|
|
"y": 133.58191020408157
|
|
},
|
|
"type": "customNode",
|
|
"data": {
|
|
"id": "chatOllama_0",
|
|
"label": "ChatOllama",
|
|
"version": 5,
|
|
"name": "chatOllama",
|
|
"type": "ChatOllama",
|
|
"baseClasses": [
|
|
"ChatOllama",
|
|
"ChatOllama",
|
|
"BaseChatModel",
|
|
"BaseLanguageModel",
|
|
"Runnable"
|
|
],
|
|
"category": "Chat Models",
|
|
"description": "Chat completion using open-source LLM on Ollama",
|
|
"inputParams": [
|
|
{
|
|
"label": "Base URL",
|
|
"name": "baseUrl",
|
|
"type": "string",
|
|
"default": "http://localhost:11434",
|
|
"id": "chatOllama_0-input-baseUrl-string"
|
|
},
|
|
{
|
|
"label": "Model Name",
|
|
"name": "modelName",
|
|
"type": "string",
|
|
"placeholder": "llama2",
|
|
"id": "chatOllama_0-input-modelName-string"
|
|
},
|
|
{
|
|
"label": "Temperature",
|
|
"name": "temperature",
|
|
"type": "number",
|
|
"description": "The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8). Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 0.1,
|
|
"default": 0.9,
|
|
"optional": true,
|
|
"id": "chatOllama_0-input-temperature-number"
|
|
},
|
|
{
|
|
"label": "Allow Image Uploads",
|
|
"name": "allowImageUploads",
|
|
"type": "boolean",
|
|
"description": "Allow image input. Refer to the <a href=\"https://docs.flowiseai.com/using-flowise/uploads#image\" target=\"_blank\">docs</a> for more details.",
|
|
"default": false,
|
|
"optional": true,
|
|
"id": "chatOllama_0-input-allowImageUploads-boolean"
|
|
},
|
|
{
|
|
"label": "Streaming",
|
|
"name": "streaming",
|
|
"type": "boolean",
|
|
"default": true,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-streaming-boolean"
|
|
},
|
|
{
|
|
"label": "JSON Mode",
|
|
"name": "jsonMode",
|
|
"type": "boolean",
|
|
"description": "Coerces model outputs to only return JSON. Specify in the system prompt to return JSON. Ex: Format all responses as JSON object",
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-jsonMode-boolean"
|
|
},
|
|
{
|
|
"label": "Keep Alive",
|
|
"name": "keepAlive",
|
|
"type": "string",
|
|
"description": "How long to keep connection alive. A duration string (such as \"10m\" or \"24h\")",
|
|
"default": "5m",
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-keepAlive-string"
|
|
},
|
|
{
|
|
"label": "Top P",
|
|
"name": "topP",
|
|
"type": "number",
|
|
"description": "Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9). Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 0.1,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-topP-number"
|
|
},
|
|
{
|
|
"label": "Top K",
|
|
"name": "topK",
|
|
"type": "number",
|
|
"description": "Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40). Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 1,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-topK-number"
|
|
},
|
|
{
|
|
"label": "Mirostat",
|
|
"name": "mirostat",
|
|
"type": "number",
|
|
"description": "Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0). Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 1,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-mirostat-number"
|
|
},
|
|
{
|
|
"label": "Mirostat ETA",
|
|
"name": "mirostatEta",
|
|
"type": "number",
|
|
"description": "Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 0.1,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-mirostatEta-number"
|
|
},
|
|
{
|
|
"label": "Mirostat TAU",
|
|
"name": "mirostatTau",
|
|
"type": "number",
|
|
"description": "Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 0.1,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-mirostatTau-number"
|
|
},
|
|
{
|
|
"label": "Context Window Size",
|
|
"name": "numCtx",
|
|
"type": "number",
|
|
"description": "Sets the size of the context window used to generate the next token. (Default: 2048) Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 1,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-numCtx-number"
|
|
},
|
|
{
|
|
"label": "Number of GPU",
|
|
"name": "numGpu",
|
|
"type": "number",
|
|
"description": "The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 1,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-numGpu-number"
|
|
},
|
|
{
|
|
"label": "Number of Thread",
|
|
"name": "numThread",
|
|
"type": "number",
|
|
"description": "Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 1,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-numThread-number"
|
|
},
|
|
{
|
|
"label": "Repeat Last N",
|
|
"name": "repeatLastN",
|
|
"type": "number",
|
|
"description": "Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx). Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 1,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-repeatLastN-number"
|
|
},
|
|
{
|
|
"label": "Repeat Penalty",
|
|
"name": "repeatPenalty",
|
|
"type": "number",
|
|
"description": "Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1). Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 0.1,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-repeatPenalty-number"
|
|
},
|
|
{
|
|
"label": "Stop Sequence",
|
|
"name": "stop",
|
|
"type": "string",
|
|
"rows": 4,
|
|
"placeholder": "AI assistant:",
|
|
"description": "Sets the stop sequences to use. Use comma to seperate different sequences. Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-stop-string"
|
|
},
|
|
{
|
|
"label": "Tail Free Sampling",
|
|
"name": "tfsZ",
|
|
"type": "number",
|
|
"description": "Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (Default: 1). Refer to <a target=\"_blank\" href=\"https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values\">docs</a> for more details",
|
|
"step": 0.1,
|
|
"optional": true,
|
|
"additionalParams": true,
|
|
"id": "chatOllama_0-input-tfsZ-number"
|
|
}
|
|
],
|
|
"inputAnchors": [
|
|
{
|
|
"label": "Cache",
|
|
"name": "cache",
|
|
"type": "BaseCache",
|
|
"optional": true,
|
|
"id": "chatOllama_0-input-cache-BaseCache"
|
|
}
|
|
],
|
|
"inputs": {
|
|
"cache": "{{inMemoryCache_0.data.instance}}",
|
|
"baseUrl": "http://host.docker.internal:11434",
|
|
"modelName": "qwen2.5-coder:32b",
|
|
"temperature": "0.5",
|
|
"allowImageUploads": "",
|
|
"streaming": true,
|
|
"jsonMode": "",
|
|
"keepAlive": "5m",
|
|
"topP": "",
|
|
"topK": "",
|
|
"mirostat": "",
|
|
"mirostatEta": "",
|
|
"mirostatTau": "",
|
|
"numCtx": "32768",
|
|
"numGpu": "",
|
|
"numThread": "",
|
|
"repeatLastN": "",
|
|
"repeatPenalty": "",
|
|
"stop": "",
|
|
"tfsZ": ""
|
|
},
|
|
"outputAnchors": [
|
|
{
|
|
"id": "chatOllama_0-output-chatOllama-ChatOllama|ChatOllama|BaseChatModel|BaseLanguageModel|Runnable",
|
|
"name": "chatOllama",
|
|
"label": "ChatOllama",
|
|
"description": "Chat completion using open-source LLM on Ollama",
|
|
"type": "ChatOllama | ChatOllama | BaseChatModel | BaseLanguageModel | Runnable"
|
|
}
|
|
],
|
|
"outputs": {},
|
|
"selected": false
|
|
},
|
|
"width": 300,
|
|
"height": 675,
|
|
"selected": false,
|
|
"positionAbsolute": {
|
|
"x": 203.62742857142882,
|
|
"y": 133.58191020408157
|
|
},
|
|
"dragging": false
|
|
},
|
|
{
|
|
"id": "inMemoryCache_0",
|
|
"position": {
|
|
"x": -222.16839650145752,
|
|
"y": 87.76136209912531
|
|
},
|
|
"type": "customNode",
|
|
"data": {
|
|
"id": "inMemoryCache_0",
|
|
"label": "InMemory Cache",
|
|
"version": 1,
|
|
"name": "inMemoryCache",
|
|
"type": "InMemoryCache",
|
|
"baseClasses": [
|
|
"InMemoryCache",
|
|
"BaseCache"
|
|
],
|
|
"category": "Cache",
|
|
"description": "Cache LLM response in memory, will be cleared once app restarted",
|
|
"inputParams": [],
|
|
"inputAnchors": [],
|
|
"inputs": {},
|
|
"outputAnchors": [
|
|
{
|
|
"id": "inMemoryCache_0-output-inMemoryCache-InMemoryCache|BaseCache",
|
|
"name": "inMemoryCache",
|
|
"label": "InMemoryCache",
|
|
"description": "Cache LLM response in memory, will be cleared once app restarted",
|
|
"type": "InMemoryCache | BaseCache"
|
|
}
|
|
],
|
|
"outputs": {},
|
|
"selected": false
|
|
},
|
|
"width": 300,
|
|
"height": 143,
|
|
"selected": false,
|
|
"positionAbsolute": {
|
|
"x": -222.16839650145752,
|
|
"y": 87.76136209912531
|
|
},
|
|
"dragging": false
|
|
},
|
|
{
|
|
"id": "braveSearchAPI_0",
|
|
"position": {
|
|
"x": 557.8984956268223,
|
|
"y": -202.80796734693882
|
|
},
|
|
"type": "customNode",
|
|
"data": {
|
|
"id": "braveSearchAPI_0",
|
|
"label": "BraveSearch API",
|
|
"version": 1,
|
|
"name": "braveSearchAPI",
|
|
"type": "BraveSearchAPI",
|
|
"baseClasses": [
|
|
"BraveSearchAPI",
|
|
"Tool",
|
|
"StructuredTool",
|
|
"Runnable"
|
|
],
|
|
"category": "Tools",
|
|
"description": "Wrapper around BraveSearch API - a real-time API to access Brave search results",
|
|
"inputParams": [
|
|
{
|
|
"label": "Connect Credential",
|
|
"name": "credential",
|
|
"type": "credential",
|
|
"credentialNames": [
|
|
"braveSearchApi"
|
|
],
|
|
"id": "braveSearchAPI_0-input-credential-credential"
|
|
}
|
|
],
|
|
"inputAnchors": [],
|
|
"inputs": {},
|
|
"outputAnchors": [
|
|
{
|
|
"id": "braveSearchAPI_0-output-braveSearchAPI-BraveSearchAPI|Tool|StructuredTool|Runnable",
|
|
"name": "braveSearchAPI",
|
|
"label": "BraveSearchAPI",
|
|
"description": "Wrapper around BraveSearch API - a real-time API to access Brave search results",
|
|
"type": "BraveSearchAPI | Tool | StructuredTool | Runnable"
|
|
}
|
|
],
|
|
"outputs": {},
|
|
"selected": false
|
|
},
|
|
"width": 300,
|
|
"height": 275,
|
|
"selected": false,
|
|
"positionAbsolute": {
|
|
"x": 557.8984956268223,
|
|
"y": -202.80796734693882
|
|
},
|
|
"dragging": false
|
|
},
|
|
{
|
|
"id": "customTool_0",
|
|
"position": {
|
|
"x": 919.9367100136845,
|
|
"y": -303.9223829475814
|
|
},
|
|
"type": "customNode",
|
|
"data": {
|
|
"id": "customTool_0",
|
|
"label": "Custom Tool",
|
|
"version": 2,
|
|
"name": "customTool",
|
|
"type": "CustomTool",
|
|
"baseClasses": [
|
|
"CustomTool",
|
|
"Tool",
|
|
"StructuredTool",
|
|
"Runnable"
|
|
],
|
|
"category": "Tools",
|
|
"description": "Use custom tool you've created in Flowise within chatflow",
|
|
"inputParams": [
|
|
{
|
|
"label": "Select Tool",
|
|
"name": "selectedTool",
|
|
"type": "asyncOptions",
|
|
"loadMethod": "listTools",
|
|
"id": "customTool_0-input-selectedTool-asyncOptions"
|
|
},
|
|
{
|
|
"label": "Return Direct",
|
|
"name": "returnDirect",
|
|
"description": "Return the output of the tool directly to the user",
|
|
"type": "boolean",
|
|
"optional": true,
|
|
"id": "customTool_0-input-returnDirect-boolean"
|
|
}
|
|
],
|
|
"inputAnchors": [],
|
|
"inputs": {
|
|
"selectedTool": "cbc24643-ad81-4769-911f-089c8e4c87ab",
|
|
"returnDirect": ""
|
|
},
|
|
"outputAnchors": [
|
|
{
|
|
"id": "customTool_0-output-customTool-CustomTool|Tool|StructuredTool|Runnable",
|
|
"name": "customTool",
|
|
"label": "CustomTool",
|
|
"description": "Use custom tool you've created in Flowise within chatflow",
|
|
"type": "CustomTool | Tool | StructuredTool | Runnable"
|
|
}
|
|
],
|
|
"outputs": {},
|
|
"selected": false
|
|
},
|
|
"width": 300,
|
|
"height": 371,
|
|
"selected": false,
|
|
"positionAbsolute": {
|
|
"x": 919.9367100136845,
|
|
"y": -303.9223829475814
|
|
},
|
|
"dragging": false
|
|
},
|
|
{
|
|
"id": "customTool_1",
|
|
"position": {
|
|
"x": 1055.4606486069163,
|
|
"y": 157.78311597410726
|
|
},
|
|
"type": "customNode",
|
|
"data": {
|
|
"id": "customTool_1",
|
|
"label": "Custom Tool",
|
|
"version": 2,
|
|
"name": "customTool",
|
|
"type": "CustomTool",
|
|
"baseClasses": [
|
|
"CustomTool",
|
|
"Tool",
|
|
"StructuredTool",
|
|
"Runnable"
|
|
],
|
|
"category": "Tools",
|
|
"description": "Use custom tool you've created in Flowise within chatflow",
|
|
"inputParams": [
|
|
{
|
|
"label": "Select Tool",
|
|
"name": "selectedTool",
|
|
"type": "asyncOptions",
|
|
"loadMethod": "listTools",
|
|
"id": "customTool_1-input-selectedTool-asyncOptions"
|
|
},
|
|
{
|
|
"label": "Return Direct",
|
|
"name": "returnDirect",
|
|
"description": "Return the output of the tool directly to the user",
|
|
"type": "boolean",
|
|
"optional": true,
|
|
"id": "customTool_1-input-returnDirect-boolean"
|
|
}
|
|
],
|
|
"inputAnchors": [],
|
|
"inputs": {
|
|
"selectedTool": "54bc0754-c127-416c-8cd0-db2902f2fce8",
|
|
"returnDirect": ""
|
|
},
|
|
"outputAnchors": [
|
|
{
|
|
"id": "customTool_1-output-customTool-CustomTool|Tool|StructuredTool|Runnable",
|
|
"name": "customTool",
|
|
"label": "CustomTool",
|
|
"description": "Use custom tool you've created in Flowise within chatflow",
|
|
"type": "CustomTool | Tool | StructuredTool | Runnable"
|
|
}
|
|
],
|
|
"outputs": {},
|
|
"selected": false
|
|
},
|
|
"width": 300,
|
|
"height": 371,
|
|
"selected": false,
|
|
"dragging": false,
|
|
"positionAbsolute": {
|
|
"x": 1055.4606486069163,
|
|
"y": 157.78311597410726
|
|
}
|
|
},
|
|
{
|
|
"id": "customTool_2",
|
|
"position": {
|
|
"x": 1058.5329191281724,
|
|
"y": 583.5403469198069
|
|
},
|
|
"type": "customNode",
|
|
"data": {
|
|
"id": "customTool_2",
|
|
"label": "Custom Tool",
|
|
"version": 2,
|
|
"name": "customTool",
|
|
"type": "CustomTool",
|
|
"baseClasses": [
|
|
"CustomTool",
|
|
"Tool",
|
|
"StructuredTool",
|
|
"Runnable"
|
|
],
|
|
"category": "Tools",
|
|
"description": "Use custom tool you've created in Flowise within chatflow",
|
|
"inputParams": [
|
|
{
|
|
"label": "Select Tool",
|
|
"name": "selectedTool",
|
|
"type": "asyncOptions",
|
|
"loadMethod": "listTools",
|
|
"id": "customTool_2-input-selectedTool-asyncOptions"
|
|
},
|
|
{
|
|
"label": "Return Direct",
|
|
"name": "returnDirect",
|
|
"description": "Return the output of the tool directly to the user",
|
|
"type": "boolean",
|
|
"optional": true,
|
|
"id": "customTool_2-input-returnDirect-boolean"
|
|
}
|
|
],
|
|
"inputAnchors": [],
|
|
"inputs": {
|
|
"selectedTool": "83ef936c-4579-48a3-b95f-fbefc6926b65",
|
|
"returnDirect": ""
|
|
},
|
|
"outputAnchors": [
|
|
{
|
|
"id": "customTool_2-output-customTool-CustomTool|Tool|StructuredTool|Runnable",
|
|
"name": "customTool",
|
|
"label": "CustomTool",
|
|
"description": "Use custom tool you've created in Flowise within chatflow",
|
|
"type": "CustomTool | Tool | StructuredTool | Runnable"
|
|
}
|
|
],
|
|
"outputs": {},
|
|
"selected": false
|
|
},
|
|
"width": 300,
|
|
"height": 371,
|
|
"selected": false,
|
|
"dragging": false,
|
|
"positionAbsolute": {
|
|
"x": 1058.5329191281724,
|
|
"y": 583.5403469198069
|
|
}
|
|
},
|
|
{
|
|
"id": "customTool_3",
|
|
"position": {
|
|
"x": 650.5308240717925,
|
|
"y": 747.2264330934981
|
|
},
|
|
"type": "customNode",
|
|
"data": {
|
|
"id": "customTool_3",
|
|
"label": "Custom Tool",
|
|
"version": 2,
|
|
"name": "customTool",
|
|
"type": "CustomTool",
|
|
"baseClasses": [
|
|
"CustomTool",
|
|
"Tool",
|
|
"StructuredTool",
|
|
"Runnable"
|
|
],
|
|
"category": "Tools",
|
|
"description": "Use custom tool you've created in Flowise within chatflow",
|
|
"inputParams": [
|
|
{
|
|
"label": "Select Tool",
|
|
"name": "selectedTool",
|
|
"type": "asyncOptions",
|
|
"loadMethod": "listTools",
|
|
"id": "customTool_3-input-selectedTool-asyncOptions"
|
|
},
|
|
{
|
|
"label": "Return Direct",
|
|
"name": "returnDirect",
|
|
"description": "Return the output of the tool directly to the user",
|
|
"type": "boolean",
|
|
"optional": true,
|
|
"id": "customTool_3-input-returnDirect-boolean"
|
|
}
|
|
],
|
|
"inputAnchors": [],
|
|
"inputs": {
|
|
"selectedTool": "59ace78e-575c-4f38-958d-a80e46be1e64",
|
|
"returnDirect": ""
|
|
},
|
|
"outputAnchors": [
|
|
{
|
|
"id": "customTool_3-output-customTool-CustomTool|Tool|StructuredTool|Runnable",
|
|
"name": "customTool",
|
|
"label": "CustomTool",
|
|
"description": "Use custom tool you've created in Flowise within chatflow",
|
|
"type": "CustomTool | Tool | StructuredTool | Runnable"
|
|
}
|
|
],
|
|
"outputs": {},
|
|
"selected": false
|
|
},
|
|
"width": 300,
|
|
"height": 371,
|
|
"selected": false,
|
|
"positionAbsolute": {
|
|
"x": 650.5308240717925,
|
|
"y": 747.2264330934981
|
|
},
|
|
"dragging": false
|
|
}
|
|
],
|
|
"edges": [
|
|
{
|
|
"source": "bufferMemory_0",
|
|
"sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory",
|
|
"target": "toolAgent_0",
|
|
"targetHandle": "toolAgent_0-input-memory-BaseChatMemory",
|
|
"type": "buttonedge",
|
|
"id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-toolAgent_0-toolAgent_0-input-memory-BaseChatMemory"
|
|
},
|
|
{
|
|
"source": "inMemoryCache_0",
|
|
"sourceHandle": "inMemoryCache_0-output-inMemoryCache-InMemoryCache|BaseCache",
|
|
"target": "chatOllama_0",
|
|
"targetHandle": "chatOllama_0-input-cache-BaseCache",
|
|
"type": "buttonedge",
|
|
"id": "inMemoryCache_0-inMemoryCache_0-output-inMemoryCache-InMemoryCache|BaseCache-chatOllama_0-chatOllama_0-input-cache-BaseCache"
|
|
},
|
|
{
|
|
"source": "chatOllama_0",
|
|
"sourceHandle": "chatOllama_0-output-chatOllama-ChatOllama|ChatOllama|BaseChatModel|BaseLanguageModel|Runnable",
|
|
"target": "toolAgent_0",
|
|
"targetHandle": "toolAgent_0-input-model-BaseChatModel",
|
|
"type": "buttonedge",
|
|
"id": "chatOllama_0-chatOllama_0-output-chatOllama-ChatOllama|ChatOllama|BaseChatModel|BaseLanguageModel|Runnable-toolAgent_0-toolAgent_0-input-model-BaseChatModel"
|
|
},
|
|
{
|
|
"source": "braveSearchAPI_0",
|
|
"sourceHandle": "braveSearchAPI_0-output-braveSearchAPI-BraveSearchAPI|Tool|StructuredTool|Runnable",
|
|
"target": "toolAgent_0",
|
|
"targetHandle": "toolAgent_0-input-tools-Tool",
|
|
"type": "buttonedge",
|
|
"id": "braveSearchAPI_0-braveSearchAPI_0-output-braveSearchAPI-BraveSearchAPI|Tool|StructuredTool|Runnable-toolAgent_0-toolAgent_0-input-tools-Tool"
|
|
},
|
|
{
|
|
"source": "customTool_0",
|
|
"sourceHandle": "customTool_0-output-customTool-CustomTool|Tool|StructuredTool|Runnable",
|
|
"target": "toolAgent_0",
|
|
"targetHandle": "toolAgent_0-input-tools-Tool",
|
|
"type": "buttonedge",
|
|
"id": "customTool_0-customTool_0-output-customTool-CustomTool|Tool|StructuredTool|Runnable-toolAgent_0-toolAgent_0-input-tools-Tool"
|
|
},
|
|
{
|
|
"source": "customTool_1",
|
|
"sourceHandle": "customTool_1-output-customTool-CustomTool|Tool|StructuredTool|Runnable",
|
|
"target": "toolAgent_0",
|
|
"targetHandle": "toolAgent_0-input-tools-Tool",
|
|
"type": "buttonedge",
|
|
"id": "customTool_1-customTool_1-output-customTool-CustomTool|Tool|StructuredTool|Runnable-toolAgent_0-toolAgent_0-input-tools-Tool"
|
|
},
|
|
{
|
|
"source": "customTool_2",
|
|
"sourceHandle": "customTool_2-output-customTool-CustomTool|Tool|StructuredTool|Runnable",
|
|
"target": "toolAgent_0",
|
|
"targetHandle": "toolAgent_0-input-tools-Tool",
|
|
"type": "buttonedge",
|
|
"id": "customTool_2-customTool_2-output-customTool-CustomTool|Tool|StructuredTool|Runnable-toolAgent_0-toolAgent_0-input-tools-Tool"
|
|
},
|
|
{
|
|
"source": "customTool_3",
|
|
"sourceHandle": "customTool_3-output-customTool-CustomTool|Tool|StructuredTool|Runnable",
|
|
"target": "toolAgent_0",
|
|
"targetHandle": "toolAgent_0-input-tools-Tool",
|
|
"type": "buttonedge",
|
|
"id": "customTool_3-customTool_3-output-customTool-CustomTool|Tool|StructuredTool|Runnable-toolAgent_0-toolAgent_0-input-tools-Tool"
|
|
}
|
|
]
|
|
} |