mirror of
https://github.com/coleam00/ai-agents-masterclass.git
synced 2025-11-29 08:33:16 +00:00
OpenAI Swarm implementation of an AI agent swarm to manage a SQL database
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -11,3 +11,4 @@ credentials.json
|
|||||||
token.json
|
token.json
|
||||||
node_modules
|
node_modules
|
||||||
venv
|
venv
|
||||||
|
*.db
|
||||||
89
sql-ai-agent/ai-news-complete-mock-data.sql
Normal file
89
sql-ai-agent/ai-news-complete-mock-data.sql
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
-- Insert mock data into rss_feeds table
|
||||||
|
INSERT OR IGNORE INTO rss_feeds (name, url, description, site_link, language) VALUES
|
||||||
|
('AI Daily', 'https://ai-daily.com/feed', 'Daily AI news and updates', 'https://ai-daily.com', 'en'),
|
||||||
|
('ML Weekly', 'https://mlweekly.com/rss', 'Weekly roundup of machine learning news', 'https://mlweekly.com', 'en'),
|
||||||
|
('IA Nouvelles', 'https://ia-nouvelles.fr/flux', 'Actualités sur l''intelligence artificielle en français', 'https://ia-nouvelles.fr', 'fr'),
|
||||||
|
('Data Science Digest', 'https://datasciencedigest.com/feed', 'Comprehensive coverage of data science topics', 'https://datasciencedigest.com', 'en'),
|
||||||
|
('AI Ethics Blog', 'https://aiethicsblog.org/rss', 'Exploring ethical implications of AI', 'https://aiethicsblog.org', 'en');
|
||||||
|
|
||||||
|
-- Insert mock data into categories table
|
||||||
|
INSERT OR IGNORE INTO categories (name, description) VALUES
|
||||||
|
('Machine Learning', 'News related to machine learning algorithms and techniques'),
|
||||||
|
('Natural Language Processing', 'Updates on NLP research and applications'),
|
||||||
|
('Computer Vision', 'Advancements in image and video processing using AI'),
|
||||||
|
('Ethics in AI', 'Discussions on ethical considerations in AI development and deployment'),
|
||||||
|
('Robotics', 'News about AI in robotics and automation'),
|
||||||
|
('AI in Healthcare', 'Applications of AI in medicine and healthcare'),
|
||||||
|
('Deep Learning', 'Focused on deep neural networks and related technologies');
|
||||||
|
|
||||||
|
-- Insert mock data into rss_items table
|
||||||
|
INSERT OR IGNORE INTO rss_items (rss_feed_id, title, link, description, content, published_date, author) VALUES
|
||||||
|
(1, 'New breakthrough in reinforcement learning', 'https://ai-daily.com/articles/reinforcement-learning-breakthrough', 'Researchers achieve significant progress in RL algorithms', 'Full content of the article...', '2023-04-15 09:30:00', 'Jane Doe'),
|
||||||
|
(2, 'GPT-4 shows impressive results in medical diagnosis', 'https://mlweekly.com/news/gpt4-medical-diagnosis', 'OpenAI''s latest language model demonstrates potential in healthcare', 'Detailed article content...', '2023-04-14 14:45:00', 'John Smith'),
|
||||||
|
(3, 'L''IA générative révolutionne la création artistique', 'https://ia-nouvelles.fr/articles/ia-generative-art', 'Comment l''IA transforme le processus créatif des artistes', 'Contenu complet de l''article...', '2023-04-13 11:15:00', 'Marie Dupont'),
|
||||||
|
(4, 'Advancements in Computer Vision for Autonomous Vehicles', 'https://datasciencedigest.com/articles/cv-autonomous-vehicles', 'Recent developments in CV improving self-driving car capabilities', 'Full article content...', '2023-04-16 10:00:00', 'Alex Johnson'),
|
||||||
|
(5, 'The Ethics of AI in Hiring Processes', 'https://aiethicsblog.org/posts/ai-in-hiring', 'Examining the implications of using AI for job candidate selection', 'Detailed blog post content...', '2023-04-17 13:20:00', 'Samantha Lee');
|
||||||
|
|
||||||
|
-- Insert mock data into rss_item_categories junction table
|
||||||
|
INSERT OR IGNORE INTO rss_item_categories (rss_item_id, category_id) VALUES
|
||||||
|
(1, 1), (1, 7), -- Reinforcement learning article tagged with Machine Learning and Deep Learning
|
||||||
|
(2, 2), (2, 6), -- GPT-4 article tagged with NLP and AI in Healthcare
|
||||||
|
(3, 1), (3, 4), -- Generative AI article tagged with Machine Learning and Ethics in AI
|
||||||
|
(4, 3), (4, 5), -- Computer Vision article tagged with Computer Vision and Robotics
|
||||||
|
(5, 4), (5, 6); -- AI Ethics article tagged with Ethics in AI and AI in Healthcare
|
||||||
|
|
||||||
|
-- Insert mock data into users table
|
||||||
|
INSERT OR IGNORE INTO users (username, email, password_hash, created_at, last_login) VALUES
|
||||||
|
('alice_ai', 'alice@example.com', 'hashed_password_1', '2023-01-01 10:00:00', '2023-04-15 14:30:00'),
|
||||||
|
('bob_ml', 'bob@example.com', 'hashed_password_2', '2023-02-15 11:30:00', '2023-04-14 09:15:00'),
|
||||||
|
('charlie_nlp', 'charlie@example.com', 'hashed_password_3', '2023-03-20 09:45:00', '2023-04-13 16:45:00'),
|
||||||
|
('dana_cv', 'dana@example.com', 'hashed_password_4', '2023-03-25 14:00:00', '2023-04-16 11:30:00'),
|
||||||
|
('evan_ethics', 'evan@example.com', 'hashed_password_5', '2023-04-01 08:30:00', '2023-04-17 10:45:00');
|
||||||
|
|
||||||
|
-- Insert mock data into user_category_preferences table
|
||||||
|
INSERT OR IGNORE INTO user_category_preferences (user_id, category_id) VALUES
|
||||||
|
(1, 1), (1, 2), (1, 7), -- Alice is interested in Machine Learning, NLP, and Deep Learning
|
||||||
|
(2, 1), (2, 3), (2, 5), -- Bob is interested in Machine Learning, Computer Vision, and Robotics
|
||||||
|
(3, 2), (3, 4), (3, 6), -- Charlie is interested in NLP, Ethics in AI, and AI in Healthcare
|
||||||
|
(4, 3), (4, 5), (4, 7), -- Dana is interested in Computer Vision, Robotics, and Deep Learning
|
||||||
|
(5, 4), (5, 6), (5, 1); -- Evan is interested in Ethics in AI, AI in Healthcare, and Machine Learning
|
||||||
|
|
||||||
|
-- Insert mock data into user_feed_preferences table
|
||||||
|
INSERT OR IGNORE INTO user_feed_preferences (user_id, rss_feed_id) VALUES
|
||||||
|
(1, 1), (1, 2), (1, 4), -- Alice follows AI Daily, ML Weekly, and Data Science Digest
|
||||||
|
(2, 2), (2, 3), (2, 4), -- Bob follows ML Weekly, IA Nouvelles, and Data Science Digest
|
||||||
|
(3, 1), (3, 3), (3, 5), -- Charlie follows AI Daily, IA Nouvelles, and AI Ethics Blog
|
||||||
|
(4, 1), (4, 2), (4, 4), -- Dana follows AI Daily, ML Weekly, and Data Science Digest
|
||||||
|
(5, 3), (5, 4), (5, 5); -- Evan follows IA Nouvelles, Data Science Digest, and AI Ethics Blog
|
||||||
|
|
||||||
|
-- Insert mock data into article_interactions table
|
||||||
|
INSERT OR IGNORE INTO article_interactions (user_id, rss_item_id, interaction_type, interaction_time) VALUES
|
||||||
|
(1, 1, 'view', '2023-04-15 10:15:00'),
|
||||||
|
(1, 1, 'like', '2023-04-15 10:20:00'),
|
||||||
|
(1, 2, 'view', '2023-04-15 10:30:00'),
|
||||||
|
(2, 2, 'view', '2023-04-14 15:00:00'),
|
||||||
|
(2, 2, 'like', '2023-04-14 15:05:00'),
|
||||||
|
(2, 2, 'share', '2023-04-14 15:10:00'),
|
||||||
|
(3, 3, 'view', '2023-04-13 17:00:00'),
|
||||||
|
(3, 3, 'like', '2023-04-13 17:10:00'),
|
||||||
|
(4, 4, 'view', '2023-04-16 11:45:00'),
|
||||||
|
(4, 4, 'share', '2023-04-16 11:50:00'),
|
||||||
|
(5, 5, 'view', '2023-04-17 14:00:00'),
|
||||||
|
(5, 5, 'like', '2023-04-17 14:15:00');
|
||||||
|
|
||||||
|
-- Insert mock data into feed_views table
|
||||||
|
INSERT OR IGNORE INTO feed_views (user_id, rss_feed_id, viewed_at) VALUES
|
||||||
|
(1, 1, '2023-04-15 10:00:00'),
|
||||||
|
(1, 2, '2023-04-15 10:25:00'),
|
||||||
|
(2, 2, '2023-04-14 14:55:00'),
|
||||||
|
(3, 3, '2023-04-13 16:50:00'),
|
||||||
|
(4, 4, '2023-04-16 11:40:00'),
|
||||||
|
(5, 5, '2023-04-17 13:55:00');
|
||||||
|
|
||||||
|
-- Insert mock data into user_sessions table
|
||||||
|
INSERT OR IGNORE INTO user_sessions (user_id, session_token, created_at, expires_at) VALUES
|
||||||
|
(1, 'token_alice_1', '2023-04-15 14:30:00', '2023-04-16 14:30:00'),
|
||||||
|
(2, 'token_bob_1', '2023-04-14 09:15:00', '2023-04-15 09:15:00'),
|
||||||
|
(3, 'token_charlie_1', '2023-04-13 16:45:00', '2023-04-14 16:45:00'),
|
||||||
|
(4, 'token_dana_1', '2023-04-16 11:30:00', '2023-04-17 11:30:00'),
|
||||||
|
(5, 'token_evan_1', '2023-04-17 10:45:00', '2023-04-18 10:45:00');
|
||||||
97
sql-ai-agent/ai-news-complete-tables.sql
Normal file
97
sql-ai-agent/ai-news-complete-tables.sql
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
-- Create the RSS feeds table
|
||||||
|
CREATE TABLE IF NOT EXISTS rss_feeds (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
url VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
description TEXT,
|
||||||
|
site_link VARCHAR(255),
|
||||||
|
language VARCHAR(50)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create the categories table
|
||||||
|
CREATE TABLE IF NOT EXISTS categories (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name VARCHAR(100) NOT NULL UNIQUE,
|
||||||
|
description TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create the RSS news feed items table
|
||||||
|
CREATE TABLE IF NOT EXISTS rss_items (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
rss_feed_id INTEGER NOT NULL,
|
||||||
|
title VARCHAR(255) NOT NULL,
|
||||||
|
link VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
content TEXT,
|
||||||
|
published_date DATETIME,
|
||||||
|
author VARCHAR(255),
|
||||||
|
FOREIGN KEY (rss_feed_id) REFERENCES rss_feeds(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create a junction table for the many-to-many relationship between rss_items and categories
|
||||||
|
CREATE TABLE IF NOT EXISTS rss_item_categories (
|
||||||
|
rss_item_id INTEGER NOT NULL,
|
||||||
|
category_id INTEGER NOT NULL,
|
||||||
|
PRIMARY KEY (rss_item_id, category_id),
|
||||||
|
FOREIGN KEY (rss_item_id) REFERENCES rss_items(id),
|
||||||
|
FOREIGN KEY (category_id) REFERENCES categories(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create users table
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
username VARCHAR(50) NOT NULL UNIQUE,
|
||||||
|
email VARCHAR(100) NOT NULL UNIQUE,
|
||||||
|
password_hash VARCHAR(255) NOT NULL,
|
||||||
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
last_login DATETIME
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create user preferences table for categories
|
||||||
|
CREATE TABLE IF NOT EXISTS user_category_preferences (
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
category_id INTEGER NOT NULL,
|
||||||
|
PRIMARY KEY (user_id, category_id),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||||
|
FOREIGN KEY (category_id) REFERENCES categories(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create user preferences table for RSS feeds
|
||||||
|
CREATE TABLE IF NOT EXISTS user_feed_preferences (
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
rss_feed_id INTEGER NOT NULL,
|
||||||
|
PRIMARY KEY (user_id, rss_feed_id),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||||
|
FOREIGN KEY (rss_feed_id) REFERENCES rss_feeds(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create table for tracking article interactions
|
||||||
|
CREATE TABLE IF NOT EXISTS article_interactions (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
rss_item_id INTEGER NOT NULL,
|
||||||
|
interaction_type VARCHAR(20) NOT NULL, -- 'view', 'like', 'share', etc.
|
||||||
|
interaction_time DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||||
|
FOREIGN KEY (rss_item_id) REFERENCES rss_items(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create table for tracking feed views
|
||||||
|
CREATE TABLE IF NOT EXISTS feed_views (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
rss_feed_id INTEGER NOT NULL,
|
||||||
|
viewed_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||||
|
FOREIGN KEY (rss_feed_id) REFERENCES rss_feeds(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create table for user sessions
|
||||||
|
CREATE TABLE IF NOT EXISTS user_sessions (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
session_token VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
expires_at DATETIME NOT NULL,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
|
);
|
||||||
50
sql-ai-agent/load_sql_data.py
Normal file
50
sql-ai-agent/load_sql_data.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import sqlite3
|
||||||
|
|
||||||
|
"""
|
||||||
|
This script is used to create a SQLlite database, add tables
|
||||||
|
to it, and insert mock data, all for the OpenAI Swarm demonstration
|
||||||
|
with the other Python script in this directory.
|
||||||
|
|
||||||
|
Simply run this script with the command:
|
||||||
|
|
||||||
|
python load_sql_data.py
|
||||||
|
|
||||||
|
And then you will have a database loaded and ready to use
|
||||||
|
with the agent swarm!
|
||||||
|
"""
|
||||||
|
|
||||||
|
def execute_sql_script(cursor, script_file):
|
||||||
|
# Opens the .sql file given as script_file
|
||||||
|
with open(script_file, 'r') as sql_file:
|
||||||
|
sql_script = sql_file.read()
|
||||||
|
|
||||||
|
# Gets all the sql commands and executes them one at a time
|
||||||
|
statements = sql_script.split(';')
|
||||||
|
for statement in statements:
|
||||||
|
if statement.strip():
|
||||||
|
cursor.execute(statement)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Connect to the database
|
||||||
|
conn = sqlite3.connect('rss-feed-database.db')
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Execute SQL script to create tables for the AI RSS Feed system
|
||||||
|
execute_sql_script(cursor, 'ai-news-complete-tables.sql')
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
# Execute SQL script to insert mock data for the AI RSS Feed system
|
||||||
|
execute_sql_script(cursor, 'ai-news-complete-mock-data.sql')
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
# Query table to make sure things are looking good
|
||||||
|
cursor.execute("SELECT * FROM rss_feeds")
|
||||||
|
feeds = cursor.fetchall()
|
||||||
|
for feed in feeds:
|
||||||
|
print(feed)
|
||||||
|
|
||||||
|
# Close the connection
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
52
sql-ai-agent/requirements.txt
Normal file
52
sql-ai-agent/requirements.txt
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
aiohappyeyeballs==2.4.3
|
||||||
|
aiohttp==3.10.10
|
||||||
|
aiosignal==1.3.1
|
||||||
|
annotated-types==0.7.0
|
||||||
|
anyio==4.6.2.post1
|
||||||
|
attrs==24.2.0
|
||||||
|
certifi==2024.8.30
|
||||||
|
cfgv==3.4.0
|
||||||
|
charset-normalizer==3.4.0
|
||||||
|
click==8.1.7
|
||||||
|
colorama==0.4.6
|
||||||
|
distlib==0.3.9
|
||||||
|
distro==1.9.0
|
||||||
|
docstring_parser==0.16
|
||||||
|
filelock==3.16.1
|
||||||
|
frozenlist==1.4.1
|
||||||
|
h11==0.14.0
|
||||||
|
httpcore==1.0.6
|
||||||
|
httpx==0.27.2
|
||||||
|
identify==2.6.1
|
||||||
|
idna==3.10
|
||||||
|
iniconfig==2.0.0
|
||||||
|
instructor==1.5.2
|
||||||
|
jiter==0.5.0
|
||||||
|
markdown-it-py==3.0.0
|
||||||
|
mdurl==0.1.2
|
||||||
|
multidict==6.1.0
|
||||||
|
nodeenv==1.9.1
|
||||||
|
numpy==2.1.2
|
||||||
|
openai==1.51.2
|
||||||
|
packaging==24.1
|
||||||
|
platformdirs==4.3.6
|
||||||
|
pluggy==1.5.0
|
||||||
|
pre_commit==4.0.1
|
||||||
|
propcache==0.2.0
|
||||||
|
pydantic==2.9.2
|
||||||
|
pydantic_core==2.23.4
|
||||||
|
Pygments==2.18.0
|
||||||
|
pytest==8.3.3
|
||||||
|
PyYAML==6.0.2
|
||||||
|
requests==2.32.3
|
||||||
|
rich==13.9.2
|
||||||
|
shellingham==1.5.4
|
||||||
|
sniffio==1.3.1
|
||||||
|
swarm @ git+https://github.com/openai/swarm.git@9db581cecaacea0d46a933d6453c312b034dbf47
|
||||||
|
tenacity==8.5.0
|
||||||
|
tqdm==4.66.5
|
||||||
|
typer==0.12.5
|
||||||
|
typing_extensions==4.12.2
|
||||||
|
urllib3==2.2.3
|
||||||
|
virtualenv==20.26.6
|
||||||
|
yarl==1.15.2
|
||||||
5
sql-ai-agent/run.py
Normal file
5
sql-ai-agent/run.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from swarm.repl import run_demo_loop
|
||||||
|
from sql_agents import sql_router_agent
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
run_demo_loop(sql_router_agent)
|
||||||
101
sql-ai-agent/sql_agents.py
Normal file
101
sql-ai-agent/sql_agents.py
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
from swarm import Agent
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
conn = sqlite3.connect('rss-feed-database.db')
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
with open("ai-news-complete-tables.sql", "r") as table_schema_file:
|
||||||
|
table_schemas = table_schema_file.read()
|
||||||
|
|
||||||
|
def run_sql_select_statement(sql_statement):
|
||||||
|
"""Executes a SQL SELECT statement and returns the results of running the SELECT. Make sure you have a full SQL SELECT query created before calling this function."""
|
||||||
|
print(f"Executing SQL statement: {sql_statement}")
|
||||||
|
cursor.execute(sql_statement)
|
||||||
|
records = cursor.fetchall()
|
||||||
|
|
||||||
|
if not records:
|
||||||
|
return "No results found."
|
||||||
|
|
||||||
|
# Get column names
|
||||||
|
column_names = [description[0] for description in cursor.description]
|
||||||
|
|
||||||
|
# Calculate column widths
|
||||||
|
col_widths = [len(name) for name in column_names]
|
||||||
|
for row in records:
|
||||||
|
for i, value in enumerate(row):
|
||||||
|
col_widths[i] = max(col_widths[i], len(str(value)))
|
||||||
|
|
||||||
|
# Format the results
|
||||||
|
result_str = ""
|
||||||
|
|
||||||
|
# Add header
|
||||||
|
header = " | ".join(name.ljust(width) for name, width in zip(column_names, col_widths))
|
||||||
|
result_str += header + "\n"
|
||||||
|
result_str += "-" * len(header) + "\n"
|
||||||
|
|
||||||
|
# Add rows
|
||||||
|
for row in records:
|
||||||
|
row_str = " | ".join(str(value).ljust(width) for value, width in zip(row, col_widths))
|
||||||
|
result_str += row_str + "\n"
|
||||||
|
|
||||||
|
return result_str
|
||||||
|
|
||||||
|
def get_sql_router_agent_instructions():
|
||||||
|
return """You are an orchestrator of different SQL data experts and it is your job to
|
||||||
|
determine which of the agent is best suited to handle the user's request,
|
||||||
|
and transfer the conversation to that agent."""
|
||||||
|
|
||||||
|
def get_sql_agent_instructions():
|
||||||
|
return f"""You are a SQL expert who takes in a request from a user for information
|
||||||
|
they want to retrieve from the DB, creates a SELECT statement to retrieve the
|
||||||
|
necessary information, and then invoke the function to run the query and
|
||||||
|
get the results back to then report to the user the information they wanted to know.
|
||||||
|
|
||||||
|
Here are the table schemas for the DB you can query:
|
||||||
|
|
||||||
|
{table_schemas}
|
||||||
|
|
||||||
|
Write all of your SQL SELECT statements to work 100% with these schemas and nothing else.
|
||||||
|
You are always willing to create and execute the SQL statements to answer the user's question.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
sql_router_agent = Agent(
|
||||||
|
name="Router Agent",
|
||||||
|
instructions=get_sql_router_agent_instructions()
|
||||||
|
)
|
||||||
|
rss_feed_agent = Agent(
|
||||||
|
name="RSS Feed Agent",
|
||||||
|
instructions=get_sql_agent_instructions() + "\n\nHelp the user with data related to RSS feeds. Be super enthusiastic about how many great RSS feeds there are in every one of your responses.",
|
||||||
|
functions=[run_sql_select_statement]
|
||||||
|
)
|
||||||
|
user_agent = Agent(
|
||||||
|
name="User Agent",
|
||||||
|
instructions=get_sql_agent_instructions() + "\n\nHelp the user with data related to users.",
|
||||||
|
functions=[run_sql_select_statement],
|
||||||
|
)
|
||||||
|
analytics_agent = Agent(
|
||||||
|
name="Analytics Agent",
|
||||||
|
instructions=get_sql_agent_instructions() + "\n\nHelp the user gain insights from the data with analytics. Be super accurate in reporting numbers and citing sources.",
|
||||||
|
functions=[run_sql_select_statement],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def transfer_back_to_router_agent():
|
||||||
|
"""Call this function if a user is asking about data that is not handled by the current agent."""
|
||||||
|
return sql_router_agent
|
||||||
|
|
||||||
|
def transfer_to_rss_feeds_agent():
|
||||||
|
return rss_feed_agent
|
||||||
|
|
||||||
|
def transfer_to_user_agent():
|
||||||
|
return user_agent
|
||||||
|
|
||||||
|
def transfer_to_analytics_agent():
|
||||||
|
return analytics_agent
|
||||||
|
|
||||||
|
|
||||||
|
sql_router_agent.functions = [transfer_to_rss_feeds_agent, transfer_to_user_agent, transfer_to_analytics_agent]
|
||||||
|
rss_feed_agent.functions.append(transfer_back_to_router_agent)
|
||||||
|
user_agent.functions.append(transfer_back_to_router_agent)
|
||||||
|
analytics_agent.functions.append(transfer_back_to_router_agent)
|
||||||
Reference in New Issue
Block a user