Spaces:
Sleeping
Sleeping
Vaishak G Kumar
commited on
Update src/agentics/agents.py
Browse files- src/agentics/agents.py +116 -111
src/agentics/agents.py
CHANGED
@@ -22,121 +22,126 @@ llm_config = {
|
|
22 |
"temperature": 0,
|
23 |
}
|
24 |
|
25 |
-
def termination_msg(x):
|
26 |
return isinstance(x, dict) and "TERMINATE" == str(x.get("content", ""))[-9:].upper()
|
27 |
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
"
|
49 |
-
|
50 |
-
"
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
name="Academic_Whiz",
|
98 |
system_message="You are an academic whiz. Offer solutions to academic challenges.",
|
99 |
llm_config=llm_config
|
100 |
)
|
101 |
|
102 |
-
def consulting_pro():
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
)
|
|
|
22 |
"temperature": 0,
|
23 |
}
|
24 |
|
25 |
+
def termination_msg(self, x):
|
26 |
return isinstance(x, dict) and "TERMINATE" == str(x.get("content", ""))[-9:].upper()
|
27 |
|
28 |
+
class AgentsFactory:
|
29 |
+
def __init__(self, llm_config, db_path):
|
30 |
+
self.llm_config = llm_config
|
31 |
+
self.db_path = db_path
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
def tonic(self) :
|
36 |
+
return autogen.UserProxyAgent(
|
37 |
+
name="Boss",
|
38 |
+
is_termination_msg=termination_msg,
|
39 |
+
human_input_mode="NEVER",
|
40 |
+
system_message="The boss who asks questions and gives tasks.",
|
41 |
+
code_execution_config=False,
|
42 |
+
default_auto_reply="Reply `TERMINATE` if the task is done.",
|
43 |
+
)
|
44 |
+
|
45 |
+
# Create the RetrieveUserProxyAgent (Boss Assistant)
|
46 |
+
def scitonic(self) :
|
47 |
+
return RetrieveUserProxyAgent(
|
48 |
+
name="Boss_Assistant",
|
49 |
+
is_termination_msg=termination_msg,
|
50 |
+
system_message="Assistant who has extra content retrieval power for solving difficult problems.",
|
51 |
+
human_input_mode="NEVER",
|
52 |
+
max_consecutive_auto_reply=3,
|
53 |
+
retrieve_config={
|
54 |
+
"task": "QuoraRetrieval",
|
55 |
+
"docs_path": self.db_path,
|
56 |
+
"chunk_token_size": 1000,
|
57 |
+
"model": llm_config["config_list"][0]["model"],
|
58 |
+
"client": chromadb.PersistentClient(path="/tmp/chromadb"),
|
59 |
+
"collection_name": "groupchat",
|
60 |
+
"get_or_create": True,
|
61 |
+
},
|
62 |
+
code_execution_config=False,
|
63 |
+
)
|
64 |
+
# Placeholder definitions for agents used in team functions
|
65 |
+
def coder(self) :
|
66 |
+
return AssistantAgent(
|
67 |
+
name="Coder",
|
68 |
+
system_message="You are a coder. Help in writing and reviewing code.",
|
69 |
+
llm_config=llm_config
|
70 |
+
)
|
71 |
+
|
72 |
+
def pm(self) :
|
73 |
+
return AssistantAgent(
|
74 |
+
name="Project_Manager",
|
75 |
+
system_message="You are a project manager. Coordinate tasks and ensure project success.",
|
76 |
+
llm_config=llm_config
|
77 |
+
)
|
78 |
+
|
79 |
+
def reviewer(self) :
|
80 |
+
return AssistantAgent(
|
81 |
+
name="Reviewer",
|
82 |
+
system_message="You are a code reviewer. Provide feedback on code quality.",
|
83 |
+
llm_config=llm_config
|
84 |
+
)
|
85 |
+
|
86 |
+
# Define more agents for each team
|
87 |
+
def finance_expert(self) :
|
88 |
+
return AssistantAgent(
|
89 |
+
name="Finance_Expert",
|
90 |
+
system_message="You are a finance expert. Provide insights on financial matters.",
|
91 |
+
llm_config=llm_config
|
92 |
+
)
|
93 |
+
|
94 |
+
def debate_champion(self) :
|
95 |
+
return AssistantAgent(
|
96 |
+
name="Debate_Champion",
|
97 |
+
system_message="You are a debate champion. Contribute to meaningful debates.",
|
98 |
+
llm_config=llm_config
|
99 |
+
)
|
100 |
+
|
101 |
+
def academic_whiz(self) :
|
102 |
+
return AssistantAgent(
|
103 |
name="Academic_Whiz",
|
104 |
system_message="You are an academic whiz. Offer solutions to academic challenges.",
|
105 |
llm_config=llm_config
|
106 |
)
|
107 |
|
108 |
+
def consulting_pro(self) :
|
109 |
+
return AssistantAgent(
|
110 |
+
name="Consulting_Pro",
|
111 |
+
system_message="You are a consulting professional. Offer professional advice and solutions.",
|
112 |
+
llm_config=llm_config
|
113 |
+
)
|
114 |
+
def covid19_scientist(self) :
|
115 |
+
return AssistantAgent(
|
116 |
+
name="Covid19_Scientist",
|
117 |
+
system_message="You are a scientist studying Covid-19 trends. Provide analysis and insights.",
|
118 |
+
llm_config=llm_config
|
119 |
+
)
|
120 |
+
|
121 |
+
def healthcare_expert(self) :
|
122 |
+
return AssistantAgent(
|
123 |
+
name="Healthcare_Expert",
|
124 |
+
system_message="You are a healthcare expert focused on managing and mitigating the impact of Covid-19.",
|
125 |
+
llm_config=llm_config
|
126 |
+
)
|
127 |
+
|
128 |
+
def finance_analyst(self) :
|
129 |
+
return AssistantAgent(
|
130 |
+
name="Finance_Analyst",
|
131 |
+
system_message="You are a finance analyst. Provide insights on the economic impact of Covid-19.",
|
132 |
+
llm_config=llm_config
|
133 |
+
)
|
134 |
+
|
135 |
+
def debate_expert(self) :
|
136 |
+
return AssistantAgent(
|
137 |
+
name="Debate_Expert",
|
138 |
+
system_message="You are an expert in debate strategies and communication. Participate in meaningful debates.",
|
139 |
+
llm_config=llm_config
|
140 |
+
)
|
141 |
+
|
142 |
+
def academic_expert(self) :
|
143 |
+
return AssistantAgent(
|
144 |
+
name="Academic_Expert",
|
145 |
+
system_message="You are an academic expert. Provide assistance and insights for academic challenges.",
|
146 |
+
llm_config=llm_config
|
147 |
+
)
|
|