Spaces:
Sleeping
Sleeping
[update]add function calling test
Browse files- examples.json +34 -0
- examples/test/dynamic_import_package.py +51 -0
- examples/test/test_assistant/test_assistant_function_call.py +218 -0
- examples/test/test_assistant/test_function_call.py +118 -0
- functions/__init__.py +6 -0
- functions/get_current_weather.py +22 -0
- main.py +183 -220
examples.json
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[
|
2 |
+
[
|
3 |
+
"Math Tutor",
|
4 |
+
"You are a personal math tutor. Write and run code to answer math questions.",
|
5 |
+
"Official math test case",
|
6 |
+
null,
|
7 |
+
null,
|
8 |
+
"gpt-4-1106-preview",
|
9 |
+
"123 * 524 等于多少?",
|
10 |
+
"Math Tutor 数学导师,一个最简单的 Agent,即没有函数调用,也没有文本检索。"
|
11 |
+
],
|
12 |
+
[
|
13 |
+
"小说专家",
|
14 |
+
"根据小说内容回答问题。",
|
15 |
+
"三国演义文档问答测试",
|
16 |
+
"{\"type\": \"retrieval\"}",
|
17 |
+
[
|
18 |
+
"data/三国演义.txt"
|
19 |
+
],
|
20 |
+
"gpt-4-1106-preview",
|
21 |
+
"刘备和张飞是什么关系。",
|
22 |
+
"基于文本检索的问答机器人,目前我也没有找到调整 chunk_size 的方法。"
|
23 |
+
],
|
24 |
+
[
|
25 |
+
"Weather Bot",
|
26 |
+
"You are a weather bot. Use the provided functions to answer questions.",
|
27 |
+
"Function calling test case",
|
28 |
+
"{\"type\": \"function\", \"function\": {\"name\": \"get_current_weather\", \"description\": \"Get the current weather in a given location\", \"parameters\": {\"type\": \"object\", \"properties\": {\"location\": {\"type\": \"string\", \"description\": \"The city and state, e.g. San Francisco, CA\"}, \"unit\": {\"type\": \"string\", \"enum\": [\"celsius\", \"fahrenheit\"]}}, \"required\": [\"location\"]}}}",
|
29 |
+
null,
|
30 |
+
"gpt-4-1106-preview",
|
31 |
+
"What's the weather san francisco",
|
32 |
+
"Weather Bot 天气查询,一个基于函数调用的机器人。\n在左边栏的 function script 标签中可以查看有哪些函数可用。\n例如有一个函数叫作 “get_current_weather” 则有一个名为 “get_current_weather.py” 的 python 脚本,其中包含一个名为 “get_current_weather” 的函数。\n当函数被调用时,程序会自动导入该函数并调用。\n因此,你也可以上传自己的函数。"
|
33 |
+
]
|
34 |
+
]
|
examples/test/dynamic_import_package.py
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import argparse
|
4 |
+
import importlib
|
5 |
+
import time
|
6 |
+
|
7 |
+
from openai import OpenAI
|
8 |
+
from openai.pagination import SyncCursorPage
|
9 |
+
from openai.types.beta.threads import ThreadMessage
|
10 |
+
|
11 |
+
from project_settings import environment, project_path
|
12 |
+
|
13 |
+
|
14 |
+
def get_args():
|
15 |
+
parser = argparse.ArgumentParser()
|
16 |
+
parser.add_argument(
|
17 |
+
"--package_name",
|
18 |
+
default="get_current_weather",
|
19 |
+
type=str
|
20 |
+
)
|
21 |
+
parser.add_argument(
|
22 |
+
"--function_name",
|
23 |
+
default="get_current_weather",
|
24 |
+
type=str
|
25 |
+
)
|
26 |
+
args = parser.parse_args()
|
27 |
+
return args
|
28 |
+
|
29 |
+
|
30 |
+
def main():
|
31 |
+
args = get_args()
|
32 |
+
|
33 |
+
lib = importlib.import_module("functions.{}".format(args.package_name))
|
34 |
+
print(lib)
|
35 |
+
|
36 |
+
function_to_call = getattr(lib, args.function_name)
|
37 |
+
print(function_to_call)
|
38 |
+
|
39 |
+
result = function_to_call("beijing")
|
40 |
+
print(result)
|
41 |
+
|
42 |
+
del lib
|
43 |
+
|
44 |
+
lib = importlib.import_module("functions.not_exist")
|
45 |
+
print(lib)
|
46 |
+
|
47 |
+
return
|
48 |
+
|
49 |
+
|
50 |
+
if __name__ == '__main__':
|
51 |
+
main()
|
examples/test/test_assistant/test_assistant_function_call.py
ADDED
@@ -0,0 +1,218 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
"""
|
4 |
+
https://platform.openai.com/docs/assistants/tools/function-calling
|
5 |
+
"""
|
6 |
+
import argparse
|
7 |
+
import json
|
8 |
+
import time
|
9 |
+
|
10 |
+
from openai import OpenAI
|
11 |
+
from openai.pagination import SyncCursorPage
|
12 |
+
from openai.types.beta.threads import ThreadMessage
|
13 |
+
from openai.types.beta.assistant import Assistant
|
14 |
+
|
15 |
+
from project_settings import environment, project_path
|
16 |
+
|
17 |
+
|
18 |
+
def get_args():
|
19 |
+
parser = argparse.ArgumentParser()
|
20 |
+
parser.add_argument(
|
21 |
+
"--openai_api_key",
|
22 |
+
default=environment.get("openai_api_key", default=None, dtype=str),
|
23 |
+
type=str
|
24 |
+
)
|
25 |
+
args = parser.parse_args()
|
26 |
+
return args
|
27 |
+
|
28 |
+
|
29 |
+
def get_current_weather(location, unit="fahrenheit"):
|
30 |
+
if "tokyo" in location.lower():
|
31 |
+
return json.dumps({"location": location, "temperature": "10", "unit": "celsius"})
|
32 |
+
elif "san francisco" in location.lower():
|
33 |
+
return json.dumps({"location": location, "temperature": "72", "unit": "fahrenheit"})
|
34 |
+
else:
|
35 |
+
return json.dumps({"location": location, "temperature": "22", "unit": "celsius"})
|
36 |
+
|
37 |
+
|
38 |
+
available_functions = {
|
39 |
+
"get_current_weather": get_current_weather,
|
40 |
+
}
|
41 |
+
|
42 |
+
|
43 |
+
def main():
|
44 |
+
"""
|
45 |
+
assistant.id: asst_9iUOSeG3dUgzBxYqfygvtKLi
|
46 |
+
thread.id: thread_9C4dDj5i4jDCtkMCujyBleOc
|
47 |
+
|
48 |
+
"""
|
49 |
+
args = get_args()
|
50 |
+
|
51 |
+
client = OpenAI(
|
52 |
+
api_key=args.openai_api_key
|
53 |
+
)
|
54 |
+
|
55 |
+
tools = [
|
56 |
+
{
|
57 |
+
"type": "function",
|
58 |
+
"function": {
|
59 |
+
"name": "get_current_weather",
|
60 |
+
"description": "Get the current weather in a given location",
|
61 |
+
"parameters": {
|
62 |
+
"type": "object",
|
63 |
+
"properties": {
|
64 |
+
"location": {
|
65 |
+
"type": "string",
|
66 |
+
"description": "The city and state, e.g. San Francisco, CA",
|
67 |
+
},
|
68 |
+
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
|
69 |
+
},
|
70 |
+
"required": ["location"],
|
71 |
+
},
|
72 |
+
},
|
73 |
+
}
|
74 |
+
]
|
75 |
+
tools_ = json.dumps(tools, ensure_ascii=False)
|
76 |
+
print(tools_.replace("\"", "\\\""))
|
77 |
+
exit(0)
|
78 |
+
|
79 |
+
assistant = client.beta.assistants.create(
|
80 |
+
instructions="You are a weather bot. Use the provided functions to answer questions.",
|
81 |
+
model="gpt-4-1106-preview",
|
82 |
+
tools=tools
|
83 |
+
)
|
84 |
+
print(f"assistant.id: {assistant.id}")
|
85 |
+
|
86 |
+
thread = client.beta.threads.create()
|
87 |
+
print(f"thread.id: {thread.id}")
|
88 |
+
|
89 |
+
message = client.beta.threads.messages.create(
|
90 |
+
thread_id=thread.id,
|
91 |
+
role="user",
|
92 |
+
content="what's the whether San Francisco"
|
93 |
+
)
|
94 |
+
|
95 |
+
run = client.beta.threads.runs.create(
|
96 |
+
thread_id=thread.id,
|
97 |
+
assistant_id=assistant.id,
|
98 |
+
instructions="Please address the user as Jane Doe. The user has a premium account."
|
99 |
+
)
|
100 |
+
|
101 |
+
delta_time = 0.3
|
102 |
+
|
103 |
+
no_update_count = 0
|
104 |
+
max_no_update_count = 10
|
105 |
+
while True:
|
106 |
+
if no_update_count > max_no_update_count:
|
107 |
+
break
|
108 |
+
time.sleep(delta_time)
|
109 |
+
|
110 |
+
run = client.beta.threads.runs.retrieve(
|
111 |
+
thread_id=thread.id,
|
112 |
+
run_id=run.id
|
113 |
+
)
|
114 |
+
print("run.required_action: {}".format(run.required_action))
|
115 |
+
if run.required_action is None:
|
116 |
+
no_update_count += 1
|
117 |
+
continue
|
118 |
+
else:
|
119 |
+
|
120 |
+
if run.required_action.type != "submit_tool_outputs":
|
121 |
+
raise AssertionError
|
122 |
+
|
123 |
+
tool_outputs = list()
|
124 |
+
for tool_call in run.required_action.submit_tool_outputs.tool_calls:
|
125 |
+
function_name = tool_call.function.name
|
126 |
+
function_to_call = available_functions[function_name]
|
127 |
+
function_args = json.loads(tool_call.function.arguments)
|
128 |
+
function_response = function_to_call(
|
129 |
+
location=function_args.get("location"),
|
130 |
+
unit=function_args.get("unit"),
|
131 |
+
)
|
132 |
+
tool_outputs.append({
|
133 |
+
"tool_call_id": tool_call.id,
|
134 |
+
"output": function_response,
|
135 |
+
})
|
136 |
+
|
137 |
+
run = client.beta.threads.runs.submit_tool_outputs(
|
138 |
+
thread_id=thread.id,
|
139 |
+
run_id=run.id,
|
140 |
+
tool_outputs=tool_outputs
|
141 |
+
)
|
142 |
+
no_update_count = 0
|
143 |
+
|
144 |
+
# wait complete
|
145 |
+
while True:
|
146 |
+
time.sleep(delta_time)
|
147 |
+
run = client.beta.threads.runs.retrieve(
|
148 |
+
thread_id=thread.id,
|
149 |
+
run_id=run.id
|
150 |
+
)
|
151 |
+
print(run.created_at)
|
152 |
+
print(run.started_at)
|
153 |
+
print(run.completed_at)
|
154 |
+
print(run.failed_at)
|
155 |
+
print(run.expires_at)
|
156 |
+
print(run.cancelled_at)
|
157 |
+
|
158 |
+
if run.completed_at is not None:
|
159 |
+
break
|
160 |
+
if run.failed_at is not None:
|
161 |
+
break
|
162 |
+
if run.expires_at is not None:
|
163 |
+
break
|
164 |
+
if run.cancelled_at is not None:
|
165 |
+
break
|
166 |
+
|
167 |
+
# messages
|
168 |
+
messages = client.beta.threads.messages.list(
|
169 |
+
thread_id=thread.id
|
170 |
+
)
|
171 |
+
messages = messages.model_dump(mode="json")
|
172 |
+
messages = json.dumps(messages, indent=4, ensure_ascii=False)
|
173 |
+
print(messages)
|
174 |
+
|
175 |
+
return
|
176 |
+
|
177 |
+
|
178 |
+
def main2():
|
179 |
+
"""
|
180 |
+
assistant.id: asst_OrPcAueQLrLYxtksFaPVVeJo
|
181 |
+
thread.id: thread_2oJCtoSCYgguOhdssafJM7ab
|
182 |
+
run: run_cA8DtX8EnoVGhmvu4VrvF63O
|
183 |
+
run.required_action: None
|
184 |
+
run id: 2090622954288
|
185 |
+
run: run_cA8DtX8EnoVGhmvu4VrvF63O
|
186 |
+
run.required_action: None
|
187 |
+
run id: 2090623149056
|
188 |
+
|
189 |
+
run.required_action: RequiredAction(submit_tool_outputs=RequiredActionSubmitToolOutputs(tool_calls=[RequiredActionFunctionToolCall(id='call_jalze5uKemfrnkPiJPRehVt0', function=Function(arguments='{"location":"San Francisco, CA"}', name='getCurrentWeather'), type='function')]), type='submit_tool_outputs')
|
190 |
+
|
191 |
+
"""
|
192 |
+
args = get_args()
|
193 |
+
|
194 |
+
client = OpenAI(
|
195 |
+
api_key=args.openai_api_key
|
196 |
+
)
|
197 |
+
|
198 |
+
thread_id = "thread_2oJCtoSCYgguOhdssafJM7ab"
|
199 |
+
run_id = "run_cA8DtX8EnoVGhmvu4VrvF63O"
|
200 |
+
|
201 |
+
run = client.beta.threads.runs.retrieve(
|
202 |
+
thread_id=thread_id,
|
203 |
+
run_id=run_id
|
204 |
+
)
|
205 |
+
print("run: {}".format(run.id))
|
206 |
+
print("run.required_action: {}".format(run.required_action))
|
207 |
+
print("run id: {}".format(id(run)))
|
208 |
+
|
209 |
+
messages = client.beta.threads.messages.list(
|
210 |
+
thread_id=thread_id
|
211 |
+
)
|
212 |
+
print(messages)
|
213 |
+
|
214 |
+
return
|
215 |
+
|
216 |
+
|
217 |
+
if __name__ == '__main__':
|
218 |
+
main()
|
examples/test/test_assistant/test_function_call.py
ADDED
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
"""
|
4 |
+
https://platform.openai.com/docs/guides/function-calling
|
5 |
+
"""
|
6 |
+
import argparse
|
7 |
+
import json
|
8 |
+
import time
|
9 |
+
|
10 |
+
import openai
|
11 |
+
from openai import OpenAI
|
12 |
+
from openai.pagination import SyncCursorPage
|
13 |
+
from openai.types.beta.threads import ThreadMessage
|
14 |
+
from openai.types.beta.assistant import Assistant
|
15 |
+
|
16 |
+
from project_settings import environment, project_path
|
17 |
+
|
18 |
+
|
19 |
+
def get_args():
|
20 |
+
parser = argparse.ArgumentParser()
|
21 |
+
parser.add_argument(
|
22 |
+
"--openai_api_key",
|
23 |
+
default=environment.get("openai_api_key", default=None, dtype=str),
|
24 |
+
type=str
|
25 |
+
)
|
26 |
+
args = parser.parse_args()
|
27 |
+
return args
|
28 |
+
|
29 |
+
|
30 |
+
def get_current_weather(location, unit="fahrenheit"):
|
31 |
+
if "tokyo" in location.lower():
|
32 |
+
return json.dumps({"location": location, "temperature": "10", "unit": "celsius"})
|
33 |
+
elif "san francisco" in location.lower():
|
34 |
+
return json.dumps({"location": location, "temperature": "72", "unit": "fahrenheit"})
|
35 |
+
else:
|
36 |
+
return json.dumps({"location": location, "temperature": "22", "unit": "celsius"})
|
37 |
+
|
38 |
+
|
39 |
+
available_functions = {
|
40 |
+
"get_current_weather": get_current_weather,
|
41 |
+
}
|
42 |
+
|
43 |
+
|
44 |
+
def main():
|
45 |
+
"""
|
46 |
+
assistant.id: asst_9iUOSeG3dUgzBxYqfygvtKLi
|
47 |
+
thread.id: thread_9C4dDj5i4jDCtkMCujyBleOc
|
48 |
+
|
49 |
+
"""
|
50 |
+
args = get_args()
|
51 |
+
|
52 |
+
client = OpenAI(
|
53 |
+
api_key=args.openai_api_key
|
54 |
+
)
|
55 |
+
|
56 |
+
messages = [{"role": "user", "content": "What's the weather like in San Francisco, Tokyo, and Paris?"}]
|
57 |
+
|
58 |
+
tools = [
|
59 |
+
{
|
60 |
+
"type": "function",
|
61 |
+
"function": {
|
62 |
+
"name": "get_current_weather",
|
63 |
+
"description": "Get the current weather in a given location",
|
64 |
+
"parameters": {
|
65 |
+
"type": "object",
|
66 |
+
"properties": {
|
67 |
+
"location": {
|
68 |
+
"type": "string",
|
69 |
+
"description": "The city and state, e.g. San Francisco, CA",
|
70 |
+
},
|
71 |
+
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
|
72 |
+
},
|
73 |
+
"required": ["location"],
|
74 |
+
},
|
75 |
+
},
|
76 |
+
}
|
77 |
+
]
|
78 |
+
|
79 |
+
response = openai.chat.completions.create(
|
80 |
+
model="gpt-3.5-turbo-1106",
|
81 |
+
messages=messages,
|
82 |
+
tools=tools,
|
83 |
+
tool_choice="auto", # auto is default, but we'll be explicit
|
84 |
+
)
|
85 |
+
print(response.choices)
|
86 |
+
response_message = response.choices[0].message
|
87 |
+
tool_calls = response_message.tool_calls
|
88 |
+
print(tool_calls)
|
89 |
+
|
90 |
+
if tool_calls:
|
91 |
+
messages.append(response_message)
|
92 |
+
for tool_call in tool_calls:
|
93 |
+
function_name = tool_call.function.name
|
94 |
+
function_to_call = available_functions[function_name]
|
95 |
+
function_args = json.loads(tool_call.function.arguments)
|
96 |
+
function_response = function_to_call(
|
97 |
+
location=function_args.get("location"),
|
98 |
+
unit=function_args.get("unit"),
|
99 |
+
)
|
100 |
+
messages.append(
|
101 |
+
{
|
102 |
+
"tool_call_id": tool_call.id,
|
103 |
+
"role": "tool",
|
104 |
+
"name": function_name,
|
105 |
+
"content": function_response,
|
106 |
+
}
|
107 |
+
)
|
108 |
+
second_response = openai.chat.completions.create(
|
109 |
+
model="gpt-3.5-turbo-1106",
|
110 |
+
messages=messages,
|
111 |
+
)
|
112 |
+
print("second_response: {}".format(second_response))
|
113 |
+
|
114 |
+
return
|
115 |
+
|
116 |
+
|
117 |
+
if __name__ == '__main__':
|
118 |
+
main()
|
functions/__init__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
|
4 |
+
|
5 |
+
if __name__ == '__main__':
|
6 |
+
pass
|
functions/get_current_weather.py
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import json
|
4 |
+
|
5 |
+
|
6 |
+
def get_current_weather(location, unit="fahrenheit"):
|
7 |
+
if "tokyo" in location.lower():
|
8 |
+
return json.dumps({"location": location, "temperature": "10", "unit": "celsius"})
|
9 |
+
elif "san francisco" in location.lower():
|
10 |
+
return json.dumps({"location": location, "temperature": "72", "unit": "fahrenheit"})
|
11 |
+
else:
|
12 |
+
return json.dumps({"location": location, "temperature": "22", "unit": "celsius"})
|
13 |
+
|
14 |
+
|
15 |
+
def main():
|
16 |
+
result = get_current_weather("beijing")
|
17 |
+
print(result)
|
18 |
+
return
|
19 |
+
|
20 |
+
|
21 |
+
if __name__ == '__main__':
|
22 |
+
main()
|
main.py
CHANGED
@@ -5,13 +5,17 @@ https://huggingface.co/spaces/fffiloni/langchain-chat-with-pdf-openai
|
|
5 |
"""
|
6 |
import argparse
|
7 |
import httpx
|
|
|
8 |
import json
|
9 |
import logging
|
|
|
|
|
|
|
10 |
import time
|
11 |
from typing import List, Tuple
|
12 |
|
13 |
logging.basicConfig(
|
14 |
-
level=logging.DEBUG,
|
15 |
format="%(asctime)s %(levelname)s %(message)s",
|
16 |
datefmt="%Y-%m-%d %H:%M:%S",
|
17 |
)
|
@@ -31,6 +35,11 @@ logger = logging.getLogger(__name__)
|
|
31 |
|
32 |
def get_args():
|
33 |
parser = argparse.ArgumentParser()
|
|
|
|
|
|
|
|
|
|
|
34 |
parser.add_argument(
|
35 |
"--openai_api_key",
|
36 |
default=settings.environment.get("openai_api_key", default=None, dtype=str),
|
@@ -40,10 +49,16 @@ def get_args():
|
|
40 |
return args
|
41 |
|
42 |
|
43 |
-
def
|
44 |
-
|
45 |
-
|
46 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
|
48 |
|
49 |
def click_create_assistant(openai_api_key: str,
|
@@ -98,11 +113,7 @@ def click_create_assistant(openai_api_key: str,
|
|
98 |
)
|
99 |
assistant_id = assistant.id
|
100 |
|
101 |
-
|
102 |
-
thread = client.beta.threads.create()
|
103 |
-
thread_id = thread.id
|
104 |
-
|
105 |
-
return assistant_id, thread_id
|
106 |
|
107 |
|
108 |
def click_list_assistant(openai_api_key: str) -> str:
|
@@ -138,6 +149,17 @@ def click_delete_assistant(openai_api_key: str,
|
|
138 |
return result
|
139 |
|
140 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
141 |
def click_list_file(openai_api_key: str):
|
142 |
client = OpenAI(
|
143 |
api_key=openai_api_key,
|
@@ -197,140 +219,38 @@ def click_upload_files(openai_api_key: str,
|
|
197 |
return result
|
198 |
|
199 |
|
200 |
-
def
|
201 |
-
""
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
MessageContentText(
|
209 |
-
text=Text(
|
210 |
-
annotations=[
|
211 |
-
TextAnnotationFileCitation(
|
212 |
-
end_index=44,
|
213 |
-
file_citation=TextAnnotationFileCitationFileCitation(
|
214 |
-
file_id='file-IwzwXQkixMu7fvgGoC1alIWu',
|
215 |
-
quote='念刘备、关羽、张飞,虽然异姓,既结为兄弟,则同心协力,救困扶危;上报国家,下安黎庶。不求同年同月同日生,只愿同年同月同日死。皇天后土,实鉴此心,背义忘恩,天人共戮!”誓毕,拜玄德为兄,关羽次之,张飞为弟'
|
216 |
-
),
|
217 |
-
start_index=34,
|
218 |
-
text='【7†source】',
|
219 |
-
type='file_citation'
|
220 |
-
)
|
221 |
-
],
|
222 |
-
value='刘备和张飞虽然是异姓,但他们结为了兄弟,其中刘备被拜为兄,而张飞为弟【7†source】。'
|
223 |
-
),
|
224 |
-
type='text'
|
225 |
-
)
|
226 |
-
],
|
227 |
-
created_at=1699493845,
|
228 |
-
file_ids=[],
|
229 |
-
metadata={},
|
230 |
-
object='thread.message',
|
231 |
-
role='assistant',
|
232 |
-
run_id='run_zJYZX0KFEvEh2VG5x5zSLq9s',
|
233 |
-
thread_id='thread_3JWRdjvZDJTBgZ0tlrrKXnrt'
|
234 |
-
),
|
235 |
-
|
236 |
-
ThreadMessage(
|
237 |
-
id='msg_tc5Tit7q19S5TSgvmBauME3H',
|
238 |
-
assistant_id=None,
|
239 |
-
content=[
|
240 |
-
MessageContentText(
|
241 |
-
text=Text(
|
242 |
-
annotations=[],
|
243 |
-
value='刘备和张飞是什么关系。'
|
244 |
-
),
|
245 |
-
type='text'
|
246 |
-
)
|
247 |
-
],
|
248 |
-
created_at=1699493838,
|
249 |
-
file_ids=[],
|
250 |
-
metadata={},
|
251 |
-
object='thread.message',
|
252 |
-
role='user',
|
253 |
-
run_id=None,
|
254 |
-
thread_id='thread_3JWRdjvZDJTBgZ0tlrrKXnrt'
|
255 |
-
)
|
256 |
-
|
257 |
-
],
|
258 |
-
object='list',
|
259 |
-
first_id='msg_kb0f2fyDC6OwMyXxKbUpcuBS',
|
260 |
-
last_id='msg_tc5Tit7q19S5TSgvmBauME3H',
|
261 |
-
has_more=False
|
262 |
-
)
|
263 |
-
"""
|
264 |
-
messages = client.beta.threads.messages.list(
|
265 |
-
thread_id=thread_id
|
266 |
-
)
|
267 |
-
# print(messages)
|
268 |
-
|
269 |
-
result = list()
|
270 |
-
for message in messages.data:
|
271 |
-
|
272 |
-
content = list()
|
273 |
-
for msg in message.content:
|
274 |
-
annotations = list()
|
275 |
-
for annotation in msg.text.annotations:
|
276 |
-
a = {
|
277 |
-
"start_index": annotation.start_index,
|
278 |
-
"end_index": annotation.end_index,
|
279 |
-
"text": annotation.text,
|
280 |
-
"type": annotation.type,
|
281 |
-
}
|
282 |
-
|
283 |
-
if annotation.type == "file_citation":
|
284 |
-
a["file_citation"] = {
|
285 |
-
"file_id": annotation.file_citation.file_id,
|
286 |
-
"quote": annotation.file_citation.quote,
|
287 |
-
}
|
288 |
-
|
289 |
-
annotations.append(a)
|
290 |
-
|
291 |
-
content.append({
|
292 |
-
"text": {
|
293 |
-
"annotations": annotations,
|
294 |
-
"value": msg.text.value,
|
295 |
-
},
|
296 |
-
"type": msg.type,
|
297 |
-
|
298 |
-
})
|
299 |
-
|
300 |
-
result.append({
|
301 |
-
"id": message.id,
|
302 |
-
"assistant_id": message.assistant_id,
|
303 |
-
"content": content,
|
304 |
-
"created_at": message.created_at,
|
305 |
-
"file_ids": message.file_ids,
|
306 |
-
"metadata": message.metadata,
|
307 |
-
"object": message.object,
|
308 |
-
"role": message.role,
|
309 |
-
"run_id": message.run_id,
|
310 |
-
"thread_id": message.thread_id,
|
311 |
-
|
312 |
-
})
|
313 |
-
|
314 |
-
result = list(sorted(result, key=lambda x: x["created_at"]))
|
315 |
return result
|
316 |
|
317 |
|
318 |
-
def
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
|
|
|
|
|
323 |
|
324 |
-
for c in content:
|
325 |
-
if c["type"] != "text":
|
326 |
-
continue
|
327 |
-
text: dict = c["text"]
|
328 |
-
msg = "{}: \n{}\n".format(role, text["value"])
|
329 |
-
response += msg
|
330 |
-
response += "-" * 80
|
331 |
-
response += "\n"
|
332 |
|
333 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
334 |
|
335 |
|
336 |
def convert_message_list_to_conversation(message_list: List[dict]) -> List[Tuple[str, str]]:
|
@@ -370,30 +290,6 @@ def convert_message_list_to_conversation(message_list: List[dict]) -> List[Tuple
|
|
370 |
return conversation
|
371 |
|
372 |
|
373 |
-
def streaming_refresh(openai_api_key: str,
|
374 |
-
thread_id: str,
|
375 |
-
queue: Queue,
|
376 |
-
):
|
377 |
-
delta_time = 0.3
|
378 |
-
last_response = None
|
379 |
-
no_updates_count = 0
|
380 |
-
max_no_updates_count = 5
|
381 |
-
while True:
|
382 |
-
time.sleep(delta_time)
|
383 |
-
|
384 |
-
this_response = refresh(openai_api_key, thread_id)
|
385 |
-
|
386 |
-
if this_response == last_response:
|
387 |
-
no_updates_count += 1
|
388 |
-
if no_updates_count >= max_no_updates_count:
|
389 |
-
break
|
390 |
-
last_response = this_response
|
391 |
-
|
392 |
-
queue.put(this_response, block=True, timeout=2)
|
393 |
-
|
394 |
-
return last_response
|
395 |
-
|
396 |
-
|
397 |
def refresh(openai_api_key: str,
|
398 |
thread_id: str,
|
399 |
):
|
@@ -401,9 +297,14 @@ def refresh(openai_api_key: str,
|
|
401 |
api_key=openai_api_key,
|
402 |
)
|
403 |
|
404 |
-
message_list =
|
405 |
-
|
406 |
-
|
|
|
|
|
|
|
|
|
|
|
407 |
conversation = convert_message_list_to_conversation(message_list)
|
408 |
return conversation
|
409 |
|
@@ -424,7 +325,7 @@ def add_and_run(openai_api_key: str,
|
|
424 |
api_key=openai_api_key,
|
425 |
)
|
426 |
if assistant_id is None or len(assistant_id.strip()) == 0:
|
427 |
-
assistant_id = click_create_assistant(
|
428 |
openai_api_key,
|
429 |
name, instructions, description, tools, files, file_ids, model
|
430 |
)
|
@@ -433,6 +334,8 @@ def add_and_run(openai_api_key: str,
|
|
433 |
thread = client.beta.threads.create()
|
434 |
thread_id = thread.id
|
435 |
|
|
|
|
|
436 |
message = client.beta.threads.messages.create(
|
437 |
thread_id=thread_id,
|
438 |
role="user",
|
@@ -442,42 +345,57 @@ def add_and_run(openai_api_key: str,
|
|
442 |
thread_id=thread_id,
|
443 |
assistant_id=assistant_id,
|
444 |
)
|
445 |
-
run = client.beta.threads.runs.retrieve(
|
446 |
-
thread_id=thread_id,
|
447 |
-
run_id=run.id
|
448 |
-
)
|
449 |
-
|
450 |
-
response_queue = Queue(maxsize=10)
|
451 |
-
refresh_kwargs = dict(
|
452 |
-
openai_api_key=openai_api_key,
|
453 |
-
thread_id=thread_id,
|
454 |
-
queue=response_queue,
|
455 |
-
)
|
456 |
-
thread = Thread(target=streaming_refresh, kwargs=refresh_kwargs)
|
457 |
-
thread.start()
|
458 |
|
459 |
delta_time = 0.1
|
460 |
-
|
461 |
no_updates_count = 0
|
462 |
-
max_no_updates_count =
|
463 |
while True:
|
464 |
time.sleep(delta_time)
|
|
|
|
|
|
|
|
|
465 |
|
466 |
-
|
467 |
-
|
468 |
-
|
469 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
470 |
|
471 |
-
if this_response == last_response:
|
472 |
-
no_updates_count += 1
|
473 |
if no_updates_count >= max_no_updates_count:
|
474 |
break
|
475 |
-
|
476 |
|
477 |
result = [
|
478 |
assistant_id, thread_id,
|
479 |
-
|
480 |
-
[]
|
481 |
]
|
482 |
yield result
|
483 |
|
@@ -489,6 +407,16 @@ def main():
|
|
489 |
OpenAI Assistant
|
490 |
"""
|
491 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
492 |
# ui
|
493 |
with gr.Blocks() as blocks:
|
494 |
gr.Markdown(value=gr_description)
|
@@ -520,23 +448,35 @@ def main():
|
|
520 |
# create assistant
|
521 |
create_assistant_button = gr.Button("create assistant")
|
522 |
|
523 |
-
with gr.TabItem("
|
524 |
list_assistant_button = gr.Button("list assistant")
|
525 |
assistant_list = gr.TextArea(label="assistant_list")
|
526 |
|
527 |
-
delete_assistant_id = gr.Textbox(label="delete_assistant_id")
|
528 |
delete_assistant_button = gr.Button("delete assistant")
|
529 |
|
530 |
-
|
|
|
|
|
531 |
list_file_button = gr.Button("list file")
|
532 |
file_list = gr.TextArea(label="file_list")
|
533 |
|
534 |
-
delete_file_id = gr.Textbox(label="delete_file_id")
|
535 |
-
delete_file_button = gr.Button("delete file")
|
536 |
-
|
537 |
upload_files = gr.Files(label="upload_files")
|
538 |
upload_files_button = gr.Button("upload file")
|
539 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
540 |
# chat
|
541 |
with gr.Column(scale=5):
|
542 |
chat_bot = gr.Chatbot(label="conversation", height=600)
|
@@ -553,34 +493,15 @@ def main():
|
|
553 |
assistant_id = gr.Textbox(value=None, label="assistant_id")
|
554 |
thread_id = gr.Textbox(value=None, label="thread_id")
|
555 |
|
|
|
|
|
556 |
# examples
|
557 |
with gr.Row():
|
558 |
gr.Examples(
|
559 |
-
examples=
|
560 |
-
[
|
561 |
-
"Math Tutor",
|
562 |
-
"You are a personal math tutor. Write and run code to answer math questions.",
|
563 |
-
"Official math test cases",
|
564 |
-
None,
|
565 |
-
None,
|
566 |
-
"gpt-4-1106-preview",
|
567 |
-
"123 * 524 等于多少?"
|
568 |
-
],
|
569 |
-
[
|
570 |
-
"小说专家",
|
571 |
-
"根据小说内容回答问题。",
|
572 |
-
"三国演义文档问答测试",
|
573 |
-
"{\"type\": \"retrieval\"}",
|
574 |
-
[
|
575 |
-
(project_path / "data/三国演义.txt").as_posix()
|
576 |
-
],
|
577 |
-
"gpt-4-1106-preview",
|
578 |
-
"刘备和张飞是什么关系。"
|
579 |
-
],
|
580 |
-
],
|
581 |
inputs=[
|
582 |
name, instructions, description, tools, retrieval_files, model,
|
583 |
-
query,
|
584 |
],
|
585 |
examples_per_page=5
|
586 |
)
|
@@ -620,6 +541,17 @@ def main():
|
|
620 |
]
|
621 |
)
|
622 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
623 |
# list file
|
624 |
list_file_button.click(
|
625 |
click_list_file,
|
@@ -655,6 +587,37 @@ def main():
|
|
655 |
]
|
656 |
)
|
657 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
658 |
# add and run
|
659 |
add_and_run_button.click(
|
660 |
add_and_run,
|
|
|
5 |
"""
|
6 |
import argparse
|
7 |
import httpx
|
8 |
+
import importlib
|
9 |
import json
|
10 |
import logging
|
11 |
+
import os
|
12 |
+
import platform
|
13 |
+
import shutil
|
14 |
import time
|
15 |
from typing import List, Tuple
|
16 |
|
17 |
logging.basicConfig(
|
18 |
+
level=logging.INFO if platform.system() == "Windows" else logging.DEBUG,
|
19 |
format="%(asctime)s %(levelname)s %(message)s",
|
20 |
datefmt="%Y-%m-%d %H:%M:%S",
|
21 |
)
|
|
|
35 |
|
36 |
def get_args():
|
37 |
parser = argparse.ArgumentParser()
|
38 |
+
parser.add_argument(
|
39 |
+
"--examples_json_file",
|
40 |
+
default="examples.json",
|
41 |
+
type=str
|
42 |
+
)
|
43 |
parser.add_argument(
|
44 |
"--openai_api_key",
|
45 |
default=settings.environment.get("openai_api_key", default=None, dtype=str),
|
|
|
49 |
return args
|
50 |
|
51 |
|
52 |
+
def dynamic_import_function(package_name: str, function_name: str):
|
53 |
+
try:
|
54 |
+
lib = importlib.import_module("functions.{}".format(package_name))
|
55 |
+
print(lib)
|
56 |
+
except ModuleNotFoundError as e:
|
57 |
+
raise e
|
58 |
+
|
59 |
+
function = getattr(lib, function_name)
|
60 |
+
|
61 |
+
return function
|
62 |
|
63 |
|
64 |
def click_create_assistant(openai_api_key: str,
|
|
|
113 |
)
|
114 |
assistant_id = assistant.id
|
115 |
|
116 |
+
return assistant_id, None
|
|
|
|
|
|
|
|
|
117 |
|
118 |
|
119 |
def click_list_assistant(openai_api_key: str) -> str:
|
|
|
149 |
return result
|
150 |
|
151 |
|
152 |
+
def click_delete_all_assistant(openai_api_key: str):
|
153 |
+
client = OpenAI(
|
154 |
+
api_key=openai_api_key,
|
155 |
+
)
|
156 |
+
assistant_list = client.beta.assistants.list()
|
157 |
+
|
158 |
+
for a in assistant_list.data:
|
159 |
+
client.beta.assistants.delete(a.id)
|
160 |
+
return None
|
161 |
+
|
162 |
+
|
163 |
def click_list_file(openai_api_key: str):
|
164 |
client = OpenAI(
|
165 |
api_key=openai_api_key,
|
|
|
219 |
return result
|
220 |
|
221 |
|
222 |
+
def click_list_function_python_script():
|
223 |
+
function_script_dir = project_path / "functions"
|
224 |
+
result = ""
|
225 |
+
for script in function_script_dir.glob("*.py"):
|
226 |
+
if script.name == "__init__.py":
|
227 |
+
continue
|
228 |
+
result += script.name
|
229 |
+
result += "\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
230 |
return result
|
231 |
|
232 |
|
233 |
+
def click_upload_function_python_script(files: List[str]):
|
234 |
+
tgt = project_path / "functions"
|
235 |
+
if files is None:
|
236 |
+
return None
|
237 |
+
for file in files:
|
238 |
+
shutil.copy(file, tgt.as_posix())
|
239 |
+
return None
|
240 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
241 |
|
242 |
+
def click_delete_function_python_script(filename: str):
|
243 |
+
function_script_dir = project_path / "functions"
|
244 |
+
filename = function_script_dir / filename.strip()
|
245 |
+
filename = filename.as_posix()
|
246 |
+
try:
|
247 |
+
os.remove(filename)
|
248 |
+
result = "success"
|
249 |
+
except FileNotFoundError as e:
|
250 |
+
result = str(e)
|
251 |
+
except Exception as e:
|
252 |
+
result = str(e)
|
253 |
+
return result
|
254 |
|
255 |
|
256 |
def convert_message_list_to_conversation(message_list: List[dict]) -> List[Tuple[str, str]]:
|
|
|
290 |
return conversation
|
291 |
|
292 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
293 |
def refresh(openai_api_key: str,
|
294 |
thread_id: str,
|
295 |
):
|
|
|
297 |
api_key=openai_api_key,
|
298 |
)
|
299 |
|
300 |
+
message_list = client.beta.threads.messages.list(
|
301 |
+
thread_id=thread_id
|
302 |
+
)
|
303 |
+
message_list = message_list.model_dump(mode="json")
|
304 |
+
message_list = message_list["data"]
|
305 |
+
message_list = list(sorted(message_list, key=lambda x: x["created_at"]))
|
306 |
+
|
307 |
+
logger.debug("message_list: {}".format(message_list))
|
308 |
conversation = convert_message_list_to_conversation(message_list)
|
309 |
return conversation
|
310 |
|
|
|
325 |
api_key=openai_api_key,
|
326 |
)
|
327 |
if assistant_id is None or len(assistant_id.strip()) == 0:
|
328 |
+
assistant_id, _ = click_create_assistant(
|
329 |
openai_api_key,
|
330 |
name, instructions, description, tools, files, file_ids, model
|
331 |
)
|
|
|
334 |
thread = client.beta.threads.create()
|
335 |
thread_id = thread.id
|
336 |
|
337 |
+
logger.info(f"assistant_id: {assistant_id}, thread_id: {thread_id}")
|
338 |
+
|
339 |
message = client.beta.threads.messages.create(
|
340 |
thread_id=thread_id,
|
341 |
role="user",
|
|
|
345 |
thread_id=thread_id,
|
346 |
assistant_id=assistant_id,
|
347 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
348 |
|
349 |
delta_time = 0.1
|
350 |
+
last_conversation = None
|
351 |
no_updates_count = 0
|
352 |
+
max_no_updates_count = 5
|
353 |
while True:
|
354 |
time.sleep(delta_time)
|
355 |
+
run = client.beta.threads.runs.retrieve(
|
356 |
+
thread_id=thread_id,
|
357 |
+
run_id=run.id,
|
358 |
+
)
|
359 |
|
360 |
+
# required action
|
361 |
+
if run.required_action is not None:
|
362 |
+
if run.required_action.type == "submit_tool_outputs":
|
363 |
+
tool_outputs = list()
|
364 |
+
for tool_call in run.required_action.submit_tool_outputs.tool_calls:
|
365 |
+
function_name = tool_call.function.name
|
366 |
+
function_to_call = dynamic_import_function(function_name, function_name)
|
367 |
+
function_args = json.loads(tool_call.function.arguments)
|
368 |
+
function_response = function_to_call(
|
369 |
+
location=function_args.get("location"),
|
370 |
+
unit=function_args.get("unit"),
|
371 |
+
)
|
372 |
+
tool_outputs.append({
|
373 |
+
"tool_call_id": tool_call.id,
|
374 |
+
"output": function_response,
|
375 |
+
})
|
376 |
+
|
377 |
+
run = client.beta.threads.runs.submit_tool_outputs(
|
378 |
+
thread_id=thread_id,
|
379 |
+
run_id=run.id,
|
380 |
+
tool_outputs=tool_outputs
|
381 |
+
)
|
382 |
+
|
383 |
+
# get message
|
384 |
+
conversation = refresh(openai_api_key, thread_id)
|
385 |
+
if conversation == last_conversation:
|
386 |
+
if any([run.completed_at is not None,
|
387 |
+
run.cancelled_at is not None,
|
388 |
+
run.failed_at is not None,
|
389 |
+
run.expires_at is not None]):
|
390 |
+
no_updates_count += 1
|
391 |
|
|
|
|
|
392 |
if no_updates_count >= max_no_updates_count:
|
393 |
break
|
394 |
+
last_conversation = conversation
|
395 |
|
396 |
result = [
|
397 |
assistant_id, thread_id,
|
398 |
+
conversation,
|
|
|
399 |
]
|
400 |
yield result
|
401 |
|
|
|
407 |
OpenAI Assistant
|
408 |
"""
|
409 |
|
410 |
+
# example json
|
411 |
+
with open(args.examples_json_file, "r", encoding="utf-8") as f:
|
412 |
+
examples = json.load(f)
|
413 |
+
for example in examples:
|
414 |
+
files: List[str] = example[4]
|
415 |
+
if files is None:
|
416 |
+
continue
|
417 |
+
files = [(project_path / file).as_posix() for file in files]
|
418 |
+
example[4] = files
|
419 |
+
|
420 |
# ui
|
421 |
with gr.Blocks() as blocks:
|
422 |
gr.Markdown(value=gr_description)
|
|
|
448 |
# create assistant
|
449 |
create_assistant_button = gr.Button("create assistant")
|
450 |
|
451 |
+
with gr.TabItem("assistants"):
|
452 |
list_assistant_button = gr.Button("list assistant")
|
453 |
assistant_list = gr.TextArea(label="assistant_list")
|
454 |
|
455 |
+
delete_assistant_id = gr.Textbox(max_lines=1, label="delete_assistant_id")
|
456 |
delete_assistant_button = gr.Button("delete assistant")
|
457 |
|
458 |
+
delete_all_assistant_button = gr.Button("delete all assistant")
|
459 |
+
|
460 |
+
with gr.TabItem("files"):
|
461 |
list_file_button = gr.Button("list file")
|
462 |
file_list = gr.TextArea(label="file_list")
|
463 |
|
|
|
|
|
|
|
464 |
upload_files = gr.Files(label="upload_files")
|
465 |
upload_files_button = gr.Button("upload file")
|
466 |
|
467 |
+
delete_file_id = gr.Textbox(max_lines=1, label="delete_file_id")
|
468 |
+
delete_file_button = gr.Button("delete file")
|
469 |
+
|
470 |
+
with gr.TabItem("function script"):
|
471 |
+
list_function_python_script_button = gr.Button("list python script")
|
472 |
+
list_function_python_script_list = gr.TextArea(label="python_script_list")
|
473 |
+
|
474 |
+
upload_function_python_script_files = gr.Files(label="upload_python_script_files")
|
475 |
+
upload_function_python_script_button = gr.Button("upload python script")
|
476 |
+
|
477 |
+
delete_function_python_script_file = gr.Textbox(max_lines=1, label="delete_python_script_file")
|
478 |
+
delete_function_python_script_button = gr.Button("delete python script")
|
479 |
+
|
480 |
# chat
|
481 |
with gr.Column(scale=5):
|
482 |
chat_bot = gr.Chatbot(label="conversation", height=600)
|
|
|
493 |
assistant_id = gr.Textbox(value=None, label="assistant_id")
|
494 |
thread_id = gr.Textbox(value=None, label="thread_id")
|
495 |
|
496 |
+
tips = gr.TextArea(value=None, label="tips")
|
497 |
+
|
498 |
# examples
|
499 |
with gr.Row():
|
500 |
gr.Examples(
|
501 |
+
examples=examples,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
502 |
inputs=[
|
503 |
name, instructions, description, tools, retrieval_files, model,
|
504 |
+
query, tips
|
505 |
],
|
506 |
examples_per_page=5
|
507 |
)
|
|
|
541 |
]
|
542 |
)
|
543 |
|
544 |
+
# delete all assistant
|
545 |
+
delete_all_assistant_button.click(
|
546 |
+
click_delete_all_assistant,
|
547 |
+
inputs=[
|
548 |
+
openai_api_key
|
549 |
+
],
|
550 |
+
outputs=[
|
551 |
+
file_list
|
552 |
+
]
|
553 |
+
)
|
554 |
+
|
555 |
# list file
|
556 |
list_file_button.click(
|
557 |
click_list_file,
|
|
|
587 |
]
|
588 |
)
|
589 |
|
590 |
+
# list python script
|
591 |
+
list_function_python_script_button.click(
|
592 |
+
click_list_function_python_script,
|
593 |
+
inputs=[],
|
594 |
+
outputs=[
|
595 |
+
list_function_python_script_list
|
596 |
+
]
|
597 |
+
)
|
598 |
+
|
599 |
+
# upload function python script
|
600 |
+
upload_function_python_script_button.click(
|
601 |
+
click_upload_function_python_script,
|
602 |
+
inputs=[
|
603 |
+
upload_function_python_script_files
|
604 |
+
],
|
605 |
+
outputs=[
|
606 |
+
upload_function_python_script_files
|
607 |
+
]
|
608 |
+
)
|
609 |
+
|
610 |
+
# delete function python script
|
611 |
+
delete_function_python_script_button.click(
|
612 |
+
click_delete_function_python_script,
|
613 |
+
inputs=[
|
614 |
+
delete_function_python_script_file
|
615 |
+
],
|
616 |
+
outputs=[
|
617 |
+
delete_function_python_script_file
|
618 |
+
]
|
619 |
+
)
|
620 |
+
|
621 |
# add and run
|
622 |
add_and_run_button.click(
|
623 |
add_and_run,
|