Spaces:
Sleeping
Sleeping
use streaming
Browse files- app.py +31 -13
- requirements.txt +1 -1
app.py
CHANGED
@@ -6,6 +6,7 @@ from pydantic import BaseModel
|
|
6 |
from typing import List
|
7 |
import ast
|
8 |
import os
|
|
|
9 |
|
10 |
|
11 |
QUEUE_MAX_SIZE = int(os.getenv("QUEUE_MAX_SIZE", 20))
|
@@ -21,6 +22,7 @@ class LearningBotRequest(BaseModel):
|
|
21 |
session_id: str
|
22 |
context: dict
|
23 |
user_serial: str
|
|
|
24 |
|
25 |
|
26 |
def generate_uuid():
|
@@ -41,15 +43,13 @@ def construct_message(list_message):
|
|
41 |
|
42 |
|
43 |
def send_message(url, request):
|
44 |
-
|
45 |
-
if response.status_code != 200:
|
46 |
-
raise gr.Error(response.text)
|
47 |
-
else:
|
48 |
-
result = response.json()["data"]["reply"]
|
49 |
-
return result
|
50 |
|
51 |
|
52 |
-
def respond(chat_history, message, session_id, user_serial, persona, context, endpoint):
|
|
|
|
|
|
|
53 |
if session_id is None:
|
54 |
session_id = generate_uuid()
|
55 |
|
@@ -64,13 +64,30 @@ def respond(chat_history, message, session_id, user_serial, persona, context, en
|
|
64 |
persona=persona,
|
65 |
session_id=session_id,
|
66 |
context=context,
|
67 |
-
user_serial=user_serial
|
|
|
|
|
68 |
)
|
69 |
|
70 |
response = send_message(endpoint, request)
|
71 |
|
|
|
|
|
|
|
|
|
|
|
|
|
72 |
|
73 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
74 |
|
75 |
|
76 |
def reset_textbox():
|
@@ -88,10 +105,11 @@ with gr.Blocks() as demo:
|
|
88 |
with gr.Accordion("Parameters", open=False):
|
89 |
user_serial = gr.Textbox(label="User serial")
|
90 |
context = gr.Textbox(label="context", value={})
|
91 |
-
persona = gr.Textbox(label="persona")
|
92 |
|
93 |
chatbot = gr.Chatbot()
|
94 |
message = gr.Textbox(placeholder="Halo kak, aku mau bertanya", label="Chat Here")
|
|
|
95 |
with gr.Row():
|
96 |
with gr.Column(scale=5):
|
97 |
send = gr.Button("Send")
|
@@ -99,12 +117,12 @@ with gr.Blocks() as demo:
|
|
99 |
status_box = gr.Textbox(label="Status code from OpenAI server")
|
100 |
session = gr.Textbox(label="session_id")
|
101 |
|
102 |
-
message.submit(respond, [chatbot, message, session_id, user_serial, persona, context, endpoint], [chatbot, status_box, session, session_id])
|
103 |
|
104 |
clear.click(lambda: None, None, chatbot, queue=False)
|
105 |
clear.click(lambda: None, None, session_id, queue=False)
|
106 |
|
107 |
-
send.click(respond, [chatbot, message, session_id, user_serial, persona, context, endpoint], [chatbot, status_box, session, session_id])
|
108 |
send.click(reset_textbox, [], [message])
|
109 |
message.submit(reset_textbox, [], [message])
|
110 |
|
@@ -112,4 +130,4 @@ with gr.Blocks() as demo:
|
|
112 |
demo
|
113 |
.queue(max_size=QUEUE_MAX_SIZE, concurrency_count=QUEUE_CONCURENCY_COUNT)
|
114 |
.launch(auth=(USERNAME, PASSWORD), debug=True)
|
115 |
-
)
|
|
|
6 |
from typing import List
|
7 |
import ast
|
8 |
import os
|
9 |
+
import sseclient
|
10 |
|
11 |
|
12 |
QUEUE_MAX_SIZE = int(os.getenv("QUEUE_MAX_SIZE", 20))
|
|
|
22 |
session_id: str
|
23 |
context: dict
|
24 |
user_serial: str
|
25 |
+
stream: bool
|
26 |
|
27 |
|
28 |
def generate_uuid():
|
|
|
43 |
|
44 |
|
45 |
def send_message(url, request):
|
46 |
+
return requests.post(url, stream=True, data=request.json())
|
|
|
|
|
|
|
|
|
|
|
47 |
|
48 |
|
49 |
+
def respond(chat_history, message, history, session_id, user_serial, persona, context, endpoint):
|
50 |
+
if history is None:
|
51 |
+
history = []
|
52 |
+
history.append(message)
|
53 |
if session_id is None:
|
54 |
session_id = generate_uuid()
|
55 |
|
|
|
64 |
persona=persona,
|
65 |
session_id=session_id,
|
66 |
context=context,
|
67 |
+
user_serial=user_serial,
|
68 |
+
stream=True,
|
69 |
+
product="learning_companion"
|
70 |
)
|
71 |
|
72 |
response = send_message(endpoint, request)
|
73 |
|
74 |
+
token_counter = 0
|
75 |
+
partial_reply = ""
|
76 |
+
client = sseclient.SSEClient(response)
|
77 |
+
for event in client.events():
|
78 |
+
data = json.loads(event.data)["data"]
|
79 |
+
partial_reply = partial_reply + data["data"]["reply"]
|
80 |
|
81 |
+
if token_counter == 0:
|
82 |
+
history.append(" " + partial_reply)
|
83 |
+
else:
|
84 |
+
history[-1] = partial_reply
|
85 |
+
|
86 |
+
chat = [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)] # convert to tuples of list
|
87 |
+
|
88 |
+
token_counter += 1
|
89 |
+
|
90 |
+
yield chat, history, "Success", session_id, session_id
|
91 |
|
92 |
|
93 |
def reset_textbox():
|
|
|
105 |
with gr.Accordion("Parameters", open=False):
|
106 |
user_serial = gr.Textbox(label="User serial")
|
107 |
context = gr.Textbox(label="context", value={})
|
108 |
+
persona = gr.Textbox(label="persona", value="a493700848d84d0dab8d0095c2477c1e")
|
109 |
|
110 |
chatbot = gr.Chatbot()
|
111 |
message = gr.Textbox(placeholder="Halo kak, aku mau bertanya", label="Chat Here")
|
112 |
+
state = gr.State([])
|
113 |
with gr.Row():
|
114 |
with gr.Column(scale=5):
|
115 |
send = gr.Button("Send")
|
|
|
117 |
status_box = gr.Textbox(label="Status code from OpenAI server")
|
118 |
session = gr.Textbox(label="session_id")
|
119 |
|
120 |
+
message.submit(respond, [chatbot, message, state, session_id, user_serial, persona, context, endpoint], [chatbot, state, status_box, session, session_id])
|
121 |
|
122 |
clear.click(lambda: None, None, chatbot, queue=False)
|
123 |
clear.click(lambda: None, None, session_id, queue=False)
|
124 |
|
125 |
+
send.click(respond, [chatbot, message, state, session_id, user_serial, persona, context, endpoint], [chatbot, state, status_box, session, session_id])
|
126 |
send.click(reset_textbox, [], [message])
|
127 |
message.submit(reset_textbox, [], [message])
|
128 |
|
|
|
130 |
demo
|
131 |
.queue(max_size=QUEUE_MAX_SIZE, concurrency_count=QUEUE_CONCURENCY_COUNT)
|
132 |
.launch(auth=(USERNAME, PASSWORD), debug=True)
|
133 |
+
)
|
requirements.txt
CHANGED
@@ -1,2 +1,2 @@
|
|
1 |
openai==0.27.2
|
2 |
-
|
|
|
1 |
openai==0.27.2
|
2 |
+
sseclient-py==1.7.2
|