materialized the chunk with async in stream before returning them
Browse files
app.py
CHANGED
@@ -143,20 +143,12 @@ async def bot_comms(message, history):
|
|
143 |
message=message,
|
144 |
history=history,
|
145 |
)
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
yield "".join(results)
|
153 |
-
else:
|
154 |
-
gpt_outputs = []
|
155 |
-
for chunk in stream:
|
156 |
-
if chunk.choices[0].delta.content is not None:
|
157 |
-
text = chunk.choices[0].delta.content
|
158 |
-
gpt_outputs.append(text)
|
159 |
-
yield "".join(gpt_outputs)
|
160 |
|
161 |
chatbot = gr.Chatbot(height=600, label="Chimera AI")
|
162 |
chat_input = gr.MultimodalTextbox(interactive=True, file_types=["images"], placeholder="Enter your question or upload an image.", show_label=False)
|
|
|
143 |
message=message,
|
144 |
history=history,
|
145 |
)
|
146 |
+
gpt_outputs = []
|
147 |
+
async for chunk in stream:
|
148 |
+
if chunk.choices[0].delta.content is not None:
|
149 |
+
text = chunk.choices[0].delta.content
|
150 |
+
gpt_outputs.append(text)
|
151 |
+
yield "".join(gpt_outputs)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
152 |
|
153 |
chatbot = gr.Chatbot(height=600, label="Chimera AI")
|
154 |
chat_input = gr.MultimodalTextbox(interactive=True, file_types=["images"], placeholder="Enter your question or upload an image.", show_label=False)
|