sandz7 commited on
Commit
c266f85
Β·
verified Β·
1 Parent(s): 7e71487

placed the whole interface more into blocks instead of chatinterface

Browse files
Files changed (1) hide show
  1. app.py +22 -9
app.py CHANGED
@@ -210,11 +210,24 @@ def bot_comms(message, history):
210
  buffer += text
211
  yield "".join(gpt_outputs)
212
 
213
- chatbot = gr.Chatbot(height=600, label="Chimera AI")
214
- chat_input = gr.MultimodalTextbox(interactive=True, file_types=["images"], placeholder="Enter your question or upload an image.", show_label=False)
215
 
216
  with gr.Blocks(fill_height=True) as demo:
217
  gr.Markdown(DESCRIPTION)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
218
  # if mode_gradio == "imagery":
219
  # gr.Interface()
220
  # Customize chatinterface to handle tuples
@@ -228,13 +241,13 @@ with gr.Blocks(fill_height=True) as demo:
228
  # else:
229
  # output.append(item)
230
  # return output
231
- gr.ChatInterface(
232
- fn=bot_comms,
233
- chatbot=chatbot,
234
- fill_height=True,
235
- multimodal=True,
236
- textbox=chat_input,
237
- )
238
 
239
  if __name__ == "__main__":
240
  demo.launch()
 
210
  buffer += text
211
  yield "".join(gpt_outputs)
212
 
 
 
213
 
214
  with gr.Blocks(fill_height=True) as demo:
215
  gr.Markdown(DESCRIPTION)
216
+
217
+ chatbot = gr.Chatbot(height=600, label="Chimera AI")
218
+ chat_input = gr.MultimodalTextbox(interactive=True, file_types=["images"], placeholder="Enter your question or upload an image.", show_label=False)
219
+ image_output = gr.Image(type="pil", label="Generated Image")
220
+
221
+ def process_response(message, history):
222
+ response = bot_comms(message, history)
223
+ if isinstance(response, tuple) and len(response) == 2:
224
+ text, image = response
225
+ return text, image
226
+ return response, None
227
+
228
+ chatbot_output = gr.Chatbot(height=600, label="Chimera AI")
229
+
230
+ chat_input.submit(process_response, inputs=[chat_input, chatbot], outputs=[chat_output, image_output])
231
  # if mode_gradio == "imagery":
232
  # gr.Interface()
233
  # Customize chatinterface to handle tuples
 
241
  # else:
242
  # output.append(item)
243
  # return output
244
+ # gr.ChatInterface(
245
+ # fn=bot_comms,
246
+ # chatbot=chatbot,
247
+ # fill_height=True,
248
+ # multimodal=True,
249
+ # textbox=chat_input,
250
+ # )
251
 
252
  if __name__ == "__main__":
253
  demo.launch()