import gradio as gr from transformers import pipeline # Load the hotdog-not-hotdog model hotdog_pipeline = pipeline(task="image-classification", model="julien-c/hotdog-not-hotdog") # Load the chatbot model (DialoGPT) chatbot_pipeline = pipeline(task="text-generation", model="microsoft/DialoGPT-medium") def predict(input_img, chat_input): # Predict hotdog or not hotdog_predictions = hotdog_pipeline(input_img) # Generate chatbot response chatbot_response = chatbot_pipeline(chat_input, max_length=50)[0]['generated_text'] return input_img, { "Hotdog Classification": {p["label"]: p["score"] for p in hotdog_predictions}, "Chatbot Response": chatbot_response } gradio_app = gr.Interface( fn=predict, inputs=[ gr.Image(label="Select hot dog candidate", sources=['upload', 'webcam'], type="pil"), gr.Textbox(label="Chatbot Input", placeholder="Type something to chat with the bot") ], outputs=[ gr.Image(label="Processed Image"), gr.Label(label="Hotdog Classification", num_top_classes=2), gr.Textbox(label="Chatbot Response", output_transform=lambda x: x["Chatbot Response"], type="readonly") ], title="Hot Dog? Or Not? with Chatbot", ) if _name_ == "_main_": gradio_app.launch()