# This is a sample Python script. # Press ⌃R to execute it or replace it with your code. # Press Double ⇧ to search everywhere for classes, files, tool windows, actions, and settings. import gradio as gr from transformers import pipeline, Pipeline from transformers import Conversation def chatwith_blenderbot400m(): pipe = pipeline(task="conversational", model="facebook/blenderbot-400M-distill") user_message = "What are some fun activities I can do in the winter?" conversation = Conversation(user_message) print(conversation) print(type(conversation)) conversation = pipe(conversation) print(conversation) conversation.add_message( {"role": "user", "content": "I would like to do outdoor activities. Which activities can I do?"}) conversation = pipe(conversation) print(conversation) def chatwith_qwen2_1point5b_instruct(): pipe = pipeline(task="text-generation", model="Qwen/Qwen2-1.5B-Instruct") messages = [{"role": "user", "content": "What are some fun activities I can do in the winter?"}] messages = pipe(messages, max_new_tokens=50)[0]["generated_text"] print(messages) messages.append({"role": "user", "content": "I would like to do outdoor activities. Which activities can I do?"}) print(messages) messages = pipe(messages, max_new_tokens=50)[0]["generated_text"] print(messages) #chatwith_qwen2_1point5b_instruct() def chatwith_qwen2_1point5b_instruct(prompt, max_newtokens): print("Aaaaa") pipe = pipeline(task="text-generation", model="Qwen/Qwen2-1.5B-Instruct") messages = [{"role": "user", "content": prompt}] messages = pipe(messages, max_new_tokens=max_newtokens)[0]["generated_text"] return messages pipe = pipeline(task="text-generation", model="Qwen/Qwen2-1.5B-Instruct") def chatbot_handler(user_message, history): bot_response = "I don't think so" messages = [] user_message = {"role": "user", "content": user_message} # TODO: build messages based on history then add user_message to messages. call model for message in history: messages.append({"role": "user", "content": message[0]}) messages.append({"role": "assistant", "content": message[1]}) # print(message[0]) # print(message[1]) messages.append(user_message) print(f"messages before sending to model {messages}") messages = pipe(messages, max_new_tokens=512)[0]['generated_text'] print(f"messages after sending to model{messages}") if messages: # messages has at least one item print(f"the last message is: {messages[-1]}") bot_response = messages[-1]["content"] print(bot_response) return bot_response chatbot = gr.ChatInterface(chatbot_handler) chatbot.launch(share=False)