mrolando
fixed
035fc14
import os
from openai import OpenAI
import gradio as gr
import json
from dotenv import load_dotenv
# Load environment variables from the .env file de forma local
load_dotenv()
import base64
with open("Iso_Logotipo_Ceibal.png", "rb") as image_file:
encoded_image = base64.b64encode(image_file.read()).decode()
client1= OpenAI(api_key=os.environ['OPENAI_API_KEY'])
client2= OpenAI(api_key=os.environ['OPENAI_API_KEY'])
def clear_chat(message, chat_history):
return "", []
def add_new_message_client1(message,person, chat_history):
new_chat = []
new_chat.append({"role": "system", "content": 'Sos {} y tendr谩s que responder preguntas, las respuestas tienen que ser c贸mo si las hubiera dicho {} '.format(person,person)})
for turn in chat_history:
user, bot = turn
new_chat.append({"role": "user", "content": user})
new_chat.append({"role": "assistant","content":bot})
new_chat.append({"role": "user","content":message})
return new_chat
def add_new_message_client2(message,person, chat_history):
new_chat = []
new_chat.append({"role": "system", "content": 'Sos {} y tendr谩s que responder preguntas, las respuestas tienen que ser c贸mo si las hubiera dicho {} '.format(person,person)})
for turn in chat_history:
user, bot = turn
new_chat.append({"role": "user", "content": user})
new_chat.append({"role": "assistant","content":bot})
new_chat.append({"role": "user","content":message})
return new_chat
counter2 =1
def respond(person1,person2, chat_history):
print(chat_history)
global counter2
if(len(chat_history)<1):
message="Hola"
prompt = add_new_message_client1(message, person1, chat_history)
response = client1.chat.completions.create(
model="gpt-3.5-turbo",
messages= prompt,
temperature=0.5,
max_tokens=1000,
stream=False,
)
chat_history.append((message, response.choices[0].message.content))
else:
counter2 +=1
if(counter2 % 2==0):
prompt = add_new_message_client1(chat_history[-1][1], person1, chat_history)
response = client1.chat.completions.create(
model="gpt-3.5-turbo",
messages= prompt,
temperature=0.5,
max_tokens=1000,
stream=False,
)
chat_history.append((response.choices[0].message.content, "" ))
else:
prompt =add_new_message_client2(chat_history[-1][1], person2, chat_history)
response = client2.chat.completions.create(
model="gpt-3.5-turbo",
messages= prompt,
temperature=0.5,
max_tokens=1000,
stream=False,
)
chat_history[-1][1]=response.choices[0].message.content #.append([chat_history[-1][1], response.choices[0].message.content])
print(chat_history)
token_counter = 0
partial_words = ""
counter=0
partial_message = ""
print(chat_history)
return "", chat_history
# for chunk in response:
# if len(chunk.choices[0].delta.content) != 0:
# partial_message = partial_message + chunk.choices[0].delta.content
# yield partial_message
# for chunk in response:
# print(chunk)
# print( "text",chunk.choices[0].delta.content)
# chunk_message = chunk.choices[0].delta
# if(len(chat_history))<1:
# # print("entr贸 aca谩")
# partial_words += chunk_message.content
# chat_history.append([message,chunk_message.content])
# else:
# # print("antes", chat_history)
# if(len(chunk_message.content)!=0):
# if(len(chunk_message.content)==2):
# partial_words += chunk_message.content
# chat_history.append([message,chunk_message.content])
# else:
# partial_words += chunk_message.content
# chat_history[-1] =([message,partial_words])
# yield "",chat_history
with gr.Blocks() as demo:
gr.Markdown("""
<center>
<h1>
Uso de AI para un chatbot.
</h1>
<img src='data:image/jpg;base64,{}' width=200px>
<h3>
Con este espacio podr谩s hablar en formato conversaci贸n con el personaje famoso que quieras, puede ser Albert Einstein, Marie Curie o el/la que quieras!
</h3>
</center>
""".format(encoded_image))
with gr.Row():
person1 = gr.Textbox(label="Escrib铆 el nombre del perosnaje famoso:")
person2 = gr.Textbox(label="Escrib铆 el nombre del perosnaje famoso:")
with gr.Row():
chatbot = gr.Chatbot( height=550) #just to fit the notebook
with gr.Row():
with gr.Row():
with gr.Column(scale=4):
msg = gr.Textbox(label="Texto de entrada")
with gr.Column(scale=1):
btn = gr.Button("Enviar")
clear = gr.ClearButton(components=[msg, chatbot], value="Borrar chat")
btn.click(respond, inputs=[person1,person2, chatbot], outputs=[msg, chatbot])
#msg.submit(respond, inputs=[msg, person,chatbot], outputs=[msg, chatbot]) #Press enter to submit
clear.click(clear_chat,inputs=[msg, chatbot], outputs=[msg, chatbot])
demo.queue()
demo.launch()