Spaces:
Sleeping
Sleeping
import gradio as gr | |
from huggingface_hub import InferenceClient | |
from os import environ | |
from json import loads | |
from random import randint | |
client = InferenceClient( | |
"meta-llama/Meta-Llama-3-8B-Instruct", | |
token=environ["token"], | |
) | |
main_prompt=environ["prompt"] | |
css = """ | |
.gradio-container { | |
background-color: #F0F0F0; | |
border: 2px solid #333; | |
padding: 20px; | |
border-radius: 10px; | |
} | |
.feedback textarea { | |
font-size: 18px; | |
padding: 10px; | |
border: 1px solid #999; | |
border-radius: 5px; | |
} | |
.gradio-button { | |
background-color: #0074D9; | |
color: white; | |
font-weight: bold; | |
border: none; | |
border-radius: 5px; | |
padding: 10px 20px; | |
margin-right: 10px; | |
} | |
""" | |
def slice_per(source, step): | |
return [ | |
source[idx:idx+step] for idx in range(0, len(source), step)] | |
def llamatochat(history): | |
historias=list(map(lambda x:x["content"],history)) | |
slices=slice_per(historias,2) | |
return slices | |
def chatollama(historias): | |
history=[] | |
for i,j in historias: | |
history.append({"role":"user","content":i}) | |
history.append({"role":"assistant","content":j}) | |
return history | |
def reply(msg,history): | |
historias=[{"role":"system","content":main_prompt}] | |
historias.extend(chatollama(history)) | |
historias.append({"role":"user","content":msg}) | |
m=client.chat_completion( | |
messages=historias, | |
max_tokens=1024, | |
stream=False, | |
seed=randint(100_000_000,999_999_999) | |
) | |
p=m.choices[0].message.content | |
historias.append({"role":"assistant","content":p}) | |
try: | |
l=loads(p) | |
except Exception as e: | |
l={"msg":"Ocurrio un error","options":0} | |
print(p,"->") | |
historias.pop(0) | |
resp=[gr.Textbox(value=l.get("msg")),gr.Chatbot(value=llamatochat(historias),visible=False)] | |
for c in range(l.get("options")): | |
resp.append(gr.Button(value=l.get("ops")[c],visible=True)) | |
for o in range(4-l.get("options")): | |
resp.append(gr.Button(value="",visible=False)) | |
return resp | |
def set_theme(theme): | |
historias=[{"role":"system","content":main_prompt}] | |
historias.append({"role":'user',"content":f"Tema: {theme}"}) | |
m=client.chat_completion( | |
messages=historias, | |
max_tokens=1024, | |
stream=False, | |
seed=randint(100_000_000,999_999_999) | |
) | |
historias.pop(0) | |
p=m.choices[0].message.content | |
historias.append({"role":"assistant","content":p}) | |
try: | |
l=loads(p) | |
except Exception as e: | |
l={"msg":"Ocurrio un error","options":0} | |
print(p,"<-") | |
cc=gr.Chatbot(value=llamatochat(historias), visible=False) | |
resp=[gr.Textbox(visible=False),gr.Button(visible=False),gr.Textbox(visible=True,value=l.get("msg")),cc] | |
for c in range(l.get("options")): | |
resp.append(gr.Button(value=l.get("ops")[c],visible=True)) | |
for o in range(4-l.get("options")): | |
resp.append(gr.Button(value="",visible=False)) | |
return resp | |
with gr.Blocks(css=css,fill_height=True) as ia: | |
cbox=gr.Chatbot(visible=False) | |
gr.Markdown(""" | |
""") | |
tbox=gr.Textbox(label="Tema de la historia(Ej:Amistad) puede incluir mas detalles") | |
mbox=gr.Textbox(label="Mensaje",visible=False,elem_classes="feedback") | |
tsub=gr.Button(value="Enviar") | |
opciones=[gr.Button(value="",visible=False,elem_classes="gradio-button") for _ in range(4)] | |
for opcion in opciones: | |
opcion.click(reply,[opcion,cbox],[mbox,cbox,*opciones]) | |
tsub.click(set_theme,[tbox],[tbox,tsub,mbox,cbox,*opciones]) | |
ia.launch(show_api=False) |