jcmachicao's picture
Update app.py
107aab9 verified
raw
history blame
2 kB
import gradio as gr
from transformers import pipeline
import json
import os
api_key = os.getenv("OPENAI_KEY")
client = OpenAI(api_key=api_key)
modelx = 'gpt-3.5-turbo-0125'
# Define the JSON structure
predefined_json = {
"name": None,
"age": None,
"email": None,
"address": None
}
'''
# Load a model
model = pipeline("text2text-generation", model="google/flan-t5-small", max_length=256)
model = pipeline("text-generation", model="tiiuae/falcon-7b-instruct", max_length=512)
'''
# define generation function
def generacion_llm(mensaje_sistema, mensaje_usuario, client):
response = client.chat.completions.create(
model=modelx,
messages = [
{"role": "system", "content": mensaje_sistema},
{"role": "user", "content": mensaje_usuario}],
temperature=0.8,
max_tokens=300,
top_p=1,
frequency_penalty=0,
presence_penalty=0
)
return response
def convert_text_to_json(input_text):
# Generate JSON using the LLM
prompt = f"Extrae los campos incluidos en el siguiente formato JSON: {list(predefined_json.keys())}\nInput: {input_text}"
response = generacion_llm(prompt).choices[0].message.content
# Attempt to parse the response into a JSON object
try:
generated_json = json.loads(response) # Safer than eval()
except json.JSONDecodeError:
return {}, "Error: El modelo no retorn贸 un formato JSON v谩lido."
# Check for missing fields
missing_fields = [key for key in predefined_json if key not in generated_json or not generated_json[key]]
missing_message = f"Campos faltantes: {', '.join(missing_fields)}" if missing_fields else "Todos los campos est谩n completos."
return generated_json, missing_message
# Define Gradio app
interface = gr.Interface(
fn=convert_text_to_json,
inputs="text",
outputs=["json", "text"],
title="Convertidor Texto en JSON",
description="Ingrese el texto para extraer informaci贸n en un formato JSON predefinido."
)
interface.launch()