Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import TextIteratorStreamer | |
from threading import Thread | |
import spaces | |
def food_safety_query(prompt): | |
from unsloth import FastLanguageModel | |
# Model configuration | |
model_name = "yasserrmd/food_safety" | |
max_seq_length = 512 | |
load_in_4bit = True | |
dtype = "fp16" | |
# Load model and tokenizer | |
model, tokenizer = FastLanguageModel.from_pretrained( | |
model_name=model_name, | |
max_seq_length=max_seq_length, | |
dtype=dtype, | |
load_in_4bit=load_in_4bit, | |
) | |
FastLanguageModel.for_inference(model) # Enable faster inference | |
messages = [ | |
{"role": "system", "content": "You are an AI assistant with expertise in food safety. Your primary goal is to provide precise, actionable, and scientifically accurate responses to queries about food safety practices, standards, and regulations. Focus on offering guidance based on global food safety standards, such as HACCP, ISO 22000, and FDA guidelines. Your responses should address hygiene, contamination prevention, food handling, storage, production processes, and safety protocols with practical and specific advice."}, | |
{"role": "user", "content": prompt}, | |
] | |
# Apply chat template and obtain inputs | |
inputs = tokenizer.apply_chat_template( | |
messages, | |
tokenize=True, | |
add_generation_prompt=True, | |
return_tensors="pt", | |
).to("cuda") | |
# Check if inputs is a dictionary and contains 'input_ids' | |
if isinstance(inputs, dict) and "input_ids" in inputs: | |
input_ids = inputs["input_ids"] | |
else: | |
input_ids = inputs # Assume inputs is already a tensor | |
streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True) | |
generation_kwargs = dict( | |
input_ids=input_ids, | |
max_new_tokens=2048, | |
use_cache=True, | |
temperature=1.5, | |
min_p=0.1, | |
streamer=streamer | |
) | |
thread = Thread(target=model.generate, kwargs=generation_kwargs) | |
thread.start() | |
output = "" | |
for new_text in streamer: | |
output += new_text | |
yield output | |
with gr.Blocks() as app: | |
gr.Markdown("""# Food Safety App\nEnter your questions related to food safety, and the assistant will provide detailed responses. | |
""") | |
prompt = gr.Textbox(label="Enter your query:", placeholder="E.g., What hygiene practices should milk-producing factories follow?") | |
submit_button = gr.Button("Get Response") | |
response = gr.Markdown() # Changed response to Markdown for better display | |
submit_button.click( | |
fn=food_safety_query, | |
inputs=[prompt], | |
outputs=[response] | |
) | |
# Launch the app | |
if __name__ == "__main__": | |
app.launch(debug=True) |