import streamlit as st
from transformers import pipeline
import subprocess
import sys


# Ensure PyTorch is installed
def install_pytorch():
    subprocess.check_call([sys.executable, "-m", "pip", "install", "torch"])


try:
    import torch
except ImportError:
    install_pytorch()

st.title("Hugging Face Model Demo")


@st.cache_resource
def load_model():
    return pipeline("text-generation", model="klyang/MentaLLaMA-chat-7B")


model = load_model()

user_input = st.text_input("Enter your text:")
if user_input:
    with st.spinner("Generating response..."):
        result = model(user_input)
        st.success(result[0]["generated_text"])