|
import streamlit as st |
|
from huggingface_hub import InferenceClient |
|
import random |
|
|
|
|
|
st.title("ChatGPT-like clone") |
|
|
|
|
|
model_name = "HuggingFaceH4/zephyr-7b-beta" |
|
client = InferenceClient(model_name) |
|
|
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [{"role": "system", "content": "You are a helpful assistant."}] |
|
|
|
|
|
for message in st.session_state.messages: |
|
if message["role"] == "user": |
|
st.markdown(f"**You:** {message['content']}") |
|
else: |
|
st.markdown(f"**Zephyr 7B:** {message['content']}") |
|
|
|
|
|
user_input = st.text_input("Type your message here...", key="user_input") |
|
|
|
|
|
if st.button("Send"): |
|
if user_input.strip(): |
|
|
|
st.session_state.messages.append({"role": "user", "content": user_input}) |
|
|
|
|
|
with st.spinner("Zephyr is thinking..."): |
|
response = client.text_generation(user_input) |
|
|
|
assistant_reply = response.strip() |
|
|
|
|
|
st.session_state.messages.append({"role": "assistant", "content": assistant_reply}) |
|
|
|
|
|
st.session_state.user_input = "" |
|
|
|
|
|
st.sidebar.title("Instructions") |
|
st.sidebar.info( |
|
""" |
|
- Type your message in the input box and press "Send." |
|
- Responses are powered by the HuggingFace `zephyr-7b-beta` model. |
|
""" |
|
) |