File size: 1,750 Bytes
c794c25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ef0d759
c794c25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import streamlit as st
import requests

# Set the model ID of your fine-tuned model on Hugging Face
MODEL_ID = "Mishal23/fine-tuned-dialoGPT-crm-chatbot"  # Your model ID

# Retrieve your Hugging Face token from secrets
HUGGING_FACE_TOKEN = st.secrets["HUGGING_FACE_TOKEN"]

# Function to generate a response from the chatbot using the Hugging Face API
def generate_response(prompt):
    headers = {"Authorization": f"Bearer {HUGGING_FACE_TOKEN}"}
    payload = {"inputs": prompt}
    
    try:
        # Make the API call to the Hugging Face model
        response = requests.post(f"https://api-inference.huggingface.co/models/{MODEL_ID}", headers=headers, json=payload)
        response.raise_for_status()  # Raise an error for bad responses
        return response.json()[0]['generated_text']
    except requests.exceptions.HTTPError as http_err:
        st.error(f"HTTP error occurred: {http_err}")
        return "Sorry, there was an error with the server."
    except requests.exceptions.RequestException as req_err:
        st.error(f"Request error occurred: {req_err}")
        return "Sorry, there was an issue with your request."
    except Exception as e:
        st.error(f"Error generating response: {e}")
        return "Sorry, I couldn't generate a response."

# Streamlit UI setup
st.title("Chatbot Powered by Hugging Face")
st.subheader("Talk to the Chatbot")

# User input
user_input = st.text_input("You: ", "")

# Button to submit the input
if st.button("Send"):
    if user_input:
        with st.spinner("Generating response..."):
            bot_response = generate_response(user_input)
            st.text_area("Chatbot:", value=bot_response, height=200)
    else:
        st.warning("Please enter a message before sending.")