vishalkatheriya's picture
Update app.py
7e1285f verified
import streamlit as st
from huggingface_hub import InferenceClient
from googlesearch import search
# Function to load the model
@st.cache_resource
def load_model():
if "client" not in st.session_state:
st.session_state.client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
return st.session_state.client
# Load the model once
client = load_model()
# Define prompt templates with the assistant's new persona
def create_prompt(user_message):
return f"""
You are Katheriya, a skilled data scientist who helps users find the best information from around the globe. You are highly knowledgeable and provide insightful, detailed responses.
User: {user_message}
Katheriya:
"""
# Function to process the query using the open-source LLM for general chat
def chat_with_llm(query):
try:
# Create the formatted prompt using the template
formatted_prompt = create_prompt(query)
# Prepare the messages in the required format
messages = [{"role": "user", "content": formatted_prompt}]
# Create an empty container for streaming the response
response_container = st.empty()
response_text = ""
# Stream the response from the model
response_stream = client.chat_completion(messages=messages, stream=True, max_tokens=2048)
for message in response_stream:
# Check if the response has content
if 'choices' in message and message['choices']:
delta_content = message['choices'][0]['delta'].get('content', '')
response_text += delta_content
response_container.write(f"**Katheriya:** {response_text}") # Update response in real-time
return response_text
except Exception as e:
st.error(f"An error occurred: {e}")
# Function to process the query for search intent
def process_query_with_llm(query):
prompt = f"User asked: '{query}'. What would be the best search query to use?"
# Generate response using text_generation without assuming the structure of the output
response = client.text_generation(prompt)
# Ensure response is in string format and handle errors
if isinstance(response, str):
return response.strip()
elif isinstance(response, list) and 'generated_text' in response[0]:
return response[0]['generated_text'].strip()
else:
return "No query generated."
# Function to perform a Google search using the googlesearch-python package
def search_web(query):
try:
search_results = []
for result in search(query, num_results=10):
search_results.append(result)
return search_results
except Exception as e:
st.error(f"An error occurred during web search: {e}")
return []
# Streamlit UI
st.title("Interactive Chatbot - Powered by Katheriya")
# Input field for user query
user_input = st.text_input("You:", "")
if user_input:
st.write(f"**You:** {user_input}")
search_phrases = [
"search", "find", "get me", "give me", "look up", "show me",
"retrieve", "browse", "where can I find", "search for", "look for",
"can you find", "find me", "what is", "how to", "who is", "where is",
"what are", "tell me about", "do you know", "could you find",
"can you search", "help me find", "explore", "fetch", "locate", "suggest"
]
# Determine if the query is a search or a general chat
if any(keyword in user_input.lower() for keyword in search_phrases):
# If the user input indicates a search intent
search_query = process_query_with_llm(user_input)
st.write(f"**Processed Query:** {search_query}")
# Search the web using the processed query
links = search_web(search_query)
# Display the search results
if links:
st.write("Here are some links you might find useful:")
for idx, link in enumerate(links):
st.write(f"{idx + 1}. [Link {idx + 1}]({link})")
else:
st.write("Sorry, I couldn't find any relevant links.")
else:
# Handle general conversation with response streaming
response = chat_with_llm(user_input)
# import streamlit as st
# from huggingface_hub import InferenceClient
# from googlesearch import search
# # Initialize the InferenceClient with the model and token
# if 'client' not in st.session_state:
# st.session_state.client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
# client = st.session_state.client
# # Function to get chat completion from the model with personalization
# def chat_with_llm(message):
# try:
# # Define the prompt with a name and role
# personalized_message = f"""
# You are Vishal, a data scientist working at Inferenz.ai. Respond to the following query as naturally and informatively as possible.
# User: {message}
# Vishal:
# """
# response_stream = client.chat_completion(messages=[{"role": "user", "content": personalized_message}], stream=True, max_tokens=500)
# delta_content = ""
# for message in response_stream:
# if 'choices' in message and message['choices']:
# delta_content += message['choices'][0]['delta'].get('content', '')
# return delta_content.strip()
# except Exception as e:
# return f"An error occurred: {e}"
# # Function to process the query for search intent
# def process_query_with_llm(query):
# response = chat_with_llm(f"User asked: '{query}'. What would be the best search query to use?")
# st.write("Query for search:", response)
# return response
# # Function to perform a Google search using the googlesearch-python package
# def search_web(query):
# search_results = []
# try:
# for result in search(query, num_results=10):
# search_results.append(result)
# except Exception as e:
# st.write(f"An error occurred during the search: {e}")
# return search_results
# # Streamlit UI
# st.title("Interactive Chatbot")
# # Input field for user query
# user_input = st.text_input("You:", "")
# # Check if the input field is not empty
# if user_input:
# st.write(f"**You:** {user_input}")
# search_phrases = [
# "search", "find", "get me", "give me", "look up", "show me", "retrieve",
# "browse", "where can I find", "search for", "look for", "can you find",
# "find me", "what is", "how to", "who is", "where is", "what are",
# "tell me about", "do you know", "could you find", "can you search",
# "help me find", "explore", "fetch", "locate", "suggest me", "suggest"
# ]
# if any(keyword in user_input.lower() for keyword in search_phrases):
# # If the user input indicates a search intent
# search_query = process_query_with_llm(user_input)
# st.write(f"**Processed Query:** {search_query}")
# # Search the web using the processed query
# links = search_web(search_query)
# # Display the search results
# if links:
# st.write("Here are some links you might find useful:")
# for idx, link in enumerate(links):
# st.write(f"{idx + 1}. [Link {idx + 1}]({link})")
# else:
# st.write("Sorry, I couldn't find any relevant links.")
# else:
# # Handle general conversation
# response = chat_with_llm(user_input)
# st.write(f"**Vishal:** {response}")
# # Ensure input field is cleared after processing
# st.text_input("You:", "", key="user_input")