Spaces:
Runtime error
Runtime error
File size: 4,512 Bytes
c67e035 a6f79a9 c67e035 ba6c3d0 c67e035 a6f79a9 aea68a1 a6f79a9 aea68a1 ba6c3d0 a6f79a9 7476b7e a6f79a9 aea68a1 7476b7e aea68a1 a6f79a9 aea68a1 7476b7e aea68a1 7476b7e a6f79a9 7476b7e aea68a1 51241b7 a6f79a9 aea68a1 c67e035 aea68a1 ba6c3d0 aea68a1 a6f79a9 7476b7e a6f79a9 7476b7e ba6c3d0 a6f79a9 7476b7e 3942997 a6f79a9 3942997 c67e035 ba6c3d0 c67e035 aea68a1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 |
import gradio as gr
from huggingface_hub import InferenceClient
from bs4 import BeautifulSoup
import requests
from typing import List, Tuple
# Initialize the InferenceClient with the model ID from Hugging Face
client = InferenceClient(model="HuggingFaceH4/zephyr-7b-beta")
def scrape_yahoo_search(query: str) -> Tuple[str, str]:
"""
Scrapes Yahoo search results for the given query and returns the top result's snippet and URL.
Args:
query (str): The search query.
Returns:
Tuple[str, str]: The snippet and URL of the top search result.
"""
search_url = f"https://search.yahoo.com/search?p={query}"
try:
response = requests.get(search_url)
response.raise_for_status()
soup = BeautifulSoup(response.content, 'html.parser')
# Find the top search result snippet and URL
result = soup.find('div', {'class': 'dd algo algo-sr Sr'})
if result:
snippet = result.find('div', {'class': 'compText aAbs'}).get_text(strip=True)
url = result.find('a')['href']
return snippet, url
else:
return "No results found.", search_url
except Exception as e:
return f"An error occurred while scraping Yahoo: {str(e)}", search_url
def respond(
message: str,
history: List[Tuple[str, str]],
system_message: str,
max_tokens: int,
temperature: float,
top_p: float,
) -> str:
"""
Generates a response from the AI model based on the user's message, chat history, and optional Yahoo search results.
Args:
message (str): The user's input message.
history (List[Tuple[str, str]]): A list of tuples representing the conversation history (user, assistant).
system_message (str): A system-level message guiding the AI's behavior.
max_tokens (int): The maximum number of tokens for the output.
temperature (float): Sampling temperature for controlling the randomness.
top_p (float): Top-p (nucleus sampling) for controlling diversity.
Returns:
str: The AI's response as it is generated, including the source URL if applicable.
"""
# Check for trigger word and activate search feature if present
trigger_word = "search"
if trigger_word in message.lower():
# Extract the query from the message
query = message.lower().split(trigger_word, 1)[-1].strip()
if query:
snippet, url = scrape_yahoo_search(query)
message = f"{message}\n\nWeb Content:\n{snippet}\nSource: {url}"
else:
message = "Please provide a search query after the trigger word."
# Prepare the conversation history for the API call
messages = [{"role": "system", "content": system_message}]
for user_input, assistant_response in history:
if user_input:
messages.append({"role": "user", "content": user_input})
if assistant_response:
messages.append({"role": "assistant", "content": assistant_response})
# Add the latest user message to the conversation
messages.append({"role": "user", "content": message})
# Initialize an empty response
response = ""
try:
# Generate a response from the model with streaming
for response_chunk in client.chat_completion(
messages=messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = response_chunk.choices[0].delta.content
response += token
except Exception as e:
return f"An error occurred: {str(e)}"
return response
# Define the ChatInterface with additional input components for user customization
demo = gr.ChatInterface(
fn=respond,
additional_inputs=[
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
title="Chatbot Interface",
description="A customizable chatbot interface using Hugging Face's Inference API with Yahoo search scraping capabilities.",
)
# Launch the Gradio interface
if __name__ == "__main__":
demo.launch()
|