Spaces:
Runtime error
Runtime error
File size: 6,200 Bytes
c67e035 a6f79a9 cea70da 960cefd c67e035 cea70da a9dcc32 cea70da 1ba4597 cea70da 1ba4597 c67e035 16cb777 a6f79a9 16cb777 a6f79a9 960cefd a6f79a9 16cb777 a6f79a9 960cefd a6f79a9 960cefd a6f79a9 960cefd 16cb777 960cefd 16cb777 1ba4597 16cb777 a6f79a9 960cefd 16cb777 1ba4597 a6f79a9 1ba4597 a6f79a9 960cefd 1ba4597 960cefd aea68a1 a6f79a9 aea68a1 ba6c3d0 a6f79a9 7476b7e a6f79a9 aea68a1 7476b7e aea68a1 a6f79a9 aea68a1 7476b7e aea68a1 7476b7e a6f79a9 7476b7e aea68a1 51241b7 960cefd 1ba4597 960cefd 1ba4597 960cefd a6f79a9 aea68a1 c67e035 aea68a1 ba6c3d0 aea68a1 a6f79a9 7476b7e a6f79a9 7476b7e ba6c3d0 1ba4597 a6f79a9 7476b7e 3942997 a6f79a9 3942997 c67e035 ba6c3d0 c67e035 aea68a1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 |
import gradio as gr
from huggingface_hub import InferenceClient
from bs4 import BeautifulSoup
import requests
import os
from typing import List, Tuple, Optional
# Retrieve API key from environment variable
api_key = os.getenv('HF_TOKEN')
if not api_key:
raise ValueError("API key not found. Please set the HFTOKEN environment variable.")
# Initialize the InferenceClient with the specified model and API key
client = InferenceClient(
model="meta-llama/Meta-Llama-3.1-405B-Instruct",
token=api_key
)
def scrape_yahoo_search(query: str, num_results: int = 3) -> Tuple[str, str]:
"""
Scrapes Yahoo search results for the given query and returns detailed snippets and URLs for the top results.
Args:
query (str): The search query.
num_results (int): Number of results to retrieve.
Returns:
Tuple[str, str]: The formatted snippets and URLs of the top search results.
"""
search_url = f"https://search.yahoo.com/search?p={query}"
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
}
try:
response = requests.get(search_url, headers=headers)
response.raise_for_status()
soup = BeautifulSoup(response.content, 'html.parser')
results = []
result_elements = soup.find_all('div', {'class': 'dd algo algo-sr Sr'}, limit=num_results)
if result_elements:
for result in result_elements:
title = result.find('h3').get_text(strip=True) if result.find('h3') else "No title"
snippet = result.find('div', {'class': 'compText aAbs'}).get_text(strip=True) if result.find('div', {'class': 'compText aAbs'}) else "No snippet"
url = result.find('a')['href'] if result.find('a') else "No URL"
results.append((title, snippet, url))
formatted_results = "\n\n".join(
f"Title: {title}\nSnippet: {snippet}\nURL: {url}" for title, snippet, url in results
)
return formatted_results, search_url
else:
return "No results found.", search_url
except requests.RequestException as e:
return f"Request error: {str(e)}", search_url
except Exception as e:
return f"Processing error: {str(e)}", search_url
def extract_search_query(message: str, trigger_word: str) -> Optional[str]:
"""
Extracts the search query from the message based on the trigger word.
Args:
message (str): The user's input message.
trigger_word (str): The word that activates the search feature.
Returns:
Optional[str]: The extracted search query if found, otherwise None.
"""
lower_message = message.lower()
if trigger_word in lower_message:
parts = lower_message.split(trigger_word, 1)
if len(parts) > 1:
query = parts[1].strip()
return query if query else None
return None
def respond(
message: str,
history: List[Tuple[str, str]],
system_message: str,
max_tokens: int,
temperature: float,
top_p: float,
) -> str:
"""
Generates a response from the AI model based on the user's message, chat history, and optional Yahoo search results.
Args:
message (str): The user's input message.
history (List[Tuple[str, str]]): A list of tuples representing the conversation history (user, assistant).
system_message (str): A system-level message guiding the AI's behavior.
max_tokens (int): The maximum number of tokens for the output.
temperature (float): Sampling temperature for controlling the randomness.
top_p (float): Top-p (nucleus sampling) for controlling diversity.
Returns:
str: The AI's response as it is generated, including the source URL if applicable.
"""
# Check for trigger word and activate search feature if present
trigger_word = "search"
query = extract_search_query(message, trigger_word)
if query:
snippet, url = scrape_yahoo_search(query, num_results=3)
message += f"\n\nWeb Content:\n{snippet}\nSource: {url}"
elif query is None:
message = "Please provide a search query after the trigger word."
# Prepare the conversation history for the API call
messages = [{"role": "system", "content": system_message}]
for user_input, assistant_response in history:
if user_input:
messages.append({"role": "user", "content": user_input})
if assistant_response:
messages.append({"role": "assistant", "content": assistant_response})
# Add the latest user message to the conversation
messages.append({"role": "user", "content": message})
# Initialize an empty response
response = ""
try:
# Generate a response from the model with streaming
for response_chunk in client.chat_completion(
messages=messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = response_chunk.choices[0].delta.content
response += token
except Exception as e:
return f"AI model error: {str(e)}"
return response
# Define the ChatInterface with additional input components for user customization
demo = gr.ChatInterface(
fn=respond,
additional_inputs=[
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
title="Chatbot Interface",
description="A customizable chatbot interface using Hugging Face's Inference API with Yahoo search scraping capabilities.",
)
# Launch the Gradio interface
if __name__ == "__main__":
demo.launch()
|