Spaces:
Build error
Build error
File size: 4,356 Bytes
c67e035 a6f79a9 cea70da 960cefd c67e035 ccbd1ae c67e035 ccbd1ae a6f79a9 ccbd1ae a6f79a9 960cefd ccbd1ae a6f79a9 16cb777 ccbd1ae 16cb777 ccbd1ae a6f79a9 ccbd1ae 16cb777 ccbd1ae a6f79a9 ccbd1ae a6f79a9 ccbd1ae 960cefd ccbd1ae 960cefd ccbd1ae aea68a1 a6f79a9 aea68a1 ba6c3d0 a6f79a9 ccbd1ae a6f79a9 ccbd1ae aea68a1 ccbd1ae aea68a1 c67e035 ba6c3d0 a6f79a9 7476b7e ccbd1ae ba6c3d0 ccbd1ae a6f79a9 7476b7e ccbd1ae 3942997 ccbd1ae 3942997 ccbd1ae 3942997 c67e035 ccbd1ae c67e035 ccbd1ae |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 |
import gradio as gr
from huggingface_hub import InferenceClient
from bs4 import BeautifulSoup
import requests
import os
from typing import List, Tuple, Optional
# --- Configuration ---
MODEL_NAME = "meta-llama/Meta-Llama-3.1-405B-Instruct"
API_KEY_ENV_VAR = "HF_TOKEN"
SEARCH_TRIGGER_WORD = "search"
DEFAULT_NUM_RESULTS = 3
# --- Utility Functions ---
def get_api_key() -> str:
"""Retrieves the API key from an environment variable."""
api_key = os.getenv(API_KEY_ENV_VAR)
if not api_key:
raise ValueError(f"API key not found. Please set the {API_KEY_ENV_VAR} environment variable.")
return api_key
def scrape_yahoo_search(query: str, num_results: int = DEFAULT_NUM_RESULTS) -> Tuple[Optional[str], Optional[str]]:
"""Scrapes Yahoo search results and returns formatted snippets and the search URL."""
search_url = f"https://search.yahoo.com/search?p={query}"
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36'}
try:
response = requests.get(search_url, headers=headers)
response.raise_for_status()
soup = BeautifulSoup(response.content, 'html.parser')
result_elements = soup.find_all('div', {'class': 'dd algo algo-sr Sr'}, limit=num_results)
if result_elements:
results = [
f"**Title:** {res.find('h3').get_text(strip=True)}\n**Snippet:** {res.find('div', {'class': 'compText aAbs'}).get_text(strip=True)}\n**URL:** {res.find('a')['href']}"
for res in result_elements
if res.find('h3') and res.find('div', {'class': 'compText aAbs'}) and res.find('a')
]
return "\n\n".join(results), search_url
else:
return None, search_url
except requests.RequestException as e:
return f"Request error: {str(e)}", None
except Exception as e:
return f"Processing error: {str(e)}", None
def extract_search_query(message: str, trigger_word: str = SEARCH_TRIGGER_WORD) -> Optional[str]:
"""Extracts the search query from the message if the trigger word is present."""
lower_message = message.lower()
if trigger_word in lower_message:
query = lower_message.split(trigger_word, 1)[1].strip()
return query if query else None
return None
# --- Initialize Inference Client ---
client = InferenceClient(model=MODEL_NAME, token=get_api_key())
# --- Chatbot Logic ---
def respond(
message: str,
history: List[Tuple[str, str]],
system_message: str,
max_tokens: int,
temperature: float,
top_p: float,
) -> str:
"""Generates a response from the AI model, incorporating search results if requested."""
query = extract_search_query(message)
if query:
search_results, search_url = scrape_yahoo_search(query)
if search_results:
message += f"\n\n## Web Search Results:\n{search_results}\n**Source:** {search_url}"
else:
message += "\n\nI couldn't find any relevant web results for your query."
messages = [{"role": "system", "content": system_message}] + \
[{"role": role, "content": content} for role, content in history] + \
[{"role": "user", "content": message}]
response = ""
try:
for response_chunk in client.chat_completion(
messages=messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
response += response_chunk.choices[0].delta.content
except Exception as e:
return f"AI model error: {str(e)}"
return response
# --- Gradio Interface ---
demo = gr.ChatInterface(
fn=respond,
additional_inputs=[
gr.Textbox(value="You are a helpful and informative AI assistant.", label="System Message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max New Tokens"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (Nucleus Sampling)"),
],
title="Chatbot with Search",
description="Chat and search the web using the power of Meta-Llama!",
)
# --- Launch the App ---
if __name__ == "__main__":
demo.launch() |