chat-llm / app.py
Threatthriver's picture
Update app.py
ccbd1ae verified
raw
history blame
4.36 kB
import gradio as gr
from huggingface_hub import InferenceClient
from bs4 import BeautifulSoup
import requests
import os
from typing import List, Tuple, Optional
# --- Configuration ---
MODEL_NAME = "meta-llama/Meta-Llama-3.1-405B-Instruct"
API_KEY_ENV_VAR = "HF_TOKEN"
SEARCH_TRIGGER_WORD = "search"
DEFAULT_NUM_RESULTS = 3
# --- Utility Functions ---
def get_api_key() -> str:
"""Retrieves the API key from an environment variable."""
api_key = os.getenv(API_KEY_ENV_VAR)
if not api_key:
raise ValueError(f"API key not found. Please set the {API_KEY_ENV_VAR} environment variable.")
return api_key
def scrape_yahoo_search(query: str, num_results: int = DEFAULT_NUM_RESULTS) -> Tuple[Optional[str], Optional[str]]:
"""Scrapes Yahoo search results and returns formatted snippets and the search URL."""
search_url = f"https://search.yahoo.com/search?p={query}"
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36'}
try:
response = requests.get(search_url, headers=headers)
response.raise_for_status()
soup = BeautifulSoup(response.content, 'html.parser')
result_elements = soup.find_all('div', {'class': 'dd algo algo-sr Sr'}, limit=num_results)
if result_elements:
results = [
f"**Title:** {res.find('h3').get_text(strip=True)}\n**Snippet:** {res.find('div', {'class': 'compText aAbs'}).get_text(strip=True)}\n**URL:** {res.find('a')['href']}"
for res in result_elements
if res.find('h3') and res.find('div', {'class': 'compText aAbs'}) and res.find('a')
]
return "\n\n".join(results), search_url
else:
return None, search_url
except requests.RequestException as e:
return f"Request error: {str(e)}", None
except Exception as e:
return f"Processing error: {str(e)}", None
def extract_search_query(message: str, trigger_word: str = SEARCH_TRIGGER_WORD) -> Optional[str]:
"""Extracts the search query from the message if the trigger word is present."""
lower_message = message.lower()
if trigger_word in lower_message:
query = lower_message.split(trigger_word, 1)[1].strip()
return query if query else None
return None
# --- Initialize Inference Client ---
client = InferenceClient(model=MODEL_NAME, token=get_api_key())
# --- Chatbot Logic ---
def respond(
message: str,
history: List[Tuple[str, str]],
system_message: str,
max_tokens: int,
temperature: float,
top_p: float,
) -> str:
"""Generates a response from the AI model, incorporating search results if requested."""
query = extract_search_query(message)
if query:
search_results, search_url = scrape_yahoo_search(query)
if search_results:
message += f"\n\n## Web Search Results:\n{search_results}\n**Source:** {search_url}"
else:
message += "\n\nI couldn't find any relevant web results for your query."
messages = [{"role": "system", "content": system_message}] + \
[{"role": role, "content": content} for role, content in history] + \
[{"role": "user", "content": message}]
response = ""
try:
for response_chunk in client.chat_completion(
messages=messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
response += response_chunk.choices[0].delta.content
except Exception as e:
return f"AI model error: {str(e)}"
return response
# --- Gradio Interface ---
demo = gr.ChatInterface(
fn=respond,
additional_inputs=[
gr.Textbox(value="You are a helpful and informative AI assistant.", label="System Message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max New Tokens"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (Nucleus Sampling)"),
],
title="Chatbot with Search",
description="Chat and search the web using the power of Meta-Llama!",
)
# --- Launch the App ---
if __name__ == "__main__":
demo.launch()