import requests import gradio as gr import bs4 from bs4 import BeautifulSoup # Configuration de l'API (à ajuster selon votre setup dans le Space) API_TOKEN = "votre_token_api" API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct" headers = {"Authorization": f"Bearer {API_TOKEN}"} def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() def analyze_sentiment(text): output = query({ "inputs": f'''<|begin_of_text|> <|start_header_id|>system<|end_header_id|> you are going to analyse the prompt that i'll give to you and tell me if they are either talking about "chat bot", "AI dev", <|eot_id|> <|start_header_id|>user<|end_header_id|> {text} <|eot_id|> <|start_header_id|>assistant<|end_header_id|> ''' }) if isinstance(output, list) and len(output) > 0: response = output[0].get('generated_text', '').strip().lower() if "chat bot" in response: return "chat bot" elif "ai dev" in response: return "AI dev" else: return "autre" def scrape_and_analyze(url): try: response = requests.get(url) soup = BeautifulSoup(response.text, 'html.parser') # Ajustez ce sélecteur selon la structure du site cible posts = soup.find_all('div', class_='post') categories = {"chat bot": 0, "AI dev": 0, "autre": 0} for post in posts: content = post.find('div', class_='content').text.strip() if post.find('div', class_='content') else "Pas de contenu" category = analyze_sentiment(content) categories[category] += 1 total_posts = sum(categories.values()) result = f"Total des posts analysés : {total_posts}\n" result += f"chat bot : {categories['chat bot']}\n" result += f"AI dev : {categories['AI dev']}\n" result += f"autre : {categories['autre']}" return result except Exception as e: return f"Une erreur s'est produite : {str(e)}" iface = gr.Interface( fn=scrape_and_analyze, inputs="text", outputs="text", title="Analyse de posts de blog", description="Entrez l'URL d'un blog pour analyser ses posts." ) iface.launch()