alex-abb commited on
Commit
0658229
·
verified ·
1 Parent(s): 2d06ca9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -42
app.py CHANGED
@@ -1,12 +1,13 @@
1
- import os
2
  import requests
3
- import bs4
4
- from bs4 import BeautifulSoup
5
  import gradio as gr
 
 
 
6
 
7
- api_token = os.environ.get("TOKEN")
 
8
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
9
- headers = {"Authorization": f"Bearer {api_token}"}
10
 
11
  def query(payload):
12
  response = requests.post(API_URL, headers=headers, json=payload)
@@ -35,45 +36,37 @@ you are going to analyse the prompt that i'll give to you and tell me if they ar
35
  else:
36
  return "autre"
37
 
38
- def scrape_huggingface_posts(url):
39
- response = requests.get(url)
40
- soup = BeautifulSoup(response.text, 'html.parser')
41
-
42
- # Ajustez ce sélecteur selon la structure réelle de la page
43
- posts = soup.find_all('div', class_='space-y-3 pl-7')
44
-
45
- extracted_posts = []
46
- for post in posts:
47
- # Extrayez les informations pertinentes de chaque post
48
- title = post.find('h2', class_='post-title').text.strip()
49
- content = post.find('div', class_='post-content').text.strip()
50
- author = post.find('span', class_='post-author').text.strip()
51
 
52
- extracted_posts.append({
53
- 'title': title,
54
- 'content': content,
55
- 'author': author
56
- })
57
-
58
- return extracted_posts
59
-
60
- # Utilisation des fonctions
61
- url = "https://huggingface.co/posts"
62
- all_posts = scrape_huggingface_posts(url)
63
-
64
- # Analyse de chaque post
65
- for post in all_posts:
66
- category = analyze_sentiment(post['content'])
67
- print(f"Post titre: {post['title']}")
68
- print(f"Auteur: {post['author']}")
69
- print(f"Catégorie: {category}")
70
- print("---")
71
 
72
- # Interface Gradio (si vous voulez la garder)
73
- demo = gr.Interface(
74
- fn=analyze_sentiment,
75
  inputs="text",
76
- outputs="text"
 
 
77
  )
78
 
79
- demo.launch()
 
 
1
  import requests
 
 
2
  import gradio as gr
3
+ import bs4
4
+ from bs4 import BeautifulSoup
5
+
6
 
7
+ # Configuration de l'API (à ajuster selon votre setup dans le Space)
8
+ API_TOKEN = "votre_token_api"
9
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
10
+ headers = {"Authorization": f"Bearer {API_TOKEN}"}
11
 
12
  def query(payload):
13
  response = requests.post(API_URL, headers=headers, json=payload)
 
36
  else:
37
  return "autre"
38
 
39
+ def scrape_and_analyze(url):
40
+ try:
41
+ response = requests.get(url)
42
+ soup = BeautifulSoup(response.text, 'html.parser')
 
 
 
 
 
 
 
 
 
43
 
44
+ # Ajustez ce sélecteur selon la structure du site cible
45
+ posts = soup.find_all('div', class_='post')
46
+
47
+ categories = {"chat bot": 0, "AI dev": 0, "autre": 0}
48
+
49
+ for post in posts:
50
+ content = post.find('div', class_='content').text.strip() if post.find('div', class_='content') else "Pas de contenu"
51
+ category = analyze_sentiment(content)
52
+ categories[category] += 1
53
+
54
+ total_posts = sum(categories.values())
55
+ result = f"Total des posts analysés : {total_posts}\n"
56
+ result += f"chat bot : {categories['chat bot']}\n"
57
+ result += f"AI dev : {categories['AI dev']}\n"
58
+ result += f"autre : {categories['autre']}"
59
+
60
+ return result
61
+ except Exception as e:
62
+ return f"Une erreur s'est produite : {str(e)}"
63
 
64
+ iface = gr.Interface(
65
+ fn=scrape_and_analyze,
 
66
  inputs="text",
67
+ outputs="text",
68
+ title="Analyse de posts de blog",
69
+ description="Entrez l'URL d'un blog pour analyser ses posts."
70
  )
71
 
72
+ iface.launch()