Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,18 +1,34 @@
|
|
1 |
import streamlit as st
|
2 |
from transformers import pipeline
|
|
|
|
|
3 |
|
4 |
# Initialize a text generation pipeline
|
5 |
generator = pipeline('text-generation', model='dbmdz/german-gpt2')
|
6 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
# Define the pages
|
8 |
def page_trending_niche():
|
|
|
|
|
9 |
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
st.
|
|
|
16 |
|
17 |
|
18 |
# Define the pages
|
|
|
1 |
import streamlit as st
|
2 |
from transformers import pipeline
|
3 |
+
import requests
|
4 |
+
from bs4 import BeautifulSoup
|
5 |
|
6 |
# Initialize a text generation pipeline
|
7 |
generator = pipeline('text-generation', model='dbmdz/german-gpt2')
|
8 |
|
9 |
+
# Define a function to fetch trending news related to a specific niche
|
10 |
+
def fetch_trending_news(niche):
|
11 |
+
url = f"https://www.google.com/search?q={niche}+news&tbs=qdr:d"
|
12 |
+
headers = {
|
13 |
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"}
|
14 |
+
response = requests.get(url, headers=headers)
|
15 |
+
soup = BeautifulSoup(response.content, "html.parser")
|
16 |
+
news_items = soup.find_all("div", class_="BNeawe vvjwJb AP7Wnd")
|
17 |
+
trending_news = [item.text for item in news_items[:5]] # Extract the top 5 news items
|
18 |
+
return trending_news
|
19 |
+
|
20 |
# Define the pages
|
21 |
def page_trending_niche():
|
22 |
+
st.title("What is trending in my niche?")
|
23 |
+
st.image('Robot.png', use_column_width=True)
|
24 |
|
25 |
+
niche = st.text_input('Enter your niche', 'German clinics')
|
26 |
+
st.write(f"Trending news in {niche}:")
|
27 |
+
|
28 |
+
trending_news = fetch_trending_news(niche)
|
29 |
+
for idx, news_item in enumerate(trending_news, start=1):
|
30 |
+
st.write(f"{idx}. {news_item}")
|
31 |
+
|
32 |
|
33 |
|
34 |
# Define the pages
|