Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -3,8 +3,46 @@ import requests
|
|
3 |
import json
|
4 |
import os
|
5 |
from datetime import datetime, timedelta
|
|
|
6 |
|
7 |
API_KEY = os.getenv("SERPHOUSE_API_KEY")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
COUNTRY_LOCATIONS = {
|
10 |
"United States": "United States",
|
@@ -78,16 +116,37 @@ COUNTRY_LOCATIONS = {
|
|
78 |
|
79 |
MAJOR_COUNTRIES = list(COUNTRY_LOCATIONS.keys())
|
80 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
81 |
|
82 |
def search_serphouse(query, country, page=1, num_result=10):
|
83 |
url = "https://api.serphouse.com/serp/live"
|
84 |
|
|
|
|
|
|
|
|
|
|
|
85 |
payload = {
|
86 |
"data": {
|
87 |
-
"q":
|
88 |
"domain": "google.com",
|
89 |
-
"loc": COUNTRY_LOCATIONS.get(country, "United States"),
|
90 |
-
"lang": "en",
|
91 |
"device": "desktop",
|
92 |
"serp_type": "news",
|
93 |
"page": "1",
|
@@ -109,11 +168,11 @@ def search_serphouse(query, country, page=1, num_result=10):
|
|
109 |
print("Response status:", response.status_code)
|
110 |
|
111 |
response.raise_for_status()
|
112 |
-
return response.json()
|
113 |
except requests.RequestException as e:
|
114 |
-
return {"error": f"Error: {str(e)}"}
|
115 |
|
116 |
-
def format_results_from_raw(results):
|
117 |
if isinstance(results, dict) and "error" in results:
|
118 |
return "Error: " + results["error"], []
|
119 |
|
@@ -131,15 +190,16 @@ def format_results_from_raw(results):
|
|
131 |
"snippet": result.get("snippet", "λ΄μ© μμ"),
|
132 |
"channel": result.get("channel", result.get("source", "μ μ μμ")),
|
133 |
"time": result.get("time", result.get("date", "μ μ μλ μκ°")),
|
134 |
-
"image_url": result.get("img", result.get("thumbnail", ""))
|
|
|
135 |
})
|
136 |
return "", articles
|
137 |
except Exception as e:
|
138 |
return f"κ²°κ³Ό μ²λ¦¬ μ€ μ€λ₯ λ°μ: {str(e)}", []
|
139 |
|
140 |
def serphouse_search(query, country):
|
141 |
-
results = search_serphouse(query, country)
|
142 |
-
return format_results_from_raw(results)
|
143 |
|
144 |
css = """
|
145 |
footer {visibility: hidden;}
|
@@ -154,7 +214,6 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI μλΉμ€") as
|
|
154 |
country = gr.Dropdown(MAJOR_COUNTRIES, label="κ΅κ°", value="South Korea")
|
155 |
search_button = gr.Button("κ²μ", variant="primary")
|
156 |
|
157 |
-
# νλ‘κ·Έλ μ€λ° μΆκ°
|
158 |
progress = gr.Progress()
|
159 |
status_message = gr.Markdown(visible=False)
|
160 |
articles_state = gr.State([])
|
@@ -200,9 +259,11 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI μλΉμ€") as
|
|
200 |
image_url = article['image_url']
|
201 |
image_update = gr.update(value=image_url, visible=True) if image_url and not image_url.startswith('data:image') else gr.update(value=None, visible=False)
|
202 |
|
|
|
|
|
203 |
outputs.extend([
|
204 |
gr.update(visible=True),
|
205 |
-
gr.update(value=f"### [{article['title']}]({article['link']})"),
|
206 |
image_update,
|
207 |
gr.update(value=f"**μμ½:** {article['snippet']}"),
|
208 |
gr.update(value=f"**μΆμ²:** {article['channel']} | **μκ°:** {article['time']}")
|
|
|
3 |
import json
|
4 |
import os
|
5 |
from datetime import datetime, timedelta
|
6 |
+
from huggingface_hub import InferenceClient
|
7 |
|
8 |
API_KEY = os.getenv("SERPHOUSE_API_KEY")
|
9 |
+
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
|
10 |
+
|
11 |
+
# κ΅κ°λ³ μΈμ΄ μ½λ λ§€ν
|
12 |
+
COUNTRY_LANGUAGES = {
|
13 |
+
"South Korea": "ko",
|
14 |
+
"Japan": "ja",
|
15 |
+
"China": "zh",
|
16 |
+
"Russia": "ru",
|
17 |
+
"France": "fr",
|
18 |
+
"Germany": "de",
|
19 |
+
"Spain": "es",
|
20 |
+
"Italy": "it",
|
21 |
+
"Netherlands": "nl",
|
22 |
+
"Portugal": "pt",
|
23 |
+
"Thailand": "th",
|
24 |
+
"Vietnam": "vi",
|
25 |
+
"Indonesia": "id",
|
26 |
+
"Malaysia": "ms",
|
27 |
+
"Saudi Arabia": "ar",
|
28 |
+
"United Arab Emirates": "ar",
|
29 |
+
"Egypt": "ar",
|
30 |
+
"Morocco": "ar",
|
31 |
+
"Greece": "el",
|
32 |
+
"Poland": "pl",
|
33 |
+
"Czech Republic": "cs",
|
34 |
+
"Hungary": "hu",
|
35 |
+
"Turkey": "tr",
|
36 |
+
"Romania": "ro",
|
37 |
+
"Bulgaria": "bg",
|
38 |
+
"Croatia": "hr",
|
39 |
+
"Serbia": "sr",
|
40 |
+
"Slovakia": "sk",
|
41 |
+
"Slovenia": "sl",
|
42 |
+
"Estonia": "et",
|
43 |
+
"Latvia": "lv",
|
44 |
+
"Lithuania": "lt"
|
45 |
+
}
|
46 |
|
47 |
COUNTRY_LOCATIONS = {
|
48 |
"United States": "United States",
|
|
|
116 |
|
117 |
MAJOR_COUNTRIES = list(COUNTRY_LOCATIONS.keys())
|
118 |
|
119 |
+
def translate_query(query, country):
|
120 |
+
try:
|
121 |
+
if country in COUNTRY_LANGUAGES:
|
122 |
+
target_lang = COUNTRY_LANGUAGES[country]
|
123 |
+
prompt = f"Translate the following English text to {target_lang} language. Only output the translated text without any explanations or quotes: {query}"
|
124 |
+
|
125 |
+
translated = hf_client.text_generation(
|
126 |
+
prompt,
|
127 |
+
max_new_tokens=100,
|
128 |
+
temperature=0.3
|
129 |
+
)
|
130 |
+
return translated.strip()
|
131 |
+
return query
|
132 |
+
except Exception as e:
|
133 |
+
print(f"Translation error: {str(e)}")
|
134 |
+
return query
|
135 |
|
136 |
def search_serphouse(query, country, page=1, num_result=10):
|
137 |
url = "https://api.serphouse.com/serp/live"
|
138 |
|
139 |
+
# κ²μμ΄ λ²μ
|
140 |
+
translated_query = translate_query(query, country)
|
141 |
+
print(f"Original query: {query}")
|
142 |
+
print(f"Translated query: {translated_query}")
|
143 |
+
|
144 |
payload = {
|
145 |
"data": {
|
146 |
+
"q": translated_query,
|
147 |
"domain": "google.com",
|
148 |
+
"loc": COUNTRY_LOCATIONS.get(country, "United States"),
|
149 |
+
"lang": COUNTRY_LANGUAGES.get(country, "en"),
|
150 |
"device": "desktop",
|
151 |
"serp_type": "news",
|
152 |
"page": "1",
|
|
|
168 |
print("Response status:", response.status_code)
|
169 |
|
170 |
response.raise_for_status()
|
171 |
+
return response.json(), translated_query
|
172 |
except requests.RequestException as e:
|
173 |
+
return {"error": f"Error: {str(e)}"}, query
|
174 |
|
175 |
+
def format_results_from_raw(results, translated_query):
|
176 |
if isinstance(results, dict) and "error" in results:
|
177 |
return "Error: " + results["error"], []
|
178 |
|
|
|
190 |
"snippet": result.get("snippet", "λ΄μ© μμ"),
|
191 |
"channel": result.get("channel", result.get("source", "μ μ μμ")),
|
192 |
"time": result.get("time", result.get("date", "μ μ μλ μκ°")),
|
193 |
+
"image_url": result.get("img", result.get("thumbnail", "")),
|
194 |
+
"translated_query": translated_query
|
195 |
})
|
196 |
return "", articles
|
197 |
except Exception as e:
|
198 |
return f"κ²°κ³Ό μ²λ¦¬ μ€ μ€λ₯ λ°μ: {str(e)}", []
|
199 |
|
200 |
def serphouse_search(query, country):
|
201 |
+
results, translated_query = search_serphouse(query, country)
|
202 |
+
return format_results_from_raw(results, translated_query)
|
203 |
|
204 |
css = """
|
205 |
footer {visibility: hidden;}
|
|
|
214 |
country = gr.Dropdown(MAJOR_COUNTRIES, label="κ΅κ°", value="South Korea")
|
215 |
search_button = gr.Button("κ²μ", variant="primary")
|
216 |
|
|
|
217 |
progress = gr.Progress()
|
218 |
status_message = gr.Markdown(visible=False)
|
219 |
articles_state = gr.State([])
|
|
|
259 |
image_url = article['image_url']
|
260 |
image_update = gr.update(value=image_url, visible=True) if image_url and not image_url.startswith('data:image') else gr.update(value=None, visible=False)
|
261 |
|
262 |
+
translated_info = f"\n**λ²μλ κ²μμ΄:** {article['translated_query']}" if article['translated_query'] != query else ""
|
263 |
+
|
264 |
outputs.extend([
|
265 |
gr.update(visible=True),
|
266 |
+
gr.update(value=f"### [{article['title']}]({article['link']}){translated_info}"),
|
267 |
image_update,
|
268 |
gr.update(value=f"**μμ½:** {article['snippet']}"),
|
269 |
gr.update(value=f"**μΆμ²:** {article['channel']} | **μκ°:** {article['time']}")
|