File size: 4,852 Bytes
f2842d4
 
 
1ee305b
f2842d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1ee305b
 
 
 
f2842d4
 
 
 
 
 
 
 
 
 
1ee305b
 
f2842d4
 
 
 
 
 
 
 
 
 
 
 
 
 
4e396cb
 
 
 
f2842d4
 
4e396cb
1ee305b
4e396cb
 
 
 
 
 
1ee305b
f2842d4
 
 
 
 
 
 
 
 
1ee305b
f2842d4
4e396cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f2842d4
4e396cb
1ee305b
f2842d4
 
1ee305b
4e396cb
f2842d4
1ee305b
f2842d4
 
 
1ee305b
 
f2842d4
 
 
 
 
1ee305b
 
 
 
 
 
f2842d4
 
 
 
 
 
 
 
 
 
 
1ee305b
 
f2842d4
4e396cb
 
f2842d4
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
import gradio as gr
import requests
import json
from datetime import datetime, timedelta

API_KEY = "V38CNn4HXpLtynJQyOeoUensTEYoFy8PBUxKpDqAW1pawT1vfJ2BWtPQ98h6"

MAJOR_COUNTRIES = [
    "United States", "United Kingdom", "Canada", "Australia", "Germany", 
    "France", "Japan", "South Korea", "China", "India", 
    "Brazil", "Mexico", "Russia", "Italy", "Spain", 
    "Netherlands", "Sweden", "Switzerland", "Norway", "Denmark", 
    "Finland", "Belgium", "Austria", "New Zealand", "Ireland", 
    "Singapore", "Hong Kong", "Israel", "United Arab Emirates", "Saudi Arabia", 
    "South Africa", "Turkey", "Egypt", "Poland", "Czech Republic", 
    "Hungary", "Greece", "Portugal", "Argentina", "Chile", 
    "Colombia", "Peru", "Venezuela", "Thailand", "Malaysia", 
    "Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh"
]

def search_serphouse(query, country, page, num_result):
    url = "https://api.serphouse.com/serp/live"
    
    now = datetime.utcnow()
    yesterday = now - timedelta(days=1)
    
    payload = {
        "data": {
            "q": query,
            "domain": "google.com",
            "loc": country,
            "lang": "en",
            "device": "desktop",
            "serp_type": "news",
            "page": str(page),
            "verbatim": "1",
            "num": str(num_result),
            "date_range": f"{yesterday.strftime('%Y-%m-%d')}:{now.strftime('%Y-%m-%d')}"
        }
    }
    
    headers = {
        "accept": "application/json",
        "content-type": "application/json",
        "authorization": f"Bearer {API_KEY}"
    }
    
    try:
        response = requests.post(url, json=payload, headers=headers)
        response.raise_for_status()
        return response.json()
    except requests.RequestException as e:
        error_msg = f"Error: {str(e)}"
        if response.text:
            error_msg += f"\nResponse content: {response.text}"
        return {"error": error_msg}

def format_results(results):
    all_results = "<h2>๋ชจ๋“  ๋‰ด์Šค ๊ฒฐ๊ณผ (24์‹œ๊ฐ„ ์ด๋‚ด)</h2>"
    debug_info = "<h2>๋””๋ฒ„๊ทธ ์ •๋ณด</h2>"
    
    if isinstance(results, dict) and "error" in results:
        all_results += f"<p>์˜ค๋ฅ˜ ๋ฐœ์ƒ: {results['error']}</p>"
        debug_info += f"<pre>{results['error']}</pre>"
        return all_results, debug_info
    
    debug_info += f"<pre>{json.dumps(results, indent=2, ensure_ascii=False)}</pre>"
    
    try:
        if not isinstance(results, dict):
            raise ValueError("๊ฒฐ๊ณผ๊ฐ€ ์‚ฌ์ „ ํ˜•์‹์ด ์•„๋‹™๋‹ˆ๋‹ค.")
        
        if "results" not in results:
            raise ValueError("'results' ํ‚ค๊ฐ€ ์‘๋‹ต์— ์—†์Šต๋‹ˆ๋‹ค.")
        
        news_results = results["results"].get("news", [])
        debug_info += f"<p>๋‰ด์Šค ๊ฒฐ๊ณผ ์ˆ˜: {len(news_results)}</p>"
        
        if not news_results:
            all_results += "<p>๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.</p>"
        else:
            all_results += "<ol>"
            for result in news_results:
                title = result.get("title", "์ œ๋ชฉ ์—†์Œ")
                url = result.get("url", "#")
                snippet = result.get("snippet", "๋‚ด์šฉ ์—†์Œ")
                channel = result.get("channel", "์•Œ ์ˆ˜ ์—†์Œ")
                time_str = result.get("time", "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๊ฐ„")
                
                article_info = f"""
                <li>
                    <h3><a href="{url}" target="_blank">{title}</a></h3>
                    <p>{snippet}</p>
                    <p><strong>์ถœ์ฒ˜:</strong> {channel} - {time_str}</p>
                </li>
                """
                all_results += article_info
            
            all_results += "</ol>"
        
    except Exception as e:
        error_message = f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
        debug_info += f"<p>{error_message}</p>"
        all_results += f"<p>{error_message}</p>"
    
    return all_results, debug_info

def serphouse_search(query, country, page, num_result):
    results = search_serphouse(query, country, page, num_result)
    all_results, debug_info = format_results(results)
    return all_results, debug_info

css = """
footer {
    visibility: hidden;
}
ol {
    padding-left: 20px;
}
li {
    margin-bottom: 20px;
}
"""

iface = gr.Interface(
    fn=serphouse_search,
    inputs=[
        gr.Textbox(label="๊ฒ€์ƒ‰์–ด"),
        gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ€"),
        gr.Slider(1, 10, 1, label="ํŽ˜์ด์ง€"),
        gr.Slider(1, 100, 10, label="๊ฒฐ๊ณผ ์ˆ˜")
    ],
    outputs=[
        gr.HTML(label="๋ชจ๋“  ๊ฒฐ๊ณผ"),
        gr.HTML(label="๋””๋ฒ„๊ทธ ์ •๋ณด")
    ],
    title="24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค ๊ฒ€์ƒ‰ ์ธํ„ฐํŽ˜์ด์Šค",
    description="๊ฒ€์ƒ‰์–ด๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜์—ฌ 24์‹œ๊ฐ„ ์ด๋‚ด์˜ ๋‰ด์Šค ๊ฒฐ๊ณผ๋ฅผ ๊ฐ€์ ธ์˜ต๋‹ˆ๋‹ค.",
    theme="Nymbo/Nymbo_Theme",
    css=css
)

iface.launch()