File size: 8,240 Bytes
f2842d4
 
 
8aede25
1ee305b
5313f78
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c7302ea
5313f78
f2842d4
9395df5
f2842d4
5313f78
 
f4723df
 
 
eb43e13
f2842d4
 
 
5313f78
 
f2842d4
 
 
 
 
1ee305b
f4723df
f2842d4
 
eb43e13
5528b6f
 
 
 
 
 
 
 
 
 
 
5313f78
5528b6f
606cff4
5313f78
 
46b3475
5313f78
 
f7bc11a
8aede25
46b3475
8aede25
359bc71
8aede25
f10671e
5313f78
 
 
 
 
 
8aede25
 
f2842d4
5313f78
f2842d4
9178c02
5313f78
 
8aede25
 
 
f10671e
8aede25
82853cb
5313f78
8aede25
5313f78
f2842d4
 
5313f78
f2842d4
 
c7302ea
0e37fb4
12d3fa4
8aede25
12d3fa4
 
 
8aede25
12d3fa4
c7302ea
5313f78
695d9f1
f10671e
5313f78
f10671e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
aaf8e65
f10671e
 
1ac6a1a
c7302ea
 
5313f78
c7302ea
 
 
 
5313f78
 
c7302ea
 
 
 
 
 
 
 
5313f78
c7302ea
 
5313f78
 
 
 
 
 
c7302ea
 
f10671e
5313f78
 
f10671e
5313f78
 
c7302ea
 
1ac6a1a
 
5313f78
1ac6a1a
5313f78
 
 
1ac6a1a
 
 
 
c7302ea
5313f78
1ac6a1a
 
 
 
 
c7302ea
 
 
 
5313f78
1ac6a1a
aaf8e65
1ac6a1a
 
 
c7302ea
5313f78
1ac6a1a
5528b6f
9395df5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
import gradio as gr
import requests
import json
import os
from datetime import datetime, timedelta
from huggingface_hub import InferenceClient

API_KEY = os.getenv("SERPHOUSE_API_KEY")
HF_TOKEN = os.getenv("HF_TOKEN")

COUNTRY_DOMAINS = {
    "United States": "google.com",
    "United Kingdom": "google.co.uk",
    "Canada": "google.ca",
    "Australia": "google.com.au",
    "Germany": "google.de",
    "France": "google.fr",
    "Japan": "google.co.jp",
    "South Korea": "google.co.kr",
    "China": "google.com.hk",
    "India": "google.co.in",
    "Brazil": "google.com.br",
    "Mexico": "google.com.mx",
    "Russia": "google.ru",
    "Italy": "google.it",
    "Spain": "google.es",
    "Netherlands": "google.nl",
    "Singapore": "google.com.sg",
    "Hong Kong": "google.com.hk"
}

MAJOR_COUNTRIES = list(COUNTRY_DOMAINS.keys())

def search_serphouse(query, country, page=1, num_result=100):
    url = "https://api.serphouse.com/serp/live"
    domain = COUNTRY_DOMAINS.get(country, "google.com")
    
    now = datetime.utcnow()
    yesterday = now - timedelta(days=1)
    date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"

    payload = {
        "data": {
            "q": query,
            "domain": domain,
            "loc": country,
            "lang": "en",
            "device": "desktop",
            "serp_type": "news",
            "page": str(page),
            "verbatim": "1",
            "num": str(num_result),
            "date_range": date_range
        }
    }

    headers = {
        "accept": "application/json",
        "content-type": "application/json",
        "authorization": f"Bearer {API_KEY}"
    }

    try:
        response = requests.post(url, json=payload, headers=headers)
        response.raise_for_status()
        return response.json()
    except requests.RequestException as e:
        return {"error": f"Error: {str(e)}"}

def format_results_from_raw(results):
    if isinstance(results, dict) and "error" in results:
        return "Error: " + results["error"], []

    try:
        news_results = results.get('results', {}).get('results', {}).get('news', [])
        if not news_results:
            return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", []

        articles = []
        for idx, result in enumerate(news_results, 1):
            articles.append({
                "index": idx,
                "title": result.get("title", "์ œ๋ชฉ ์—†์Œ"),
                "link": result.get("url", result.get("link", "#")),
                "snippet": result.get("snippet", "๋‚ด์šฉ ์—†์Œ"),
                "channel": result.get("channel", result.get("source", "์•Œ ์ˆ˜ ์—†์Œ")),
                "time": result.get("time", result.get("date", "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๊ฐ„")),
                "image_url": result.get("img", result.get("thumbnail", ""))
            })
        return "", articles
    except Exception as e:
        return f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}", []

def serphouse_search(query, country):
    results = search_serphouse(query, country)
    return format_results_from_raw(results)

hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)

def summarize_article(title, snippet):
    try:
        prompt = f"๋‹ค์Œ ๋‰ด์Šค ์ œ๋ชฉ๊ณผ ์š”์•ฝ์„ ๋ฐ”ํƒ•์œผ๋กœ ํ•œ๊ตญ์–ด๋กœ 1๋ฌธ์žฅ์œผ๋กœ ๊ธ์ • ๋˜๋Š” ์ค‘๋ฆฝ ๋˜๋Š” ๋ถ€์ •์  ์„ฑ๊ฒฉ์˜ ๊ธฐ์‚ฌ์ธ์ง€ ํŒ๋‹จํ•˜๋ผ. ์ ˆ๋Œ€ ํ”„๋กฌํ”„ํŠธ ๋ฐ ์ง€์‹œ๋ฌธ ๋“ฑ์„ ๋…ธ์ถœํ•˜์ง€ ๋ง๊ณ  ์ค‘๋ณต์—†์ด ์˜ค๋กœ์ง€ 1๋ฌธ์žฅ์˜ ๊ฒฐ๊ณผ๊ฐ’๋งŒ ์ถœ๋ ฅํ•˜๋ผ.:\n์ œ๋ชฉ: {title}\n์š”์•ฝ: {snippet}"
        return hf_client.text_generation(prompt, max_new_tokens=500)
    except Exception as e:
        return f"๋ถ„์„ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"

css = """
footer {visibility: hidden;}
"""

with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as iface:
    gr.Markdown("๊ฒ€์ƒ‰์–ด๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ์›ํ•˜๋Š” ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜๋ฉด, ๊ฒ€์ƒ‰์–ด์™€ ์ผ์น˜ํ•˜๋Š” 24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค๋ฅผ ์ตœ๋Œ€ 100๊ฐœ ์ถœ๋ ฅํ•ฉ๋‹ˆ๋‹ค.")

    with gr.Column():
        with gr.Row():
            query = gr.Textbox(label="๊ฒ€์ƒ‰์–ด")
            country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ€", value="South Korea")
            search_button = gr.Button("๊ฒ€์ƒ‰")

        status_message = gr.Markdown(visible=False)
        articles_state = gr.State([])

        article_components = []
        for i in range(100):
            with gr.Group(visible=False) as article_group:
                title = gr.Markdown()
                image = gr.Image(width=200, height=150)
                snippet = gr.Markdown()
                info = gr.Markdown()
                analyze_button = gr.Button("๋ถ„์„")
                summary_output = gr.Markdown(visible=False)

                article_components.append({
                    'group': article_group,
                    'title': title,
                    'image': image,
                    'snippet': snippet,
                    'info': info,
                    'analyze_button': analyze_button,
                    'summary_output': summary_output,
                    'index': i,
                })

    def search_and_display(query, country, articles_state):
        error_message, articles = serphouse_search(query, country)
        outputs = []
        
        if error_message:
            outputs.append(gr.update(value=error_message, visible=True))
            for comp in article_components:
                outputs.extend([
                    gr.update(visible=False), gr.update(), gr.update(),
                    gr.update(), gr.update(), gr.update(visible=False),
                ])
            articles_state = []
        else:
            outputs.append(gr.update(value="", visible=False))
            for idx, comp in enumerate(article_components):
                if idx < len(articles):
                    article = articles[idx]
                    image_url = article['image_url']
                    image_update = gr.update(value=image_url, visible=True) if image_url and not image_url.startswith('data:image') else gr.update(value=None, visible=False)

                    outputs.extend([
                        gr.update(visible=True),
                        gr.update(value=f"### [{article['title']}]({article['link']})"),
                        image_update,
                        gr.update(value=f"**์š”์•ฝ:** {article['snippet']}"),
                        gr.update(value=f"**์ถœ์ฒ˜:** {article['channel']} | **์‹œ๊ฐ„:** {article['time']}"),
                        gr.update(visible=False),
                    ])
                else:
                    outputs.extend([
                        gr.update(visible=False), gr.update(), gr.update(),
                        gr.update(), gr.update(), gr.update(visible=False),
                    ])
            articles_state = articles

        outputs.append(articles_state)
        outputs.append(gr.update(visible=False))
        return outputs

    search_outputs = [gr.Markdown(visible=False)]
    for comp in article_components:
        search_outputs.extend([comp['group'], comp['title'], comp['image'],
                             comp['snippet'], comp['info'], comp['summary_output']])
    search_outputs.extend([articles_state, status_message])

    search_button.click(
        search_and_display,
        inputs=[query, country, articles_state],
        outputs=search_outputs,
        show_progress=False
    )

    for idx, comp in enumerate(article_components):
        def create_analyze_function(index=idx):
            def analyze_article(articles):
                if articles and index < len(articles):
                    article = articles[index]
                    summary = summarize_article(article['title'], article['snippet'])
                    return gr.update(value=summary, visible=True), gr.update(visible=False)
                return gr.update(value="๊ธฐ์‚ฌ ์ •๋ณด๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.", visible=True), gr.update(visible=False)
            return analyze_article

        comp['analyze_button'].click(
            create_analyze_function(),
            inputs=[articles_state],
            outputs=[comp['summary_output'], status_message],
            show_progress=True
        )

iface.launch()