File size: 8,357 Bytes
f2842d4
 
 
8aede25
1ee305b
8aede25
 
f2842d4
8aede25
 
 
 
 
 
f2842d4
 
e420980
 
 
 
 
 
 
 
 
f2842d4
 
 
9178c02
f2842d4
eb43e13
f4723df
 
 
eb43e13
f2842d4
 
 
 
 
 
 
 
 
 
1ee305b
f4723df
f2842d4
 
eb43e13
f2842d4
 
 
 
 
eb43e13
f2842d4
 
 
 
 
4e396cb
 
 
 
f2842d4
606cff4
f2842d4
57624e4
8aede25
57624e4
f2842d4
 
46b3475
359bc71
f7bc11a
 
359bc71
 
 
 
 
 
 
f7bc11a
 
 
 
46b3475
f7bc11a
8aede25
46b3475
8aede25
359bc71
 
e420980
 
 
eb43e13
 
 
4172426
8aede25
 
 
 
 
 
 
 
 
359bc71
8aede25
46b3475
f2842d4
fbd003b
8aede25
f2842d4
9178c02
 
 
 
f2842d4
8aede25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f2842d4
 
 
 
 
 
 
12d3fa4
8aede25
45eb115
12d3fa4
8aede25
12d3fa4
 
 
8aede25
12d3fa4
8aede25
 
021f211
8aede25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
021f211
8aede25
021f211
8aede25
021f211
8aede25
 
 
 
 
 
021f211
 
 
9178c02
8aede25
 
 
 
 
 
 
021f211
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
import gradio as gr
import requests
import json
import os
from datetime import datetime, timedelta
from bs4 import BeautifulSoup  # ์›น ํŽ˜์ด์ง€์—์„œ ํ…์ŠคํŠธ๋ฅผ ์ถ”์ถœํ•˜๊ธฐ ์œ„ํ•ด ์‚ฌ์šฉ
from huggingface_hub import InferenceClient  # LLM ์‚ฌ์šฉ์„ ์œ„ํ•ด ํ•„์š”

# ํ•„์š”ํ•œ ํŒจํ‚ค์ง€ ์„ค์น˜ (ํ•„์š”ํ•œ ๊ฒฝ์šฐ ์ฃผ์„์„ ์ œ๊ฑฐํ•˜๊ณ  ์‹คํ–‰)
# !pip install bs4 huggingface_hub

# ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ API ํ‚ค ๊ฐ€์ ธ์˜ค๊ธฐ (API ํ‚ค๋Š” ์•ˆ์ „ํ•˜๊ฒŒ ๊ด€๋ฆฌ๋˜์–ด์•ผ ํ•ฉ๋‹ˆ๋‹ค)
API_KEY = os.getenv("SERPHOUSE_API_KEY")  # ๋ณธ์ธ์˜ SerpHouse API ํ‚ค๋ฅผ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋กœ ์„ค์ •ํ•˜์„ธ์š”.
HF_TOKEN = os.getenv("HF_TOKEN")  # Hugging Face API ํ† ํฐ์„ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋กœ ์„ค์ •ํ•˜์„ธ์š”.

MAJOR_COUNTRIES = [
    "United States", "United Kingdom", "Canada", "Australia", "Germany",
    "France", "Japan", "South Korea", "China", "India",
    "Brazil", "Mexico", "Russia", "Italy", "Spain",
    "Netherlands", "Sweden", "Switzerland", "Norway", "Denmark",
    "Finland", "Belgium", "Austria", "New Zealand", "Ireland",
    "Singapore", "Hong Kong", "Israel", "United Arab Emirates", "Saudi Arabia",
    "South Africa", "Turkey", "Egypt", "Poland", "Czech Republic",
    "Hungary", "Greece", "Portugal", "Argentina", "Chile",
    "Colombia", "Peru", "Venezuela", "Thailand", "Malaysia",
    "Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh"
]

def search_serphouse(query, country, page=1, num_result=100):
    url = "https://api.serphouse.com/serp/live"

    now = datetime.utcnow()
    yesterday = now - timedelta(days=1)
    date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"

    payload = {
        "data": {
            "q": query,
            "domain": "google.com",
            "loc": country,
            "lang": "en",
            "device": "desktop",
            "serp_type": "news",
            "page": str(page),
            "verbatim": "1",
            "num": str(num_result),
            "date_range": date_range
        }
    }

    headers = {
        "accept": "application/json",
        "content-type": "application/json",
        "authorization": f"Bearer {API_KEY}"
    }

    try:
        response = requests.post(url, json=payload, headers=headers)
        response.raise_for_status()
        return response.json()
    except requests.RequestException as e:
        error_msg = f"Error: {str(e)}"
        if response.text:
            error_msg += f"\nResponse content: {response.text}"
        return {"error": error_msg}

def format_results_from_raw(results):
    try:
        if isinstance(results, dict) and "error" in results:
            return "Error: " + results["error"], []

        if not isinstance(results, dict):
            raise ValueError("๊ฒฐ๊ณผ๊ฐ€ ์‚ฌ์ „ ํ˜•์‹์ด ์•„๋‹™๋‹ˆ๋‹ค.")

        # 'results' ํ‚ค ๋‚ด๋ถ€์˜ ๊ตฌ์กฐ ํ™•์ธ (์ค‘์ฒฉ๋œ 'results' ์ฒ˜๋ฆฌ)
        if 'results' in results:
            results_content = results['results']
            if 'results' in results_content:
                results_content = results_content['results']
                # 'news' ํ‚ค ํ™•์ธ
                if 'news' in results_content:
                    news_results = results_content['news']
                else:
                    news_results = []
            else:
                news_results = []
        else:
            news_results = []

        if not news_results:
            return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", []

        articles = []

        for idx, result in enumerate(news_results, 1):
            title = result.get("title", "์ œ๋ชฉ ์—†์Œ")
            link = result.get("url", result.get("link", "#"))
            snippet = result.get("snippet", "๋‚ด์šฉ ์—†์Œ")
            channel = result.get("channel", result.get("source", "์•Œ ์ˆ˜ ์—†์Œ"))
            time = result.get("time", result.get("date", "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๊ฐ„"))
            image_url = result.get("img", result.get("thumbnail", ""))

            articles.append({
                "index": idx,
                "title": title,
                "link": link,
                "snippet": snippet,
                "channel": channel,
                "time": time,
                "image_url": image_url
            })

        return "", articles

    except Exception as e:
        error_message = f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
        return "Error: " + error_message, []

def serphouse_search(query, country):
    # ํŽ˜์ด์ง€์™€ ๊ฒฐ๊ณผ ์ˆ˜์˜ ๊ธฐ๋ณธ๊ฐ’์„ ์„ค์ •ํ•ฉ๋‹ˆ๋‹ค.
    page = 1
    num_result = 100
    results = search_serphouse(query, country, page, num_result)
    error_message, articles = format_results_from_raw(results)
    return error_message, articles

# LLM ์„ค์ •
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)

def summarize_article(url):
    try:
        # ์›น ํŽ˜์ด์ง€์—์„œ ํ…์ŠคํŠธ ์ถ”์ถœ
        response = requests.get(url)
        response.raise_for_status()
        soup = BeautifulSoup(response.text, 'html.parser')
        # ๋ชจ๋“  ํ…์ŠคํŠธ๋ฅผ ์ถ”์ถœ (๊ฐ„๋‹จํ•œ ์˜ˆ์‹œ)
        text = ' '.join([p.get_text() for p in soup.find_all('p')])
        if not text.strip():
            return "๊ธฐ์‚ฌ ๋‚ด์šฉ์„ ๊ฐ€์ ธ์˜ฌ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."

        # ์š”์•ฝ ์ƒ์„ฑ
        prompt = f"๋‹ค์Œ ์˜์–ด ๊ธฐ์‚ฌ๋ฅผ ํ•œ๊ตญ์–ด๋กœ 3๋ฌธ์žฅ์œผ๋กœ ์š”์•ฝํ•˜์„ธ์š”:\n{text}"
        summary = hf_client.text_generation(prompt, max_new_tokens=500)
        return summary
    except Exception as e:
        return f"์š”์•ฝ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"

css = """
footer {
    visibility: hidden;
}
"""

# Gradio ์ธํ„ฐํŽ˜์ด์Šค ๊ตฌ์„ฑ
with gr.Blocks(css=css, title="NewsAI ์„œ๋น„์Šค") as iface:
    gr.Markdown("๊ฒ€์ƒ‰์–ด๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ์›ํ•˜๋Š” ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜๋ฉด, ๊ฒ€์ƒ‰์–ด์™€ ์ผ์น˜ํ•˜๋Š” 24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค๋ฅผ ์ตœ๋Œ€ 100๊ฐœ ์ถœ๋ ฅํ•ฉ๋‹ˆ๋‹ค.")

    with gr.Column():
        with gr.Row():
            query = gr.Textbox(label="๊ฒ€์ƒ‰์–ด")
            country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ€", value="South Korea")
            search_button = gr.Button("๊ฒ€์ƒ‰")

        output_table = gr.HTML()
        summary_output = gr.Markdown(visible=False)

    def search_and_display(query, country):
        error_message, articles = serphouse_search(query, country)
        if error_message:
            return f"<p>{error_message}</p>", gr.update(visible=False)
        else:
            # ๊ธฐ์‚ฌ ๋ชฉ๋ก์„ HTML ํ…Œ์ด๋ธ”๋กœ ์ƒ์„ฑ
            table_html = "<table border='1' style='width:100%; text-align:left;'><tr><th>๋ฒˆํ˜ธ</th><th>์ œ๋ชฉ</th><th>์ถœ์ฒ˜</th><th>์‹œ๊ฐ„</th><th>๋ถ„์„</th></tr>"
            for article in articles:
                # ๊ฐ ๊ธฐ์‚ฌ์— ๋Œ€ํ•ด ๋ฒ„ํŠผ์— ํ•ด๋‹นํ•˜๋Š” JavaScript ์ฝ”๋“œ๋ฅผ ์‚ฝ์ž…
                analyze_button = f"""<button onclick="analyzeArticle('{article['link']}')">๋ถ„์„</button>"""
                row = f"""
                <tr>
                    <td>{article['index']}</td>
                    <td><a href="{article['link']}" target="_blank">{article['title']}</a></td>
                    <td>{article['channel']}</td>
                    <td>{article['time']}</td>
                    <td>{analyze_button}</td>
                </tr>
                """
                table_html += row
            table_html += "</table>"

            # JavaScript ํ•จ์ˆ˜ ์ •์˜
            js_code = """
            <script>
            function analyzeArticle(url) {
                // Gradio์˜ handle_function์„ ์‚ฌ์šฉํ•˜์—ฌ Python ํ•จ์ˆ˜ ํ˜ธ์ถœ
                gradioApp().querySelector('#article_url_input textarea').value = url;
                gradioApp().querySelector('#analyze_button').click();
            }
            </script>
            """

            full_html = table_html + js_code

            return full_html, gr.update(visible=True, value="")  # summary_output ์ดˆ๊ธฐํ™”

    def analyze_article(url):
        summary = summarize_article(url)
        return summary

    article_url_input = gr.Textbox(visible=False, elem_id="article_url_input")
    analyze_button = gr.Button("๋ถ„์„", visible=False, elem_id="analyze_button")

    search_button.click(
        search_and_display,
        inputs=[query, country],
        outputs=[output_table, summary_output]
    )

    analyze_button.click(
        analyze_article,
        inputs=[article_url_input],
        outputs=[summary_output]
    )

iface.launch(auth=("gini", "pick"))