Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
import requests
|
3 |
import json
|
|
|
4 |
|
5 |
API_KEY = "V38CNn4HXpLtynJQyOeoUensTEYoFy8PBUxKpDqAW1pawT1vfJ2BWtPQ98h6"
|
6 |
|
@@ -19,6 +20,10 @@ MAJOR_COUNTRIES = [
|
|
19 |
|
20 |
def search_serphouse(query, country, page, num_result):
|
21 |
url = "https://api.serphouse.com/serp/live"
|
|
|
|
|
|
|
|
|
22 |
payload = {
|
23 |
"data": {
|
24 |
"q": query,
|
@@ -29,7 +34,8 @@ def search_serphouse(query, country, page, num_result):
|
|
29 |
"serp_type": "news",
|
30 |
"page": str(page),
|
31 |
"verbatim": "1",
|
32 |
-
"num": str(num_result)
|
|
|
33 |
}
|
34 |
}
|
35 |
|
@@ -47,13 +53,9 @@ def search_serphouse(query, country, page, num_result):
|
|
47 |
return f"Error: {str(e)}"
|
48 |
|
49 |
def format_results(results):
|
50 |
-
all_results = "
|
51 |
-
|
52 |
-
|
53 |
-
result_table += "|------|------|------|------|-------------|\n"
|
54 |
-
|
55 |
-
debug_info = "## ๋๋ฒ๊ทธ ์ ๋ณด\n\n"
|
56 |
-
debug_info += f"Raw API Response:\n```json\n{json.dumps(results, indent=2)}\n```\n\n"
|
57 |
|
58 |
try:
|
59 |
if not isinstance(results, dict):
|
@@ -63,46 +65,48 @@ def format_results(results):
|
|
63 |
raise ValueError("'results' ํค๊ฐ ์๋ต์ ์์ต๋๋ค.")
|
64 |
|
65 |
news_results = results["results"].get("news", [])
|
66 |
-
debug_info += f"
|
67 |
|
68 |
-
for
|
69 |
title = result.get("title", "์ ๋ชฉ ์์")
|
70 |
url = result.get("url", "#")
|
71 |
snippet = result.get("snippet", "๋ด์ฉ ์์")
|
72 |
channel = result.get("channel", "์ ์ ์์")
|
73 |
time_str = result.get("time", "์ ์ ์๋ ์๊ฐ")
|
74 |
|
75 |
-
single_debug_info = f"๋ด์ค {idx + 1} - ์ ๋ชฉ: {title}, ๋งํฌ: {url}, ์๊ฐ: {time_str}, ์ถ์ฒ: {channel}"
|
76 |
-
|
77 |
-
result_table += f"| {title[:30]}... | [{url[:30]}...]({url}) | {time_str} | {channel} | {single_debug_info[:50]}... |\n"
|
78 |
-
|
79 |
article_info = f"""
|
80 |
-
|
81 |
-
{
|
82 |
-
|
83 |
-
|
84 |
-
|
|
|
85 |
all_results += article_info
|
86 |
|
|
|
|
|
87 |
except Exception as e:
|
88 |
error_message = f"๊ฒฐ๊ณผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
89 |
-
debug_info += error_message
|
90 |
-
all_results = error_message
|
91 |
-
|
92 |
-
result_table += f"| ์ค๋ฅ ๋ฐ์ | - | - | - | {error_message} |\n"
|
93 |
|
94 |
-
return all_results,
|
95 |
-
|
96 |
|
97 |
def serphouse_search(query, country, page, num_result):
|
98 |
results = search_serphouse(query, country, page, num_result)
|
99 |
-
all_results,
|
100 |
-
return all_results,
|
101 |
|
102 |
css = """
|
103 |
footer {
|
104 |
visibility: hidden;
|
105 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
106 |
"""
|
107 |
|
108 |
iface = gr.Interface(
|
@@ -114,11 +118,11 @@ iface = gr.Interface(
|
|
114 |
gr.Slider(1, 100, 10, label="๊ฒฐ๊ณผ ์")
|
115 |
],
|
116 |
outputs=[
|
117 |
-
gr.
|
118 |
-
gr.
|
119 |
],
|
120 |
-
title="SERPHouse ๋ด์ค ๊ฒ์ ์ธํฐํ์ด์ค",
|
121 |
-
description="๊ฒ์์ด๋ฅผ ์
๋ ฅํ๊ณ ๊ตญ๊ฐ๋ฅผ ์ ํํ์ฌ SERPHouse API์์ ๋ด์ค ๊ฒฐ๊ณผ๋ฅผ ๊ฐ์ ธ์ต๋๋ค.",
|
122 |
theme="Nymbo/Nymbo_Theme",
|
123 |
css=css
|
124 |
)
|
|
|
1 |
import gradio as gr
|
2 |
import requests
|
3 |
import json
|
4 |
+
from datetime import datetime, timedelta
|
5 |
|
6 |
API_KEY = "V38CNn4HXpLtynJQyOeoUensTEYoFy8PBUxKpDqAW1pawT1vfJ2BWtPQ98h6"
|
7 |
|
|
|
20 |
|
21 |
def search_serphouse(query, country, page, num_result):
|
22 |
url = "https://api.serphouse.com/serp/live"
|
23 |
+
|
24 |
+
now = datetime.utcnow()
|
25 |
+
yesterday = now - timedelta(days=1)
|
26 |
+
|
27 |
payload = {
|
28 |
"data": {
|
29 |
"q": query,
|
|
|
34 |
"serp_type": "news",
|
35 |
"page": str(page),
|
36 |
"verbatim": "1",
|
37 |
+
"num": str(num_result),
|
38 |
+
"date_range": f"{yesterday.strftime('%Y-%m-%d')}:{now.strftime('%Y-%m-%d')}"
|
39 |
}
|
40 |
}
|
41 |
|
|
|
53 |
return f"Error: {str(e)}"
|
54 |
|
55 |
def format_results(results):
|
56 |
+
all_results = "<h2>๋ชจ๋ ๋ด์ค ๊ฒฐ๊ณผ (24์๊ฐ ์ด๋ด)</h2><ol>"
|
57 |
+
debug_info = "<h2>๋๋ฒ๊ทธ ์ ๋ณด</h2>"
|
58 |
+
debug_info += f"<pre>{json.dumps(results, indent=2, ensure_ascii=False)}</pre>"
|
|
|
|
|
|
|
|
|
59 |
|
60 |
try:
|
61 |
if not isinstance(results, dict):
|
|
|
65 |
raise ValueError("'results' ํค๊ฐ ์๋ต์ ์์ต๋๋ค.")
|
66 |
|
67 |
news_results = results["results"].get("news", [])
|
68 |
+
debug_info += f"<p>๋ด์ค ๊ฒฐ๊ณผ ์: {len(news_results)}</p>"
|
69 |
|
70 |
+
for result in news_results:
|
71 |
title = result.get("title", "์ ๋ชฉ ์์")
|
72 |
url = result.get("url", "#")
|
73 |
snippet = result.get("snippet", "๋ด์ฉ ์์")
|
74 |
channel = result.get("channel", "์ ์ ์์")
|
75 |
time_str = result.get("time", "์ ์ ์๋ ์๊ฐ")
|
76 |
|
|
|
|
|
|
|
|
|
77 |
article_info = f"""
|
78 |
+
<li>
|
79 |
+
<h3><a href="{url}" target="_blank">{title}</a></h3>
|
80 |
+
<p>{snippet}</p>
|
81 |
+
<p><strong>์ถ์ฒ:</strong> {channel} - {time_str}</p>
|
82 |
+
</li>
|
83 |
+
"""
|
84 |
all_results += article_info
|
85 |
|
86 |
+
all_results += "</ol>"
|
87 |
+
|
88 |
except Exception as e:
|
89 |
error_message = f"๊ฒฐ๊ณผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
90 |
+
debug_info += f"<p>{error_message}</p>"
|
91 |
+
all_results = f"<p>{error_message}</p>"
|
|
|
|
|
92 |
|
93 |
+
return all_results, debug_info
|
|
|
94 |
|
95 |
def serphouse_search(query, country, page, num_result):
|
96 |
results = search_serphouse(query, country, page, num_result)
|
97 |
+
all_results, debug_info = format_results(results)
|
98 |
+
return all_results, debug_info
|
99 |
|
100 |
css = """
|
101 |
footer {
|
102 |
visibility: hidden;
|
103 |
}
|
104 |
+
ol {
|
105 |
+
padding-left: 20px;
|
106 |
+
}
|
107 |
+
li {
|
108 |
+
margin-bottom: 20px;
|
109 |
+
}
|
110 |
"""
|
111 |
|
112 |
iface = gr.Interface(
|
|
|
118 |
gr.Slider(1, 100, 10, label="๊ฒฐ๊ณผ ์")
|
119 |
],
|
120 |
outputs=[
|
121 |
+
gr.HTML(label="๋ชจ๋ ๊ฒฐ๊ณผ"),
|
122 |
+
gr.HTML(label="๋๋ฒ๊ทธ ์ ๋ณด")
|
123 |
],
|
124 |
+
title="SERPHouse 24์๊ฐ ์ด๋ด ๋ด์ค ๊ฒ์ ์ธํฐํ์ด์ค",
|
125 |
+
description="๊ฒ์์ด๋ฅผ ์
๋ ฅํ๊ณ ๊ตญ๊ฐ๋ฅผ ์ ํํ์ฌ SERPHouse API์์ 24์๊ฐ ์ด๋ด์ ๋ด์ค ๊ฒฐ๊ณผ๋ฅผ ๊ฐ์ ธ์ต๋๋ค.",
|
126 |
theme="Nymbo/Nymbo_Theme",
|
127 |
css=css
|
128 |
)
|