|
import gradio as gr |
|
import requests |
|
import json |
|
|
|
API_KEY = "V38CNn4HXpLtynJQyOeoUensTEYoFy8PBUxKpDqAW1pawT1vfJ2BWtPQ98h6" |
|
|
|
MAJOR_COUNTRIES = [ |
|
"United States", "United Kingdom", "Canada", "Australia", "Germany", |
|
"France", "Japan", "South Korea", "China", "India", |
|
"Brazil", "Mexico", "Russia", "Italy", "Spain", |
|
"Netherlands", "Sweden", "Switzerland", "Norway", "Denmark", |
|
"Finland", "Belgium", "Austria", "New Zealand", "Ireland", |
|
"Singapore", "Hong Kong", "Israel", "United Arab Emirates", "Saudi Arabia", |
|
"South Africa", "Turkey", "Egypt", "Poland", "Czech Republic", |
|
"Hungary", "Greece", "Portugal", "Argentina", "Chile", |
|
"Colombia", "Peru", "Venezuela", "Thailand", "Malaysia", |
|
"Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh" |
|
] |
|
|
|
def search_serphouse(query, country, page, num_result): |
|
url = "https://api.serphouse.com/serp/live" |
|
payload = { |
|
"data": { |
|
"q": query, |
|
"domain": "google.com", |
|
"loc": country, |
|
"lang": "en", |
|
"device": "desktop", |
|
"serp_type": "news", |
|
"page": str(page), |
|
"verbatim": "1", |
|
"num": str(num_result) |
|
} |
|
} |
|
|
|
headers = { |
|
"accept": "application/json", |
|
"content-type": "application/json", |
|
"authorization": f"Bearer {API_KEY}" |
|
} |
|
|
|
try: |
|
response = requests.post(url, json=payload, headers=headers) |
|
response.raise_for_status() |
|
return response.json() |
|
except requests.RequestException as e: |
|
return f"Error: {str(e)}" |
|
|
|
def format_results(results): |
|
all_results = "## ๋ชจ๋ ๋ด์ค ๊ฒฐ๊ณผ\n\n" |
|
result_table = "## ๋ด์ค ๊ฒฐ๊ณผ ํ\n\n" |
|
result_table += "| ์ ๋ชฉ | ๋งํฌ | ์๊ฐ | ์ถ์ฒ | ๋๋ฒ๊ทธ ์ ๋ณด |\n" |
|
result_table += "|------|------|------|------|-------------|\n" |
|
|
|
debug_info = "## ๋๋ฒ๊ทธ ์ ๋ณด\n\n" |
|
debug_info += f"Raw API Response:\n```json\n{json.dumps(results, indent=2)}\n```\n\n" |
|
|
|
try: |
|
if not isinstance(results, dict): |
|
raise ValueError("๊ฒฐ๊ณผ๊ฐ ์ฌ์ ํ์์ด ์๋๋๋ค.") |
|
|
|
if "results" not in results: |
|
raise ValueError("'results' ํค๊ฐ ์๋ต์ ์์ต๋๋ค.") |
|
|
|
|
|
news_results = results["results"].get("news", []) |
|
debug_info += f"๋ด์ค ๊ฒฐ๊ณผ ์: {len(news_results)}\n\n" |
|
|
|
for idx, result in enumerate(news_results): |
|
title = result.get("title", "์ ๋ชฉ ์์") |
|
url = result.get("url", "#") |
|
snippet = result.get("snippet", "๋ด์ฉ ์์") |
|
channel = result.get("channel", "์ ์ ์์") |
|
time_str = result.get("time", "์ ์ ์๋ ์๊ฐ") |
|
|
|
|
|
single_debug_info = f"๋ด์ค {idx + 1} - ์ ๋ชฉ: {title}, ๋งํฌ: {url}, ์๊ฐ: {time_str}, ์ถ์ฒ: {channel}" |
|
|
|
|
|
result_table += f"| {title[:30]}... | [{url[:30]}...]({url}) | {time_str} | {channel} | {single_debug_info[:50]}... |\n" |
|
|
|
|
|
article_info = f""" |
|
### [{title}]({url}) |
|
{snippet} |
|
**์ถ์ฒ:** {channel} - {time_str} |
|
--- |
|
""" |
|
all_results += article_info |
|
|
|
except Exception as e: |
|
error_message = f"๊ฒฐ๊ณผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}" |
|
debug_info += error_message + "\n" |
|
all_results = error_message + "\n\n" |
|
|
|
|
|
result_table += f"| ์ค๋ฅ ๋ฐ์ | - | - | - | {error_message} |\n" |
|
|
|
return all_results, result_table + "\n\n" + debug_info |
|
|
|
|
|
def serphouse_search(query, country, page, num_result): |
|
results = search_serphouse(query, country, page, num_result) |
|
all_results, result_table_and_debug = format_results(results) |
|
return all_results, result_table_and_debug |
|
|
|
css = """ |
|
footer { |
|
visibility: hidden; |
|
} |
|
""" |
|
|
|
iface = gr.Interface( |
|
fn=serphouse_search, |
|
inputs=[ |
|
gr.Textbox(label="๊ฒ์์ด"), |
|
gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ"), |
|
gr.Slider(1, 10, 1, label="ํ์ด์ง"), |
|
gr.Slider(1, 100, 10, label="๊ฒฐ๊ณผ ์") |
|
], |
|
outputs=[ |
|
gr.Markdown(label="๋ชจ๋ ๊ฒฐ๊ณผ"), |
|
gr.Markdown(label="๊ฒฐ๊ณผ ํ ๋ฐ ๋๋ฒ๊ทธ ์ ๋ณด") |
|
], |
|
title="SERPHouse ๋ด์ค ๊ฒ์ ์ธํฐํ์ด์ค", |
|
description="๊ฒ์์ด๋ฅผ ์
๋ ฅํ๊ณ ๊ตญ๊ฐ๋ฅผ ์ ํํ์ฌ SERPHouse API์์ ๋ด์ค ๊ฒฐ๊ณผ๋ฅผ ๊ฐ์ ธ์ต๋๋ค.", |
|
theme="Nymbo/Nymbo_Theme", |
|
css=css |
|
) |
|
|
|
iface.launch() |