ainews-db / app.py
seawolf2357's picture
Update app.py
1c76f31 verified
raw
history blame
4.87 kB
import gradio as gr
import requests
import json
from datetime import datetime, timedelta
import pandas as pd
API_KEY = "V38CNn4HXpLtynJQyOeoUensTEYoFy8PBUxKpDqAW1pawT1vfJ2BWtPQ98h6"
MAJOR_COUNTRIES = [
"United States", "United Kingdom", "Canada", "Australia", "Germany",
"France", "Japan", "South Korea", "China", "India",
"Brazil", "Mexico", "Russia", "Italy", "Spain",
"Netherlands", "Sweden", "Switzerland", "Norway", "Denmark",
"Finland", "Belgium", "Austria", "New Zealand", "Ireland",
"Singapore", "Hong Kong", "Israel", "United Arab Emirates", "Saudi Arabia",
"South Africa", "Turkey", "Egypt", "Poland", "Czech Republic",
"Hungary", "Greece", "Portugal", "Argentina", "Chile",
"Colombia", "Peru", "Venezuela", "Thailand", "Malaysia",
"Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh"
]
def search_serphouse(query, country, page, num_result):
url = "https://api.serphouse.com/serp/live"
now = datetime.utcnow()
yesterday = now - timedelta(days=1)
date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"
payload = {
"data": {
"q": query,
"domain": "google.com",
"loc": country,
"lang": "en",
"device": "desktop",
"serp_type": "news",
"page": str(page),
"verbatim": "1",
"num": str(num_result),
"date_range": date_range
}
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"authorization": f"Bearer {API_KEY}"
}
try:
response = requests.post(url, json=payload, headers=headers)
response.raise_for_status()
return response.json()
except requests.RequestException as e:
error_msg = f"Error: {str(e)}"
if response.text:
error_msg += f"\nResponse content: {response.text}"
return {"error": error_msg}
def format_results(results):
debug_info = f"Raw API Response:\n{json.dumps(results, indent=2, ensure_ascii=False)}\n\n"
if isinstance(results, dict) and "error" in results:
return pd.DataFrame({"Error": [results["error"]]}), debug_info
try:
if not isinstance(results, dict):
raise ValueError("๊ฒฐ๊ณผ๊ฐ€ ์‚ฌ์ „ ํ˜•์‹์ด ์•„๋‹™๋‹ˆ๋‹ค.")
if "results" not in results:
raise ValueError("'results' ํ‚ค๊ฐ€ ์‘๋‹ต์— ์—†์Šต๋‹ˆ๋‹ค.")
news_results = results.get("results", {})
if isinstance(news_results, dict):
news_results = news_results.get("news", [])
elif isinstance(news_results, list):
news_results = news_results
else:
news_results = []
debug_info += f"๋‰ด์Šค ๊ฒฐ๊ณผ ์ˆ˜: {len(news_results)}\n"
debug_info += f"๋‰ด์Šค ๊ฒฐ๊ณผ ๊ตฌ์กฐ: {type(news_results)}\n"
if not news_results:
return pd.DataFrame({"Message": ["๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค."]}), debug_info
formatted_results = []
for result in news_results:
formatted_results.append({
"์ œ๋ชฉ": result.get("title", "์ œ๋ชฉ ์—†์Œ"),
"๋งํฌ": result.get("url", "#"),
"๋‚ด์šฉ": result.get("snippet", "๋‚ด์šฉ ์—†์Œ"),
"์ถœ์ฒ˜": result.get("channel", "์•Œ ์ˆ˜ ์—†์Œ"),
"์‹œ๊ฐ„": result.get("time", "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๊ฐ„")
})
return pd.DataFrame(formatted_results), debug_info
except Exception as e:
error_message = f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
debug_info += f"Error: {error_message}\n"
debug_info += f"Results structure: {type(results)}\n"
if isinstance(results, dict):
debug_info += f"Results keys: {results.keys()}\n"
return pd.DataFrame({"Error": [error_message]}), debug_info
def serphouse_search(query, country, page, num_result):
results = search_serphouse(query, country, page, num_result)
df_results, debug_info = format_results(results)
return df_results, debug_info
css = """
footer {
visibility: hidden;
}
"""
iface = gr.Interface(
fn=serphouse_search,
inputs=[
gr.Textbox(label="๊ฒ€์ƒ‰์–ด"),
gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ€"),
gr.Slider(1, 10, 1, label="ํŽ˜์ด์ง€"),
gr.Slider(1, 100, 10, label="๊ฒฐ๊ณผ ์ˆ˜")
],
outputs=[
gr.Dataframe(label="๋‰ด์Šค ๊ฒฐ๊ณผ"),
gr.Textbox(label="๋””๋ฒ„๊ทธ ์ •๋ณด", lines=10)
],
title="24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค ๊ฒ€์ƒ‰ ์ธํ„ฐํŽ˜์ด์Šค",
description="๊ฒ€์ƒ‰์–ด๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜์—ฌ 24์‹œ๊ฐ„ ์ด๋‚ด์˜ ๋‰ด์Šค ๊ฒฐ๊ณผ๋ฅผ ๊ฐ€์ ธ์˜ต๋‹ˆ๋‹ค.",
theme="Nymbo/Nymbo_Theme",
css=css
)
iface.launch(auth=("gini","pick"))