Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -36,17 +36,16 @@ def get_current_time_in_timezone(timezone: str) -> str:
|
|
36 |
return f"Error fetching time for timezone '{timezone}': {str(e)}"
|
37 |
|
38 |
@tool
|
39 |
-
def
|
40 |
"""A tool that fetches the URLs to the boxscores for each of last nights nba games
|
41 |
Args:
|
42 |
-
|
43 |
"""
|
44 |
# Base URL of the main page containing box score links
|
45 |
BASE_URL = "https://www.basketball-reference.com"
|
46 |
MAIN_URL = f"{BASE_URL}/boxscores/"
|
|
|
47 |
try:
|
48 |
-
|
49 |
-
soup = bs4.BeautifulSoup(response.text, 'html.parser')
|
50 |
box_score_links = []
|
51 |
# Find all box score links
|
52 |
for link in soup.find_all('a', href=True):
|
@@ -55,10 +54,35 @@ def get_boxScores() -> list:
|
|
55 |
box_score_links.append(BASE_URL + href)
|
56 |
return list(set(box_score_links))
|
57 |
except Exception as e:
|
58 |
-
return f"Error fetching
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
|
61 |
-
|
|
|
62 |
final_answer = FinalAnswerTool()
|
63 |
search_tool = DuckDuckGoSearchTool()
|
64 |
visit_webpage_tool = VisitWebpageTool()
|
@@ -84,7 +108,7 @@ with open("prompts.yaml", 'r') as stream:
|
|
84 |
|
85 |
agent = CodeAgent(
|
86 |
model=model,
|
87 |
-
tools=[final_answer, image_generation_tool, search_tool, visit_webpage_tool, user_input_tool,
|
88 |
max_steps=6,
|
89 |
verbosity_level=1,
|
90 |
grammar=None,
|
|
|
36 |
return f"Error fetching time for timezone '{timezone}': {str(e)}"
|
37 |
|
38 |
@tool
|
39 |
+
def get_box_score_links() -> list:
|
40 |
"""A tool that fetches the URLs to the boxscores for each of last nights nba games
|
41 |
Args:
|
|
|
42 |
"""
|
43 |
# Base URL of the main page containing box score links
|
44 |
BASE_URL = "https://www.basketball-reference.com"
|
45 |
MAIN_URL = f"{BASE_URL}/boxscores/"
|
46 |
+
response = requests.get(MAIN_URL)
|
47 |
try:
|
48 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
|
|
49 |
box_score_links = []
|
50 |
# Find all box score links
|
51 |
for link in soup.find_all('a', href=True):
|
|
|
54 |
box_score_links.append(BASE_URL + href)
|
55 |
return list(set(box_score_links))
|
56 |
except Exception as e:
|
57 |
+
return f"Error fetching boxScore links: {str(e)}"
|
58 |
+
|
59 |
+
@tool
|
60 |
+
def get_box_score_data(links: list) -> dict:
|
61 |
+
"""A tool that fetches the URLs to the boxscores for each of last nights nba games
|
62 |
+
Args:
|
63 |
+
links: A list of strings representing the URLs to the box score of each of last nights games
|
64 |
+
"""
|
65 |
+
# Base URL of the main page containing box score links
|
66 |
+
BASE_URL = "https://www.basketball-reference.com"
|
67 |
+
MAIN_URL = f"{BASE_URL}/boxscores/"
|
68 |
+
response = requests.get(MAIN_URL)
|
69 |
+
try:
|
70 |
+
box_scores = {}
|
71 |
+
for _ in links:
|
72 |
+
response = requests.get(_)
|
73 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
74 |
+
pattern = r"<h1>(.*?) at (.*?) Box Score"
|
75 |
+
match = re.search(pattern, str(soup.find('div', id="content").find('h1')))
|
76 |
+
|
77 |
+
box_scores[match.group(1)] = pd.read_html(_)[0].to_dict(orient='records')
|
78 |
+
box_scores[match.group(2)] = pd.read_html(_)[8].to_dict(orient='records')
|
79 |
+
return box_scores
|
80 |
+
except Exception as e:
|
81 |
+
return f"Error fetching boxScore data: {str(e)}"
|
82 |
|
83 |
|
84 |
+
box_score_links = get_box_score_links()
|
85 |
+
box_score_data = get_box_score_data()
|
86 |
final_answer = FinalAnswerTool()
|
87 |
search_tool = DuckDuckGoSearchTool()
|
88 |
visit_webpage_tool = VisitWebpageTool()
|
|
|
108 |
|
109 |
agent = CodeAgent(
|
110 |
model=model,
|
111 |
+
tools=[final_answer, image_generation_tool, search_tool, visit_webpage_tool, user_input_tool, get_box_score_links, get_box_score_data], ## add your tools here (don't remove final answer)
|
112 |
max_steps=6,
|
113 |
verbosity_level=1,
|
114 |
grammar=None,
|