SergeyO7 commited on
Commit
ff43791
·
verified ·
1 Parent(s): 87110c1

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +18 -2
agent.py CHANGED
@@ -53,7 +53,7 @@ class VisitWebpageTool(Tool):
53
 
54
  def forward(self, url: str) -> str:
55
  try:
56
- response = requests.get(url, timeout=30)
57
  response.raise_for_status()
58
  markdown_content = markdownify(response.text).strip()
59
  markdown_content = re.sub(r"\n{3,}", "\n\n", markdown_content)
@@ -89,7 +89,7 @@ class DownloadTaskAttachmentTool(Tool):
89
  while not self.rate_limiter.consume(1):
90
  print(f"Rate limit reached for downloading file for task {task_id}. Waiting...")
91
  time.sleep(4) # Assuming 15 RPM
92
- response = requests.get(file_url, stream=True, timeout=25)
93
  response.raise_for_status()
94
 
95
  # Determine file extension based on Content-Type
@@ -235,6 +235,21 @@ class RetryDuckDuckGoSearchTool(DuckDuckGoSearchTool):
235
  def forward(self, query: str) -> str:
236
  return super().forward(query)
237
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
238
 
239
  class MagAgent:
240
  def __init__(self, rate_limiter: Optional[Limiter] = None):
@@ -272,6 +287,7 @@ class MagAgent:
272
  ExcelReaderTool(),
273
  VisitWebpageTool(),
274
  PythonCodeReaderTool(),
 
275
  # PNG2FENTool,
276
  # ChessEngineTool(),
277
  # GoogleSearchTool,
 
53
 
54
  def forward(self, url: str) -> str:
55
  try:
56
+ response = requests.get(url, timeout=50)
57
  response.raise_for_status()
58
  markdown_content = markdownify(response.text).strip()
59
  markdown_content = re.sub(r"\n{3,}", "\n\n", markdown_content)
 
89
  while not self.rate_limiter.consume(1):
90
  print(f"Rate limit reached for downloading file for task {task_id}. Waiting...")
91
  time.sleep(4) # Assuming 15 RPM
92
+ response = requests.get(file_url, stream=True, timeout=50)
93
  response.raise_for_status()
94
 
95
  # Determine file extension based on Content-Type
 
235
  def forward(self, query: str) -> str:
236
  return super().forward(query)
237
 
238
+ @tool
239
+ def search_arxiv(query: str) -> str:
240
+ """Search Arxiv for a query and return maximum 3 result.
241
+
242
+ Args:
243
+ query: The search query."""
244
+ search_docs = ArxivLoader(query=query, load_max_docs=3).load()
245
+ formatted_search_docs = "\n\n---\n\n".join(
246
+ [
247
+ f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
248
+ for doc in search_docs
249
+ ])
250
+ return {"arvix_results": formatted_search_docs}
251
+
252
+
253
 
254
  class MagAgent:
255
  def __init__(self, rate_limiter: Optional[Limiter] = None):
 
287
  ExcelReaderTool(),
288
  VisitWebpageTool(),
289
  PythonCodeReaderTool(),
290
+ search_arxiv(),
291
  # PNG2FENTool,
292
  # ChessEngineTool(),
293
  # GoogleSearchTool,