Spaces:
Running
Running
from smolagents import CodeAgent, DuckDuckGoSearchTool, WikipediaSearchTool, LiteLLMModel, tool, load_tool # HfApiModel, OpenAIServerModel | |
from tools.final_answer import FinalAnswerTool | |
import asyncio | |
import os | |
import yaml | |
class MagAgent: | |
def __init__(self): | |
"""Initialize the MagAgent with search tools.""" | |
print("Initializing MagAgent with search tools...") | |
model = LiteLLMModel( | |
model_id="gemini/gemini-2.0-flash", | |
api_key= os.environ.get("GEMINI_KEY"), | |
max_tokens=8192 | |
) | |
# Load prompt templates | |
with open("prompts.yaml", 'r') as stream: | |
prompt_templates = yaml.safe_load(stream) | |
self.agent = CodeAgent( | |
model= model, | |
tools=[ | |
DuckDuckGoSearchTool(), | |
WikipediaSearchTool(), | |
FinalAnswerToolnswer() | |
] | |
) | |
print("MagAgent initialized.") | |
async def __call__(self, question: str) -> str: | |
"""Process a question asynchronously using the MagAgent.""" | |
print(f"MagAgent received question (first 50 chars): {question[:50]}...") | |
try: | |
# Define a task with fallback search logic | |
task = ( | |
f"Answer the following question accurately and concisely: {question}\n" | |
"First, try searching Wikipedia with 'Mercedes Sosa'. If that fails, " | |
"use DuckDuckGo to search 'Mercedes Sosa discography 2000-2009'." | |
) | |
response = await asyncio.to_thread( | |
self.agent.run, | |
task=task | |
) | |
if not response or "No Wikipedia page found" in response: | |
# Fallback response if search fails | |
response = "Unable to retrieve exact data. Please refine the question or check external sources." | |
print(f"MagAgent response: {response[:50]}...") | |
return response | |
except Exception as e: | |
error_msg = f"Error processing question: {str(e)}. Check API key or network connectivity." | |
print(error_msg) | |
return error_msg |