Spaces:
Runtime error
Runtime error
File size: 1,993 Bytes
9b5b26a c19d193 6aae614 8fe992b 9b5b26a 7921f45 9b5b26a 1db95ee 1206d04 9b5b26a 1d984db 9b5b26a 8c01ffb 6aae614 e121372 bf6d34c 6977344 fe328e0 13d500a 8c01ffb 9b5b26a 0a23e5c 8c01ffb 861422e 9b5b26a 8c01ffb 8fe992b 1206d04 8c01ffb 861422e 8fe992b eed06ef 9b5b26a 8c01ffb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI
@tool
{
"name": "sum_tool",
"type": "Code",
"function": "calculate",
"input_parameters": {
"math_expression": "string"
},
"output": {
"result": "number"
},
"code": "return eval(math_expression)"
}
@tool
def get_current_time_in_timezone(timezone: str) -> str:
"""A tool that fetches the current local time in a specified timezone.
Args:
timezone: A string representing a valid timezone (e.g., 'America/New_York').
"""
try:
# Create timezone object
tz = pytz.timezone(timezone)
# Get current time rent local time in {timezone} is: {local_time}"
except Exception as e:
return f"Error fetching time for timezone '{timezone}': {str(e)}"
final_answer = FinalAnswerTool()
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-3B-Instruct',# it is possible that this model may be overloaded
custom_role_conversions=None,
)
# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
search_tool = DuckDuckGoSearchTool()
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[final_answer, search_tool, sum_tool], ## add your tools here (don't remove final answer)
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
if agent.model.last_input_token_count is not None:
total_input_tokens += agent.model.last_input_token_count
else:
# Handle the case where last_input_token_count is None
# For example, you can set total_input_tokens to a default value
total_input_tokens = 0
GradioUI(agent).launch() |