Ferocious0xide commited on
Commit
3121027
·
verified ·
1 Parent(s): 5df5164

Update app.py

Browse files

updating the agent for the new process.

Files changed (1) hide show
  1. app.py +163 -43
app.py CHANGED
@@ -1,57 +1,177 @@
1
- from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
 
 
 
 
 
 
 
2
  import datetime
3
- import requests
4
  import pytz
5
  import yaml
 
6
  from tools.final_answer import FinalAnswerTool
 
 
 
 
 
 
 
 
 
 
 
 
7
 
 
8
  @tool
9
- def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
10
- #Keep this format for the description / args / args description but feel free to modify the tool
11
- """A tool that does nothing yet
12
- Args:
13
- arg1: the first argument
14
- arg2: the second argument
15
- """
16
- return "What magic will you build ?"
 
 
17
 
18
  @tool
19
- def get_current_time_in_timezone(timezone: str) -> str:
20
- """A tool that fetches the current local time in a specified timezone.
 
 
 
 
 
 
 
 
 
 
 
 
21
  Args:
22
- timezone: A string representing a valid timezone (e.g., 'America/New_York').
 
 
 
23
  """
24
  try:
25
- # Create timezone object
26
- tz = pytz.timezone(timezone)
27
- # Get current time in that timezone
28
- local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
29
- return f"The current local time in {timezone} is: {local_time}"
 
 
 
 
 
30
  except Exception as e:
31
- return f"Error fetching time for timezone '{timezone}': {str(e)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  final_answer = FinalAnswerTool()
34
- model = HfApiModel(
35
- max_tokens=2096,
36
- temperature=0.5,
37
- model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
38
- custom_role_conversions=None,
39
- )
40
-
41
- with open("prompts.yaml", 'r') as stream:
42
- prompt_templates = yaml.safe_load(stream)
43
-
44
- # We're creating our CodeAgent
45
- agent = CodeAgent(
46
- model=model,
47
- tools=[final_answer], ## add your tools here (don't remove final answer)
48
- max_steps=6,
49
- verbosity_level=1,
50
- grammar=None,
51
- planning_interval=None,
52
- name=None,
53
- description=None,
54
- prompt_templates=prompt_templates
55
- )
56
-
57
- GradioUI(agent).launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ HuggingFace and Gradio Agent Template
3
+ Requirements:
4
+ pip install -r requirements.txt
5
+ """
6
+
7
+ import os
8
+ from smolagents import CodeAgent, HfApiModel, load_tool, tool
9
  import datetime
 
10
  import pytz
11
  import yaml
12
+ import gradio as gr
13
  from tools.final_answer import FinalAnswerTool
14
+ from Gradio_UI import GradioUI
15
+ from typing import Dict, Any
16
+ from huggingface_hub import InferenceClient
17
+
18
+ # Example requirements.txt content (save this separately)
19
+ REQUIREMENTS = """
20
+ gradio>=4.0.0
21
+ huggingface-hub>=0.19.0
22
+ smolagents
23
+ pytz
24
+ pyyaml
25
+ """
26
 
27
+ # Basic working tool example
28
  @tool
29
+ def calculator(operation: str) -> str:
30
+ """A simple calculator tool that safely evaluates basic math expressions."""
31
+ try:
32
+ allowed_chars = set("0123456789+-*/ .()")
33
+ if not all(c in allowed_chars for c in operation):
34
+ return "Error: Only basic math operations allowed"
35
+ result = eval(operation, {"__builtins__": {}})
36
+ return f"Result: {result}"
37
+ except Exception as e:
38
+ return f"Error calculating {operation}: {str(e)}"
39
 
40
  @tool
41
+ def get_time(timezone: str = "UTC") -> str:
42
+ """Get current time in specified timezone."""
43
+ try:
44
+ tz = pytz.timezone(timezone)
45
+ current_time = datetime.datetime.now(tz)
46
+ return f"Current time in {timezone}: {current_time.strftime('%Y-%m-%d %H:%M:%S %Z')}"
47
+ except Exception as e:
48
+ return f"Error getting time for {timezone}: {str(e)}"
49
+
50
+ # Example HuggingFace tool
51
+ @tool
52
+ def text_generation(prompt: str) -> str:
53
+ """Generate text using HuggingFace model.
54
+
55
  Args:
56
+ prompt: Text prompt for generation
57
+
58
+ Returns:
59
+ str: Generated text or error message
60
  """
61
  try:
62
+ # Using HF Inference API
63
+ client = InferenceClient()
64
+ # You can change the model to any available on HF
65
+ response = client.text_generation(
66
+ prompt,
67
+ model="google/gemma-7b-it", # Example model
68
+ max_new_tokens=100,
69
+ temperature=0.7
70
+ )
71
+ return response
72
  except Exception as e:
73
+ return f"Error generating text: {str(e)}"
74
+
75
+ # Create default prompts.yaml
76
+ DEFAULT_PROMPTS = """
77
+ system_prompt: |-
78
+ You are an expert assistant who can solve tasks using Python code and available tools.
79
+ You proceed step by step using 'Thought:', 'Code:', and 'Observation:' sequences.
80
+
81
+ Here's an example:
82
+ Task: "Calculate 23 * 45 and generate a short story about the number"
83
+
84
+ Thought: First, I'll calculate the multiplication.
85
+ Code:
86
+ ```py
87
+ result = calculator("23 * 45")
88
+ print(result)
89
+ ```<end_code>
90
+ Observation: Result: 1035
91
 
92
+ Thought: Now I'll generate a short story about this number.
93
+ Code:
94
+ ```py
95
+ story = text_generation(f"Write a very short story about the number {1035}")
96
+ final_answer(f"The calculation result is 1035.\\nHere's a story about it:\\n{story}")
97
+ ```<end_code>
98
+
99
+ You have access to these tools:
100
+ - calculator: Evaluates basic math expressions
101
+ - get_time: Gets current time in any timezone
102
+ - text_generation: Generates text using HuggingFace model
103
+ - final_answer: Returns the final answer to the user
104
+
105
+ Rules:
106
+ 1. Always use 'Thought:', 'Code:', and end with '<end_code>'
107
+ 2. Only use defined variables
108
+ 3. Pass arguments directly to tools
109
+ 4. Use print() to save intermediate results
110
+ 5. End with final_answer tool
111
+
112
+ [... rest of the prompts.yaml content remains the same ...]
113
+ """
114
+
115
+ def ensure_files():
116
+ """Create necessary files if they don't exist."""
117
+ if not os.path.exists("prompts.yaml"):
118
+ with open("prompts.yaml", "w") as f:
119
+ f.write(DEFAULT_PROMPTS)
120
+
121
+ if not os.path.exists("requirements.txt"):
122
+ with open("requirements.txt", "w") as f:
123
+ f.write(REQUIREMENTS)
124
+
125
+ def initialize_agent() -> CodeAgent:
126
+ """Initialize and return a working CodeAgent."""
127
+
128
+ # Ensure necessary files exist
129
+ ensure_files()
130
+
131
+ # Initialize tools
132
  final_answer = FinalAnswerTool()
133
+
134
+ # Initialize model
135
+ model = HfApiModel(
136
+ max_tokens=2096,
137
+ temperature=0.5,
138
+ model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
139
+ custom_role_conversions=None,
140
+ )
141
+
142
+ # Load prompts
143
+ with open("prompts.yaml", "r") as f:
144
+ prompt_templates = yaml.safe_load(f)
145
+
146
+ # Create agent
147
+ agent = CodeAgent(
148
+ model=model,
149
+ tools=[
150
+ final_answer,
151
+ calculator,
152
+ get_time,
153
+ text_generation,
154
+ # Add new tools here
155
+ ],
156
+ max_steps=6,
157
+ verbosity_level=1,
158
+ grammar=None,
159
+ planning_interval=None,
160
+ name=None,
161
+ description=None,
162
+ prompt_templates=prompt_templates
163
+ )
164
+
165
+ return agent
166
+
167
+ def main():
168
+ """Run the agent with Gradio UI."""
169
+ try:
170
+ agent = initialize_agent()
171
+ GradioUI(agent).launch()
172
+ except Exception as e:
173
+ print(f"Error starting agent: {str(e)}")
174
+ raise
175
+
176
+ if __name__ == "__main__":
177
+ main()