Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,9 +1,13 @@
|
|
1 |
-
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
|
2 |
import datetime
|
3 |
import requests
|
4 |
import pytz
|
5 |
import yaml
|
6 |
from tools.final_answer import FinalAnswerTool
|
|
|
|
|
|
|
|
|
7 |
|
8 |
from Gradio_UI import GradioUI
|
9 |
|
@@ -36,13 +40,16 @@ def get_current_time_in_timezone(timezone: str) -> str:
|
|
36 |
|
37 |
|
38 |
final_answer = FinalAnswerTool()
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
#
|
43 |
-
|
44 |
-
|
45 |
-
|
|
|
|
|
|
|
46 |
|
47 |
|
48 |
# Import tool from Hub
|
|
|
1 |
+
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool,LiteLLMModel
|
2 |
import datetime
|
3 |
import requests
|
4 |
import pytz
|
5 |
import yaml
|
6 |
from tools.final_answer import FinalAnswerTool
|
7 |
+
import os
|
8 |
+
|
9 |
+
# Retrieve your "secret" google api key named GEMINI_API_KEY for LiteLLMModel
|
10 |
+
os.environ["GEMINI_API_KEY"] = os.getenv('GEMINI_API_KEY')
|
11 |
|
12 |
from Gradio_UI import GradioUI
|
13 |
|
|
|
40 |
|
41 |
|
42 |
final_answer = FinalAnswerTool()
|
43 |
+
|
44 |
+
model = LiteLLMModel(model_id="gemini/gemini-2.0-flash")
|
45 |
+
|
46 |
+
# model = HfApiModel(
|
47 |
+
# max_tokens=2096,
|
48 |
+
# temperature=0.5,
|
49 |
+
# # model_id='https://wxknx1kg971u7k1n.us-east-1.aws.endpoints.huggingface.cloud',# it is possible that this model may be overloaded
|
50 |
+
# model_id = 'mistralai/Mistral-7B-Instruct',
|
51 |
+
# custom_role_conversions=None,
|
52 |
+
# )
|
53 |
|
54 |
|
55 |
# Import tool from Hub
|