Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,7 @@ import requests
|
|
4 |
import pytz
|
5 |
import yaml
|
6 |
from tools.final_answer import FinalAnswerTool
|
|
|
7 |
|
8 |
from Gradio_UI import GradioUI
|
9 |
|
@@ -39,13 +40,28 @@ final_answer = FinalAnswerTool()
|
|
39 |
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
|
40 |
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
|
41 |
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
48 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
|
50 |
# Import tool from Hub
|
51 |
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
|
|
|
4 |
import pytz
|
5 |
import yaml
|
6 |
from tools.final_answer import FinalAnswerTool
|
7 |
+
from smolagents import OpenAIServerModel
|
8 |
|
9 |
from Gradio_UI import GradioUI
|
10 |
|
|
|
40 |
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
|
41 |
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
|
42 |
|
43 |
+
'''
|
44 |
+
HfApiModel has limited api calls
|
45 |
+
'''
|
46 |
+
# model = HfApiModel(
|
47 |
+
# max_tokens=2096,
|
48 |
+
# temperature=0.5,
|
49 |
+
# model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
|
50 |
+
# custom_role_conversions=None,
|
51 |
+
# )
|
52 |
+
|
53 |
+
'''
|
54 |
+
改用自己架設的ollama,還可以改模型
|
55 |
+
'''
|
56 |
|
57 |
+
model = OpenAIServerModel(
|
58 |
+
model_id="hf.co/Qwen/Qwen2.5-Coder-32B-Instruct-GGUF:latest",
|
59 |
+
max_tokens=2048,
|
60 |
+
temperature=0.5,
|
61 |
+
custom_role_conversions=None,
|
62 |
+
api_base="http://120.105.97.88:11434/v1",
|
63 |
+
api_key=None,
|
64 |
+
)
|
65 |
|
66 |
# Import tool from Hub
|
67 |
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
|