Update agent.py
Browse files
agent.py
CHANGED
@@ -173,13 +173,7 @@ tools = [
|
|
173 |
def build_graph(provider: str = "huggingface"):
|
174 |
"""Build the graph"""
|
175 |
# Load environment variables from .env file
|
176 |
-
if provider == "
|
177 |
-
# Google Gemini
|
178 |
-
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
|
179 |
-
elif provider == "groq":
|
180 |
-
# Groq https://console.groq.com/docs/models
|
181 |
-
llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
182 |
-
elif provider == "huggingface":
|
183 |
# TODO: Add huggingface endpoint
|
184 |
"""
|
185 |
llm = ChatHuggingFace(
|
@@ -191,16 +185,21 @@ def build_graph(provider: str = "huggingface"):
|
|
191 |
),
|
192 |
)
|
193 |
"""
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
198 |
)
|
199 |
-
|
200 |
-
llm=ChatHuggingFace(llm=llm_id)
|
201 |
|
202 |
else:
|
203 |
-
raise ValueError("Invalid provider. Choose '
|
|
|
204 |
# Bind tools to LLM
|
205 |
llm_with_tools = llm.bind_tools(tools)
|
206 |
|
|
|
173 |
def build_graph(provider: str = "huggingface"):
|
174 |
"""Build the graph"""
|
175 |
# Load environment variables from .env file
|
176 |
+
if provider == "huggingface":
|
|
|
|
|
|
|
|
|
|
|
|
|
177 |
# TODO: Add huggingface endpoint
|
178 |
"""
|
179 |
llm = ChatHuggingFace(
|
|
|
185 |
),
|
186 |
)
|
187 |
"""
|
188 |
+
llm = ChatHuggingFace(
|
189 |
+
llm=HuggingFaceEndpoint(
|
190 |
+
repo_id="TinyLlama/TinyLlama-1.1B-Chat-v1.0",
|
191 |
+
task="text-generation", # for chat‐style use “text-generation”
|
192 |
+
max_new_tokens=1024,
|
193 |
+
do_sample=False,
|
194 |
+
repetition_penalty=1.03,
|
195 |
+
temperature=0,
|
196 |
+
),
|
197 |
+
verbose=True,
|
198 |
)
|
|
|
|
|
199 |
|
200 |
else:
|
201 |
+
raise ValueError("Invalid provider. Choose 'huggingface'.")
|
202 |
+
|
203 |
# Bind tools to LLM
|
204 |
llm_with_tools = llm.bind_tools(tools)
|
205 |
|