Spaces:
Running
Running
Update agent.py
Browse files
agent.py
CHANGED
@@ -9,7 +9,8 @@ from serpapi import GoogleSearch
|
|
9 |
|
10 |
# 1️⃣ Switch Graph → StateGraph
|
11 |
from langgraph.graph import StateGraph
|
12 |
-
from langchain_core.language_models.llms import LLM
|
|
|
13 |
from langchain_core.messages import SystemMessage, HumanMessage
|
14 |
from langchain_core.tools import tool
|
15 |
from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
|
@@ -136,7 +137,15 @@ def build_graph(provider: str = "huggingface") -> StateGraph:
|
|
136 |
hf_token = os.getenv("HF_TOKEN")
|
137 |
if not hf_token:
|
138 |
raise ValueError("HF_TOKEN missing in env")
|
139 |
-
llm =
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
140 |
|
141 |
# 6.1) Node: init → seed system prompt
|
142 |
def init_node(_: AgentState) -> AgentState:
|
|
|
9 |
|
10 |
# 1️⃣ Switch Graph → StateGraph
|
11 |
from langgraph.graph import StateGraph
|
12 |
+
#from langchain_core.language_models.llms import LLM
|
13 |
+
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint
|
14 |
from langchain_core.messages import SystemMessage, HumanMessage
|
15 |
from langchain_core.tools import tool
|
16 |
from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
|
|
|
137 |
hf_token = os.getenv("HF_TOKEN")
|
138 |
if not hf_token:
|
139 |
raise ValueError("HF_TOKEN missing in env")
|
140 |
+
llm = ChatHuggingFace(
|
141 |
+
llm=HuggingFaceEndpoint(
|
142 |
+
url=f"https://api-inference.huggingface.co/models/{os.getenv('HF_MODEL_ID')}",
|
143 |
+
temperature=0.0,
|
144 |
+
huggingfacehub_api_token= hf_token,
|
145 |
+
|
146 |
+
)
|
147 |
+
)
|
148 |
+
|
149 |
|
150 |
# 6.1) Node: init → seed system prompt
|
151 |
def init_node(_: AgentState) -> AgentState:
|