Spaces:
Sleeping
Sleeping
apple muncy
commited on
Commit
·
7b40222
1
Parent(s):
cf5d9a5
switch from ollama to HF
Browse filesSigned-off-by: apple muncy <[email protected]>
app.py
CHANGED
@@ -4,7 +4,11 @@ from langchain_core.messages import AnyMessage, HumanMessage, AIMessage, SystemM
|
|
4 |
from langgraph.prebuilt import ToolNode
|
5 |
from langgraph.graph import START, StateGraph
|
6 |
from langgraph.prebuilt import tools_condition
|
7 |
-
from langchain_ollama.chat_models import ChatOllama
|
|
|
|
|
|
|
|
|
8 |
|
9 |
|
10 |
from tools import search_tool, weather_info_tool, hub_stats_tool
|
@@ -27,8 +31,15 @@ with open("prompts.yaml", 'r') as stream:
|
|
27 |
system_prompt = prompt_templates["system_prompt"]
|
28 |
|
29 |
# Initialize the chat model
|
30 |
-
|
31 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
|
33 |
# Define available tools
|
34 |
tools = [
|
|
|
4 |
from langgraph.prebuilt import ToolNode
|
5 |
from langgraph.graph import START, StateGraph
|
6 |
from langgraph.prebuilt import tools_condition
|
7 |
+
#from langchain_ollama.chat_models import ChatOllama
|
8 |
+
|
9 |
+
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
|
10 |
+
|
11 |
+
|
12 |
|
13 |
|
14 |
from tools import search_tool, weather_info_tool, hub_stats_tool
|
|
|
31 |
system_prompt = prompt_templates["system_prompt"]
|
32 |
|
33 |
# Initialize the chat model
|
34 |
+
llm = HuggingFaceEndpoint(
|
35 |
+
repo_id="Qwen/Qwen2.5-Coder-32B-Instruct",
|
36 |
+
huggingfacehub_api_token=HF_TOKEN,
|
37 |
+
)
|
38 |
+
|
39 |
+
chat = ChatHuggingFace(llm=llm, verbose=True)
|
40 |
+
|
41 |
+
#chat = ChatOllama(model="qwen2:7b",
|
42 |
+
# verbose=True)
|
43 |
|
44 |
# Define available tools
|
45 |
tools = [
|