Commit
·
d9b08a9
1
Parent(s):
71f3bea
feat: opanai
Browse files
agent.py
CHANGED
@@ -5,6 +5,7 @@ from langgraph.graph import START, StateGraph, MessagesState
|
|
5 |
from langgraph.prebuilt import tools_condition
|
6 |
from langgraph.prebuilt import ToolNode
|
7 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
|
|
8 |
from langchain_groq import ChatGroq
|
9 |
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
|
10 |
from langchain_community.tools.tavily_search import TavilySearchResults
|
@@ -158,6 +159,12 @@ def build_graph(provider: str = "groq"):
|
|
158 |
if provider == "google":
|
159 |
# Google Gemini
|
160 |
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
|
|
|
|
|
|
|
|
|
|
|
|
|
161 |
elif provider == "groq":
|
162 |
# Groq https://console.groq.com/docs/models
|
163 |
llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
|
|
5 |
from langgraph.prebuilt import tools_condition
|
6 |
from langgraph.prebuilt import ToolNode
|
7 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
8 |
+
from langchain_openai import ChatOpenAI
|
9 |
from langchain_groq import ChatGroq
|
10 |
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
|
11 |
from langchain_community.tools.tavily_search import TavilySearchResults
|
|
|
159 |
if provider == "google":
|
160 |
# Google Gemini
|
161 |
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
|
162 |
+
elif provider == "openai":
|
163 |
+
# Groq https://console.groq.com/docs/models
|
164 |
+
llm = ChatOpenAI(
|
165 |
+
model=os.environ.get("OPENAI_MODEL_NAME", "gpt-4o"),
|
166 |
+
temperature=0.0
|
167 |
+
)
|
168 |
elif provider == "groq":
|
169 |
# Groq https://console.groq.com/docs/models
|
170 |
llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|