Spaces:
Running
Running
Update pipeline.py
Browse files- pipeline.py +4 -2
pipeline.py
CHANGED
@@ -27,6 +27,7 @@ from langchain.chains import RetrievalQA, LLMChain
|
|
27 |
from langchain.prompts import PromptTemplate
|
28 |
from langchain.docstore.document import Document
|
29 |
from langchain_core.caches import BaseCache
|
|
|
30 |
# from langchain_core.callbacks import CallbackManager
|
31 |
# from langchain.callbacks.base import BaseCallbacks # Updated import
|
32 |
# from langchain.callbacks.manager import CallbackManager
|
@@ -216,11 +217,12 @@ try:
|
|
216 |
if not fallback_groq_api_key:
|
217 |
logger.warning("No Groq API key found for fallback LLM")
|
218 |
groq_fallback_llm = ChatGroq(
|
219 |
-
model="default", # Replace with your actual model name if different
|
220 |
temperature=0.7,
|
221 |
groq_api_key=fallback_groq_api_key,
|
222 |
max_tokens=2048,
|
223 |
-
cache=NoCache() # Set cache explicitly
|
|
|
224 |
)
|
225 |
except Exception as e:
|
226 |
logger.error(f"Failed to initialize fallback Groq LLM: {e}")
|
|
|
27 |
from langchain.prompts import PromptTemplate
|
28 |
from langchain.docstore.document import Document
|
29 |
from langchain_core.caches import BaseCache
|
30 |
+
from langchain_core.callbacks import Callbacks
|
31 |
# from langchain_core.callbacks import CallbackManager
|
32 |
# from langchain.callbacks.base import BaseCallbacks # Updated import
|
33 |
# from langchain.callbacks.manager import CallbackManager
|
|
|
217 |
if not fallback_groq_api_key:
|
218 |
logger.warning("No Groq API key found for fallback LLM")
|
219 |
groq_fallback_llm = ChatGroq(
|
220 |
+
model=GROQ_MODELS["default"], # Replace with your actual model name if different
|
221 |
temperature=0.7,
|
222 |
groq_api_key=fallback_groq_api_key,
|
223 |
max_tokens=2048,
|
224 |
+
cache=NoCache(), # Set cache explicitly
|
225 |
+
callbacks=[] # Explicitly set callbacks to an empty list
|
226 |
)
|
227 |
except Exception as e:
|
228 |
logger.error(f"Failed to initialize fallback Groq LLM: {e}")
|