redfernstech commited on
Commit
6b539de
·
verified ·
1 Parent(s): 0259037

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -239,6 +239,7 @@ from langchain_core.prompts import ChatPromptTemplate
239
  from llama_index.core import StorageContext, VectorStoreIndex, SimpleDirectoryReader, Settings
240
  from llama_index.core import load_index_from_storage
241
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
 
242
 
243
  # Configure logging
244
  logging.basicConfig(level=logging.INFO)
@@ -275,7 +276,7 @@ for var in required_env_vars:
275
  GROQ_API_KEY = os.getenv("CHATGROQ_API_KEY")
276
  GROQ_MODEL = "llama3-8b-8192"
277
  try:
278
- llm = ChatGroq(
279
  model_name=GROQ_MODEL,
280
  api_key=GROQ_API_KEY,
281
  temperature=0.1,
@@ -287,6 +288,7 @@ except Exception as e:
287
 
288
  # Configure LlamaIndex settings
289
  Settings.embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-en-v1.5")
 
290
 
291
  # Salesforce credentials
292
  username = os.getenv("username")
@@ -387,7 +389,7 @@ def handle_query(query: str) -> str:
387
 
388
  # Query Groq model
389
  try:
390
- response = llm.invoke(prompt)
391
  response_text = response.content.strip()
392
  if not response_text or response_text.lower() == "unknown":
393
  response_text = "I'm sorry, I don't have the information to answer that."
 
239
  from llama_index.core import StorageContext, VectorStoreIndex, SimpleDirectoryReader, Settings
240
  from llama_index.core import load_index_from_storage
241
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
242
+ from llama_index.llms.langchain import LangChainLLM # Added for Groq integration
243
 
244
  # Configure logging
245
  logging.basicConfig(level=logging.INFO)
 
276
  GROQ_API_KEY = os.getenv("CHATGROQ_API_KEY")
277
  GROQ_MODEL = "llama3-8b-8192"
278
  try:
279
+ groq_llm = ChatGroq(
280
  model_name=GROQ_MODEL,
281
  api_key=GROQ_API_KEY,
282
  temperature=0.1,
 
288
 
289
  # Configure LlamaIndex settings
290
  Settings.embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-en-v1.5")
291
+ Settings.llm = LangChainLLM(llm=groq_llm) # Use Groq LLM for LlamaIndex
292
 
293
  # Salesforce credentials
294
  username = os.getenv("username")
 
389
 
390
  # Query Groq model
391
  try:
392
+ response = groq_llm.invoke(prompt)
393
  response_text = response.content.strip()
394
  if not response_text or response_text.lower() == "unknown":
395
  response_text = "I'm sorry, I don't have the information to answer that."