Update app.py
Browse files
app.py
CHANGED
@@ -114,7 +114,7 @@ def rerank_with_bm25(docs, query):
|
|
114 |
tokenized_query = clean_and_tokenize(query, lang)
|
115 |
scores = bm25.get_scores(tokenized_query)
|
116 |
|
117 |
-
top_indices = sorted(range(len(scores)), key=lambda i: scores[i], reverse=True)[:
|
118 |
return [docs[i] for i in top_indices]
|
119 |
|
120 |
|
@@ -140,7 +140,7 @@ def retrieve_from_rag(user_query):
|
|
140 |
print("Searching in RAG with history context...")
|
141 |
|
142 |
query_embedding = embedding_model.encode(user_query)
|
143 |
-
results = collection.query(query_embeddings=[query_embedding], n_results=
|
144 |
|
145 |
if not results or not results.get('documents'):
|
146 |
return None
|
|
|
114 |
tokenized_query = clean_and_tokenize(query, lang)
|
115 |
scores = bm25.get_scores(tokenized_query)
|
116 |
|
117 |
+
top_indices = sorted(range(len(scores)), key=lambda i: scores[i], reverse=True)[:2]
|
118 |
return [docs[i] for i in top_indices]
|
119 |
|
120 |
|
|
|
140 |
print("Searching in RAG with history context...")
|
141 |
|
142 |
query_embedding = embedding_model.encode(user_query)
|
143 |
+
results = collection.query(query_embeddings=[query_embedding], n_results=3) # Get top 5 first
|
144 |
|
145 |
if not results or not results.get('documents'):
|
146 |
return None
|