Spaces:
Running
Running
Update rag_engine.py
Browse files- rag_engine.py +8 -8
rag_engine.py
CHANGED
@@ -29,7 +29,7 @@ def setup_gcp_client():
|
|
29 |
|
30 |
# Get bucket name from secrets - required
|
31 |
try:
|
32 |
-
bucket_name_gcs = st.secrets["
|
33 |
except KeyError:
|
34 |
print("β Error: GCS bucket name not found in secrets")
|
35 |
return None
|
@@ -67,9 +67,9 @@ def load_model():
|
|
67 |
|
68 |
# Get embedding model path from secrets
|
69 |
try:
|
70 |
-
embedding_model = st.secrets["
|
71 |
except KeyError:
|
72 |
-
print("β Error: Embedding model not found in secrets")
|
73 |
return None, None
|
74 |
|
75 |
# Load tokenizer and model
|
@@ -131,10 +131,10 @@ def load_data_files():
|
|
131 |
|
132 |
# Get GCS paths from secrets - required
|
133 |
try:
|
134 |
-
metadata_file_gcs = st.secrets["
|
135 |
-
embeddings_file_gcs = st.secrets["
|
136 |
-
faiss_index_file_gcs = st.secrets["
|
137 |
-
text_chunks_file_gcs = st.secrets["
|
138 |
except KeyError as e:
|
139 |
print(f"β Error: Required GCS path not found in secrets: {e}")
|
140 |
return None, None, None
|
@@ -338,7 +338,7 @@ def answer_with_llm(query, context=None, word_limit=100):
|
|
338 |
|
339 |
# Get LLM model from secrets
|
340 |
try:
|
341 |
-
llm_model = st.secrets["
|
342 |
except KeyError:
|
343 |
print("β Error: LLM model not found in secrets")
|
344 |
return "I apologize, but I'm unable to answer at the moment."
|
|
|
29 |
|
30 |
# Get bucket name from secrets - required
|
31 |
try:
|
32 |
+
bucket_name_gcs = st.secrets["BUCKET_NAME_GCS"]
|
33 |
except KeyError:
|
34 |
print("β Error: GCS bucket name not found in secrets")
|
35 |
return None
|
|
|
67 |
|
68 |
# Get embedding model path from secrets
|
69 |
try:
|
70 |
+
embedding_model = st.secrets["EMBEDDING_MODEL"]
|
71 |
except KeyError:
|
72 |
+
print("β Error: Embedding model path not found in secrets")
|
73 |
return None, None
|
74 |
|
75 |
# Load tokenizer and model
|
|
|
131 |
|
132 |
# Get GCS paths from secrets - required
|
133 |
try:
|
134 |
+
metadata_file_gcs = st.secrets["METADATA_PATH_GCS"]
|
135 |
+
embeddings_file_gcs = st.secrets["EMBEDDINGS_PATH_GCS"]
|
136 |
+
faiss_index_file_gcs = st.secrets["INDICES_PATH_GCS"]
|
137 |
+
text_chunks_file_gcs = st.secrets["CHUNKS_PATH_GCS"]
|
138 |
except KeyError as e:
|
139 |
print(f"β Error: Required GCS path not found in secrets: {e}")
|
140 |
return None, None, None
|
|
|
338 |
|
339 |
# Get LLM model from secrets
|
340 |
try:
|
341 |
+
llm_model = st.secrets["LLM_MODEL"]
|
342 |
except KeyError:
|
343 |
print("β Error: LLM model not found in secrets")
|
344 |
return "I apologize, but I'm unable to answer at the moment."
|