Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -86,25 +86,26 @@ parameters = {
|
|
86 |
}
|
87 |
|
88 |
project_id = os.getenv("IBM_PROJECT_ID")
|
89 |
-
space_id = os.getenv("IBM_SPACE_ID")
|
90 |
|
91 |
from ibm_watsonx_ai.foundation_models import ModelInference
|
92 |
|
93 |
-
model = ModelInference(
|
94 |
-
model_id = model_id,
|
95 |
-
params = parameters,
|
96 |
-
credentials = get_credentials(),
|
97 |
-
project_id = project_id,
|
98 |
-
space_id = space_id
|
99 |
-
)
|
100 |
|
101 |
from ibm_watsonx_ai.client import APIClient
|
102 |
|
103 |
-
|
104 |
-
|
|
|
105 |
|
106 |
vector_index_id = VECTOR_DB
|
107 |
-
vector_index_details = client.data_assets.get_details(vector_index_id)
|
108 |
vector_index_properties = vector_index_details["entity"]["vector_index"]
|
109 |
|
110 |
top_n = 20 if vector_index_properties["settings"].get("rerank") else int(vector_index_properties["settings"]["top_k"])
|
@@ -145,7 +146,7 @@ import random
|
|
145 |
import string
|
146 |
|
147 |
def hydrate_chromadb():
|
148 |
-
data = client.data_assets.get_content(vector_index_id)
|
149 |
content = gzip.decompress(data)
|
150 |
stringified_vectors = str(content, "utf-8")
|
151 |
vectors = json.loads(stringified_vectors)
|
@@ -192,11 +193,12 @@ def hydrate_chromadb():
|
|
192 |
)
|
193 |
return collection
|
194 |
|
195 |
-
chroma_collection
|
|
|
196 |
|
197 |
def proximity_search( question ):
|
198 |
query_vectors = emb.embed_query(question)
|
199 |
-
query_result = chroma_collection.query(
|
200 |
query_embeddings=query_vectors,
|
201 |
n_results=top_n,
|
202 |
include=["documents", "metadatas", "distances"]
|
@@ -205,7 +207,7 @@ def proximity_search( question ):
|
|
205 |
documents = list(reversed(query_result["documents"][0]))
|
206 |
|
207 |
if vector_index_properties["settings"].get("rerank"):
|
208 |
-
documents = rerank(client, documents, question, vector_index_properties["settings"]["top_k"])
|
209 |
|
210 |
return "\n".join(documents)
|
211 |
|
|
|
86 |
}
|
87 |
|
88 |
project_id = os.getenv("IBM_PROJECT_ID")
|
89 |
+
#space_id = os.getenv("IBM_SPACE_ID")
|
90 |
|
91 |
from ibm_watsonx_ai.foundation_models import ModelInference
|
92 |
|
93 |
+
#model = ModelInference(
|
94 |
+
# model_id = model_id,
|
95 |
+
# params = parameters,
|
96 |
+
# credentials = get_credentials(),
|
97 |
+
# project_id = project_id,
|
98 |
+
# space_id = space_id
|
99 |
+
# )
|
100 |
|
101 |
from ibm_watsonx_ai.client import APIClient
|
102 |
|
103 |
+
if "client" not in st.session_state:
|
104 |
+
wml_credentials = get_credentials()
|
105 |
+
st.session_state.client = APIClient(credentials=wml_credentials, project_id=project_id)
|
106 |
|
107 |
vector_index_id = VECTOR_DB
|
108 |
+
vector_index_details = st.session_state.client.data_assets.get_details(vector_index_id)
|
109 |
vector_index_properties = vector_index_details["entity"]["vector_index"]
|
110 |
|
111 |
top_n = 20 if vector_index_properties["settings"].get("rerank") else int(vector_index_properties["settings"]["top_k"])
|
|
|
146 |
import string
|
147 |
|
148 |
def hydrate_chromadb():
|
149 |
+
data = st.session_state.client.data_assets.get_content(vector_index_id)
|
150 |
content = gzip.decompress(data)
|
151 |
stringified_vectors = str(content, "utf-8")
|
152 |
vectors = json.loads(stringified_vectors)
|
|
|
193 |
)
|
194 |
return collection
|
195 |
|
196 |
+
if "chroma_collection" not in st.session_state:
|
197 |
+
st.session_state.chroma_collection = hydrate_chromadb()
|
198 |
|
199 |
def proximity_search( question ):
|
200 |
query_vectors = emb.embed_query(question)
|
201 |
+
query_result = st.session_state.chroma_collection.query(
|
202 |
query_embeddings=query_vectors,
|
203 |
n_results=top_n,
|
204 |
include=["documents", "metadatas", "distances"]
|
|
|
207 |
documents = list(reversed(query_result["documents"][0]))
|
208 |
|
209 |
if vector_index_properties["settings"].get("rerank"):
|
210 |
+
documents = rerank(st.session_state.client, documents, question, vector_index_properties["settings"]["top_k"])
|
211 |
|
212 |
return "\n".join(documents)
|
213 |
|