Spaces:
Sleeping
Sleeping
Commit
Β·
a964a99
1
Parent(s):
5ccbefb
Update main.py
Browse files
main.py
CHANGED
@@ -29,16 +29,6 @@ import datetime
|
|
29 |
|
30 |
os.environ["TOKENIZERS_PARALLELISM"] = os.environ["TOKENIZERS_PARALLELISM"]
|
31 |
os.environ['ANTHROPIC_API_KEY'] = os.environ['ANTHROPIC_API_KEY']
|
32 |
-
index_name = os.environ['PINECONE_INDEX_NAME']
|
33 |
-
embeddings = HuggingFaceEmbeddings()
|
34 |
-
pinecone.init(
|
35 |
-
api_key=os.environ['PINECONE_API_KEY'],
|
36 |
-
environment=os.environ['PINECONE_ENVIRONMENT']
|
37 |
-
)
|
38 |
-
vectorstore = Pinecone.from_existing_index(
|
39 |
-
index_name=index_name, embedding=embeddings
|
40 |
-
)
|
41 |
-
retriever = vectorstore.as_retriever(search_type="similarity_score_threshold", search_kwargs={"score_threshold": .7, "k": 60,"filter": {'categorie': {'$eq': 'OF'}}})
|
42 |
|
43 |
@cl.author_rename
|
44 |
def rename(orig_author: str):
|
@@ -89,6 +79,20 @@ async def on_action(action):
|
|
89 |
]
|
90 |
await cl.Message(author="πππ",content="Fermer le panneau d'information", actions=others).send()
|
91 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
92 |
@cl.cache
|
93 |
def to_cache(file):
|
94 |
#time.sleep(5) # Simulate a time-consuming process
|
@@ -170,7 +174,7 @@ async def start():
|
|
170 |
qa = ConversationalRetrievalChain.from_llm(
|
171 |
streaming_llm,
|
172 |
chain_type="stuff",
|
173 |
-
retriever=
|
174 |
#combine_docs_chain=doc_chain,
|
175 |
#question_generator=question_generator,
|
176 |
memory=memory,
|
|
|
29 |
|
30 |
os.environ["TOKENIZERS_PARALLELISM"] = os.environ["TOKENIZERS_PARALLELISM"]
|
31 |
os.environ['ANTHROPIC_API_KEY'] = os.environ['ANTHROPIC_API_KEY']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
|
33 |
@cl.author_rename
|
34 |
def rename(orig_author: str):
|
|
|
79 |
]
|
80 |
await cl.Message(author="πππ",content="Fermer le panneau d'information", actions=others).send()
|
81 |
|
82 |
+
@cl.cache
|
83 |
+
def retriever_to_cache():
|
84 |
+
index_name = os.environ['PINECONE_INDEX_NAME']
|
85 |
+
embeddings = HuggingFaceEmbeddings()
|
86 |
+
pinecone.init(
|
87 |
+
api_key=os.environ['PINECONE_API_KEY'],
|
88 |
+
environment=os.environ['PINECONE_ENVIRONMENT']
|
89 |
+
)
|
90 |
+
vectorstore = Pinecone.from_existing_index(
|
91 |
+
index_name=index_name, embedding=embeddings
|
92 |
+
)
|
93 |
+
retriever = vectorstore.as_retriever(search_type="similarity_score_threshold", search_kwargs={"score_threshold": .7, "k": 60,"filter": {'categorie': {'$eq': 'OF'}}})
|
94 |
+
return retriever
|
95 |
+
|
96 |
@cl.cache
|
97 |
def to_cache(file):
|
98 |
#time.sleep(5) # Simulate a time-consuming process
|
|
|
174 |
qa = ConversationalRetrievalChain.from_llm(
|
175 |
streaming_llm,
|
176 |
chain_type="stuff",
|
177 |
+
retriever=retriever_to_cache(),
|
178 |
#combine_docs_chain=doc_chain,
|
179 |
#question_generator=question_generator,
|
180 |
memory=memory,
|