Spaces:
Runtime error
Runtime error
Commit
·
5b024f0
1
Parent(s):
12611d9
change chroma db path
Browse files- email_ai.py +20 -4
email_ai.py
CHANGED
@@ -9,7 +9,7 @@ from langchain_core.messages import HumanMessage, SystemMessage
|
|
9 |
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
10 |
from langchain_huggingface import HuggingFaceEmbeddings
|
11 |
|
12 |
-
from langchain.vectorstores import Chroma
|
13 |
from langchain.schema.runnable import RunnablePassthrough
|
14 |
|
15 |
# from langchain_openai import ChatOpenAI
|
@@ -18,6 +18,7 @@ from langchain.prompts import ChatPromptTemplate
|
|
18 |
import google.generativeai as genai
|
19 |
import os
|
20 |
from dotenv import load_dotenv
|
|
|
21 |
|
22 |
# Load environment variables
|
23 |
load_dotenv()
|
@@ -30,8 +31,20 @@ def initialize_conversation():
|
|
30 |
|
31 |
llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash-latest", api_key=gemini_api_key)
|
32 |
embeddings_model = HuggingFaceEmbeddings()
|
33 |
-
|
34 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
|
36 |
return llm, chroma_retriever
|
37 |
# initialize_conversation()
|
@@ -72,4 +85,7 @@ def get_chat_model_completions(llm, chroma_retriever, query):
|
|
72 |
# print(response.content)
|
73 |
return response
|
74 |
# response=get_chat_model_completions(initialize_conversation(), query)
|
75 |
-
# print(response.response)
|
|
|
|
|
|
|
|
9 |
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
10 |
from langchain_huggingface import HuggingFaceEmbeddings
|
11 |
|
12 |
+
# from langchain.vectorstores import Chroma
|
13 |
from langchain.schema.runnable import RunnablePassthrough
|
14 |
|
15 |
# from langchain_openai import ChatOpenAI
|
|
|
18 |
import google.generativeai as genai
|
19 |
import os
|
20 |
from dotenv import load_dotenv
|
21 |
+
import chromadb
|
22 |
|
23 |
# Load environment variables
|
24 |
load_dotenv()
|
|
|
31 |
|
32 |
llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash-latest", api_key=gemini_api_key)
|
33 |
embeddings_model = HuggingFaceEmbeddings()
|
34 |
+
|
35 |
+
# client = chromadb.PersistentClient()
|
36 |
+
# Connect to your ChromaDB instance
|
37 |
+
client = chromadb.PersistentClient(path="chroma_langchain_db1")
|
38 |
+
|
39 |
+
# collection_names = client.list_collections()
|
40 |
+
|
41 |
+
# for name in collection_names:
|
42 |
+
# print(name)
|
43 |
+
|
44 |
+
insurance_collection = client.get_collection(name='langchain', embedding_function=embeddings_model)
|
45 |
+
|
46 |
+
# db = Chroma(persist_directory="/Users/daddy/aimlprojects/email_ai/chroma_langchain_db1", embedding_function=embeddings_model)
|
47 |
+
chroma_retriever = insurance_collection.as_retriever(search_kwargs={"k":50})
|
48 |
|
49 |
return llm, chroma_retriever
|
50 |
# initialize_conversation()
|
|
|
85 |
# print(response.content)
|
86 |
return response
|
87 |
# response=get_chat_model_completions(initialize_conversation(), query)
|
88 |
+
# print(response.response)
|
89 |
+
|
90 |
+
if __name__ == "__main__":
|
91 |
+
initialize_conversation()
|