Spaces:
Runtime error
Runtime error
Commit
·
7223ff6
1
Parent(s):
5b024f0
resolved embedding fi=unction issue
Browse files- email_ai.py +7 -7
email_ai.py
CHANGED
@@ -9,7 +9,7 @@ from langchain_core.messages import HumanMessage, SystemMessage
|
|
9 |
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
10 |
from langchain_huggingface import HuggingFaceEmbeddings
|
11 |
|
12 |
-
|
13 |
from langchain.schema.runnable import RunnablePassthrough
|
14 |
|
15 |
# from langchain_openai import ChatOpenAI
|
@@ -18,7 +18,7 @@ from langchain.prompts import ChatPromptTemplate
|
|
18 |
import google.generativeai as genai
|
19 |
import os
|
20 |
from dotenv import load_dotenv
|
21 |
-
import chromadb
|
22 |
|
23 |
# Load environment variables
|
24 |
load_dotenv()
|
@@ -34,17 +34,17 @@ def initialize_conversation():
|
|
34 |
|
35 |
# client = chromadb.PersistentClient()
|
36 |
# Connect to your ChromaDB instance
|
37 |
-
client = chromadb.PersistentClient(path="chroma_langchain_db1")
|
38 |
-
|
39 |
# collection_names = client.list_collections()
|
40 |
|
41 |
# for name in collection_names:
|
42 |
# print(name)
|
43 |
|
44 |
-
insurance_collection = client.get_collection(name='langchain', embedding_function=embeddings_model)
|
45 |
|
46 |
-
|
47 |
-
chroma_retriever =
|
48 |
|
49 |
return llm, chroma_retriever
|
50 |
# initialize_conversation()
|
|
|
9 |
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
10 |
from langchain_huggingface import HuggingFaceEmbeddings
|
11 |
|
12 |
+
from langchain.vectorstores import Chroma
|
13 |
from langchain.schema.runnable import RunnablePassthrough
|
14 |
|
15 |
# from langchain_openai import ChatOpenAI
|
|
|
18 |
import google.generativeai as genai
|
19 |
import os
|
20 |
from dotenv import load_dotenv
|
21 |
+
# import chromadb
|
22 |
|
23 |
# Load environment variables
|
24 |
load_dotenv()
|
|
|
34 |
|
35 |
# client = chromadb.PersistentClient()
|
36 |
# Connect to your ChromaDB instance
|
37 |
+
# client = chromadb.PersistentClient(path="chroma_langchain_db1")
|
38 |
+
#
|
39 |
# collection_names = client.list_collections()
|
40 |
|
41 |
# for name in collection_names:
|
42 |
# print(name)
|
43 |
|
44 |
+
# insurance_collection = client.get_collection(name='langchain', embedding_function=embeddings_model)
|
45 |
|
46 |
+
db = Chroma(persist_directory="chroma_langchain_db1", embedding_function=embeddings_model)
|
47 |
+
chroma_retriever = db.as_retriever(search_kwargs={"k":50})
|
48 |
|
49 |
return llm, chroma_retriever
|
50 |
# initialize_conversation()
|