Technocoloredgeek commited on
Commit
2fc54b2
·
verified ·
1 Parent(s): 6b2e19d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -22
app.py CHANGED
@@ -1,26 +1,15 @@
1
  import streamlit as st
2
  import os
3
- import langchain
4
- import langchain_community
5
- import langchain_openai
6
- import qdrant_client
7
  from langchain_community.document_loaders import PyMuPDFLoader
8
  from langchain.text_splitter import RecursiveCharacterTextSplitter
9
- from langchain_openai import ChatOpenAI
10
- from langchain_community.vectorstores import Qdrant
11
  from langchain.prompts import ChatPromptTemplate
12
  from langchain_core.output_parsers import StrOutputParser
13
  from langchain_core.runnables import RunnablePassthrough
14
  from qdrant_client import QdrantClient
15
  from qdrant_client.http.models import Distance, VectorParams
16
  from operator import itemgetter
17
- from langchain_community.embeddings import HuggingFaceEmbeddings
18
-
19
- # Print version information
20
- print(f"langchain version: {langchain.__version__}")
21
- print(f"langchain_community version: {langchain_community.__version__}")
22
- print(f"langchain_openai version: {langchain_openai.__version__}")
23
- print(f"qdrant_client version: {qdrant_client.__version__}")
24
 
25
  # Set up API keys
26
  os.environ["OPENAI_API_KEY"] = st.secrets["OPENAI_API_KEY"]
@@ -51,14 +40,9 @@ def load_and_process_pdfs(pdf_links):
51
  def setup_vectorstore():
52
  LOCATION = ":memory:"
53
  COLLECTION_NAME = "AI_Ethics_Framework"
54
-
55
- qdrant_client = QdrantClient(location=LOCATION)
56
 
57
- # Create the embeddings
58
- embeddings = HuggingFaceEmbeddings(model_name="Technocoloredgeek/midterm-finetuned-embedding")
59
-
60
- # Get the vector size from the embeddings
61
- VECTOR_SIZE = len(embeddings.embed_query("test"))
62
 
63
  # Create the collection
64
  qdrant_client.create_collection(
@@ -67,10 +51,10 @@ def setup_vectorstore():
67
  )
68
 
69
  # Create the vector store
70
- qdrant_vector_store = Qdrant(
71
  client=qdrant_client,
72
  collection_name=COLLECTION_NAME,
73
- embedding_function=embeddings
74
  )
75
 
76
  # Load and add documents
 
1
  import streamlit as st
2
  import os
 
 
 
 
3
  from langchain_community.document_loaders import PyMuPDFLoader
4
  from langchain.text_splitter import RecursiveCharacterTextSplitter
5
+ from langchain_openai import OpenAIEmbeddings, ChatOpenAI
6
+ from langchain_qdrant import QdrantVectorStore
7
  from langchain.prompts import ChatPromptTemplate
8
  from langchain_core.output_parsers import StrOutputParser
9
  from langchain_core.runnables import RunnablePassthrough
10
  from qdrant_client import QdrantClient
11
  from qdrant_client.http.models import Distance, VectorParams
12
  from operator import itemgetter
 
 
 
 
 
 
 
13
 
14
  # Set up API keys
15
  os.environ["OPENAI_API_KEY"] = st.secrets["OPENAI_API_KEY"]
 
40
  def setup_vectorstore():
41
  LOCATION = ":memory:"
42
  COLLECTION_NAME = "AI_Ethics_Framework"
43
+ VECTOR_SIZE = 1536
 
44
 
45
+ qdrant_client = QdrantClient(location=LOCATION)
 
 
 
 
46
 
47
  # Create the collection
48
  qdrant_client.create_collection(
 
51
  )
52
 
53
  # Create the vector store
54
+ qdrant_vector_store = QdrantVectorStore(
55
  client=qdrant_client,
56
  collection_name=COLLECTION_NAME,
57
+ embedding=OpenAIEmbeddings()
58
  )
59
 
60
  # Load and add documents