Technocoloredgeek commited on
Commit
3f6c71c
·
verified ·
1 Parent(s): 611ca5d

Update langchain

Browse files
Files changed (1) hide show
  1. app.py +4 -5
app.py CHANGED
@@ -3,9 +3,8 @@ import asyncio
3
  import os
4
  from langchain.text_splitter import RecursiveCharacterTextSplitter
5
  from langchain.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate
6
- from langchain.vectorstores import Chroma
7
- from langchain.embeddings import OpenAIEmbeddings
8
- from langchain.chat_models import ChatOpenAI
9
  from PyPDF2 import PdfReader
10
  import aiohttp
11
  from io import BytesIO
@@ -38,8 +37,8 @@ class RetrievalAugmentedQAPipeline:
38
  formatted_system_prompt = system_role_prompt.format()
39
  formatted_user_prompt = user_role_prompt.format(question=user_query, context=context_prompt)
40
 
41
- response = await self.llm.agenerate([formatted_system_prompt, formatted_user_prompt])
42
- return {"response": response.generations[0][0].text, "context": context_list}
43
 
44
  # PDF processing functions
45
  async def fetch_pdf(session, url):
 
3
  import os
4
  from langchain.text_splitter import RecursiveCharacterTextSplitter
5
  from langchain.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate
6
+ from langchain_community.vectorstores import Chroma
7
+ from langchain_openai import OpenAIEmbeddings, ChatOpenAI
 
8
  from PyPDF2 import PdfReader
9
  import aiohttp
10
  from io import BytesIO
 
37
  formatted_system_prompt = system_role_prompt.format()
38
  formatted_user_prompt = user_role_prompt.format(question=user_query, context=context_prompt)
39
 
40
+ response = await self.llm.ainvoke([formatted_system_prompt, formatted_user_prompt])
41
+ return {"response": response.content, "context": context_list}
42
 
43
  # PDF processing functions
44
  async def fetch_pdf(session, url):