Spaces:
Sleeping
Sleeping
File size: 1,958 Bytes
29971f8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
from langchain.prompts.prompt import PromptTemplate
class Chatbot:
_template = """๋ค์ ๋ํ์ ํ์ ์ง๋ฌธ์ด ์ฃผ์ด์ง๋ฉด ํ์ ์ง๋ฌธ์ ๋
๋ฆฝํ ์ง๋ฌธ์ผ๋ก ๋ฐ๊พธ์ญ์์ค.
์ง๋ฌธ์ด CSV ํ์ผ์ ์ ๋ณด์ ๊ดํ ๊ฒ์ด๋ผ๊ณ ๊ฐ์ ํ ์ ์์ต๋๋ค.
Chat History:
{chat_history}
Follow-up entry: {question}
Standalone question:"""
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
qa_template = """"csv ํ์ผ์ ์ ๋ณด๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ์ง๋ฌธ์ ๋ตํ๋ AI ๋ํ ๋น์์
๋๋ค.
csv ํ์ผ์ ๋ฐ์ดํฐ์ ์ง๋ฌธ์ด ์ ๊ณต๋๋ฉฐ ์ฌ์ฉ์๊ฐ ํ์ํ ์ ๋ณด๋ฅผ ์ฐพ๋๋ก ๋์์ผ ํฉ๋๋ค.
์๊ณ ์๋ ์ ๋ณด์ ๋ํด์๋ง ์๋ตํ์ญ์์ค. ๋ต์ ์ง์ด๋ด๋ ค๊ณ ํ์ง ๋ง์ธ์.
๊ทํ์ ๋ต๋ณ์ ์งง๊ณ ์น๊ทผํ๋ฉฐ ๋์ผํ ์ธ์ด๋ก ์์ฑ๋์ด์ผ ํฉ๋๋ค.
question: {question}
=========
{context}
=======
"""
QA_PROMPT = PromptTemplate(template=qa_template, input_variables=["question", "context"])
def __init__(self, model_name, temperature, vectors):
self.model_name = model_name
self.temperature = temperature
self.vectors = vectors
def conversational_chat(self, query):
"""
Starts a conversational chat with a model via Langchain
"""
chain = ConversationalRetrievalChain.from_llm(
llm=ChatOpenAI(model_name=self.model_name, temperature=self.temperature),
condense_question_prompt=self.CONDENSE_QUESTION_PROMPT,
qa_prompt=self.QA_PROMPT,
retriever=self.vectors.as_retriever(),
)
result = chain({"question": query, "chat_history": st.session_state["history"]})
st.session_state["history"].append((query, result["answer"]))
return result["answer"] |