File size: 1,958 Bytes
29971f8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
from langchain.prompts.prompt import PromptTemplate


class Chatbot:
    _template = """๋‹ค์Œ ๋Œ€ํ™”์™€ ํ›„์† ์งˆ๋ฌธ์ด ์ฃผ์–ด์ง€๋ฉด ํ›„์† ์งˆ๋ฌธ์„ ๋…๋ฆฝํ˜• ์งˆ๋ฌธ์œผ๋กœ ๋ฐ”๊พธ์‹ญ์‹œ์˜ค.
    ์งˆ๋ฌธ์ด CSV ํŒŒ์ผ์˜ ์ •๋ณด์— ๊ด€ํ•œ ๊ฒƒ์ด๋ผ๊ณ  ๊ฐ€์ •ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.
    Chat History:
    {chat_history}
    Follow-up entry: {question}
    Standalone question:"""

    CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)

    qa_template = """"csv ํŒŒ์ผ์˜ ์ •๋ณด๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์งˆ๋ฌธ์— ๋‹ตํ•˜๋Š” AI ๋Œ€ํ™” ๋น„์„œ์ž…๋‹ˆ๋‹ค.
    csv ํŒŒ์ผ์˜ ๋ฐ์ดํ„ฐ์™€ ์งˆ๋ฌธ์ด ์ œ๊ณต๋˜๋ฉฐ ์‚ฌ์šฉ์ž๊ฐ€ ํ•„์š”ํ•œ ์ •๋ณด๋ฅผ ์ฐพ๋„๋ก ๋„์™€์•ผ ํ•ฉ๋‹ˆ๋‹ค. 
    ์•Œ๊ณ  ์žˆ๋Š” ์ •๋ณด์— ๋Œ€ํ•ด์„œ๋งŒ ์‘๋‹ตํ•˜์‹ญ์‹œ์˜ค. ๋‹ต์„ ์ง€์–ด๋‚ด๋ ค๊ณ  ํ•˜์ง€ ๋งˆ์„ธ์š”.
    ๊ท€ํ•˜์˜ ๋‹ต๋ณ€์€ ์งง๊ณ  ์นœ๊ทผํ•˜๋ฉฐ ๋™์ผํ•œ ์–ธ์–ด๋กœ ์ž‘์„ฑ๋˜์–ด์•ผ ํ•ฉ๋‹ˆ๋‹ค.
    question: {question}
    =========
    {context}
    =======
    """

    QA_PROMPT = PromptTemplate(template=qa_template, input_variables=["question", "context"])

    def __init__(self, model_name, temperature, vectors):
        self.model_name = model_name
        self.temperature = temperature
        self.vectors = vectors

    def conversational_chat(self, query):
        """
        Starts a conversational chat with a model via Langchain
        """

        chain = ConversationalRetrievalChain.from_llm(
            llm=ChatOpenAI(model_name=self.model_name, temperature=self.temperature),
            condense_question_prompt=self.CONDENSE_QUESTION_PROMPT,
            qa_prompt=self.QA_PROMPT,
            retriever=self.vectors.as_retriever(),
        )
        result = chain({"question": query, "chat_history": st.session_state["history"]})

        st.session_state["history"].append((query, result["answer"]))

        return result["answer"]