|
""" |
|
๊ฐ๋จํ RAG ์ฒด์ธ ๊ตฌํ (๋๋ฒ๊น
์ฉ) |
|
""" |
|
import os |
|
from langchain_openai import ChatOpenAI |
|
from langchain.prompts import PromptTemplate |
|
from langchain_core.output_parsers import StrOutputParser |
|
from langchain_core.runnables import RunnablePassthrough |
|
|
|
|
|
class SimpleRAGChain: |
|
def __init__(self, vector_store): |
|
"""๊ฐ๋จํ RAG ์ฒด์ธ ์ด๊ธฐํ""" |
|
print("๊ฐ๋จํ RAG ์ฒด์ธ ์ด๊ธฐํ ์ค...") |
|
self.vector_store = vector_store |
|
|
|
|
|
openai_api_key = os.environ.get("OPENAI_API_KEY", "") |
|
print(f"API ํค ์ค์ ๋จ: {bool(openai_api_key)}") |
|
|
|
|
|
self.llm = ChatOpenAI( |
|
model_name="gpt-3.5-turbo", |
|
temperature=0.2, |
|
api_key=openai_api_key, |
|
) |
|
|
|
|
|
template = """ |
|
๋ค์ ์ ๋ณด๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ์ง๋ฌธ์ ์ ํํ๊ฒ ๋ต๋ณํด์ฃผ์ธ์. |
|
|
|
์ง๋ฌธ: {question} |
|
|
|
์ฐธ๊ณ ์ ๋ณด: |
|
{context} |
|
|
|
์ฐธ๊ณ ์ ๋ณด์ ๋ต์ด ์๋ ๊ฒฝ์ฐ "์ ๊ณต๋ ๋ฌธ์์์ ํด๋น ์ ๋ณด๋ฅผ ์ฐพ์ ์ ์์ต๋๋ค."๋ผ๊ณ ๋ต๋ณํ์ธ์. |
|
""" |
|
|
|
self.prompt = PromptTemplate.from_template(template) |
|
|
|
|
|
self.chain = ( |
|
{"context": self._retrieve, "question": RunnablePassthrough()} |
|
| self.prompt |
|
| self.llm |
|
| StrOutputParser() |
|
) |
|
print("๊ฐ๋จํ RAG ์ฒด์ธ ์ด๊ธฐํ ์๋ฃ") |
|
|
|
def _retrieve(self, query): |
|
"""๋ฌธ์ ๊ฒ์""" |
|
try: |
|
docs = self.vector_store.similarity_search(query, k=3) |
|
return "\n\n".join(doc.page_content for doc in docs) |
|
except Exception as e: |
|
print(f"๊ฒ์ ์ค ์ค๋ฅ: {e}") |
|
return "๋ฌธ์ ๊ฒ์ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค." |
|
|
|
def run(self, query): |
|
"""์ฟผ๋ฆฌ ์ฒ๋ฆฌ""" |
|
try: |
|
return self.chain.invoke(query) |
|
except Exception as e: |
|
print(f"์คํ ์ค ์ค๋ฅ: {e}") |
|
return f"์ค๋ฅ ๋ฐ์: {str(e)}" |