File size: 1,574 Bytes
68051dd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import time
from typing import List

from .crawler import crawl
from .vector_database import add_data, get_chunks, query_db
from langchain.chains import LLMChain
from langchain_core.prompts import ChatPromptTemplate
from langchain_groq import ChatGroq
from pydantic import StrictStr
from qdrant_client.fastembed_common import QueryResponse

chat = ChatGroq(
    model="llama3-70b-8192",
    verbose=True,
)  # type: ignore


def get_response(query: str, character: str) -> tuple[str, str]:
    start_time: float = time.time()
    print("Query:", query, "Character:", character)
    print("start crawling")
    links: List[StrictStr] = crawl(query=query)
    print("start getting chunks")
    chunks: List[Element] = []
    for link in links:
        chunks.extend(get_chunks(url=link))
    print("start adding data to db")
    add_data(chunks=chunks)
    print("start querying db")
    results: List[QueryResponse] = query_db(query=query)
    prompt: ChatPromptTemplate = ChatPromptTemplate.from_messages(
        messages=[
            (
                "system",
                "Act as {character}. Answer in one statement. Answer the question using the provided context. Context: {context}",
            ),
            ("human", "{text}"),
        ]
    )
    chain = LLMChain(
        prompt=prompt,
        llm=chat,
        verbose=True,
    )
    response = chain.invoke(
        {"text": query, "character": character, "context": results[0]["text"]}
    )
    end_time: float = time.time()
    return str(object=response.content), str(object=end_time - start_time)