File size: 318 Bytes
06696b5
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
from generator.llm_inference import generate_answer
from type.state_schema import RAGState

def call_llm(state: RAGState) -> RAGState:
    prompt = state.prompt
    response = generate_answer(prompt)
    return state.model_copy(update={
        "final_response": response,
        "status": "success"
    })