docker-api / nodes /call_llm.py
dasomaru's picture
Upload folder using huggingface_hub
06696b5 verified
raw
history blame contribute delete
318 Bytes
from generator.llm_inference import generate_answer
from type.state_schema import RAGState
def call_llm(state: RAGState) -> RAGState:
prompt = state.prompt
response = generate_answer(prompt)
return state.model_copy(update={
"final_response": response,
"status": "success"
})