File size: 421 Bytes
42c727a
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
from models.local_llm import run_llm

conversation_memory = []

def process_text(input_text: str) -> str:
    conversation_memory.append({"user": input_text})
    context = "\n".join([f"User: {m['user']}" for m in conversation_memory])
    prompt = f"You are a telecom AI assistant. Context:\n{context}\nRespond:"
    response = run_llm(prompt)
    conversation_memory.append({"assistant": response})
    return response