Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
from memory import update_memory, check_memory | |
# Load persona instructions | |
with open("persona.txt", "r", encoding="utf-8") as f: | |
personality = f.read() | |
# Load fine-tuned model | |
model_name = "./MoinRomanticBot-Lora" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
# Function to generate response with memory | |
def chatbot(input_text): | |
# Pehle check karo ki memory me pehle se koi response hai ya nahi | |
memory_response = check_memory(input_text) | |
if memory_response: | |
return memory_response | |
# Naya response generate karna | |
prompt = f"{personality}\nMoin: {input_text}\nAI:" | |
inputs = tokenizer(prompt, return_tensors="pt") | |
outputs = model.generate(**inputs, max_length=150) | |
response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
# Memory me store karna | |
update_memory(input_text, response) | |
return response | |
# Gradio interface | |
iface = gr.Interface(fn=chatbot, inputs="text", outputs="text", title="MoinRomanticBot") | |
# Launch app | |
if __name__ == "__main__": | |
iface.launch(server_name="0.0.0.0", server_port=7860) |