Spaces:
Runtime error
Runtime error
from fastapi import FastAPI | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
import torch | |
app = FastAPI() | |
# Définir le chemin correct vers ton modèle | |
MODEL_PATH = "fatmata/psyboy/psybot_model" # Remplace par le bon chemin | |
# Charger le modèle et le tokenizer | |
tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH) | |
model = AutoModelForCausalLM.from_pretrained(MODEL_PATH) | |
def read_root(): | |
return {"message": "Hello from PsyBot API!"} | |
def generate_response(prompt: str): | |
inputs = tokenizer(prompt, return_tensors="pt") | |
with torch.no_grad(): | |
output = model.generate(**inputs, max_length=150) | |
response = tokenizer.decode(output[0], skip_special_tokens=True) | |
return {"response": response} | |