minimalFlaskAPI / inference.py
filipeclduarte's picture
Update inference.py
cb35e39
raw
history blame
312 Bytes
from transformers import AutoModel, AutoTokenizer, pipeline
tokenizer = AutoTokenizer.from_pretrained("turing-usp/FinBertPTBR")
model = AutoModel.from_pretrained("turing-usp/FinBertPTBR")
classifier = pipeline("sentiment-analysis", model=model, tokenizer=tokenizer)
def predict(data):
return classifier(data)