Spaces:
Sleeping
Sleeping
File size: 1,195 Bytes
3a8d998 a488236 6dc4db2 01130ab 94a7f16 bc1afba 65afd8a 01130ab 65afd8a cb93119 0232ca4 01130ab 65afd8a 0232ca4 65afd8a 01130ab bc1afba 01130ab |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
import spaces
from transformers import pipeline as tpipeline
# from optimum.pipelines import pipeline as opipeline
#@spaces.GPU(duration=60)
def classify(tweet, event_model, hftoken, threshold):
results = {"text": None, "event": None, "score": None}
# event type prediction with transformers pipeline
event_predictor = tpipeline(task="text-classification", model=event_model,
batch_size=512, token=hftoken, device="cpu")
tokenizer_kwargs = {'padding': True, 'truncation': True, 'max_length': 512}
prediction = event_predictor(tweet, **tokenizer_kwargs)[0]
# with onnx pipeline
# onnx_classifier = opipeline("text-classification", model=event_model, accelerator="ort",
# batch_size=512, token=hftoken, device="cpu")
# prediction = onnx_classifier(tweet)[0]
results["text"] = tweet
if prediction["label"] != "none" and round(prediction["score"], 2) <= threshold:
results["event"] = "none"
results["score"] = prediction["score"]
else:
results["event"] = prediction["label"]
results["score"] = prediction["score"]
return results |