File size: 594 Bytes
01130ab
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
from transformers import pipeline

def classify(tweet, event_model, hftoken):
    
    # event type prediction
    event_predictor = pipeline(task="text-classification", model=event_model, 
                               batch_size=512, token=hftoken)
    tokenizer_kwargs = {'padding': True, 'truncation': True, 'max_length': 512}

    results = {"text": None,  "event": None, "score": None}
    prediction = event_predictor(tweet, **tokenizer_kwargs)[0]
    
    results["text"] = tweet
    results["event"] = prediction["label"]
    results["score"] = prediction["score"]

    return results