jayebaku commited on
Commit
6dc4db2
·
verified ·
1 Parent(s): 0232ca4

Update classifier.py

Browse files
Files changed (1) hide show
  1. classifier.py +5 -2
classifier.py CHANGED
@@ -1,13 +1,16 @@
1
  import spaces
2
  from transformers import pipeline as tpipeline
3
- from optimum.pipelines import pipeline as opipeline
 
4
 
5
  #@spaces.GPU(duration=60)
6
  def classify(tweet, event_model, hftoken, threshold):
7
  results = {"text": None, "event": None, "score": None}
 
 
8
 
9
  # event type prediction with transformers pipeline
10
- event_predictor = tpipeline(task="text-classification", model=event_model,
11
  batch_size=512, token=hftoken, device="cpu")
12
  tokenizer_kwargs = {'padding': True, 'truncation': True, 'max_length': 512}
13
  prediction = event_predictor(tweet, **tokenizer_kwargs)[0]
 
1
  import spaces
2
  from transformers import pipeline as tpipeline
3
+ # from optimum.pipelines import pipeline as opipeline
4
+ from optimum.onnxruntime import ORTModelForSequenceClassification
5
 
6
  #@spaces.GPU(duration=60)
7
  def classify(tweet, event_model, hftoken, threshold):
8
  results = {"text": None, "event": None, "score": None}
9
+
10
+ model = ORTModelForSequenceClassification.from_pretrained(event_model)
11
 
12
  # event type prediction with transformers pipeline
13
+ event_predictor = tpipeline(task="text-classification", model=model, #model=event_model
14
  batch_size=512, token=hftoken, device="cpu")
15
  tokenizer_kwargs = {'padding': True, 'truncation': True, 'max_length': 512}
16
  prediction = event_predictor(tweet, **tokenizer_kwargs)[0]