Rob Caamano commited on
Commit
4388ac9
·
unverified ·
1 Parent(s): 32c15df
Files changed (1) hide show
  1. app.py +2 -8
app.py CHANGED
@@ -1,15 +1,10 @@
1
  import streamlit as st
2
  import pandas as pd
3
- import numpy as np
4
  from transformers import AutoTokenizer
5
  from transformers import (
6
  TFAutoModelForSequenceClassification as AutoModelForSequenceClassification,
7
  )
8
 
9
- def softmax(x):
10
- e_x = np.exp(x - np.max(x))
11
- return e_x / e_x.sum(axis=0)
12
-
13
  st.title("Detecting Toxic Tweets")
14
 
15
  demo = """Your words are like poison. They seep into my mind and make me feel worthless."""
@@ -36,11 +31,10 @@ def get_highest_toxicity_class(prediction):
36
  return model.config.id2label[max_index], prediction[max_index]
37
 
38
  input = tokenizer(text, return_tensors="tf")
39
- logits = model(input, return_dict=True).logits.numpy()[0]
40
- probabilities = softmax(logits)
41
 
42
  if st.button("Submit", type="primary"):
43
- label, probability = get_highest_toxicity_class(probabilities)
44
 
45
  tweet_portion = text[:50] + "..." if len(text) > 50 else text
46
 
 
1
  import streamlit as st
2
  import pandas as pd
 
3
  from transformers import AutoTokenizer
4
  from transformers import (
5
  TFAutoModelForSequenceClassification as AutoModelForSequenceClassification,
6
  )
7
 
 
 
 
 
8
  st.title("Detecting Toxic Tweets")
9
 
10
  demo = """Your words are like poison. They seep into my mind and make me feel worthless."""
 
31
  return model.config.id2label[max_index], prediction[max_index]
32
 
33
  input = tokenizer(text, return_tensors="tf")
34
+ prediction = model(input, return_dict=True).logits.numpy()[0]
 
35
 
36
  if st.button("Submit", type="primary"):
37
+ label, probability = get_highest_toxicity_class(prediction)
38
 
39
  tweet_portion = text[:50] + "..." if len(text) > 50 else text
40