Spaces:
Runtime error
Runtime error
Oliver Li
commited on
Commit
·
6e993f1
1
Parent(s):
c483e0d
add initial table data and changed column format
Browse files
app.py
CHANGED
@@ -48,6 +48,59 @@ selected_model = st.selectbox("Choose a fine-tuned model:", model_options)
|
|
48 |
st.write("### Model Information")
|
49 |
st.write(f"**Description:** {model_options[selected_model]['description']}")
|
50 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
# Load the model and perform toxicity analysis
|
52 |
if st.button("Analyze"):
|
53 |
if not text:
|
@@ -65,10 +118,12 @@ if st.button("Analyze"):
|
|
65 |
# Create a table with the input text (or a portion of it), the highest toxicity class, and its probability
|
66 |
table_data = {
|
67 |
"Text (portion)": [text[:50]],
|
68 |
-
|
69 |
-
f"
|
|
|
|
|
70 |
}
|
71 |
-
table_df = pd.DataFrame(table_data)
|
72 |
st.table(table_df)
|
73 |
else:
|
74 |
sentiment_pipeline = load_model(selected_model)
|
|
|
48 |
st.write("### Model Information")
|
49 |
st.write(f"**Description:** {model_options[selected_model]['description']}")
|
50 |
|
51 |
+
table_df = pd.DataFrame(columns=["Text (portion)", "Toxicity class 1", "Class 1 probability", "Toxicity class 2", "Class 2 probability"])
|
52 |
+
initial_table_data = [{'Text (portion)': ["who's speaking? \n you goddamn cocksucker you know "],
|
53 |
+
'Toxicity class 1': ['obscene'],
|
54 |
+
'Class 1 probability': 0.7282997369766235,
|
55 |
+
'Toxicity class 2': ['toxic'],
|
56 |
+
'Class 2 probability': 0.2139672487974167},
|
57 |
+
{'Text (portion)': ['::Here is another source: Melissa Sue Halverson (2'],
|
58 |
+
'Toxicity class 1': ['toxic'],
|
59 |
+
'Class 1 probability': 0.24484945833683014,
|
60 |
+
'Toxicity class 2': ['obscene'],
|
61 |
+
'Class 2 probability': 0.1627064049243927},
|
62 |
+
{'Text (portion)': [', 8 November 2007 (UTC) \n\n All I can say is, havin'],
|
63 |
+
'Toxicity class 1': ['toxic'],
|
64 |
+
'Class 1 probability': 0.7277262806892395,
|
65 |
+
'Toxicity class 2': ['obscene'],
|
66 |
+
'Class 2 probability': 0.2502792477607727},
|
67 |
+
{'Text (portion)': ['::::I only see that at birth two persons are given'],
|
68 |
+
'Toxicity class 1': ['toxic'],
|
69 |
+
'Class 1 probability': 0.2711867094039917,
|
70 |
+
'Toxicity class 2': ['insult'],
|
71 |
+
'Class 2 probability': 0.15477754175662994},
|
72 |
+
{'Text (portion)': ["* There you have it: one man's Barnstar is another"],
|
73 |
+
'Toxicity class 1': ['toxic'],
|
74 |
+
'Class 1 probability': 0.5408656001091003,
|
75 |
+
'Toxicity class 2': ['insult'],
|
76 |
+
'Class 2 probability': 0.12563346326351166},
|
77 |
+
{'Text (portion)': ['" \n\n == Fact == \n\n Could just be abit of trivial f'],
|
78 |
+
'Toxicity class 1': ['toxic'],
|
79 |
+
'Class 1 probability': 0.35239243507385254,
|
80 |
+
'Toxicity class 2': ['obscene'],
|
81 |
+
'Class 2 probability': 0.1686778962612152},
|
82 |
+
{'Text (portion)': ['HE IS A GHAY ASS FUCKER@@!!'],
|
83 |
+
'Toxicity class 1': ['obscene'],
|
84 |
+
'Class 1 probability': 0.7819343209266663,
|
85 |
+
'Toxicity class 2': ['toxic'],
|
86 |
+
'Class 2 probability': 0.16951803863048553},
|
87 |
+
{'Text (portion)': ["I'VE SEEN YOUR CRIMES AGAINST CHILDREN AND I'M ASH"],
|
88 |
+
'Toxicity class 1': ['toxic'],
|
89 |
+
'Class 1 probability': 0.8491994738578796,
|
90 |
+
'Toxicity class 2': ['threat'],
|
91 |
+
'Class 2 probability': 0.04749392718076706},
|
92 |
+
{'Text (portion)': [':While with a lot of that essay says, general time'],
|
93 |
+
'Toxicity class 1': ['toxic'],
|
94 |
+
'Class 1 probability': 0.282654732465744,
|
95 |
+
'Toxicity class 2': ['obscene'],
|
96 |
+
'Class 2 probability': 0.15901680290699005},
|
97 |
+
{'Text (portion)': ['== Help == \n\n Please members of wiki, help me. My '],
|
98 |
+
'Toxicity class 1': ['toxic'],
|
99 |
+
'Class 1 probability': 0.3118911385536194,
|
100 |
+
'Toxicity class 2': ['obscene'],
|
101 |
+
'Class 2 probability': 0.16506287455558777}]
|
102 |
+
for d in initial_table_data:
|
103 |
+
pd.concat([table_df, pd.DataFrame(d)], ignore_index=True)
|
104 |
# Load the model and perform toxicity analysis
|
105 |
if st.button("Analyze"):
|
106 |
if not text:
|
|
|
118 |
# Create a table with the input text (or a portion of it), the highest toxicity class, and its probability
|
119 |
table_data = {
|
120 |
"Text (portion)": [text[:50]],
|
121 |
+
"Toxicity class 1": [results[0][0]],
|
122 |
+
f"Class 1 probability": results[0][1],
|
123 |
+
"Toxicity class 2": [results[1][0]],
|
124 |
+
f"Class 2 probability": results[1][1]
|
125 |
}
|
126 |
+
table_df = pd.concat([table_df, pd.DataFrame(table_data)], ignore_index=True)
|
127 |
st.table(table_df)
|
128 |
else:
|
129 |
sentiment_pipeline = load_model(selected_model)
|