Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,12 +2,16 @@ import gradio as gr
|
|
2 |
import nltk
|
3 |
from nltk.sentiment.vader import SentimentIntensityAnalyzer
|
4 |
from transformers import pipeline
|
|
|
|
|
|
|
5 |
|
6 |
# global variables to load models
|
7 |
lr_model = load("lr_model.joblib")
|
8 |
lr_vectorizer = load("vectorizer.joblib")
|
9 |
sentiment_pipe = pipeline("text-classification", model="finiteautomata/bertweet-base-sentiment-analysis")
|
10 |
-
|
|
|
11 |
|
12 |
|
13 |
def greet(name):
|
@@ -28,6 +32,9 @@ def predict_sentiment(text, model):
|
|
28 |
x = lr_vectorizer.transform([text])
|
29 |
pred = lr_model.predict_proba(x)[0]
|
30 |
return {"neg": pred[0], "pos": pred[1]}
|
|
|
|
|
|
|
31 |
|
32 |
|
33 |
demo = gr.Blocks()
|
|
|
2 |
import nltk
|
3 |
from nltk.sentiment.vader import SentimentIntensityAnalyzer
|
4 |
from transformers import pipeline
|
5 |
+
from joblib import load
|
6 |
+
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
7 |
+
import torch.nn.functional as F
|
8 |
|
9 |
# global variables to load models
|
10 |
lr_model = load("lr_model.joblib")
|
11 |
lr_vectorizer = load("vectorizer.joblib")
|
12 |
sentiment_pipe = pipeline("text-classification", model="finiteautomata/bertweet-base-sentiment-analysis")
|
13 |
+
bert_model = AutoModelForSequenceClassification.from_pretrained("./imdb-bert")
|
14 |
+
bert_tokenizer = AutoTokenizer.from_pretrained("./imdb-bert")
|
15 |
|
16 |
|
17 |
def greet(name):
|
|
|
32 |
x = lr_vectorizer.transform([text])
|
33 |
pred = lr_model.predict_proba(x)[0]
|
34 |
return {"neg": pred[0], "pos": pred[1]}
|
35 |
+
elif model == "custom BERT":
|
36 |
+
pred = F.softmax(bert_model(**bert_tokenizer("I love you", return_tensors="pt")).logits[0], dim=0).tolist()
|
37 |
+
return {"neg": pred[0], "pos": pred[1]}
|
38 |
|
39 |
|
40 |
demo = gr.Blocks()
|