Spaces:
Running
Running
change probs
Browse files- __pycache__/model.cpython-312.pyc +0 -0
- app.py +2 -2
- confidence_chart.png +0 -0
__pycache__/model.cpython-312.pyc
CHANGED
Binary files a/__pycache__/model.cpython-312.pyc and b/__pycache__/model.cpython-312.pyc differ
|
|
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
from transformers import AutoTokenizer
|
3 |
import torch
|
|
|
4 |
import matplotlib.pyplot as plt
|
5 |
from model import EnergySmellsDetector
|
6 |
from config import SMELLS, BEST_THRESHOLD
|
@@ -12,8 +13,7 @@ model = EnergySmellsDetector.load_model_from_hf()
|
|
12 |
def get_predictions(code_snippet):
|
13 |
inputs = tokenizer(code_snippet, return_tensors="pt", truncation=True)
|
14 |
with torch.no_grad():
|
15 |
-
|
16 |
-
probs = torch.sigmoid(logits).cpu().numpy().flatten()
|
17 |
rounded_logits = (probs > BEST_THRESHOLD).astype(int)
|
18 |
|
19 |
# Prepare results in a dictionary
|
|
|
1 |
import gradio as gr
|
2 |
from transformers import AutoTokenizer
|
3 |
import torch
|
4 |
+
import pandas as pd
|
5 |
import matplotlib.pyplot as plt
|
6 |
from model import EnergySmellsDetector
|
7 |
from config import SMELLS, BEST_THRESHOLD
|
|
|
13 |
def get_predictions(code_snippet):
|
14 |
inputs = tokenizer(code_snippet, return_tensors="pt", truncation=True)
|
15 |
with torch.no_grad():
|
16 |
+
probs = model(**inputs)[0].cpu().numpy().flatten() # Model output is already sigmoid applied
|
|
|
17 |
rounded_logits = (probs > BEST_THRESHOLD).astype(int)
|
18 |
|
19 |
# Prepare results in a dictionary
|
confidence_chart.png
ADDED
![]() |