Spaces:
Sleeping
Sleeping
Removed unnecessary inputs.
Browse files
app.py
CHANGED
@@ -7,21 +7,22 @@ from numpy import int64
|
|
7 |
from pandarallel import pandarallel
|
8 |
from sklearn.preprocessing import RobustScaler
|
9 |
import gradio as gr
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
|
12 |
def respond(
|
13 |
message,
|
14 |
-
|
15 |
-
threshold
|
16 |
):
|
17 |
-
for val in history:
|
18 |
-
if val[0].lower().strip() == message.lower().strip():
|
19 |
-
yield val[1]
|
20 |
|
21 |
for message in is_malicious_sql(message, threshold
|
22 |
):
|
23 |
response = message
|
24 |
-
history.append((message.lower().strip(), response))
|
25 |
yield response
|
26 |
|
27 |
"""
|
@@ -30,7 +31,6 @@ For information on how to customize the ChatInterface, peruse the gradio docs: h
|
|
30 |
demo = gr.ChatInterface(
|
31 |
respond,
|
32 |
additional_inputs=[
|
33 |
-
gr.Textbox(value="Check whether a SQL is malicious or not.", label="System message"),
|
34 |
gr.Slider(minimum=0.01, maximum=0.99, value=0.75, step=0.01, label="Detection Probability Threshold "),
|
35 |
],
|
36 |
)
|
|
|
7 |
from pandarallel import pandarallel
|
8 |
from sklearn.preprocessing import RobustScaler
|
9 |
import gradio as gr
|
10 |
+
from huggingface_hub import InferenceClient
|
11 |
+
|
12 |
+
"""
|
13 |
+
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
14 |
+
"""
|
15 |
+
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
16 |
|
17 |
|
18 |
def respond(
|
19 |
message,
|
20 |
+
threshold
|
|
|
21 |
):
|
|
|
|
|
|
|
22 |
|
23 |
for message in is_malicious_sql(message, threshold
|
24 |
):
|
25 |
response = message
|
|
|
26 |
yield response
|
27 |
|
28 |
"""
|
|
|
31 |
demo = gr.ChatInterface(
|
32 |
respond,
|
33 |
additional_inputs=[
|
|
|
34 |
gr.Slider(minimum=0.01, maximum=0.99, value=0.75, step=0.01, label="Detection Probability Threshold "),
|
35 |
],
|
36 |
)
|