Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,8 +1,12 @@
|
|
1 |
import gradio as gr
|
2 |
-
from fastai.text.all import load_learner
|
3 |
from huggingface_hub import hf_hub_download
|
4 |
|
5 |
-
# Step 1:
|
|
|
|
|
|
|
|
|
6 |
def load_model():
|
7 |
try:
|
8 |
# Download the .pkl file from Hugging Face
|
@@ -26,7 +30,7 @@ learn = load_model()
|
|
26 |
if learn is None:
|
27 |
raise ValueError("Failed to load the model")
|
28 |
|
29 |
-
# Step
|
30 |
def predict(input_text):
|
31 |
try:
|
32 |
# Get prediction from the model
|
@@ -35,5 +39,5 @@ def predict(input_text):
|
|
35 |
except Exception as e:
|
36 |
return f"Error during prediction: {e}"
|
37 |
|
38 |
-
# Step
|
39 |
gr.Interface(fn=predict, inputs="text", outputs="text").launch()
|
|
|
1 |
import gradio as gr
|
2 |
+
from fastai.text.all import load_learner, TextBlock, CategoryBlock
|
3 |
from huggingface_hub import hf_hub_download
|
4 |
|
5 |
+
# Step 1: Redefine Custom Functions
|
6 |
+
def get_x(x): return x['input']
|
7 |
+
def get_y(x): return x['output']
|
8 |
+
|
9 |
+
# Step 2: Load the model from Hugging Face
|
10 |
def load_model():
|
11 |
try:
|
12 |
# Download the .pkl file from Hugging Face
|
|
|
30 |
if learn is None:
|
31 |
raise ValueError("Failed to load the model")
|
32 |
|
33 |
+
# Step 3: Define the Gradio Interface
|
34 |
def predict(input_text):
|
35 |
try:
|
36 |
# Get prediction from the model
|
|
|
39 |
except Exception as e:
|
40 |
return f"Error during prediction: {e}"
|
41 |
|
42 |
+
# Step 4: Create Gradio Interface
|
43 |
gr.Interface(fn=predict, inputs="text", outputs="text").launch()
|