Spaces:
Sleeping
Sleeping
File size: 1,237 Bytes
6880433 5f10744 6880433 fdbb00f 5f10744 fdbb00f 5f10744 fdbb00f 5f10744 6880433 3157e2e 5f10744 6880433 f0744ba 5f10744 6880433 5f10744 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
import gradio as gr
from fastai.text.all import load_learner
from huggingface_hub import hf_hub_download
# Step 8: Download the model from Hugging Face and load it
def load_model():
try:
# Download the .pth file from Hugging Face
model_path = hf_hub_download(
repo_id="rahul7star/fastai-rahul-text-model-v02",
filename="rahul9star.pth"
)
# Load the model using FastAI's load_learner method
learn = load_learner(model_path)
print("Model loaded successfully from Hugging Face.")
return learn
except Exception as e:
print(f"Error loading the model: {e}")
return None
# Load the model
learn = load_model()
# Check if the model is loaded successfully
if learn is None:
raise ValueError("Failed to load the model")
# Step 9: Define the Gradio Interface
def predict(input_text):
try:
# Get prediction from the model
pred, _, probs = learn.predict(input_text)
return f"Prediction: {pred}, Confidence: {probs.max():.2f}"
except Exception as e:
return f"Error during prediction: {e}"
# Step 10: Create Gradio Interface
gr.Interface(fn=predict, inputs="text", outputs="text").launch()
|