File size: 1,139 Bytes
6880433
5f10744
 
 
6880433
5f10744
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6880433
5f10744
 
6880433
5f10744
 
 
 
 
 
 
 
6880433
5f10744
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import gradio as gr
import torch
from fastai.text.all import load_learner
from huggingface_hub import hf_hub_download

# Step 8: Download the model from Hugging Face and load it
def load_model():
    try:
        # Download the model .pth file from Hugging Face
        model_path = hf_hub_download(
            repo_id="rahul7star/fastai-rahul-text-model-v02", 
            filename="model.pth"
        )
        
        # Load the model using FastAI's load_learner method
        learn = load_learner(model_path)
        print("Model loaded successfully from Hugging Face.")
        return learn
    except Exception as e:
        print(f"Error loading the model: {e}")
        return None

# Load the model
learn = load_model()

# Step 9: Define the Gradio Interface
def predict(input_text):
    try:
        # Get prediction from the model
        pred, _, probs = learn.predict(input_text)
        return f"Prediction: {pred}, Confidence: {probs.max():.2f}"
    except Exception as e:
        return f"Error during prediction: {e}"

# Step 10: Create Gradio Interface
gr.Interface(fn=predict, inputs="text", outputs="text").launch()