Spaces:
Running
Running
File size: 1,413 Bytes
8979de3 e6cd92e 5f10744 6880433 828cc2a 8979de3 aefc8e0 8979de3 aefc8e0 8979de3 aefc8e0 8979de3 fde9f2e aefc8e0 8979de3 6880433 f0744ba 828cc2a 5f10744 6880433 e6cd92e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import gradio as gr
from fastai.text.all import load_learner
from huggingface_hub import hf_hub_download
# Step 1: Redefine Custom Functions
def get_x(x): return x['input']
def get_y(x): return x['output']
# Step 2: Load the model from Hugging Face
def load_model():
try:
model_path = hf_hub_download(
repo_id="rahul7star/fastai-rahul-text-model-v02",
filename="rahul9star_full_learner.pkl"
)
learn = load_learner(model_path)
print("Model loaded successfully from Hugging Face.")
return learn
except Exception as e:
print(f"Error loading the model: {e}")
return None
learn = load_model()
# Check if the model is loaded successfully
if learn is None:
raise ValueError("Failed to load the model")
# Step 3: Define the Gradio Interface
def predict(input_text):
try:
# Get prediction from the model
pred, _, probs = learn.predict(input_text)
return f"Prediction: {pred}, Confidence: {probs.max():.2f}"
except Exception as e:
return f"Error during prediction: {e}"
# Step 4: Create Gradio Interface with Examples
gr.Interface(
fn=predict,
inputs="text",
outputs="text",
examples=[
["Who is rahul7star?"], # Example 1
["What does Rahul7star do?"], # Example 2
["Tell me about Rahul7star"] # Example 3
]
).launch()
|