Spaces:
Sleeping
Sleeping
import gradio as gr | |
from fastai.text.all import load_learner | |
from huggingface_hub import hf_hub_download | |
# Step 1: Redefine Custom Functions | |
def get_x(x): return x['input'] | |
def get_y(x): return x['output'] | |
# Step 2: Load the model from Hugging Face | |
def load_model(): | |
try: | |
model_path = hf_hub_download( | |
repo_id="rahul7star/fastai-rahul-text-model-v02", | |
filename="rahul9star_full_learner.pkl" | |
) | |
learn = load_learner(model_path) | |
print("Model loaded successfully from Hugging Face.") | |
return learn | |
except Exception as e: | |
print(f"Error loading the model: {e}") | |
return None | |
learn = load_model() | |
# Check if the model is loaded successfully | |
if learn is None: | |
raise ValueError("Failed to load the model") | |
# Step 3: Define the Gradio Interface | |
def predict(input_text): | |
try: | |
# Get prediction from the model | |
pred, _, probs = learn.predict(input_text) | |
return f"Prediction: {pred}, Confidence: {probs.max():.2f}" | |
except Exception as e: | |
return f"Error during prediction: {e}" | |
# Step 4: Create Gradio Interface with Examples | |
gr.Interface( | |
fn=predict, | |
inputs="text", | |
outputs="text", | |
examples=[ | |
["Who is rahul7star?"], # Example 1 | |
["What does Rahul7star do?"], # Example 2 | |
["Tell me about Rahul7star"] # Example 3 | |
] | |
).launch() | |