rahul7star commited on
Commit
8979de3
·
verified ·
1 Parent(s): 883dab7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -47
app.py CHANGED
@@ -1,56 +1,32 @@
1
- import torch
2
- from fastai.text.all import *
3
  from huggingface_hub import hf_hub_download
4
 
5
- # Step 1: Recreate the DataLoaders used during training
6
- def create_dataloaders():
7
- data = [
8
- {"input": "Who is rahul7star?", "output": "Rahul7star is a software developer living in NSW."},
9
- {"input": "What is Rahul7star's profession?", "output": "Rahul7star is an AI creator and software developer."},
10
- {"input": "What does Rahul7star do?", "output": "Rahul7star works as a developer and enjoys solving complex coding problems."},
11
- {"input": "Tell me about Rahul7star", "output": "Rahul7star is a talented developer, AI creator, and a fan of traveling."},
12
- {"input": "What is Rahul7star known for?", "output": "Rahul7star is known for his work in AI, software development, and his love for coding."}
13
- ]
14
-
15
- # Split into training and validation sets
16
- train_data = data[:4] # First 4 examples for training
17
- valid_data = data[4:] # Last example for validation
18
-
19
- # Define the DataBlock (use the same structure as during training)
20
- dblock = DataBlock(
21
- blocks=(TextBlock.from_df(text_cols='input'), CategoryBlock),
22
- get_x=lambda x: x['input'],
23
- get_y=lambda x: x['output'],
24
- splitter=RandomSplitter(valid_pct=0.2, seed=42)
25
- )
26
-
27
- # Create DataLoaders
28
- dls = dblock.dataloaders(train_data, bs=2) # Batch size of 2 for quick experimentation
29
- return dls
30
-
31
- # Step 2: Load the model weights
32
- def load_model_weights():
33
- model_path = hf_hub_download(
34
- repo_id="rahul7star/fastai-rahul-text-model-v02",
35
- filename="rahul9star.pth"
36
- )
37
-
38
- # Load the model architecture (you need to define the same architecture used during training)
39
- learn = text_classifier_learner(create_dataloaders(), AWD_LSTM, metrics=accuracy)
40
-
41
- # Load the weights into the model
42
- learn.load(model_path)
43
- print("Model loaded successfully from Hugging Face.")
44
- return learn
45
 
46
- # Step 3: Use the model in your Gradio interface
47
- learn = load_model_weights()
48
 
49
  # Check if the model is loaded successfully
50
  if learn is None:
51
  raise ValueError("Failed to load the model")
52
 
53
- # Step 4: Define the Gradio Interface
54
  def predict(input_text):
55
  try:
56
  # Get prediction from the model
@@ -59,6 +35,5 @@ def predict(input_text):
59
  except Exception as e:
60
  return f"Error during prediction: {e}"
61
 
62
- # Step 5: Create Gradio Interface
63
- import gradio as gr
64
  gr.Interface(fn=predict, inputs="text", outputs="text").launch()
 
1
+ import gradio as gr
2
+ from fastai.text.all import load_learner
3
  from huggingface_hub import hf_hub_download
4
 
5
+ # Step 1: Load the model from Hugging Face
6
+ def load_model():
7
+ try:
8
+ # Download the .pkl file from Hugging Face
9
+ model_path = hf_hub_download(
10
+ repo_id="rahul7star/fastai-rahul-text-model-v02",
11
+ filename="rahul9star_full_learner.pkl" # File name in the repo
12
+ )
13
+
14
+ # Load the model using FastAI's load_learner method
15
+ learn = load_learner(model_path)
16
+ print("Model loaded successfully from Hugging Face.")
17
+ return learn
18
+ except Exception as e:
19
+ print(f"Error loading the model: {e}")
20
+ return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
+ # Load the model
23
+ learn = load_model()
24
 
25
  # Check if the model is loaded successfully
26
  if learn is None:
27
  raise ValueError("Failed to load the model")
28
 
29
+ # Step 2: Define the Gradio Interface
30
  def predict(input_text):
31
  try:
32
  # Get prediction from the model
 
35
  except Exception as e:
36
  return f"Error during prediction: {e}"
37
 
38
+ # Step 3: Create Gradio Interface
 
39
  gr.Interface(fn=predict, inputs="text", outputs="text").launch()