mariaasoriaano commited on
Commit
de8b4eb
·
1 Parent(s): a36b86c

Subiendo app

Browse files
Files changed (2) hide show
  1. app.py +64 -0
  2. requirements.txt +5 -0
app.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Import the necessary libraries
2
+ import gradio as gr # Gradio is a library to quickly build and share demos for ML models
3
+ import joblib # joblib is used here to load the trained model from a file
4
+ import numpy as np # NumPy for numerical operations (if needed for array manipulation)
5
+ from huggingface_hub import hf_hub_download
6
+
7
+ HF_TOKEN = 'hf_your_token_here' # Replace with your actual Hugging Face token
8
+
9
+ # Replace with your actual Hugging Face model repo ID and file names
10
+ # For example, repo_id="username/iris-decision-tree"
11
+ # Use repo_type="model" if it's a model repository
12
+ model_path = hf_hub_download(
13
+ repo_id="brjapon/iris-dt",
14
+ filename="iris_dt.joblib", # The model file stored in the HF repo
15
+ repo_type="model" # Could also be 'dataset' if you're storing it that way
16
+ )
17
+
18
+ # Load the trained model
19
+ pipeline = joblib.load(model_path)
20
+
21
+ # Define a function that takes the four iris measurements as input
22
+ # and returns the predicted iris species label.
23
+ def predict_iris(sepal_length, sepal_width, petal_length, petal_width):
24
+ # Convert the input parameters into a 2D list/array because
25
+ # scikit-learn's predict() expects a 2D array of shape (n_samples, n_features)
26
+ input = np.array([[sepal_length, sepal_width, petal_length, petal_width]])
27
+ prediction = pipeline.predict(input)
28
+
29
+ # Convert the prediction to the string label
30
+ if prediction == 0:
31
+ return 'iris-setosa'
32
+ elif prediction == 1:
33
+ return 'Iris-versicolor'
34
+ elif prediction == 2:
35
+ return 'Iris-virginica'
36
+ else:
37
+ return "Invalid prediction"
38
+
39
+ # Create a Gradio Interface:
40
+ # - fn: the function to call for inference
41
+ # - inputs: a list of component types to collect user input (in this case, four numeric values)
42
+ # - outputs: how the prediction is displayed (in this case, as text)
43
+ # - live: whether to update the output in real-time as the user types
44
+ interface = gr.Interface(
45
+ fn=predict_iris,
46
+ inputs=["number", "number", "number", "number"],
47
+ outputs="text",
48
+ live=True,
49
+ title="Iris Species Identifier",
50
+ description="Enter the four measurements to predict the Iris species."
51
+ )
52
+
53
+ # Run the interface when this script is executed directly.
54
+ # This will launch a local Gradio server and open a user interface in the browser.
55
+ if __name__ == "__main__":
56
+ # To create a public link, set the parameter share=True
57
+ interface.launch()
58
+
59
+ '''
60
+ # The Flag button allows users (or testers) to mark or “flag”
61
+ # a particular input-output interaction for later review.
62
+ # When someone clicks Flag, Gradio saves the input values (and often the output) to a log.csv file
63
+ # letting you keep track of interesting or potentially problematic cases for debugging or analysis later on
64
+ '''
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ scikit-learn
2
+ huggingface_hub
3
+ datasets
4
+ gradio
5
+ joblib