File size: 5,161 Bytes
b029a33
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bb8ef20
b029a33
bb8ef20
 
 
 
 
 
 
 
b029a33
 
bb8ef20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b029a33
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bb8ef20
b029a33
 
 
 
 
 
 
 
bb8ef20
b029a33
 
 
bb8ef20
 
 
 
 
 
 
 
 
 
 
 
 
b029a33
bb8ef20
 
 
 
 
 
 
 
 
 
b029a33
bb8ef20
b029a33
 
bb8ef20
 
 
b029a33
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
from flask import Flask, render_template, request
import numpy as np
import pandas as pd
import pickle
import os
from config import IMAGE_BASE_URL
from image_utils import get_base64_images

app = Flask(__name__)

# Load images once at startup
IMAGES = get_base64_images()

# Add this function
@app.context_processor
def utility_processor():
    def get_image_url(path):
        return IMAGE_BASE_URL + path
    return dict(get_image_url=get_image_url, images=IMAGES)

# Load and prepare data for scalers
try:
    # Try to load the models first
    with open('model.pkl', 'rb') as f:
        model = pickle.load(f)
    with open('standscaler.pkl', 'rb') as f:
        sc = pickle.load(f)
    with open('minmaxscaler.pkl', 'rb') as f:
        ms = pickle.load(f)
    print("Models loaded successfully")
except Exception as e:
    print(f"Error loading models: {e}")
    try:
        # Load the dataset and fit scalers
        data = pd.read_csv('Crop_recommendation.csv')
        features = ['N', 'P', 'K', 'temperature', 'humidity', 'ph', 'rainfall']
        X = data[features].values
        y = data['label'].values

        # Create and fit scalers with actual data
        from sklearn.preprocessing import StandardScaler, MinMaxScaler
        ms = MinMaxScaler()
        ms.fit(X)
        
        # Transform with MinMaxScaler first
        X_minmax = ms.transform(X)
        
        sc = StandardScaler()
        sc.fit(X_minmax)
        
        # Create and fit model with transformed data
        from sklearn.ensemble import RandomForestClassifier
        model = RandomForestClassifier(n_estimators=100, random_state=42)
        model.fit(sc.transform(X_minmax), y)
        print("Models fitted successfully with dataset")
        
        # Save the fitted models
        with open('model.pkl', 'wb') as f:
            pickle.dump(model, f)
        with open('standscaler.pkl', 'wb') as f:
            pickle.dump(sc, f)
        with open('minmaxscaler.pkl', 'wb') as f:
            pickle.dump(ms, f)
            
    except Exception as e:
        print(f"Error fitting models: {e}")
        # Create and fit default scalers with sample data as fallback
        sample_data = np.array([[90, 40, 40, 20, 80, 7, 200],
                              [20, 30, 10, 25, 60, 6, 100]])
        
        ms = MinMaxScaler()
        ms.fit(sample_data)
        
        sc = StandardScaler()
        sc.fit(sample_data)
        
        model = RandomForestClassifier()
        model.fit(sample_data, [1, 2])
        print("Using default models")

# Define routes
@app.route('/')
def home():
    return render_template('home.html')

@app.route('/info')
def info():
    return render_template('info.html')

@app.route('/recommendation', methods=['GET', 'POST'])
def recommendation():
    result = None
    N = P = K = temp = humidity = ph = rainfall = None

    if request.method == 'POST':
        try:
            # Get form data
            N = float(request.form['Nitrogen'])
            P = float(request.form['Phosporus'])
            K = float(request.form['Potassium'])
            temp = float(request.form['Temperature'])
            humidity = float(request.form['Humidity'])
            ph = float(request.form['Ph'])
            rainfall = float(request.form['Rainfall'])

            # Prepare features
            feature_list = [N, P, K, temp, humidity, ph, rainfall]
            single_pred = np.array(feature_list).reshape(1, -1)

            # Scale features
            try:
                scaled_features = ms.transform(single_pred)
                final_features = sc.transform(scaled_features)
                
                # Make prediction
                prediction = model.predict(final_features)
                
                # Map prediction to crop name
                crop_dict = {1: "Rice", 2: "Maize", 3: "Jute", 4: "Cotton", 5: "Coconut", 6: "Papaya", 7: "Orange",
                            8: "Apple", 9: "Muskmelon", 10: "Watermelon", 11: "Grapes", 12: "Mango", 13: "Banana",
                            14: "Pomegranate", 15: "Lentil", 16: "Blackgram", 17: "Mungbean", 18: "Mothbeans",
                            19: "Pigeonpeas", 20: "Kidneybeans", 21: "Chickpea", 22: "Coffee"}

                if prediction[0] in crop_dict:
                    crop = crop_dict[prediction[0]]
                    result = "{} is the best crop to be cultivated right there".format(crop)
                else:
                    result = "Sorry, we could not determine the best crop to be cultivated with the provided data."
                    
            except Exception as e:
                print(f"Error in prediction: {e}")
                result = "An error occurred during prediction. Please try again."
                
        except Exception as e:
            print(f"Error processing form data: {e}")
            result = f"An error occurred: {str(e)}"

    return render_template('recommendation.html', result=result, 
                         N=N, P=P, K=K, temp=temp, 
                         humidity=humidity, ph=ph, rainfall=rainfall)

if __name__ == "__main__":
    app.run(host="0.0.0.0", port=7860)