from flask import Flask, render_template, request import numpy as np import pandas as pd import pickle import os from config import IMAGE_BASE_URL from image_utils import get_base64_images app = Flask(__name__) # Load images once at startup IMAGES = get_base64_images() # Add this function @app.context_processor def utility_processor(): def get_image_url(path): return IMAGE_BASE_URL + path return dict(get_image_url=get_image_url, images=IMAGES) # Load and prepare data for scalers try: # Try to load the models first with open('model.pkl', 'rb') as f: model = pickle.load(f) with open('standscaler.pkl', 'rb') as f: sc = pickle.load(f) with open('minmaxscaler.pkl', 'rb') as f: ms = pickle.load(f) print("Models loaded successfully") except Exception as e: print(f"Error loading models: {e}") try: # Load the dataset and fit scalers data = pd.read_csv('Crop_recommendation.csv') features = ['N', 'P', 'K', 'temperature', 'humidity', 'ph', 'rainfall'] X = data[features].values y = data['label'].values # Create and fit scalers with actual data from sklearn.preprocessing import StandardScaler, MinMaxScaler ms = MinMaxScaler() ms.fit(X) # Transform with MinMaxScaler first X_minmax = ms.transform(X) sc = StandardScaler() sc.fit(X_minmax) # Create and fit model with transformed data from sklearn.ensemble import RandomForestClassifier model = RandomForestClassifier(n_estimators=100, random_state=42) model.fit(sc.transform(X_minmax), y) print("Models fitted successfully with dataset") # Save the fitted models with open('model.pkl', 'wb') as f: pickle.dump(model, f) with open('standscaler.pkl', 'wb') as f: pickle.dump(sc, f) with open('minmaxscaler.pkl', 'wb') as f: pickle.dump(ms, f) except Exception as e: print(f"Error fitting models: {e}") # Create and fit default scalers with sample data as fallback sample_data = np.array([[90, 40, 40, 20, 80, 7, 200], [20, 30, 10, 25, 60, 6, 100]]) ms = MinMaxScaler() ms.fit(sample_data) sc = StandardScaler() sc.fit(sample_data) model = RandomForestClassifier() model.fit(sample_data, [1, 2]) print("Using default models") # Define routes @app.route('/') def home(): return render_template('home.html') @app.route('/info') def info(): return render_template('info.html') @app.route('/recommendation', methods=['GET', 'POST']) def recommendation(): result = None N = P = K = temp = humidity = ph = rainfall = None if request.method == 'POST': try: # Get form data N = float(request.form['Nitrogen']) P = float(request.form['Phosporus']) K = float(request.form['Potassium']) temp = float(request.form['Temperature']) humidity = float(request.form['Humidity']) ph = float(request.form['Ph']) rainfall = float(request.form['Rainfall']) # Prepare features feature_list = [N, P, K, temp, humidity, ph, rainfall] single_pred = np.array(feature_list).reshape(1, -1) # Scale features try: scaled_features = ms.transform(single_pred) final_features = sc.transform(scaled_features) # Make prediction prediction = model.predict(final_features) # Map prediction to crop name crop_dict = {1: "Rice", 2: "Maize", 3: "Jute", 4: "Cotton", 5: "Coconut", 6: "Papaya", 7: "Orange", 8: "Apple", 9: "Muskmelon", 10: "Watermelon", 11: "Grapes", 12: "Mango", 13: "Banana", 14: "Pomegranate", 15: "Lentil", 16: "Blackgram", 17: "Mungbean", 18: "Mothbeans", 19: "Pigeonpeas", 20: "Kidneybeans", 21: "Chickpea", 22: "Coffee"} if prediction[0] in crop_dict: crop = crop_dict[prediction[0]] result = "{} is the best crop to be cultivated right there".format(crop) else: result = "Sorry, we could not determine the best crop to be cultivated with the provided data." except Exception as e: print(f"Error in prediction: {e}") result = "An error occurred during prediction. Please try again." except Exception as e: print(f"Error processing form data: {e}") result = f"An error occurred: {str(e)}" return render_template('recommendation.html', result=result, N=N, P=P, K=K, temp=temp, humidity=humidity, ph=ph, rainfall=rainfall) if __name__ == "__main__": app.run(host="0.0.0.0", port=7860)