File size: 5,614 Bytes
78fb42c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
import gradio as gr
from api import *
from processing import *
import pandas as pd
from indices import indices
import xgboost as xgb
#from lightgbm import LGBMRegressor
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
import pickle as pk
import json
import boto3
from shapely.geometry import MultiPolygon,shape
from shapely.geometry import Point
from shapely.geometry.polygon import Polygon
from glob import glob
import wget
def predict(location_name,lat, lon):
cord = [lon,lat]
lon = round(lon,4)
lat = round(lat,4)
x1 = [lon,lat]
x2 = [lat,lon]
with open("data/CIV_0.json","r") as file:
data = json.load(file)
# extract ivory coast polygone
features = [data['features'][0]['geometry']['coordinates'][0]+data['features'][0]['geometry']['coordinates'][1]+data['features'][0]['geometry']['coordinates'][2]]
data['features'][0]['geometry']['coordinates'] = features
ci_polygone = data['features'][0]['geometry']['coordinates'][0][0]
point1 = Point(x1)
point2 = Point(x2)
polygon = Polygon(ci_polygone)
result = polygon.contains(point1)
if not result:
return "Choisissez une zone de la CI","","","",""
else:
df = pd.read_csv("data/frame.csv")
name = find_good_tile(df,point2)
if name ==404:
reponse = "Sentinel-2 ne dispose pas de données ce sur ce lieu à ce jour"
return reponse,"","","",""
else:
path = "https://data354-public-assets.s3.eu-west-3.amazonaws.com/cisentineldata/"
url = path+name
#wget.download(url)
unzip()
name,cld_prob,days_ago = select_best_cloud_coverage_tile()
bandes_path_10,bandes_path_20,bandes_path_60,tile_path,path_cld_20,path_cld_60 =paths(name)
# create image dataset
images_10 = extract_sub_image(bandes_path_10,tile_path,cord)
# bandes with 20m resolution
#path_cld_20
images_20 = extract_sub_image(bandes_path_20,tile_path,cord,20,1)
# bandes with 60m resolution
#path_cld_60
images_60 = extract_sub_image(bandes_path_60,tile_path,cord,60)
#
feature = images_10.tolist()+images_20.tolist()+images_60.tolist()
bands = ['B02', 'B03', 'B04', 'B05', 'B06', 'B07', 'B08', 'B8A', 'B11', 'B12','B01','B09']
print("feature : ",feature)
print("BANDES : ",bands)
X = pd.DataFrame([feature],columns = bands)
print("==================== X SHAPE", X.shape)
## Coordinate
cord_df = pd.DataFrame({"Latitude":[lat],
"Longitude":[lon]})
print("==================== cord_df SHAPE", cord_df.shape)
## PCA dimension reduction
# later reload the pickle file
sdc_reload = pk.load(open("data/sdc.pkl",'rb'))
pca_reload = pk.load(open("data/pca.pkl",'rb'))
# standardization
X_pca = sdc_reload.transform(X)
# make pca
principalComponents = pca_reload .transform(X_pca)
principalDf = pd.DataFrame(data =principalComponents[:,:4],
columns = ["PC1","PC2","PC3","PC4"])
print("==================== principalDf SHAPE", principalDf.shape)
# vegetation index calculation
X = indices(X)
# Drop all 12 bands of S2
tab = list(range(12))
X_index = X.drop(X.iloc[:,tab],axis=1)
print("=============SHAPE1",X_index.shape)
# Create predictive features
X_final =pd.concat([cord_df,principalDf,X_index],axis=1)
print("=============SHAPE2",X_final.shape)
# load the model from disk
filename = "data/finalized_model3.sav"
loaded_model = pk.load(open(filename, 'rb'))
# make prediction
biomass = loaded_model.predict(X_final)[0]
if biomass<0:
biomass =0.0
carbon = 0.55*biomass
# NDVI
ndvi_index = ndvi(cord,name)
# deleted download files
#delete_tiles()
return str(cld_prob)+ " % cloud coverage", str(days_ago)+" days ago",str(biomass)+" t/ha", str(carbon)+" tC/ha","NDVI: "+ str(ndvi_index)
# Create title, description and article strings
title = "🌴BEEPAS : Biomass estimation to Evaluate the Environmental Performance of Agroforestry Systems🌴"
description = "This application estimates the biomass of certain areas using AI and satellite images (S2)."
article = "Created by data354."
# Create examples list from "examples/" directory
#example_list = [["examples/" + example] for example in os.listdir("examples")]
example_list = [["Foret du banco :",5.379913, -4.050445],["Pharmacie Y4 :",5.363292, -3.9481601],["Treichville Bernabé :",5.293168, -3.999796],["Adjamé :",5.346938, -4.027849],["ile boulay :",5.280498,-4.089883]]
outputs = [
gr.Textbox(label="Cloud coverage"),
gr.Textbox(label="Number of days since sensing"),
gr.Textbox(label="Above ground biomass density(AGBD) t/ha"),
gr.Textbox(label="Carbon stock density tC/ha "),
gr.Textbox(label="Mean NDVI"),]
demo = gr.Interface(
fn=predict,
inputs=["text","number", "number"],
outputs=outputs, #[ "text", "text","text","text","text"],
examples=example_list,
title=title,
description=description,
article=article,
)
demo.launch(share=True) |