Spaces:
Sleeping
Sleeping
Simon Riezebos
commited on
Commit
Β·
8d6d0f6
1
Parent(s):
8950d78
Implement all changes
Browse files- app/pages/0_π_AOIs.py +6 -5
- app/pages/1_π§_Flood_extent_analysis.py +186 -35
- app/src/gfm.py +158 -121
- app/src/hf_utils.py +1 -1
- app/src/utils.py +6 -9
app/pages/0_π_AOIs.py
CHANGED
@@ -2,7 +2,7 @@ import folium
|
|
2 |
import streamlit as st
|
3 |
from folium.plugins import Draw
|
4 |
from src.config_parameters import params
|
5 |
-
from src.gfm import
|
6 |
from src.utils import (
|
7 |
add_about,
|
8 |
get_aoi_id_from_selector_preview,
|
@@ -44,7 +44,8 @@ radio_selection = st.radio(
|
|
44 |
# call to render Folium map in Streamlit
|
45 |
folium_map = folium.Map([39, 0], zoom_start=8)
|
46 |
|
47 |
-
|
|
|
48 |
|
49 |
# See Areas will show all areas collected from GFM.
|
50 |
# Collecting AOIs is done on first page load and when switching from a different radio selection back to See Areas
|
@@ -140,8 +141,8 @@ elif radio_selection == "Delete Area":
|
|
140 |
confirm_delete = st.text_input("Enter area name")
|
141 |
if st.button("Confirm"):
|
142 |
if confirm_delete == aoi_name:
|
143 |
-
delete_aoi(selected_area_id)
|
144 |
-
|
145 |
st.toast("Area successfully deleted")
|
146 |
st.rerun()
|
147 |
else:
|
@@ -172,7 +173,7 @@ if save_area:
|
|
172 |
print("starting to post new area name to gfm api")
|
173 |
coordinates = selected_area_geojson["geometry"]["coordinates"]
|
174 |
|
175 |
-
create_aoi(new_area_name, coordinates)
|
176 |
st.toast("Area successfully created")
|
177 |
|
178 |
st.session_state["prev_page"] = "aois"
|
|
|
2 |
import streamlit as st
|
3 |
from folium.plugins import Draw
|
4 |
from src.config_parameters import params
|
5 |
+
from src.gfm import get_cached_aois, get_cached_gfm_handler
|
6 |
from src.utils import (
|
7 |
add_about,
|
8 |
get_aoi_id_from_selector_preview,
|
|
|
44 |
# call to render Folium map in Streamlit
|
45 |
folium_map = folium.Map([39, 0], zoom_start=8)
|
46 |
|
47 |
+
gfm = get_cached_gfm_handler()
|
48 |
+
aois = get_cached_aois()
|
49 |
|
50 |
# See Areas will show all areas collected from GFM.
|
51 |
# Collecting AOIs is done on first page load and when switching from a different radio selection back to See Areas
|
|
|
141 |
confirm_delete = st.text_input("Enter area name")
|
142 |
if st.button("Confirm"):
|
143 |
if confirm_delete == aoi_name:
|
144 |
+
gfm.delete_aoi(selected_area_id)
|
145 |
+
get_cached_aois.clear()
|
146 |
st.toast("Area successfully deleted")
|
147 |
st.rerun()
|
148 |
else:
|
|
|
173 |
print("starting to post new area name to gfm api")
|
174 |
coordinates = selected_area_geojson["geometry"]["coordinates"]
|
175 |
|
176 |
+
gfm.create_aoi(new_area_name, coordinates)
|
177 |
st.toast("Area successfully created")
|
178 |
|
179 |
st.session_state["prev_page"] = "aois"
|
app/pages/1_π§_Flood_extent_analysis.py
CHANGED
@@ -1,18 +1,15 @@
|
|
1 |
from datetime import date, timedelta
|
2 |
|
3 |
import folium
|
|
|
4 |
import streamlit as st
|
5 |
from src import hf_utils
|
6 |
from src.config_parameters import params
|
7 |
-
from src.gfm import
|
8 |
-
download_flood_product,
|
9 |
-
get_area_products,
|
10 |
-
retrieve_all_aois,
|
11 |
-
)
|
12 |
from src.utils import (
|
13 |
add_about,
|
14 |
get_aoi_id_from_selector_preview,
|
15 |
-
|
16 |
set_tool_page_style,
|
17 |
toggle_menu_button,
|
18 |
)
|
@@ -40,12 +37,13 @@ set_tool_page_style()
|
|
40 |
# Create two rows: top and bottom panel
|
41 |
row1 = st.container()
|
42 |
row2 = st.container()
|
43 |
-
#
|
44 |
-
col1, col2, col3, col4
|
45 |
col2_1, col2_2 = row2.columns([3, 2])
|
46 |
|
47 |
-
# Retrieve AOIs to fill AOI selector
|
48 |
-
|
|
|
49 |
|
50 |
|
51 |
if "all_products" not in st.session_state:
|
@@ -88,71 +86,200 @@ with col4:
|
|
88 |
it will not trigger any product downloads.
|
89 |
""",
|
90 |
)
|
91 |
-
show_available_products = st.button("Show
|
92 |
|
93 |
# If button above is triggered, get products from GFM
|
94 |
# Then save all products to the session state and rerun the app to display them
|
95 |
if show_available_products:
|
96 |
-
products = get_area_products(selected_area_id, start_date, end_date)
|
97 |
st.session_state["all_products"] = products
|
98 |
st.rerun()
|
99 |
|
100 |
# Contains the product checkboxes if they exist after pushing the "Show available products" button
|
101 |
with col2_2:
|
|
|
|
|
|
|
|
|
102 |
checkboxes = list()
|
103 |
# Products are checked against the index to check whether they are already downloaded
|
104 |
index_df = hf_utils.get_geojson_index_df()
|
105 |
if st.session_state["all_products"]:
|
|
|
|
|
106 |
for product in st.session_state["all_products"]:
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
|
113 |
# Contains the "Download Products" button
|
114 |
-
with
|
115 |
st.text(
|
116 |
"Button info",
|
117 |
help=""
|
118 |
"""
|
119 |
-
Will download the selected products
|
120 |
-
first if there are none).
|
121 |
-
been downloaded can be left checked,
|
|
|
122 |
""",
|
123 |
)
|
124 |
-
download_products = st.button("Download
|
125 |
|
126 |
# If the button is clicked download all checked products that have not been downloaded yet
|
127 |
if download_products:
|
128 |
index_df = hf_utils.get_geojson_index_df()
|
129 |
-
|
130 |
-
|
131 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
132 |
if product_to_download["product_id"] not in index_df["product"].values:
|
133 |
with st.spinner(
|
134 |
f"Getting GFM files for {product_to_download['product_time']}, this may take a couple of minutes"
|
135 |
):
|
136 |
-
download_flood_product(
|
|
|
|
|
137 |
st.rerun()
|
138 |
|
139 |
# For all the selected products add them to the map if they are available
|
140 |
feature_groups = []
|
|
|
141 |
if st.session_state["all_products"]:
|
142 |
index_df = hf_utils.get_geojson_index_df()
|
|
|
|
|
|
|
|
|
|
|
|
|
143 |
for i, checkbox in enumerate(checkboxes):
|
144 |
if checkbox:
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
149 |
|
150 |
# Contains the map
|
151 |
with col2_1:
|
152 |
if selected_area_id:
|
153 |
# display the bounding box
|
154 |
bounding_box = aois[selected_area_id]["bbox"]
|
155 |
-
geojson_selected_area = folium.GeoJson(
|
|
|
|
|
|
|
156 |
feat_group_selected_area = folium.FeatureGroup(name="selected_area")
|
157 |
feat_group_selected_area.add_child(geojson_selected_area)
|
158 |
feature_groups.append(feat_group_selected_area)
|
@@ -162,11 +289,35 @@ with col2_1:
|
|
162 |
folium_map.fit_bounds(feat_group_selected_area.get_bounds())
|
163 |
|
164 |
m = st_folium(
|
165 |
-
folium_map,
|
166 |
-
width=800,
|
167 |
-
height=450,
|
168 |
-
feature_group_to_add=feature_groups,
|
169 |
)
|
170 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
171 |
# Keep track of which page we're currently on for page switch events
|
172 |
st.session_state["prev_page"] = "flood_extent"
|
|
|
1 |
from datetime import date, timedelta
|
2 |
|
3 |
import folium
|
4 |
+
import pandas as pd
|
5 |
import streamlit as st
|
6 |
from src import hf_utils
|
7 |
from src.config_parameters import params
|
8 |
+
from src.gfm import get_cached_aois, get_cached_gfm_handler
|
|
|
|
|
|
|
|
|
9 |
from src.utils import (
|
10 |
add_about,
|
11 |
get_aoi_id_from_selector_preview,
|
12 |
+
get_existing_geojson,
|
13 |
set_tool_page_style,
|
14 |
toggle_menu_button,
|
15 |
)
|
|
|
37 |
# Create two rows: top and bottom panel
|
38 |
row1 = st.container()
|
39 |
row2 = st.container()
|
40 |
+
# Create two columns in the top panel: input map and paramters
|
41 |
+
col1, col2, col3, col4 = row1.columns([1, 1, 1, 2])
|
42 |
col2_1, col2_2 = row2.columns([3, 2])
|
43 |
|
44 |
+
# Retrieve GFM Handler and AOIs to fill AOI selector
|
45 |
+
gfm = get_cached_gfm_handler()
|
46 |
+
aois = get_cached_aois()
|
47 |
|
48 |
|
49 |
if "all_products" not in st.session_state:
|
|
|
86 |
it will not trigger any product downloads.
|
87 |
""",
|
88 |
)
|
89 |
+
show_available_products = st.button("Show GFM products")
|
90 |
|
91 |
# If button above is triggered, get products from GFM
|
92 |
# Then save all products to the session state and rerun the app to display them
|
93 |
if show_available_products:
|
94 |
+
products = gfm.get_area_products(selected_area_id, start_date, end_date)
|
95 |
st.session_state["all_products"] = products
|
96 |
st.rerun()
|
97 |
|
98 |
# Contains the product checkboxes if they exist after pushing the "Show available products" button
|
99 |
with col2_2:
|
100 |
+
row_checkboxes = st.container()
|
101 |
+
row_buttons = st.container()
|
102 |
+
|
103 |
+
with row_checkboxes:
|
104 |
checkboxes = list()
|
105 |
# Products are checked against the index to check whether they are already downloaded
|
106 |
index_df = hf_utils.get_geojson_index_df()
|
107 |
if st.session_state["all_products"]:
|
108 |
+
# Get unique product time groups
|
109 |
+
unique_time_groups = set()
|
110 |
for product in st.session_state["all_products"]:
|
111 |
+
unique_time_groups.add(product["product_time_group"])
|
112 |
+
|
113 |
+
# Create dataframe for the table
|
114 |
+
product_data = []
|
115 |
+
for time_group in sorted(unique_time_groups):
|
116 |
+
# Check if any product in this group is already downloaded
|
117 |
+
products_in_group = [
|
118 |
+
p
|
119 |
+
for p in st.session_state["all_products"]
|
120 |
+
if p["product_time_group"] == time_group
|
121 |
+
]
|
122 |
+
|
123 |
+
available_status = ""
|
124 |
+
for product in products_in_group:
|
125 |
+
if product["product_id"] in index_df["product"].values:
|
126 |
+
available_status = "Available in Floodmap"
|
127 |
+
flood_geojson_path = index_df.loc[
|
128 |
+
index_df["product"] == product["product_id"],
|
129 |
+
"flood_geojson_path",
|
130 |
+
].values[0]
|
131 |
+
dataset_link = f"https://huggingface.co/datasets/rodekruis/flood-mapping/resolve/main/{flood_geojson_path}"
|
132 |
+
|
133 |
+
product_data.append(
|
134 |
+
{
|
135 |
+
"Check": False,
|
136 |
+
"Product time": time_group,
|
137 |
+
"Available": dataset_link,
|
138 |
+
}
|
139 |
+
)
|
140 |
+
|
141 |
+
product_groups_df = pd.DataFrame(product_data)
|
142 |
+
|
143 |
+
# Create the data editor with checkbox column
|
144 |
+
product_groups_st_df = st.data_editor(
|
145 |
+
product_groups_df,
|
146 |
+
column_config={
|
147 |
+
"Check": st.column_config.CheckboxColumn(
|
148 |
+
"Select",
|
149 |
+
help="Select products to process",
|
150 |
+
default=False,
|
151 |
+
),
|
152 |
+
"Product time": st.column_config.TextColumn(
|
153 |
+
"Product Time Group", disabled=True
|
154 |
+
),
|
155 |
+
"Available": st.column_config.LinkColumn(
|
156 |
+
"Available in dataset", display_text="Dataset link"
|
157 |
+
),
|
158 |
+
},
|
159 |
+
hide_index=True,
|
160 |
+
disabled=["Product time", "Available"],
|
161 |
+
)
|
162 |
+
|
163 |
+
# Convert checkbox states to list for compatibility with existing code
|
164 |
+
checkboxes = product_groups_st_df["Check"].tolist()
|
165 |
+
|
166 |
+
with row_buttons:
|
167 |
+
below_checkbox_col1, below_checkbox_col2 = row_buttons.columns([1, 1])
|
168 |
|
169 |
# Contains the "Download Products" button
|
170 |
+
with below_checkbox_col1:
|
171 |
st.text(
|
172 |
"Button info",
|
173 |
help=""
|
174 |
"""
|
175 |
+
Will download the selected products from GFM to the Floodmap app
|
176 |
+
(click "Show available products" first if there are none).
|
177 |
+
Products that show that they have already been downloaded can be left checked,
|
178 |
+
they will be skipped.
|
179 |
""",
|
180 |
)
|
181 |
+
download_products = st.button("Download to Floodmap")
|
182 |
|
183 |
# If the button is clicked download all checked products that have not been downloaded yet
|
184 |
if download_products:
|
185 |
index_df = hf_utils.get_geojson_index_df()
|
186 |
+
# Get selected time groups from the table
|
187 |
+
selected_time_groups = product_groups_st_df[product_groups_st_df["Check"]][
|
188 |
+
"Product time"
|
189 |
+
].tolist()
|
190 |
+
|
191 |
+
# For each selected time group
|
192 |
+
for time_group in selected_time_groups:
|
193 |
+
# Get all products for this time group
|
194 |
+
products_in_group = [
|
195 |
+
p
|
196 |
+
for p in st.session_state["all_products"]
|
197 |
+
if p["product_time_group"] == time_group
|
198 |
+
]
|
199 |
+
|
200 |
+
# Download each product in the group that hasn't been downloaded yet
|
201 |
+
for product_to_download in products_in_group:
|
202 |
if product_to_download["product_id"] not in index_df["product"].values:
|
203 |
with st.spinner(
|
204 |
f"Getting GFM files for {product_to_download['product_time']}, this may take a couple of minutes"
|
205 |
):
|
206 |
+
gfm.download_flood_product(
|
207 |
+
selected_area_id, product_to_download
|
208 |
+
)
|
209 |
st.rerun()
|
210 |
|
211 |
# For all the selected products add them to the map if they are available
|
212 |
feature_groups = []
|
213 |
+
selected_geojsons = []
|
214 |
if st.session_state["all_products"]:
|
215 |
index_df = hf_utils.get_geojson_index_df()
|
216 |
+
# Get unique time groups
|
217 |
+
unique_time_groups = sorted(
|
218 |
+
set(p["product_time_group"] for p in st.session_state["all_products"])
|
219 |
+
)
|
220 |
+
|
221 |
+
# For each checkbox (which corresponds to a time group)
|
222 |
for i, checkbox in enumerate(checkboxes):
|
223 |
if checkbox:
|
224 |
+
time_group = unique_time_groups[i]
|
225 |
+
# Get all products for this time group
|
226 |
+
products_in_group = [
|
227 |
+
p
|
228 |
+
for p in st.session_state["all_products"]
|
229 |
+
if p["product_time_group"] == time_group
|
230 |
+
]
|
231 |
+
|
232 |
+
# Create a feature group for this time group
|
233 |
+
flood_featuregroup = folium.FeatureGroup(name=time_group)
|
234 |
+
footprint_featuregroup = folium.FeatureGroup(name="Sentinel footprint")
|
235 |
+
group_has_features = False
|
236 |
+
|
237 |
+
# Add all available products from this group to the feature group
|
238 |
+
for product in products_in_group:
|
239 |
+
if product["product_id"] in index_df["product"].values:
|
240 |
+
# Get the raw geojsons for further usage in the app
|
241 |
+
flood_geojson = get_existing_geojson(product["product_id"], "flood")
|
242 |
+
selected_geojsons.append(flood_geojson)
|
243 |
+
# Convert geojsons to folium features to display on the map
|
244 |
+
flood_folium_geojson = folium.GeoJson(
|
245 |
+
flood_geojson,
|
246 |
+
style_function=lambda x: {
|
247 |
+
"fillColor": "#ff0000",
|
248 |
+
"color": "#ff0000",
|
249 |
+
"fillOpacity": 0.2,
|
250 |
+
},
|
251 |
+
)
|
252 |
+
flood_featuregroup.add_child(flood_folium_geojson)
|
253 |
+
|
254 |
+
footprint_geojson = get_existing_geojson(
|
255 |
+
product["product_id"], "footprint"
|
256 |
+
)
|
257 |
+
footprint_folium_geojson = folium.GeoJson(
|
258 |
+
footprint_geojson,
|
259 |
+
style_function=lambda x: {
|
260 |
+
"fillColor": "yellow",
|
261 |
+
"color": "yellow",
|
262 |
+
"fillOpacity": 0.2,
|
263 |
+
"weight": 0,
|
264 |
+
},
|
265 |
+
)
|
266 |
+
footprint_featuregroup.add_child(footprint_folium_geojson)
|
267 |
+
group_has_features = True
|
268 |
+
|
269 |
+
# Only add the feature group if it contains any features
|
270 |
+
if group_has_features:
|
271 |
+
feature_groups.append(flood_featuregroup)
|
272 |
+
feature_groups.append(footprint_featuregroup)
|
273 |
|
274 |
# Contains the map
|
275 |
with col2_1:
|
276 |
if selected_area_id:
|
277 |
# display the bounding box
|
278 |
bounding_box = aois[selected_area_id]["bbox"]
|
279 |
+
geojson_selected_area = folium.GeoJson(
|
280 |
+
bounding_box,
|
281 |
+
style_function=lambda x: {"fillOpacity": 0.2, "weight": 1},
|
282 |
+
)
|
283 |
feat_group_selected_area = folium.FeatureGroup(name="selected_area")
|
284 |
feat_group_selected_area.add_child(geojson_selected_area)
|
285 |
feature_groups.append(feat_group_selected_area)
|
|
|
289 |
folium_map.fit_bounds(feat_group_selected_area.get_bounds())
|
290 |
|
291 |
m = st_folium(
|
292 |
+
folium_map, width=800, height=450, feature_group_to_add=feature_groups
|
|
|
|
|
|
|
293 |
)
|
294 |
|
295 |
+
if flood_featuregroup:
|
296 |
+
flood_part_of_legend = """
|
297 |
+
<div style="display: flex; align-items: center;">
|
298 |
+
<div style="width: 20px; height: 20px; background: rgba(255, 0, 0, .2); border: 1px solid red;"></div>
|
299 |
+
<div style="margin-left: 5px;">Floods</div>
|
300 |
+
</div>
|
301 |
+
<div style="display: flex; align-items: center;">
|
302 |
+
<div style="width: 20px; height: 20px; background: rgba(255, 255, 0, .2); border: 1px solid yellow;"></div>
|
303 |
+
<div style="margin-left: 5px;">Sentinel Footprint</div>
|
304 |
+
</div>
|
305 |
+
"""
|
306 |
+
else:
|
307 |
+
flood_part_of_legend = ""
|
308 |
+
st.markdown(
|
309 |
+
f"""
|
310 |
+
<div style="display: flex; align-items: center; gap: 20px;">
|
311 |
+
<div style="display: flex; align-items: center;">
|
312 |
+
<div style="width: 20px; height: 20px; background: rgba(51, 136, 255, .2); border: 1px solid #3388ff;"></div>
|
313 |
+
<div style="margin-left: 5px;">AOI</div>
|
314 |
+
</div>
|
315 |
+
{flood_part_of_legend}
|
316 |
+
</div>
|
317 |
+
""",
|
318 |
+
unsafe_allow_html=True,
|
319 |
+
)
|
320 |
+
|
321 |
+
|
322 |
# Keep track of which page we're currently on for page switch events
|
323 |
st.session_state["prev_page"] = "flood_extent"
|
app/src/gfm.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import io
|
2 |
import os
|
3 |
import zipfile
|
4 |
-
from
|
5 |
|
6 |
import pandas as pd
|
7 |
import requests
|
@@ -32,145 +32,182 @@ def get_gfm_user_and_token():
|
|
32 |
return user_id, access_token
|
33 |
|
34 |
|
35 |
-
|
36 |
-
def
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
54 |
|
55 |
-
|
56 |
-
|
57 |
-
user_id, access_token = get_gfm_user_and_token()
|
58 |
-
header = {"Authorization": f"bearer {access_token}"}
|
59 |
|
60 |
-
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
-
|
63 |
-
base_file_path = "./output"
|
64 |
-
|
65 |
-
product_id = product["product_id"]
|
66 |
-
product_time = product["product_time"]
|
67 |
-
|
68 |
-
output_file_path = f"{base_file_path}"
|
69 |
-
Path(output_file_path).mkdir(parents=True, exist_ok=True)
|
70 |
-
|
71 |
-
print(f"Downloading product: {product_id}")
|
72 |
-
|
73 |
-
download_url = f"{base_url}/download/product/{product_id}"
|
74 |
-
response = requests.get(download_url, headers=header)
|
75 |
-
download_link = response.json()["download_link"]
|
76 |
-
|
77 |
-
# Download and unzip file
|
78 |
-
r = requests.get(download_link)
|
79 |
-
buffer = io.BytesIO(r.content)
|
80 |
-
hf_api = hf_utils.get_hf_api()
|
81 |
-
|
82 |
-
with zipfile.ZipFile(buffer, "r") as z:
|
83 |
-
namelist = z.namelist()
|
84 |
-
for name in namelist:
|
85 |
-
if "FLOOD" in name and ".geojson" in name:
|
86 |
-
flood_filename = name
|
87 |
-
path_in_repo = f"flood-geojson/{flood_filename}"
|
88 |
-
break
|
89 |
-
with z.open(flood_filename) as f:
|
90 |
-
hf_api.upload_file(
|
91 |
-
path_or_fileobj=f,
|
92 |
-
path_in_repo=path_in_repo,
|
93 |
-
repo_id="rodekruis/flood-mapping",
|
94 |
-
repo_type="dataset",
|
95 |
-
)
|
96 |
-
|
97 |
-
df = pd.DataFrame(
|
98 |
-
{
|
99 |
-
"aoi_id": [area_id],
|
100 |
-
"datetime": [product_time],
|
101 |
-
"product": [product_id],
|
102 |
-
"path_in_repo": [path_in_repo],
|
103 |
-
}
|
104 |
-
)
|
105 |
|
106 |
-
|
107 |
-
|
108 |
-
|
|
|
|
|
|
|
|
|
109 |
|
110 |
-
|
111 |
|
|
|
112 |
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
print("Retrieving all AOIs from GFM API")
|
117 |
-
user_id, access_token = get_gfm_user_and_token()
|
118 |
-
header = {"Authorization": f"bearer {access_token}"}
|
119 |
|
120 |
-
|
121 |
|
122 |
-
|
123 |
-
|
|
|
|
|
124 |
|
125 |
-
|
126 |
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
|
|
|
|
132 |
}
|
133 |
-
for aoi in aois
|
134 |
-
}
|
135 |
-
|
136 |
-
return aois
|
137 |
|
|
|
138 |
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
header = {"Authorization": f"bearer {access_token}"}
|
143 |
-
|
144 |
-
base_url = "https://api.gfm.eodc.eu/v1"
|
145 |
|
146 |
-
|
147 |
-
|
148 |
-
|
|
|
|
|
|
|
149 |
|
150 |
-
|
151 |
-
"
|
152 |
-
"description": new_area_name,
|
153 |
-
"user_id": user_id,
|
154 |
-
"geoJSON": {"type": "Polygon", "coordinates": coordinates},
|
155 |
-
}
|
156 |
|
157 |
-
|
158 |
-
|
159 |
-
|
|
|
160 |
|
|
|
|
|
161 |
|
162 |
-
# Deletes AOI on GFM using API
|
163 |
-
def delete_aoi(aoi_id):
|
164 |
-
user_id, access_token = get_gfm_user_and_token()
|
165 |
-
header = {"Authorization": f"bearer {access_token}"}
|
166 |
|
167 |
-
|
|
|
|
|
168 |
|
169 |
-
# Create area of impact
|
170 |
-
print(f"Deleting area of impact {aoi_id}")
|
171 |
-
delete_aoi_url = f"{base_url}/aoi/delete/id/{aoi_id}"
|
172 |
-
print(delete_aoi_url)
|
173 |
|
174 |
-
|
175 |
-
|
176 |
-
|
|
|
|
1 |
import io
|
2 |
import os
|
3 |
import zipfile
|
4 |
+
from datetime import datetime, timedelta
|
5 |
|
6 |
import pandas as pd
|
7 |
import requests
|
|
|
32 |
return user_id, access_token
|
33 |
|
34 |
|
35 |
+
class GFMHandler:
|
36 |
+
def __init__(self):
|
37 |
+
self.base_url = "https://api.gfm.eodc.eu/v1"
|
38 |
+
self.user_id, self.access_token = self._get_gfm_user_and_token()
|
39 |
+
self.header = {"Authorization": f"bearer {self.access_token}"}
|
40 |
+
|
41 |
+
def _get_gfm_user_and_token(self):
|
42 |
+
username = os.environ["gfm_username"]
|
43 |
+
password = os.environ["gfm_password"]
|
44 |
+
|
45 |
+
# Get token, setup header
|
46 |
+
token_url = f"{self.base_url}/auth/login"
|
47 |
+
payload = {"email": username, "password": password}
|
48 |
+
|
49 |
+
response = requests.post(token_url, json=payload)
|
50 |
+
user_id = response.json()["client_id"]
|
51 |
+
access_token = response.json()["access_token"]
|
52 |
+
print("retrieved user id and access token")
|
53 |
+
|
54 |
+
return user_id, access_token
|
55 |
+
|
56 |
+
def _refresh_token(self):
|
57 |
+
"""Refresh the access token and update the authorization header"""
|
58 |
+
self.user_id, self.access_token = self._get_gfm_user_and_token()
|
59 |
+
self.header = {"Authorization": f"bearer {self.access_token}"}
|
60 |
+
print("Refreshed access token")
|
61 |
+
|
62 |
+
def _make_request(self, method, url, **kwargs):
|
63 |
+
"""Make an API request with automatic token refresh on authentication failure"""
|
64 |
+
try:
|
65 |
+
response = requests.request(method, url, headers=self.header, **kwargs)
|
66 |
+
if response.status_code == 401: # Unauthorized
|
67 |
+
print("Token expired, refreshing...")
|
68 |
+
self._refresh_token()
|
69 |
+
# Retry the request with new token
|
70 |
+
response = requests.request(method, url, headers=self.header, **kwargs)
|
71 |
+
response.raise_for_status()
|
72 |
+
return response
|
73 |
+
except requests.exceptions.RequestException as e:
|
74 |
+
print(f"Request failed: {e}")
|
75 |
+
raise
|
76 |
+
|
77 |
+
def get_area_products(self, area_id, from_date, to_date):
|
78 |
+
params = {
|
79 |
+
"time": "range",
|
80 |
+
"from": f"{from_date}T00:00:00",
|
81 |
+
"to": f"{to_date}T00:00:00",
|
82 |
+
}
|
83 |
+
prod_url = f"{self.base_url}/aoi/{area_id}/products"
|
84 |
+
response = self._make_request("GET", prod_url, params=params)
|
85 |
+
products = response.json()["products"]
|
86 |
+
print(f"Found {len(products)} products for {area_id}")
|
87 |
+
|
88 |
+
# Sort products by timestamp
|
89 |
+
products.sort(key=lambda x: x["product_time"])
|
90 |
+
|
91 |
+
# Group products that are within 1 minute of each other
|
92 |
+
if products:
|
93 |
+
current_group_time = products[0]["product_time"]
|
94 |
+
products[0]["product_time_group"] = current_group_time
|
95 |
+
|
96 |
+
for i in range(1, len(products)):
|
97 |
+
product_time = datetime.fromisoformat(
|
98 |
+
products[i]["product_time"].replace("Z", "+00:00")
|
99 |
+
)
|
100 |
+
current_time = datetime.fromisoformat(
|
101 |
+
current_group_time.replace("Z", "+00:00")
|
102 |
+
)
|
103 |
+
time_diff = product_time - current_time
|
104 |
+
|
105 |
+
# If more than 1 minute apart, start a new group
|
106 |
+
if time_diff > timedelta(minutes=1):
|
107 |
+
current_group_time = products[i]["product_time"]
|
108 |
+
|
109 |
+
products[i]["product_time_group"] = current_group_time
|
110 |
+
|
111 |
+
return products
|
112 |
+
|
113 |
+
def download_flood_product(self, area_id, product):
|
114 |
+
product_id = product["product_id"]
|
115 |
+
product_time = product["product_time"]
|
116 |
+
|
117 |
+
print(f"Downloading product: {product_id}")
|
118 |
+
|
119 |
+
download_url = f"{self.base_url}/download/product/{product_id}"
|
120 |
+
response = self._make_request("GET", download_url)
|
121 |
+
download_link = response.json()["download_link"]
|
122 |
+
|
123 |
+
# Download and unzip file
|
124 |
+
r = requests.get(download_link)
|
125 |
+
buffer = io.BytesIO(r.content)
|
126 |
+
hf_api = hf_utils.get_hf_api()
|
127 |
+
|
128 |
+
data = {
|
129 |
+
"aoi_id": area_id,
|
130 |
+
"datetime": product_time,
|
131 |
+
"product": product_id,
|
132 |
+
}
|
133 |
|
134 |
+
with zipfile.ZipFile(buffer, "r") as z:
|
135 |
+
namelist = z.namelist()
|
|
|
|
|
136 |
|
137 |
+
for file_type in ["flood", "footprint"]:
|
138 |
+
filename = next(
|
139 |
+
name
|
140 |
+
for name in namelist
|
141 |
+
if file_type in name.lower() and name.endswith(".geojson")
|
142 |
+
)
|
143 |
|
144 |
+
path_in_repo = f"flood-geojson/{filename}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
145 |
|
146 |
+
with z.open(filename) as f:
|
147 |
+
hf_api.upload_file(
|
148 |
+
path_or_fileobj=f,
|
149 |
+
path_in_repo=path_in_repo,
|
150 |
+
repo_id="rodekruis/flood-mapping",
|
151 |
+
repo_type="dataset",
|
152 |
+
)
|
153 |
|
154 |
+
data[f"{file_type}_geojson_path"] = path_in_repo
|
155 |
|
156 |
+
df = pd.DataFrame([data])
|
157 |
|
158 |
+
index_df = hf_utils.get_geojson_index_df()
|
159 |
+
index_df = pd.concat([index_df, df], ignore_index=True)
|
160 |
+
hf_utils.update_geojson_index_df(index_df)
|
|
|
|
|
|
|
161 |
|
162 |
+
print(f"Product {product_id} downloaded succesfully")
|
163 |
|
164 |
+
def retrieve_all_aois(self):
|
165 |
+
print("Retrieving all AOIs from GFM API")
|
166 |
+
aoi_url = f"{self.base_url}/aoi/user/{self.user_id}"
|
167 |
+
response = self._make_request("GET", aoi_url)
|
168 |
|
169 |
+
aois = response.json()["aois"]
|
170 |
|
171 |
+
aois = {
|
172 |
+
aoi["aoi_id"]: {
|
173 |
+
"name": aoi["aoi_name"],
|
174 |
+
"bbox": aoi["geoJSON"],
|
175 |
+
"name_id_preview": f"{aoi['aoi_name']} - {aoi['aoi_id'][:6]}...",
|
176 |
+
}
|
177 |
+
for aoi in aois
|
178 |
}
|
|
|
|
|
|
|
|
|
179 |
|
180 |
+
return aois
|
181 |
|
182 |
+
def create_aoi(self, new_area_name, coordinates):
|
183 |
+
print("Creating new area of impact")
|
184 |
+
create_aoi_url = f"{self.base_url}/aoi/create"
|
|
|
|
|
|
|
185 |
|
186 |
+
payload = {
|
187 |
+
"aoi_name": new_area_name,
|
188 |
+
"description": new_area_name,
|
189 |
+
"user_id": self.user_id,
|
190 |
+
"geoJSON": {"type": "Polygon", "coordinates": coordinates},
|
191 |
+
}
|
192 |
|
193 |
+
self._make_request("POST", create_aoi_url, json=payload)
|
194 |
+
print("Posted new AOI")
|
|
|
|
|
|
|
|
|
195 |
|
196 |
+
def delete_aoi(self, aoi_id):
|
197 |
+
print(f"Deleting area of impact {aoi_id}")
|
198 |
+
delete_aoi_url = f"{self.base_url}/aoi/delete/id/{aoi_id}"
|
199 |
+
print(delete_aoi_url)
|
200 |
|
201 |
+
self._make_request("DELETE", delete_aoi_url)
|
202 |
+
print("AOI deleted")
|
203 |
|
|
|
|
|
|
|
|
|
204 |
|
205 |
+
@st.cache_resource
|
206 |
+
def get_cached_gfm_handler():
|
207 |
+
return GFMHandler()
|
208 |
|
|
|
|
|
|
|
|
|
209 |
|
210 |
+
@st.cache_resource
|
211 |
+
def get_cached_aois():
|
212 |
+
gfm = get_cached_gfm_handler()
|
213 |
+
return gfm.retrieve_all_aois()
|
app/src/hf_utils.py
CHANGED
@@ -6,7 +6,7 @@ from huggingface_hub import HfApi
|
|
6 |
|
7 |
|
8 |
@st.cache_resource
|
9 |
-
def get_hf_api():
|
10 |
return HfApi()
|
11 |
|
12 |
|
|
|
6 |
|
7 |
|
8 |
@st.cache_resource
|
9 |
+
def get_hf_api() -> HfApi:
|
10 |
return HfApi()
|
11 |
|
12 |
|
app/src/utils.py
CHANGED
@@ -2,8 +2,8 @@
|
|
2 |
|
3 |
import json
|
4 |
import os
|
|
|
5 |
|
6 |
-
import folium
|
7 |
import streamlit as st
|
8 |
|
9 |
from src import hf_utils
|
@@ -157,15 +157,14 @@ def add_about():
|
|
157 |
)
|
158 |
|
159 |
|
160 |
-
def
|
161 |
"""
|
162 |
Getting a saved GFM flood geojson in an output folder of GFM files. Merge in one feature group if multiple.
|
163 |
"""
|
164 |
index_df = hf_utils.get_geojson_index_df()
|
165 |
-
path_in_repo = index_df[index_df["product"] == product_id]
|
166 |
-
|
167 |
-
|
168 |
-
flood_geojson_group = folium.FeatureGroup(name=product_id)
|
169 |
|
170 |
hf_api = hf_utils.get_hf_api()
|
171 |
subfolder, filename = path_in_repo.split("/")
|
@@ -178,7 +177,5 @@ def get_existing_flood_geojson(product_id):
|
|
178 |
|
179 |
with open(geojson_path, "r") as f:
|
180 |
geojson_data = json.load(f)
|
181 |
-
flood_layer = folium.GeoJson(geojson_data)
|
182 |
-
flood_geojson_group.add_child(flood_layer)
|
183 |
|
184 |
-
return
|
|
|
2 |
|
3 |
import json
|
4 |
import os
|
5 |
+
from typing import Literal
|
6 |
|
|
|
7 |
import streamlit as st
|
8 |
|
9 |
from src import hf_utils
|
|
|
157 |
)
|
158 |
|
159 |
|
160 |
+
def get_existing_geojson(product_id, file_type: Literal["flood", "footprint"]):
|
161 |
"""
|
162 |
Getting a saved GFM flood geojson in an output folder of GFM files. Merge in one feature group if multiple.
|
163 |
"""
|
164 |
index_df = hf_utils.get_geojson_index_df()
|
165 |
+
path_in_repo = index_df[index_df["product"] == product_id][
|
166 |
+
f"{file_type}_geojson_path"
|
167 |
+
].values[0]
|
|
|
168 |
|
169 |
hf_api = hf_utils.get_hf_api()
|
170 |
subfolder, filename = path_in_repo.split("/")
|
|
|
177 |
|
178 |
with open(geojson_path, "r") as f:
|
179 |
geojson_data = json.load(f)
|
|
|
|
|
180 |
|
181 |
+
return geojson_data
|