Update app.py
Browse files
app.py
CHANGED
@@ -144,7 +144,7 @@ def get_model(weights: str) -> YOLO | None:
|
|
144 |
# QUALITYβEVALUATION (UNCHANGED from v3)
|
145 |
# ---------------------------------------------------------------------------
|
146 |
# --ββ <Functions qc_integrity / qc_class_balance / qc_image_quality ...>
|
147 |
-
# **(unchanged β omitted
|
148 |
# ---------------------------------------------------------------------------
|
149 |
|
150 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
@@ -152,7 +152,7 @@ def get_model(weights: str) -> YOLO | None:
|
|
152 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
153 |
|
154 |
# -------------------- Roboflow helpers --------------------
|
155 |
-
RF_RE = re.compile(r"https?://universe\.roboflow\.com/([^/]+)/([^/]+)
|
156 |
|
157 |
def parse_roboflow_url(url: str) -> tuple[str, str, int | None]:
|
158 |
"""
|
@@ -432,14 +432,13 @@ with gr.Blocks(css="#classdf td{min-width:120px}") as demo:
|
|
432 |
zips_in = gr.Files(label="One or more dataset ZIPs")
|
433 |
load_btn = gr.Button("Load datasets")
|
434 |
load_log = gr.Markdown()
|
435 |
-
ds_state = gr.State([])
|
436 |
|
437 |
def _load_cb(rf_key, rf_urls_file, zip_files):
|
438 |
global autoinc
|
439 |
info_list = []
|
440 |
log_lines = []
|
441 |
|
442 |
-
# Roboflow URLs via txt
|
443 |
if rf_urls_file is not None:
|
444 |
for url in Path(rf_urls_file.name).read_text().splitlines():
|
445 |
if not url.strip():
|
@@ -451,7 +450,6 @@ with gr.Blocks(css="#classdf td{min-width:120px}") as demo:
|
|
451 |
except Exception as e:
|
452 |
log_lines.append(f"β οΈ RF load failed for {url!r}: {e}")
|
453 |
|
454 |
-
# ZIPs
|
455 |
for f in zip_files or []:
|
456 |
autoinc += 1
|
457 |
tmp = TMP_ROOT / f"zip_{autoinc}"
|
@@ -469,7 +467,6 @@ with gr.Blocks(css="#classdf td{min-width:120px}") as demo:
|
|
469 |
|
470 |
load_btn.click(_load_cb, [rf_key, rf_urls, zips_in], [ds_state, load_log])
|
471 |
|
472 |
-
# ------------- Class map editable table --------------------------
|
473 |
gr.Markdown("### 2οΈβ£Β Edit class mapping / limits / removal")
|
474 |
class_df = gr.Dataframe(
|
475 |
headers=["original_class", "new_name", "max_images", "remove"],
|
@@ -483,17 +480,15 @@ with gr.Blocks(css="#classdf td{min-width:120px}") as demo:
|
|
483 |
for _dloc, names, _spl, _ in ds_info:
|
484 |
class_names_all.extend(names)
|
485 |
class_names_all = sorted(set(class_names_all))
|
486 |
-
|
487 |
"original_class": class_names_all,
|
488 |
"new_name": class_names_all,
|
489 |
"max_images": [99999] * len(class_names_all),
|
490 |
"remove": [False] * len(class_names_all),
|
491 |
})
|
492 |
-
return df
|
493 |
|
494 |
refresh_btn.click(_build_class_df, [ds_state], [class_df])
|
495 |
|
496 |
-
# ------------- Merge button & download ---------------------------
|
497 |
merge_btn = gr.Button("Merge datasets β¨")
|
498 |
zip_out = gr.File(label="Download merged ZIP")
|
499 |
merge_log = gr.Markdown()
|
|
|
144 |
# QUALITYβEVALUATION (UNCHANGED from v3)
|
145 |
# ---------------------------------------------------------------------------
|
146 |
# --ββ <Functions qc_integrity / qc_class_balance / qc_image_quality ...>
|
147 |
+
# **(unchanged β omitted for brevity; same as your previous v3 script)**
|
148 |
# ---------------------------------------------------------------------------
|
149 |
|
150 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
|
|
152 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
153 |
|
154 |
# -------------------- Roboflow helpers --------------------
|
155 |
+
RF_RE = re.compile(r"https?://universe\.roboflow\.com/([^/]+)/([^/]+)/(.*)")
|
156 |
|
157 |
def parse_roboflow_url(url: str) -> tuple[str, str, int | None]:
|
158 |
"""
|
|
|
432 |
zips_in = gr.Files(label="One or more dataset ZIPs")
|
433 |
load_btn = gr.Button("Load datasets")
|
434 |
load_log = gr.Markdown()
|
435 |
+
ds_state = gr.State([])
|
436 |
|
437 |
def _load_cb(rf_key, rf_urls_file, zip_files):
|
438 |
global autoinc
|
439 |
info_list = []
|
440 |
log_lines = []
|
441 |
|
|
|
442 |
if rf_urls_file is not None:
|
443 |
for url in Path(rf_urls_file.name).read_text().splitlines():
|
444 |
if not url.strip():
|
|
|
450 |
except Exception as e:
|
451 |
log_lines.append(f"β οΈ RF load failed for {url!r}: {e}")
|
452 |
|
|
|
453 |
for f in zip_files or []:
|
454 |
autoinc += 1
|
455 |
tmp = TMP_ROOT / f"zip_{autoinc}"
|
|
|
467 |
|
468 |
load_btn.click(_load_cb, [rf_key, rf_urls, zips_in], [ds_state, load_log])
|
469 |
|
|
|
470 |
gr.Markdown("### 2οΈβ£Β Edit class mapping / limits / removal")
|
471 |
class_df = gr.Dataframe(
|
472 |
headers=["original_class", "new_name", "max_images", "remove"],
|
|
|
480 |
for _dloc, names, _spl, _ in ds_info:
|
481 |
class_names_all.extend(names)
|
482 |
class_names_all = sorted(set(class_names_all))
|
483 |
+
return pd.DataFrame({
|
484 |
"original_class": class_names_all,
|
485 |
"new_name": class_names_all,
|
486 |
"max_images": [99999] * len(class_names_all),
|
487 |
"remove": [False] * len(class_names_all),
|
488 |
})
|
|
|
489 |
|
490 |
refresh_btn.click(_build_class_df, [ds_state], [class_df])
|
491 |
|
|
|
492 |
merge_btn = gr.Button("Merge datasets β¨")
|
493 |
zip_out = gr.File(label="Download merged ZIP")
|
494 |
merge_log = gr.Markdown()
|