Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -131,7 +131,7 @@ def compare_models(
|
|
131 |
img = img.convert("RGB")
|
132 |
|
133 |
total_steps = len(models) * 2 # phase 1: load, phase 2: inference
|
134 |
-
progress = gr.Progress(
|
135 |
|
136 |
# ----- Phase 1: preload weights -----
|
137 |
detectors: Dict[str, object] = {}
|
@@ -140,7 +140,7 @@ def compare_models(
|
|
140 |
detectors[name] = load_model(name, custom_file)
|
141 |
except Exception as exc:
|
142 |
detectors[name] = exc # store exception for later reporting
|
143 |
-
progress
|
144 |
|
145 |
# ----- Phase 2: run inference -----
|
146 |
results: List[Image.Image] = []
|
@@ -154,7 +154,7 @@ def compare_models(
|
|
154 |
results.append(Image.new("RGB", img.size, (40, 40, 40)))
|
155 |
emsg = str(detector_or_err)
|
156 |
legends[name] = "Unavailable (weights not found)" if "No such file" in emsg or "not found" in emsg else f"ERROR: {emsg.splitlines()[0][:120]}"
|
157 |
-
progress
|
158 |
continue
|
159 |
try:
|
160 |
annotated, latency = run_single_inference(detector_or_err, img, threshold)
|
@@ -163,7 +163,7 @@ def compare_models(
|
|
163 |
except Exception as exc:
|
164 |
results.append(Image.new("RGB", img.size, (40, 40, 40)))
|
165 |
legends[name] = f"ERROR: {str(exc).splitlines()[0][:120]}"
|
166 |
-
progress
|
167 |
|
168 |
yield results, legends # final output
|
169 |
|
|
|
131 |
img = img.convert("RGB")
|
132 |
|
133 |
total_steps = len(models) * 2 # phase 1: load, phase 2: inference
|
134 |
+
progress = gr.Progress()
|
135 |
|
136 |
# ----- Phase 1: preload weights -----
|
137 |
detectors: Dict[str, object] = {}
|
|
|
140 |
detectors[name] = load_model(name, custom_file)
|
141 |
except Exception as exc:
|
142 |
detectors[name] = exc # store exception for later reporting
|
143 |
+
progress(i, total=total_steps, desc=f"Loading {name}")
|
144 |
|
145 |
# ----- Phase 2: run inference -----
|
146 |
results: List[Image.Image] = []
|
|
|
154 |
results.append(Image.new("RGB", img.size, (40, 40, 40)))
|
155 |
emsg = str(detector_or_err)
|
156 |
legends[name] = "Unavailable (weights not found)" if "No such file" in emsg or "not found" in emsg else f"ERROR: {emsg.splitlines()[0][:120]}"
|
157 |
+
progress(step_index, total=total_steps, desc=f"Skipped {name}")
|
158 |
continue
|
159 |
try:
|
160 |
annotated, latency = run_single_inference(detector_or_err, img, threshold)
|
|
|
163 |
except Exception as exc:
|
164 |
results.append(Image.new("RGB", img.size, (40, 40, 40)))
|
165 |
legends[name] = f"ERROR: {str(exc).splitlines()[0][:120]}"
|
166 |
+
progress(step_index, total=total_steps, desc=f"Inference {name}")
|
167 |
|
168 |
yield results, legends # final output
|
169 |
|