File size: 6,981 Bytes
63eb207
 
 
 
 
9503f8d
 
edd3af7
 
 
 
63eb207
 
 
 
 
 
edd3af7
63eb207
edd3af7
63eb207
 
 
 
 
 
 
 
9503f8d
63eb207
9503f8d
63eb207
 
edd3af7
63eb207
 
 
 
 
 
 
 
 
 
 
 
 
9503f8d
63eb207
 
 
 
 
 
 
 
edd3af7
63eb207
9503f8d
edd3af7
63eb207
 
 
9503f8d
 
edd3af7
9503f8d
 
 
 
 
 
 
 
 
 
 
 
edd3af7
 
63eb207
 
 
 
9503f8d
edd3af7
9503f8d
63eb207
edd3af7
63eb207
 
edd3af7
 
 
 
63eb207
 
 
 
 
 
edd3af7
63eb207
 
9503f8d
 
63eb207
 
edd3af7
 
 
 
 
 
 
 
63eb207
9503f8d
edd3af7
9503f8d
63eb207
9503f8d
 
 
63eb207
 
9503f8d
edd3af7
 
 
63eb207
9503f8d
 
 
 
edd3af7
9503f8d
edd3af7
 
766e4d2
 
 
 
 
 
edd3af7
63eb207
 
9503f8d
edd3af7
9503f8d
63eb207
 
 
edd3af7
63eb207
 
 
edd3af7
9503f8d
edd3af7
 
63eb207
 
 
edd3af7
63eb207
9503f8d
edd3af7
63eb207
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
"""
Gradio app to compare object‑detection models:
  • Ultralytics YOLOv12 (n, s, m, l, x)
  • Ultralytics YOLOv11 (n, s, m, l, x)
  • Roboflow RF‑DETR (Base, Large)
  • Custom fine‑tuned checkpoints for either framework (upload .pt/.pth files)

Changes in this revision (2025‑04‑19):
  • Thinner, semi‑transparent bounding boxes for better visibility in crowded scenes.
  • Legend now shows a clean dict of runtimes (or concise errors) instead of auto‑indexed JSON.
  • File uploader is fully integrated for custom checkpoints.
"""

from __future__ import annotations

import time
from pathlib import Path
from typing import List, Tuple, Dict, Optional

import cv2
import numpy as np
from PIL import Image
import gradio as gr
import supervision as sv
from ultralytics import YOLO
from rfdetr import RFDETRBase, RFDETRLarge
from rfdetr.util.coco_classes import COCO_CLASSES

###############################################################################
# Model registry & lazy loader
###############################################################################

YOLO_MODEL_MAP = {
    # Ultralytics hub IDs — downloaded on first use
    "YOLOv12‑n": "yolov12n.pt",
    "YOLOv12‑s": "yolov12s.pt",
    "YOLOv12‑m": "yolov12m.pt",
    "YOLOv12‑l": "yolov12l.pt",
    "YOLOv12‑x": "yolov12x.pt",
    "YOLOv11‑n": "yolov11n.pt",
    "YOLOv11‑s": "yolov11s.pt",
    "YOLOv11‑m": "yolov11m.pt",
    "YOLOv11‑l": "yolov11l.pt",
    "YOLOv11‑x": "yolov11x.pt",
}

RFDETR_MODEL_MAP = {
    "RF‑DETR‑Base (29M)": "base",
    "RF‑DETR‑Large (128M)": "large",
}

ALL_MODELS = list(YOLO_MODEL_MAP.keys()) + list(RFDETR_MODEL_MAP.keys()) + [
    "Custom YOLO (.pt/.pth)",
    "Custom RF‑DETR (.pth)",
]

_loaded: Dict[str, object] = {}

def load_model(choice: str, custom_file: Optional[Path] = None):
    """Lazy‑load and cache a detector. Returns a model instance or raises RuntimeError."""
    if choice in _loaded:
        return _loaded[choice]

    try:
        if choice in YOLO_MODEL_MAP:
            mdl = YOLO(YOLO_MODEL_MAP[choice])  # hub download if needed
        elif choice in RFDETR_MODEL_MAP:
            mdl = RFDETRBase() if RFDETR_MODEL_MAP[choice] == "base" else RFDETRLarge()
        elif choice.startswith("Custom YOLO"):
            if not custom_file:
                raise ValueError("Upload a YOLO .pt/.pth checkpoint first.")
            mdl = YOLO(str(custom_file))
        elif choice.startswith("Custom RF‑DETR"):
            if not custom_file:
                raise ValueError("Upload an RF‑DETR .pth checkpoint first.")
            mdl = RFDETRBase(pretrain_weights=str(custom_file))
        else:
            raise ValueError(f"Unsupported model choice: {choice}")
    except Exception as e:
        raise RuntimeError(str(e)) from e

    _loaded[choice] = mdl
    return mdl

###############################################################################
# Inference helpers — semi‑transparent, thin boxes
###############################################################################

box_annotator = sv.BoxAnnotator(thickness=2)  # thinner lines
label_annotator = sv.LabelAnnotator()

def blend_overlay(base_np: np.ndarray, overlay_np: np.ndarray, alpha: float = 0.6) -> np.ndarray:
    """Blend two BGR images with given alpha for overlay."""
    return cv2.addWeighted(overlay_np, alpha, base_np, 1 - alpha, 0)

def run_single_inference(model, image: Image.Image, threshold: float) -> Tuple[Image.Image, float]:
    start = time.perf_counter()

    if isinstance(model, (RFDETRBase, RFDETRLarge)):
        detections = model.predict(image, threshold=threshold)
        label_source = COCO_CLASSES
    else:
        result = model.predict(image, verbose=False)[0]
        detections = sv.Detections.from_ultralytics(result)
        label_source = model.names

    runtime = time.perf_counter() - start

    img_np = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2BGR)
    overlay = img_np.copy()
    overlay = box_annotator.annotate(overlay, detections)
    overlay = label_annotator.annotate(overlay, detections, [f"{label_source[c]} {p:.2f}" for c, p in zip(detections.class_id, detections.confidence)])
    blended = blend_overlay(img_np, overlay, alpha=0.6)  # semi‑transparent boxes
    annotated_pil = Image.fromarray(cv2.cvtColor(blended, cv2.COLOR_BGR2RGB))

    return annotated_pil, runtime

###############################################################################
# Gradio callback
###############################################################################

def compare_models(models: List[str], img: Image.Image, threshold: float, custom_file: Optional[Path]):
    if img is None:
        raise gr.Error("Please upload an image first.")
    if img.mode != "RGB":
        img = img.convert("RGB")

    results: List[Image.Image] = []
    legends: Dict[str, str] = {}

    for m in models:
        try:
            model_obj = load_model(m, custom_file)
            annotated, t = run_single_inference(model_obj, img, threshold)
            results.append(annotated)
            legends[m] = f"{t*1000:.1f} ms"
        except Exception as e:
            # show blank slate if model unavailable
            results.append(Image.new("RGB", img.size, (40, 40, 40)))
                            err_msg = str(e)
            # Normalize common weight‑missing errors for clarity
            if "No such file or directory" in err_msg:
                legends[m] = "Unavailable (weights not found)"
            else:
                legends[m] = f"ERROR: {err_msg.splitlines()[0][:120]}"

    return results, legends

###############################################################################
# Build & launch Gradio UI
###############################################################################

def build_demo():
    with gr.Blocks(title="CV Model Comparison") as demo:
        gr.Markdown("""# 🔍 Compare Object‑Detection Models\nUpload an image, choose detectors, and optionally add a custom checkpoint.\nBounding boxes are thin and 60 % opaque for clarity.""")

        with gr.Row():
            model_select = gr.CheckboxGroup(choices=ALL_MODELS, value=["YOLOv12‑n"], label="Select models")
            threshold_slider = gr.Slider(0.0, 1.0, 0.5, step=0.05, label="Confidence threshold")

        custom_checkpoint = gr.File(label="Upload custom checkpoint (.pt/.pth)", file_types=[".pt", ".pth"], interactive=True)
        image_in = gr.Image(type="pil", label="Image", sources=["upload", "webcam"])

        with gr.Row():
            gallery = gr.Gallery(label="Annotated results", columns=2, height="auto")
        legends_out = gr.JSON(label="Latency / status by model")

        run_btn = gr.Button("Run Inference", variant="primary")
        run_btn.click(compare_models, [model_select, image_in, threshold_slider, custom_checkpoint], [gallery, legends_out])

    return demo

if __name__ == "__main__":
    build_demo().launch()