Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
9b35681
1
Parent(s):
6b70bd5
keypoint examples
Browse files- app.py +136 -86
- bad_hands/1.npy +3 -0
- bad_hands/1_kpts.png +3 -0
- bad_hands/3.npy +3 -0
- bad_hands/3_kpts.png +3 -0
- bad_hands/4.npy +3 -0
- bad_hands/4_kpts.png +3 -0
- bad_hands/5.npy +3 -0
- bad_hands/5_kpts.png +3 -0
- bad_hands/6.npy +3 -0
- bad_hands/6_kpts.png +3 -0
- bad_hands/7.npy +3 -0
- bad_hands/7_kpts.png +3 -0
- debug_keypts.npy +3 -0
- prepare_examples.py +99 -0
- tmp.py +0 -18
app.py
CHANGED
@@ -546,6 +546,12 @@ def reset_kps(img, keypoints, side: Literal["right", "left"]):
|
|
546 |
keypoints[1] = []
|
547 |
return img, keypoints
|
548 |
|
|
|
|
|
|
|
|
|
|
|
|
|
549 |
def stay_crop(img, crop_coord):
|
550 |
if img is not None:
|
551 |
if crop_coord is None:
|
@@ -651,36 +657,35 @@ def sample_diff(ref_cond, target_cond, target_keypts, num_gen, seed, cfg):
|
|
651 |
return results, results_pose
|
652 |
|
653 |
@spaces_120_fn
|
654 |
-
def ready_sample(img_cropped, inpaint_mask, keypts):
|
655 |
-
# img = cv2.resize(img_ori[..., :3], opts.image_size, interpolation=cv2.INTER_AREA)
|
656 |
img = cv2.resize(img_cropped["background"][..., :3], opts.image_size, interpolation=cv2.INTER_AREA)
|
657 |
sam_predictor.set_image(img)
|
658 |
-
if
|
659 |
-
keypts
|
660 |
-
elif len(keypts[0]) == 21:
|
661 |
-
keypts[0] = np.array(keypts[0], dtype=np.float32)
|
662 |
-
# keypts[0][:, 0] = keypts[0][:, 0] + crop_coord[0][0]
|
663 |
-
# keypts[0][:, 1] = keypts[0][:, 1] + crop_coord[0][1]
|
664 |
-
else:
|
665 |
-
gr.Info("Number of right hand keypoints should be either 0 or 21.")
|
666 |
-
return None, None
|
667 |
-
|
668 |
-
if len(keypts[1]) == 0:
|
669 |
-
keypts[1] = np.zeros((21, 2))
|
670 |
-
elif len(keypts[1]) == 21:
|
671 |
-
keypts[1] = np.array(keypts[1], dtype=np.float32)
|
672 |
-
# keypts[1][:, 0] = keypts[1][:, 0] + crop_coord[0][0]
|
673 |
-
# keypts[1][:, 1] = keypts[1][:, 1] + crop_coord[0][1]
|
674 |
else:
|
675 |
-
|
676 |
-
|
677 |
-
|
678 |
-
|
679 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
680 |
|
681 |
box_shift_ratio = 0.5
|
682 |
box_size_factor = 1.2
|
683 |
-
|
684 |
if keypts[0].sum() != 0 and keypts[21].sum() != 0:
|
685 |
input_point = np.array(keypts)
|
686 |
input_box = np.stack([keypts.min(axis=0), keypts.max(axis=0)])
|
@@ -808,7 +813,7 @@ def sample_inpaint(
|
|
808 |
cfg,
|
809 |
quality,
|
810 |
):
|
811 |
-
if
|
812 |
return None, None, None
|
813 |
set_seed(seed)
|
814 |
N = num_gen
|
@@ -830,6 +835,7 @@ def sample_inpaint(
|
|
830 |
cfg_scale=cfg_scale,
|
831 |
)
|
832 |
|
|
|
833 |
samples, _ = diffusion.inpaint_p_sample_loop(
|
834 |
model.forward_with_cfg,
|
835 |
z.shape,
|
@@ -977,8 +983,9 @@ def fix_clear_all():
|
|
977 |
None,
|
978 |
None,
|
979 |
None,
|
|
|
|
|
980 |
1,
|
981 |
-
# (0,0),
|
982 |
42,
|
983 |
3.0,
|
984 |
10,
|
@@ -1216,6 +1223,20 @@ fix_example_brush = [
|
|
1216 |
# ["bad_hands/14_mask.jpg"],
|
1217 |
# ["bad_hands/15_mask.jpg"],
|
1218 |
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1219 |
custom_css = """
|
1220 |
.gradio-container .examples img {
|
1221 |
width: 240px !important;
|
@@ -1231,6 +1252,10 @@ custom_css = """
|
|
1231 |
font-weight: bold !important;
|
1232 |
background-color: #90EE90 !important;
|
1233 |
}
|
|
|
|
|
|
|
|
|
1234 |
"""
|
1235 |
# color: black !important;
|
1236 |
|
@@ -1290,6 +1315,7 @@ with gr.Blocks(css=custom_css, theme="soft") as demo:
|
|
1290 |
fix_crop_coord = gr.State(value=None)
|
1291 |
fix_img = gr.State(value=None)
|
1292 |
fix_kpts = gr.State(value=None)
|
|
|
1293 |
fix_kpts_np = gr.State(value=None)
|
1294 |
fix_ref_cond = gr.State(value=None)
|
1295 |
fix_target_cond = gr.State(value=None)
|
@@ -1390,73 +1416,93 @@ with gr.Blocks(css=custom_css, theme="soft") as demo:
|
|
1390 |
# keypoint selection
|
1391 |
with gr.Column():
|
1392 |
gr.Markdown(
|
1393 |
-
"""<p style="text-align: center; font-size: 18px; font-weight: bold;">3.
|
1394 |
-
)
|
1395 |
-
gr.Markdown(
|
1396 |
-
"""<p style="text-align: center;">① Tell us if this is right, left, or both hands</p>"""
|
1397 |
)
|
1398 |
-
|
1399 |
-
["Right hand", "Left hand"],
|
1400 |
-
show_label=False,
|
1401 |
-
interactive=False,
|
1402 |
-
)
|
1403 |
-
fix_kp_r_info = gr.Markdown(
|
1404 |
-
"""<p style="text-align: center;">② Click 21 keypoints on the image to provide the target hand pose of <b>right hand</b>. See the \"OpenPose keypoints convention\" for guidance.</p>""",
|
1405 |
-
visible=False
|
1406 |
-
)
|
1407 |
-
# fix_kp_r_info = gr.Markdown(
|
1408 |
-
# """<p style="text-align: center; font-size: 20px; font-weight: bold; ">Select right only</p>""",
|
1409 |
-
# visible=False,
|
1410 |
-
# )
|
1411 |
-
fix_kp_right = gr.Image(
|
1412 |
type="numpy",
|
1413 |
-
label="
|
1414 |
-
show_label=
|
1415 |
height=LENGTH,
|
1416 |
width=LENGTH,
|
1417 |
interactive=False,
|
1418 |
-
visible=
|
1419 |
-
sources=
|
|
|
1420 |
)
|
1421 |
-
with gr.
|
1422 |
-
|
1423 |
-
|
|
|
|
|
|
|
|
|
1424 |
)
|
1425 |
-
|
1426 |
-
|
|
|
1427 |
)
|
1428 |
-
|
1429 |
-
|
1430 |
-
|
1431 |
-
|
1432 |
-
fix_kp_left = gr.Image(
|
1433 |
-
type="numpy",
|
1434 |
-
label="Keypoint Selection (left hand)",
|
1435 |
-
show_label=True,
|
1436 |
-
height=LENGTH,
|
1437 |
-
width=LENGTH,
|
1438 |
-
interactive=False,
|
1439 |
-
visible=False,
|
1440 |
-
sources=[],
|
1441 |
-
)
|
1442 |
-
with gr.Row():
|
1443 |
-
fix_undo_left = gr.Button(
|
1444 |
-
value="Undo", interactive=False, visible=False
|
1445 |
)
|
1446 |
-
|
1447 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1448 |
)
|
1449 |
-
gr.Markdown(
|
1450 |
-
"""<p style="text-align: left; font-weight: bold; ">OpenPose keypoints convention</p>"""
|
1451 |
-
)
|
1452 |
-
fix_openpose = gr.Image(
|
1453 |
-
value="openpose.png",
|
1454 |
-
type="numpy",
|
1455 |
-
show_label=False,
|
1456 |
-
height=LENGTH // 2,
|
1457 |
-
width=LENGTH // 2,
|
1458 |
-
interactive=False,
|
1459 |
-
)
|
1460 |
|
1461 |
# get latent
|
1462 |
# with gr.Column():
|
@@ -1650,9 +1696,10 @@ with gr.Blocks(css=custom_css, theme="soft") as demo:
|
|
1650 |
# fix_vis_mask32.change(
|
1651 |
# enable_component, [fix_vis_mask32, fix_vis_mask256], fix_run
|
1652 |
# )
|
|
|
1653 |
fix_run.click(
|
1654 |
ready_sample,
|
1655 |
-
[fix_ref, fix_inpaint_mask, fix_kpts],
|
1656 |
[
|
1657 |
fix_ref_cond,
|
1658 |
fix_target_cond,
|
@@ -1663,7 +1710,7 @@ with gr.Blocks(css=custom_css, theme="soft") as demo:
|
|
1663 |
fix_vis_mask256,
|
1664 |
],
|
1665 |
)
|
1666 |
-
|
1667 |
sample_inpaint,
|
1668 |
[
|
1669 |
fix_ref_cond,
|
@@ -1688,6 +1735,7 @@ with gr.Blocks(css=custom_css, theme="soft") as demo:
|
|
1688 |
fix_crop_coord,
|
1689 |
fix_ref,
|
1690 |
fix_checkbox,
|
|
|
1691 |
fix_kp_right,
|
1692 |
fix_kp_left,
|
1693 |
fix_result,
|
@@ -1704,6 +1752,7 @@ with gr.Blocks(css=custom_css, theme="soft") as demo:
|
|
1704 |
fix_target_cond,
|
1705 |
fix_latent,
|
1706 |
fix_inpaint_latent,
|
|
|
1707 |
fix_n_generation,
|
1708 |
fix_seed,
|
1709 |
fix_cfg,
|
@@ -2272,7 +2321,8 @@ with gr.Blocks(css=custom_css, theme="soft") as demo:
|
|
2272 |
gr.Markdown("<h1>Acknowledgement</h1>")
|
2273 |
gr.Markdown(_ACK_)
|
2274 |
gr.Markdown("<h1>Trouble Shooting</h1>")
|
2275 |
-
gr.Markdown("If
|
|
|
2276 |
gr.Markdown("<h1>Citation</h1>")
|
2277 |
gr.Markdown(
|
2278 |
"""<p style="text-align: left;">If this was useful, please cite us! ❤️</p>"""
|
|
|
546 |
keypoints[1] = []
|
547 |
return img, keypoints
|
548 |
|
549 |
+
def read_kpts(kpts_path):
|
550 |
+
if kpts_path is None or len(kpts_path)==0:
|
551 |
+
return None
|
552 |
+
kpts = np.load(kpts_path)
|
553 |
+
return kpts
|
554 |
+
|
555 |
def stay_crop(img, crop_coord):
|
556 |
if img is not None:
|
557 |
if crop_coord is None:
|
|
|
657 |
return results, results_pose
|
658 |
|
659 |
@spaces_120_fn
|
660 |
+
def ready_sample(img_cropped, inpaint_mask, keypts, keypts_np):
|
|
|
661 |
img = cv2.resize(img_cropped["background"][..., :3], opts.image_size, interpolation=cv2.INTER_AREA)
|
662 |
sam_predictor.set_image(img)
|
663 |
+
if keypts is None and keypts_np is not None:
|
664 |
+
keypts = keypts_np
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
665 |
else:
|
666 |
+
if len(keypts[0]) == 0:
|
667 |
+
keypts[0] = np.zeros((21, 2))
|
668 |
+
elif len(keypts[0]) == 21:
|
669 |
+
keypts[0] = np.array(keypts[0], dtype=np.float32)
|
670 |
+
# keypts[0][:, 0] = keypts[0][:, 0] + crop_coord[0][0]
|
671 |
+
# keypts[0][:, 1] = keypts[0][:, 1] + crop_coord[0][1]
|
672 |
+
else:
|
673 |
+
gr.Info("Number of right hand keypoints should be either 0 or 21.")
|
674 |
+
return None, None
|
675 |
+
if len(keypts[1]) == 0:
|
676 |
+
keypts[1] = np.zeros((21, 2))
|
677 |
+
elif len(keypts[1]) == 21:
|
678 |
+
keypts[1] = np.array(keypts[1], dtype=np.float32)
|
679 |
+
# keypts[1][:, 0] = keypts[1][:, 0] + crop_coord[0][0]
|
680 |
+
# keypts[1][:, 1] = keypts[1][:, 1] + crop_coord[0][1]
|
681 |
+
else:
|
682 |
+
gr.Info("Number of left hand keypoints should be either 0 or 21.")
|
683 |
+
return None, None
|
684 |
+
keypts = np.concatenate(keypts, axis=0)
|
685 |
+
keypts = scale_keypoint(keypts, (img_cropped["background"].shape[1], img_cropped["background"].shape[0]), opts.image_size)
|
686 |
|
687 |
box_shift_ratio = 0.5
|
688 |
box_size_factor = 1.2
|
|
|
689 |
if keypts[0].sum() != 0 and keypts[21].sum() != 0:
|
690 |
input_point = np.array(keypts)
|
691 |
input_box = np.stack([keypts.min(axis=0), keypts.max(axis=0)])
|
|
|
813 |
cfg,
|
814 |
quality,
|
815 |
):
|
816 |
+
if inpaint_latent_mask is None:
|
817 |
return None, None, None
|
818 |
set_seed(seed)
|
819 |
N = num_gen
|
|
|
835 |
cfg_scale=cfg_scale,
|
836 |
)
|
837 |
|
838 |
+
gr.Info("The process successfully started to run. Please wait for around 3.5 minutes.", duration=20)
|
839 |
samples, _ = diffusion.inpaint_p_sample_loop(
|
840 |
model.forward_with_cfg,
|
841 |
z.shape,
|
|
|
983 |
None,
|
984 |
None,
|
985 |
None,
|
986 |
+
None,
|
987 |
+
None,
|
988 |
1,
|
|
|
989 |
42,
|
990 |
3.0,
|
991 |
10,
|
|
|
1223 |
# ["bad_hands/14_mask.jpg"],
|
1224 |
# ["bad_hands/15_mask.jpg"],
|
1225 |
]
|
1226 |
+
fix_example_kpts = [
|
1227 |
+
["bad_hands/1_kpts.png"],
|
1228 |
+
["bad_hands/3_kpts.png"],
|
1229 |
+
["bad_hands/4_kpts.png"],
|
1230 |
+
["bad_hands/5_kpts.png"],
|
1231 |
+
["bad_hands/6_kpts.png"],
|
1232 |
+
["bad_hands/7_kpts.png"],
|
1233 |
+
]
|
1234 |
+
for i in range(len(fix_example_kpts)):
|
1235 |
+
npy_path = fix_example_kpts[i][0].replace("_kpts.png", ".npy")
|
1236 |
+
# kpts = np.load(npy_path)
|
1237 |
+
# kpts = gr.State(kpts)
|
1238 |
+
fix_example_kpts[i].append(npy_path)
|
1239 |
+
|
1240 |
custom_css = """
|
1241 |
.gradio-container .examples img {
|
1242 |
width: 240px !important;
|
|
|
1252 |
font-weight: bold !important;
|
1253 |
background-color: #90EE90 !important;
|
1254 |
}
|
1255 |
+
#kpts_examples table tr th:nth-child(2),
|
1256 |
+
#kpts_examples table tr td:nth-child(2) {
|
1257 |
+
display: none !important;
|
1258 |
+
}
|
1259 |
"""
|
1260 |
# color: black !important;
|
1261 |
|
|
|
1315 |
fix_crop_coord = gr.State(value=None)
|
1316 |
fix_img = gr.State(value=None)
|
1317 |
fix_kpts = gr.State(value=None)
|
1318 |
+
fix_kpts_path = gr.Textbox(visible=False)
|
1319 |
fix_kpts_np = gr.State(value=None)
|
1320 |
fix_ref_cond = gr.State(value=None)
|
1321 |
fix_target_cond = gr.State(value=None)
|
|
|
1416 |
# keypoint selection
|
1417 |
with gr.Column():
|
1418 |
gr.Markdown(
|
1419 |
+
"""<p style="text-align: center; font-size: 18px; font-weight: bold;">3. Target hand pose</p>"""
|
|
|
|
|
|
|
1420 |
)
|
1421 |
+
fix_kp_all = gr.Image(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1422 |
type="numpy",
|
1423 |
+
# label="Keypoints",
|
1424 |
+
show_label=False,
|
1425 |
height=LENGTH,
|
1426 |
width=LENGTH,
|
1427 |
interactive=False,
|
1428 |
+
visible=True,
|
1429 |
+
sources=(),
|
1430 |
+
image_mode="RGBA"
|
1431 |
)
|
1432 |
+
with gr.Accordion(open=True):
|
1433 |
+
fix_ex_kpts = gr.Examples(
|
1434 |
+
fix_example_kpts,
|
1435 |
+
inputs=[fix_kp_all, fix_kpts_path],
|
1436 |
+
examples_per_page=20,
|
1437 |
+
postprocess=False,
|
1438 |
+
elem_id="kpts_examples"
|
1439 |
)
|
1440 |
+
with gr.Accordion("[Custom data] Manually give hand pose", open=False):
|
1441 |
+
gr.Markdown(
|
1442 |
+
"""<p style="text-align: center;">① Tell us if this is right, left, or both hands</p>"""
|
1443 |
)
|
1444 |
+
fix_checkbox = gr.CheckboxGroup(
|
1445 |
+
["Right hand", "Left hand"],
|
1446 |
+
show_label=False,
|
1447 |
+
interactive=False,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1448 |
)
|
1449 |
+
fix_kp_r_info = gr.Markdown(
|
1450 |
+
"""<p style="text-align: center;">② Click 21 keypoints on the image to provide the target hand pose of <b>right hand</b>. See the \"OpenPose keypoints convention\" for guidance.</p>""",
|
1451 |
+
visible=False
|
1452 |
+
)
|
1453 |
+
# fix_kp_r_info = gr.Markdown(
|
1454 |
+
# """<p style="text-align: center; font-size: 20px; font-weight: bold; ">Select right only</p>""",
|
1455 |
+
# visible=False,
|
1456 |
+
# )
|
1457 |
+
fix_kp_right = gr.Image(
|
1458 |
+
type="numpy",
|
1459 |
+
label="Keypoint Selection (right hand)",
|
1460 |
+
show_label=True,
|
1461 |
+
height=LENGTH,
|
1462 |
+
width=LENGTH,
|
1463 |
+
interactive=False,
|
1464 |
+
visible=False,
|
1465 |
+
sources=[],
|
1466 |
+
)
|
1467 |
+
with gr.Row():
|
1468 |
+
fix_undo_right = gr.Button(
|
1469 |
+
value="Undo", interactive=False, visible=False
|
1470 |
+
)
|
1471 |
+
fix_reset_right = gr.Button(
|
1472 |
+
value="Reset", interactive=False, visible=False
|
1473 |
+
)
|
1474 |
+
fix_kp_l_info = gr.Markdown(
|
1475 |
+
"""<p style="text-align: center;">② Click 21 keypoints on the image to provide the target hand pose of <b>left hand</b>. See the \"OpenPose keypoints convention\" for guidance.</p>""",
|
1476 |
+
visible=False
|
1477 |
+
)
|
1478 |
+
fix_kp_left = gr.Image(
|
1479 |
+
type="numpy",
|
1480 |
+
label="Keypoint Selection (left hand)",
|
1481 |
+
show_label=True,
|
1482 |
+
height=LENGTH,
|
1483 |
+
width=LENGTH,
|
1484 |
+
interactive=False,
|
1485 |
+
visible=False,
|
1486 |
+
sources=[],
|
1487 |
+
)
|
1488 |
+
with gr.Row():
|
1489 |
+
fix_undo_left = gr.Button(
|
1490 |
+
value="Undo", interactive=False, visible=False
|
1491 |
+
)
|
1492 |
+
fix_reset_left = gr.Button(
|
1493 |
+
value="Reset", interactive=False, visible=False
|
1494 |
+
)
|
1495 |
+
gr.Markdown(
|
1496 |
+
"""<p style="text-align: left; font-weight: bold; ">OpenPose keypoints convention</p>"""
|
1497 |
+
)
|
1498 |
+
fix_openpose = gr.Image(
|
1499 |
+
value="openpose.png",
|
1500 |
+
type="numpy",
|
1501 |
+
show_label=False,
|
1502 |
+
height=LENGTH // 2,
|
1503 |
+
width=LENGTH // 2,
|
1504 |
+
interactive=False,
|
1505 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1506 |
|
1507 |
# get latent
|
1508 |
# with gr.Column():
|
|
|
1696 |
# fix_vis_mask32.change(
|
1697 |
# enable_component, [fix_vis_mask32, fix_vis_mask256], fix_run
|
1698 |
# )
|
1699 |
+
fix_kpts_path.change(read_kpts, fix_kpts_path, fix_kpts_np)
|
1700 |
fix_run.click(
|
1701 |
ready_sample,
|
1702 |
+
[fix_ref, fix_inpaint_mask, fix_kpts, fix_kpts_np],
|
1703 |
[
|
1704 |
fix_ref_cond,
|
1705 |
fix_target_cond,
|
|
|
1710 |
fix_vis_mask256,
|
1711 |
],
|
1712 |
)
|
1713 |
+
fix_inpaint_latent.change(
|
1714 |
sample_inpaint,
|
1715 |
[
|
1716 |
fix_ref_cond,
|
|
|
1735 |
fix_crop_coord,
|
1736 |
fix_ref,
|
1737 |
fix_checkbox,
|
1738 |
+
fix_kp_all,
|
1739 |
fix_kp_right,
|
1740 |
fix_kp_left,
|
1741 |
fix_result,
|
|
|
1752 |
fix_target_cond,
|
1753 |
fix_latent,
|
1754 |
fix_inpaint_latent,
|
1755 |
+
fix_kpts_path,
|
1756 |
fix_n_generation,
|
1757 |
fix_seed,
|
1758 |
fix_cfg,
|
|
|
2321 |
gr.Markdown("<h1>Acknowledgement</h1>")
|
2322 |
gr.Markdown(_ACK_)
|
2323 |
gr.Markdown("<h1>Trouble Shooting</h1>")
|
2324 |
+
gr.Markdown("If error persists, please try the following steps:<br>1. Refresh the page and try again.<br>2. The issue might be due to compatibility with HuggingFace or GPU memory limitations. We recommend cloning this repository and trying it with your own GPU if possible.<br>3. Kindly leave a message on our HuggingFace Spaces Community tab (located at the top right), on our GitHub repository's Issues page, or send us an email. We are happy to help you as soon as possible.")
|
2325 |
+
gr.Markdown("If the result is not satisfactory:<br>1. Try changing the Classifier Free Guidance Scale, that can be found at \"More Options\".")
|
2326 |
gr.Markdown("<h1>Citation</h1>")
|
2327 |
gr.Markdown(
|
2328 |
"""<p style="text-align: left;">If this was useful, please cite us! ❤️</p>"""
|
bad_hands/1.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:65ec39b95455dcb42771a0fa21d087d9065d3cddd24b01d3200dbfb05a4108a9
|
3 |
+
size 464
|
bad_hands/1_kpts.png
ADDED
![]() |
Git LFS Details
|
bad_hands/3.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:876f8cc25adbd22e2a89f8bdeaa39bb519ff68b98f0831906dcdc5762e4fa0a2
|
3 |
+
size 464
|
bad_hands/3_kpts.png
ADDED
![]() |
Git LFS Details
|
bad_hands/4.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:57abffa5190455ebc915b16641e69fb491091ae626316a6d00bd3cb156696605
|
3 |
+
size 800
|
bad_hands/4_kpts.png
ADDED
![]() |
Git LFS Details
|
bad_hands/5.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e9ac0ad6f59679be44091daac9ee4e50e331f005d62845ba78788685de4b4d28
|
3 |
+
size 800
|
bad_hands/5_kpts.png
ADDED
![]() |
Git LFS Details
|
bad_hands/6.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:950449c557f58ca5fa65ada7fcd0652a4f4781774c4ddd9e53aec1f9acaa1ca2
|
3 |
+
size 800
|
bad_hands/6_kpts.png
ADDED
![]() |
Git LFS Details
|
bad_hands/7.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1ae9e3f73fdc8908804e383a28817803c3e9bde63355e1327d2b4f1f24083d8b
|
3 |
+
size 464
|
bad_hands/7_kpts.png
ADDED
![]() |
Git LFS Details
|
debug_keypts.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:14b1e5273aded0649d0097c8032dff15900a071a6c0d82d9a775e74565c29cc9
|
3 |
+
size 800
|
prepare_examples.py
ADDED
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import glob
|
2 |
+
import os
|
3 |
+
from PIL import Image
|
4 |
+
import numpy as np
|
5 |
+
from matplotlib import pyplot as plt
|
6 |
+
from io import BytesIO
|
7 |
+
|
8 |
+
def visualize_hand(all_joints, img, side=["right", "left"], n_avail_joints=21):
|
9 |
+
# Define the connections between joints for drawing lines and their corresponding colors
|
10 |
+
connections = [
|
11 |
+
((0, 1), "red"),
|
12 |
+
((1, 2), "green"),
|
13 |
+
((2, 3), "blue"),
|
14 |
+
((3, 4), "purple"),
|
15 |
+
((0, 5), "orange"),
|
16 |
+
((5, 6), "pink"),
|
17 |
+
((6, 7), "brown"),
|
18 |
+
((7, 8), "cyan"),
|
19 |
+
((0, 9), "yellow"),
|
20 |
+
((9, 10), "magenta"),
|
21 |
+
((10, 11), "lime"),
|
22 |
+
((11, 12), "indigo"),
|
23 |
+
((0, 13), "olive"),
|
24 |
+
((13, 14), "teal"),
|
25 |
+
((14, 15), "navy"),
|
26 |
+
((15, 16), "gray"),
|
27 |
+
((0, 17), "lavender"),
|
28 |
+
((17, 18), "silver"),
|
29 |
+
((18, 19), "maroon"),
|
30 |
+
((19, 20), "fuchsia"),
|
31 |
+
]
|
32 |
+
H, W, C = img.shape
|
33 |
+
|
34 |
+
# Create a figure and axis
|
35 |
+
plt.figure()
|
36 |
+
ax = plt.gca()
|
37 |
+
# Plot joints as points
|
38 |
+
ax.imshow(img)
|
39 |
+
start_is = []
|
40 |
+
if "right" in side:
|
41 |
+
start_is.append(0)
|
42 |
+
if "left" in side:
|
43 |
+
start_is.append(21)
|
44 |
+
for start_i in start_is:
|
45 |
+
joints = all_joints[start_i : start_i + n_avail_joints]
|
46 |
+
if len(joints) == 1:
|
47 |
+
ax.scatter(joints[0][0], joints[0][1], color="red", s=10)
|
48 |
+
else:
|
49 |
+
for connection, color in connections[: len(joints) - 1]:
|
50 |
+
joint1 = joints[connection[0]]
|
51 |
+
joint2 = joints[connection[1]]
|
52 |
+
ax.plot([joint1[0], joint2[0]], [joint1[1], joint2[1]], color=color, linewidth=4)
|
53 |
+
|
54 |
+
ax.set_xlim([0, W])
|
55 |
+
ax.set_ylim([0, H])
|
56 |
+
ax.grid(False)
|
57 |
+
ax.set_axis_off()
|
58 |
+
ax.invert_yaxis()
|
59 |
+
# plt.subplots_adjust(wspace=0.01)
|
60 |
+
# plt.show()
|
61 |
+
buf = BytesIO()
|
62 |
+
plt.savefig(buf, format="png", bbox_inches="tight", pad_inches=0)
|
63 |
+
plt.close()
|
64 |
+
|
65 |
+
# Convert BytesIO object to numpy array
|
66 |
+
buf.seek(0)
|
67 |
+
img_pil = Image.open(buf)
|
68 |
+
img_pil = img_pil.resize((W, H))
|
69 |
+
numpy_img = np.array(img_pil)
|
70 |
+
|
71 |
+
return numpy_img
|
72 |
+
|
73 |
+
'''put brush example at alpha channel'''
|
74 |
+
# img_dir = "bad_hands"
|
75 |
+
# masked_paths = sorted(glob.glob(os.path.join(img_dir, "*_mask.jpg")))
|
76 |
+
# for masked_pth in masked_paths:
|
77 |
+
# img_path = masked_pth.replace("_mask.jpg", ".jpg")
|
78 |
+
# assert os.path.exists(img_path), f"Image path {img_path} does not exist."
|
79 |
+
# masked = np.array(Image.open(masked_pth))
|
80 |
+
# mask = (np.all(masked > 245, axis=-1)).astype(np.uint8)*128 + 64
|
81 |
+
# img = np.array(Image.open(img_path))
|
82 |
+
# composite = np.concatenate((img, mask[..., None]), axis=-1)
|
83 |
+
# composite = Image.fromarray(composite)
|
84 |
+
# composite.save(masked_pth.replace("_mask.jpg", "_composite.png"))
|
85 |
+
# print(f"Saved composite image {masked_pth.replace('_mask.jpg', '_composite.png')}")
|
86 |
+
|
87 |
+
'''visualize keypoint example'''
|
88 |
+
data_dir = "bad_hands"
|
89 |
+
kpts_paths = sorted(glob.glob(os.path.join(data_dir, "*.npy")))
|
90 |
+
for kpts_pth in kpts_paths:
|
91 |
+
img_pth = kpts_pth.replace(".npy", ".jpg")
|
92 |
+
kpts = np.load(kpts_pth)
|
93 |
+
img = np.array(Image.open(img_pth))
|
94 |
+
h, w = img.shape[:2]
|
95 |
+
kpts = kpts / np.array([256,256]) * np.array([w, h])
|
96 |
+
kpts_vis = visualize_hand(kpts, img)
|
97 |
+
save_path = kpts_pth.replace(".npy", "_kpts.png")
|
98 |
+
kpts_vis = Image.fromarray(kpts_vis).save(save_path)
|
99 |
+
print(f"Saved {save_path}")
|
tmp.py
DELETED
@@ -1,18 +0,0 @@
|
|
1 |
-
import glob
|
2 |
-
import os
|
3 |
-
from PIL import Image
|
4 |
-
import numpy as np
|
5 |
-
|
6 |
-
img_dir = "bad_hands"
|
7 |
-
masked_paths = sorted(glob.glob(os.path.join(img_dir, "*_mask.jpg")))
|
8 |
-
for masked_pth in masked_paths:
|
9 |
-
img_path = masked_pth.replace("_mask.jpg", ".jpg")
|
10 |
-
assert os.path.exists(img_path), f"Image path {img_path} does not exist."
|
11 |
-
masked = np.array(Image.open(masked_pth))
|
12 |
-
mask = (np.all(masked > 245, axis=-1)).astype(np.uint8)*128 + 64
|
13 |
-
img = np.array(Image.open(img_path))
|
14 |
-
composite = np.concatenate((img, mask[..., None]), axis=-1)
|
15 |
-
# img.putalpha(Image.fromarray(mask))
|
16 |
-
composite = Image.fromarray(composite)
|
17 |
-
composite.save(masked_pth.replace("_mask.jpg", "_composite.png"))
|
18 |
-
print(f"Saved composite image {masked_pth.replace('_mask.jpg', '_composite.png')}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|