File size: 13,095 Bytes
1cdc47e
 
 
 
4e2aa43
 
1cdc47e
82d4b57
 
 
 
1cdc47e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82d4b57
1cdc47e
 
 
82d4b57
1cdc47e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82d4b57
1cdc47e
 
 
 
 
 
 
 
 
 
 
82d4b57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1cdc47e
4e2aa43
1cdc47e
 
 
82d4b57
1cdc47e
 
82d4b57
1cdc47e
82d4b57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1cdc47e
82d4b57
 
 
 
 
 
 
 
 
 
 
 
 
 
1cdc47e
82d4b57
 
 
 
 
 
 
 
 
 
 
 
1cdc47e
82d4b57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1cdc47e
 
82d4b57
4e2aa43
82d4b57
 
 
 
4e2aa43
82d4b57
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
import os
import numpy as np
from scipy.spatial.transform import Rotation
from collections import deque
import sys
import argparse

# Custom print function to show only progress
def log_progress(message):
    if message.startswith("PROGRESS:"):
        print(message)

def parse_obj(filename):
    vertices = []
    lines = []
    try:
        with open(filename, 'r') as f:
            for line in f:
                if line.startswith('v '):
                    parts = line.split()
                    vertices.append([float(parts[1]), float(parts[2]), float(parts[3])])
                elif line.startswith('l '):
                    parts = line.split()
                    lines.append([int(parts[1]) - 1, int(parts[2]) - 1])
        return np.array(vertices), lines
    except Exception as e:
        raise ValueError(f"Error parsing OBJ file {filename}: {str(e)}")

def build_hierarchy(lines, root=0):
    num_joints = max(max(line) for line in lines) + 1
    adj = [[] for _ in range(num_joints)]
    for a, b in lines:
        adj[a].append(b)
        adj[b].append(a)
    parent = [-1] * num_joints
    queue = deque([root])
    visited = [False] * num_joints
    visited[root] = True
    while queue:
        p = queue.popleft()
        for c in adj[p]:
            if not visited[c]:
                parent[c] = p
                queue.append(c)
                visited[c] = True
    if not all(visited):
        raise ValueError("The skeleton has disconnected components.")
    children = [[] for _ in range(num_joints)]
    for c in range(num_joints):
        if parent[c] != -1:
            children[parent[c]].append(c)
    return parent, children

def compute_offsets(vertices_ref, parent):
    num_joints = len(vertices_ref)
    scale_factor = 0.05
    offsets = np.zeros((num_joints, 3))
    for j in range(num_joints):
        if parent[j] != -1:
            offsets[j] = (vertices_ref[j] - vertices_ref[parent[j]])*scale_factor
    return offsets

def compute_R_world(joint, vertices_ref, vertices_cur, children):
    if not children[joint]:
        return np.eye(3)
    elif len(children[joint]) == 1:
        c = children[joint][0]
        V_ref = vertices_ref[c] - vertices_ref[joint]
        V_cur = vertices_cur[c] - vertices_cur[joint]
        norm_ref = np.linalg.norm(V_ref)
        norm_cur = np.linalg.norm(V_cur)
        if norm_ref < 1e-6 or norm_cur < 1e-6:
            return np.eye(3)
        V_ref_norm = V_ref / norm_ref
        V_cur_norm = V_cur / norm_cur
        cos_theta = np.clip(np.dot(V_ref_norm, V_cur_norm), -1.0, 1.0)
        if cos_theta > 0.99999:
            return np.eye(3)
        axis = np.cross(V_ref_norm, V_cur_norm)
        axis_norm = np.linalg.norm(axis)
        if axis_norm < 1e-6:
            return np.eye(3)
        axis = axis / axis_norm
        angle = np.arccos(cos_theta)
        return Rotation.from_rotvec(axis * angle).as_matrix()
    else:
        A = np.column_stack([vertices_ref[c] - vertices_ref[joint] for c in children[joint]])
        B = np.column_stack([vertices_cur[c] - vertices_cur[joint] for c in children[joint]])
        M = B @ A.T
        U, _, Vh = np.linalg.svd(M)
        R = U @ Vh
        if np.linalg.det(R) < 0:
            Vh[-1, :] *= -1
            R = U @ Vh
        return R

def calculate_motion_velocity(rotations):
    n_frames = len(rotations)
    if n_frames <= 1:
        return np.zeros(n_frames)
    
    velocities = np.zeros(n_frames)
    for i in range(1, n_frames):
        prev_quat = rotations[i-1].as_quat()
        curr_quat = rotations[i].as_quat()
        if np.dot(prev_quat, curr_quat) < 0:
            curr_quat = -curr_quat
        diff = Rotation.from_quat(prev_quat).inv() * Rotation.from_quat(curr_quat)
        velocities[i] = np.linalg.norm(diff.as_rotvec())
    if n_frames > 1:
        velocities[0] = velocities[1]
    return velocities

def adaptive_smooth_rotations(rotations, window_size=7, velocity_threshold=0.03):
    n_frames = len(rotations)
    if n_frames <= 1:
        return rotations
    
    velocities = calculate_motion_velocity(rotations)
    if np.max(velocities) > 0:
        velocities = velocities / np.max(velocities)
    
    smoothed = []
    half_window = window_size // 2
    
    for i in range(n_frames):
        start_idx = max(0, i - half_window)
        end_idx = min(n_frames - 1, i + half_window)
        window_rots = rotations[start_idx:end_idx + 1]
        
        velocity_factor = min(1.0, velocities[i] / velocity_threshold)
        sigma = 0.5 + 1.5 * velocity_factor
        dist = np.linspace(-1, 1, len(window_rots))
        weights = np.exp(-sigma * np.square(dist))
        weights = weights / np.sum(weights)
        
        quats = [r.as_quat() for r in window_rots]
        for j in range(1, len(quats)):
            if np.dot(quats[0], quats[j]) < 0:
                quats[j] = -quats[j]
        
        result_quat = np.zeros(4)
        for j in range(len(quats)):
            result_quat += weights[j] * quats[j]
        result_quat = result_quat / np.linalg.norm(result_quat)
        smoothed.append(Rotation.from_quat(result_quat))
    
    return smoothed

def adaptive_smooth_positions(positions, window_size=7, velocity_threshold=0.03):
    n_frames = len(positions)
    if n_frames <= 1:
        return positions
    
    positions = np.array(positions)
    smoothed = np.zeros_like(positions)
    half_window = window_size // 2
    
    velocities = np.zeros(n_frames)
    for i in range(1, n_frames):
        velocities[i] = np.linalg.norm(positions[i] - positions[i-1])
    velocities[0] = velocities[1]
    if np.max(velocities) > 0:
        velocities = velocities / np.max(velocities)
    
    for i in range(n_frames):
        start_idx = max(0, i - half_window)
        end_idx = min(n_frames - 1, i + half_window)
        window_pos = positions[start_idx:end_idx + 1]
        
        velocity_factor = min(1.0, velocities[i] / velocity_threshold)
        sigma = 0.5 + 1.5 * velocity_factor
        dist = np.linspace(-1, 1, len(window_pos))
        weights = np.exp(-sigma * np.square(dist))
        weights = weights / np.sum(weights)
        
        smoothed[i] = np.sum(window_pos * weights[:, np.newaxis], axis=0)
    
    return smoothed

def detect_arm_joints(children, num_joints):
    return [j for j in range(num_joints) if len(children[j]) == 1]

def main(output_dir, smoothing_window=8, velocity_threshold=0.04, joint_constraint=True):
    folder = os.path.join(output_dir, 'obj_sequence')
    
    try:
        obj_files = sorted([f for f in os.listdir(folder) if f.endswith('.obj')])
    except Exception as e:
        sys.exit(f"Error accessing folder {folder}: {e}")

    if not obj_files:
        sys.exit("No OBJ files found.")

    vertices_ref, lines = parse_obj(os.path.join(folder, obj_files[0]))
    num_joints = len(vertices_ref)
    parent, children = build_hierarchy(lines)
    offsets = compute_offsets(vertices_ref, parent)
    root = 0

    hierarchy_order = []
    def dfs(joint):
        hierarchy_order.append(joint)
        for child in children[joint]:
            dfs(child)
    dfs(root)

    arm_joints = detect_arm_joints(children, num_joints)

    all_root_positions = []
    all_positions = [[] for _ in range(num_joints)]
    all_rotations = [[] for _ in range(num_joints)]
    
    total_files = len(obj_files)
    for i in range(total_files):
        obj_file = obj_files[i]
        vertices_cur = parse_obj(os.path.join(folder, obj_file))[0]
        R_world = [compute_R_world(j, vertices_ref, vertices_cur, children) for j in range(num_joints)]
        R_local = [R_world[j] if parent[j] == -1 else R_world[parent[j]].T @ R_world[j] for j in range(num_joints)]
        rotations = [Rotation.from_matrix(R) for R in R_local]
        
        all_root_positions.append(vertices_cur[root])
        for j in range(num_joints):
            all_positions[j].append(vertices_cur[j])
            all_rotations[j].append(rotations[j])
        
        # First half of progress (0-50%)
        progress = (i / total_files) * 50
        log_progress(f"PROGRESS:{progress:.2f}")
    
    smoothed_root_positions = adaptive_smooth_positions(all_root_positions, smoothing_window, velocity_threshold)
    smoothed_positions = [adaptive_smooth_positions(np.array(pos), smoothing_window, velocity_threshold) for pos in all_positions]
    smoothed_rotations = [adaptive_smooth_rotations(rot, smoothing_window, velocity_threshold) for rot in all_rotations]
    
    # Enforce bone lengths (no lengthening restraint)
    for i in range(total_files):
        # Start with root position
        smoothed_positions[root][i] = smoothed_root_positions[i]
        # Adjust each child joint to maintain bone length
        for j in range(num_joints):
            if parent[j] != -1:  # Skip root
                parent_pos = smoothed_positions[parent[j]][i]
                child_pos = smoothed_positions[j][i]
                ref_offset = offsets[j]  # Reference bone length vector
                bone_length = np.linalg.norm(ref_offset)
                if bone_length < 1e-6:
                    continue  # Skip if bone length is near zero
                current_vec = child_pos - parent_pos
                current_length = np.linalg.norm(current_vec)
                if current_length < 1e-6:
                    # If current length is near zero, use reference direction
                    smoothed_positions[j][i] = parent_pos + ref_offset
                else:
                    # Scale the current vector to match reference bone length
                    corrected_vec = (current_vec / current_length) * bone_length
                    smoothed_positions[j][i] = parent_pos + corrected_vec

    motion_data = []
    joints_to_remove = {10, 13, 16, 6, 3}
    for i in range(total_files):
        root_pos = smoothed_root_positions[i]
        
        if joint_constraint:
            for j in range(num_joints):
                euler = smoothed_rotations[j][i].as_euler('ZYX', degrees=True)
                if j in arm_joints:
                    euler = np.clip(euler, -180, 180)
                else:
                    euler = np.clip(euler, -150, 150)
                smoothed_rotations[j][i] = Rotation.from_euler('ZYX', euler, degrees=True)
        
        euler_angles = [smoothed_rotations[j][i].as_euler('ZYX', degrees=True) for j in range(num_joints)]
        motion_line = list(root_pos) + list(euler_angles[root])
        for j in hierarchy_order[1:]:
            motion_line.extend(euler_angles[j])
        motion_data.append(motion_line)
        
        # Second half of progress (50-100%)
        progress = 50 + (i / total_files) * 50
        log_progress(f"PROGRESS:{progress:.2f}")

    bvh_dir = os.path.join(output_dir, 'bvh')
    os.makedirs(bvh_dir, exist_ok=True)
    bvh_file = os.path.join(bvh_dir, 'output.bvh')

    with open(bvh_file, 'w') as f:
        f.write("HIERARCHY\n")
        def write_hierarchy(joint, parent, f, indent=0):
            if parent == -1:
                f.write("ROOT Joint{}\n".format(joint))
            else:
                f.write("  " * indent + "JOINT Joint{}\n".format(joint))
            f.write("  " * indent + "{\n")
            f.write("  " * (indent + 1) + "OFFSET {:.6f} {:.6f} {:.6f}\n".format(*offsets[joint]))
            if parent == -1:
                f.write("  " * (indent + 1) + "CHANNELS 6 Xposition Yposition Zposition Zrotation Yrotation Xrotation\n")
            else:
                f.write("  " * (indent + 1) + "CHANNELS 3 Zrotation Yrotation Xrotation\n")
            for child in children[joint]:
                write_hierarchy(child, joint, f, indent + 1)
            if not children[joint]:
                f.write("  " * (indent + 1) + "End Site\n")
                f.write("  " * (indent + 1) + "{\n")
                f.write("  " * (indent + 2) + "OFFSET 0.000000 0.000000 0.000000\n")
                f.write("  " * (indent + 1) + "}\n")
            f.write("  " * indent + "}\n")

        write_hierarchy(root, -1, f)

        f.write("MOTION\n")
        f.write("Frames: {}\n".format(len(motion_data)))
        f.write("Frame Time: 0.033333\n")
        for motion_line in motion_data:
            f.write(" ".join("{:.6f}".format(x) for x in motion_line) + "\n")

if __name__ == "__main__":
    parser = argparse.ArgumentParser('Convert OBJ sequence to BVH with improved adaptive smoothing.')
    parser.add_argument('--output-dir', type=str, default='../outputs/', help='Output directory containing obj_sequence')
    parser.add_argument('--smoothing-window', type=int, default=7, help='Size of smoothing window')
    parser.add_argument('--velocity-threshold', type=float, default=0.03, help='Velocity threshold for adaptive smoothing')
    parser.add_argument('--disable-joint-constraints', action='store_false', dest='joint_constraint', 
                        help='Disable joint constraints that prevent extreme rotations')
    args = parser.parse_args()
    main(args.output_dir, args.smoothing_window, args.velocity_threshold, args.joint_constraint)