File size: 837 Bytes
1b58092
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import argparse

import torch

parser = argparse.ArgumentParser(description='Hyperparams')
parser.add_argument('filename', nargs='?', type=str, default=None)

args = parser.parse_args()

model = torch.load(args.filename, map_location=torch.device('cpu'))
model = model['module']

# new_model = {}
# for k, v in model.items():
#     if "backbone.blocks" in k:
#         continue
#     if "auxiliary_head" in k:
#         continue
#     if "pos_embed" in k or "patch_embed" in k or "cls_token" in k:
#         continue
#     try:
#         if "bn" in k:
#             print("fp32:", k)
#             new_model[k] = v
#         else:
#             new_model[k] = v
#     except:
#         new_model[k] = v
# print(new_model.keys())

# new_dict = {'state_dict': new_state_dict}
torch.save(model, args.filename.replace('.pt', '_release.pt'))