echarlaix HF Staff commited on
Commit
ca137a9
·
1 Parent(s): fc6ea0c

add tiny model

Browse files
config.json ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.1,
3
+ "apply_spec_augment": true,
4
+ "architectures": [
5
+ "SpeechT5ForTextToSpeech"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "bos_token_id": 0,
9
+ "conv_bias": false,
10
+ "conv_dim": [
11
+ 512,
12
+ 512,
13
+ 512,
14
+ 512,
15
+ 512,
16
+ 512,
17
+ 512
18
+ ],
19
+ "conv_kernel": [
20
+ 10,
21
+ 3,
22
+ 3,
23
+ 3,
24
+ 3,
25
+ 2,
26
+ 2
27
+ ],
28
+ "conv_stride": [
29
+ 5,
30
+ 2,
31
+ 2,
32
+ 2,
33
+ 2,
34
+ 2,
35
+ 2
36
+ ],
37
+ "decoder_attention_heads": 2,
38
+ "decoder_ffn_dim": 4,
39
+ "decoder_layerdrop": 0.1,
40
+ "decoder_layers": 2,
41
+ "decoder_start_token_id": 2,
42
+ "encoder_attention_heads": 2,
43
+ "encoder_ffn_dim": 4,
44
+ "encoder_layerdrop": 0.1,
45
+ "encoder_layers": 2,
46
+ "encoder_max_relative_position": 160,
47
+ "eos_token_id": 2,
48
+ "feat_extract_activation": "gelu",
49
+ "feat_extract_norm": "group",
50
+ "feat_proj_dropout": 0.0,
51
+ "guided_attention_loss_num_heads": 2,
52
+ "guided_attention_loss_scale": 10.0,
53
+ "guided_attention_loss_sigma": 0.4,
54
+ "hidden_act": "gelu",
55
+ "hidden_dropout": 0.1,
56
+ "hidden_size": 24,
57
+ "initializer_range": 0.02,
58
+ "is_encoder_decoder": true,
59
+ "layer_norm_eps": 1e-05,
60
+ "mask_feature_length": 10,
61
+ "mask_feature_min_masks": 0,
62
+ "mask_feature_prob": 0.0,
63
+ "mask_time_length": 10,
64
+ "mask_time_min_masks": 2,
65
+ "mask_time_prob": 0.05,
66
+ "max_speech_positions": 4000,
67
+ "max_text_positions": 450,
68
+ "model_type": "speecht5",
69
+ "num_conv_pos_embedding_groups": 16,
70
+ "num_conv_pos_embeddings": 128,
71
+ "num_feat_extract_layers": 7,
72
+ "num_mel_bins": 80,
73
+ "pad_token_id": 1,
74
+ "positional_dropout": 0.1,
75
+ "reduction_factor": 2,
76
+ "scale_embedding": false,
77
+ "speaker_embedding_dim": 512,
78
+ "speech_decoder_postnet_dropout": 0.5,
79
+ "speech_decoder_postnet_kernel": 5,
80
+ "speech_decoder_postnet_layers": 5,
81
+ "speech_decoder_postnet_units": 256,
82
+ "speech_decoder_prenet_dropout": 0.5,
83
+ "speech_decoder_prenet_layers": 2,
84
+ "speech_decoder_prenet_units": 256,
85
+ "torch_dtype": "float32",
86
+ "transformers_version": "4.51.3",
87
+ "use_cache": true,
88
+ "use_guided_attention_loss": true,
89
+ "vocab_size": 81
90
+ }
generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "decoder_start_token_id": 2,
5
+ "eos_token_id": 2,
6
+ "pad_token_id": 1,
7
+ "transformers_version": "4.51.3"
8
+ }
openvino_decoder_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01d80c098e8c96514cca6460d8edcce3c13dd1c6c8dffbf9f9f6baa287a662aa
3
+ size 858260
openvino_decoder_model.xml ADDED
The diff for this file is too large to render. See raw diff
 
openvino_encoder_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1226ad68e656691db77541187b4a26592c78b274de5d258c35dc6974df68f09a
3
+ size 86584
openvino_encoder_model.xml ADDED
@@ -0,0 +1,3223 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="Model0" version="11">
3
+ <layers>
4
+ <layer id="0" name="input_ids" type="Parameter" version="opset1">
5
+ <data shape="1,?" element_type="i64" />
6
+ <output>
7
+ <port id="0" precision="I64" names="input_ids">
8
+ <dim>1</dim>
9
+ <dim>-1</dim>
10
+ </port>
11
+ </output>
12
+ </layer>
13
+ <layer id="1" name="aten::ones_like/Constant" type="Const" version="opset1">
14
+ <data element_type="f32" shape="" offset="0" size="4" />
15
+ <output>
16
+ <port id="0" precision="FP32" />
17
+ </output>
18
+ </layer>
19
+ <layer id="2" name="__module.prenet.embed_tokens/aten::embedding/Convert" type="Convert" version="opset1">
20
+ <data destination_type="i32" />
21
+ <input>
22
+ <port id="0" precision="I64">
23
+ <dim>1</dim>
24
+ <dim>-1</dim>
25
+ </port>
26
+ </input>
27
+ <output>
28
+ <port id="1" precision="I32">
29
+ <dim>1</dim>
30
+ <dim>-1</dim>
31
+ </port>
32
+ </output>
33
+ </layer>
34
+ <layer id="3" name="ShapeOf_4170" type="ShapeOf" version="opset3">
35
+ <data output_type="i64" />
36
+ <input>
37
+ <port id="0" precision="I32">
38
+ <dim>1</dim>
39
+ <dim>-1</dim>
40
+ </port>
41
+ </input>
42
+ <output>
43
+ <port id="1" precision="I64">
44
+ <dim>2</dim>
45
+ </port>
46
+ </output>
47
+ </layer>
48
+ <layer id="4" name="Convert_4457" type="Convert" version="opset1">
49
+ <data destination_type="i32" />
50
+ <input>
51
+ <port id="0" precision="I64">
52
+ <dim>2</dim>
53
+ </port>
54
+ </input>
55
+ <output>
56
+ <port id="1" precision="I32">
57
+ <dim>2</dim>
58
+ </port>
59
+ </output>
60
+ </layer>
61
+ <layer id="5" name="aten::ones_like/Broadcast" type="Broadcast" version="opset3">
62
+ <data mode="numpy" />
63
+ <input>
64
+ <port id="0" precision="FP32" />
65
+ <port id="1" precision="I32">
66
+ <dim>2</dim>
67
+ </port>
68
+ </input>
69
+ <output>
70
+ <port id="2" precision="FP32">
71
+ <dim>1</dim>
72
+ <dim>-1</dim>
73
+ </port>
74
+ </output>
75
+ </layer>
76
+ <layer id="6" name="aten::ones_like/ConvertLike" type="Convert" version="opset1">
77
+ <data destination_type="i64" />
78
+ <input>
79
+ <port id="0" precision="FP32">
80
+ <dim>1</dim>
81
+ <dim>-1</dim>
82
+ </port>
83
+ </input>
84
+ <output>
85
+ <port id="1" precision="I64" names="encoder_attention_mask">
86
+ <dim>1</dim>
87
+ <dim>-1</dim>
88
+ </port>
89
+ </output>
90
+ </layer>
91
+ <layer id="8" name="self.prenet.embed_tokens.weight" type="Const" version="opset1">
92
+ <data element_type="f32" shape="81, 24" offset="4" size="7776" />
93
+ <output>
94
+ <port id="0" precision="FP32" names="self.prenet.embed_tokens.weight">
95
+ <dim>81</dim>
96
+ <dim>24</dim>
97
+ </port>
98
+ </output>
99
+ </layer>
100
+ <layer id="9" name="__module.prenet.embed_tokens/aten::embedding/Constant" type="Const" version="opset1">
101
+ <data element_type="i32" shape="" offset="7780" size="4" />
102
+ <output>
103
+ <port id="0" precision="I32" />
104
+ </output>
105
+ </layer>
106
+ <layer id="10" name="__module.prenet.embed_tokens/aten::embedding/Gather" type="Gather" version="opset8">
107
+ <data batch_dims="0" />
108
+ <input>
109
+ <port id="0" precision="FP32">
110
+ <dim>81</dim>
111
+ <dim>24</dim>
112
+ </port>
113
+ <port id="1" precision="I32">
114
+ <dim>1</dim>
115
+ <dim>-1</dim>
116
+ </port>
117
+ <port id="2" precision="I32" />
118
+ </input>
119
+ <output>
120
+ <port id="3" precision="FP32" names="21,emb">
121
+ <dim>1</dim>
122
+ <dim>-1</dim>
123
+ <dim>24</dim>
124
+ </port>
125
+ </output>
126
+ </layer>
127
+ <layer id="11" name="__module.prenet.encode_positions/aten::slice/Slice" type="Const" version="opset1">
128
+ <data element_type="f32" shape="1, 450, 24" offset="7784" size="43200" />
129
+ <output>
130
+ <port id="0" precision="FP32" names="25">
131
+ <dim>1</dim>
132
+ <dim>450</dim>
133
+ <dim>24</dim>
134
+ </port>
135
+ </output>
136
+ </layer>
137
+ <layer id="12" name="Constant_51" type="Const" version="opset1">
138
+ <data element_type="i64" shape="1" offset="50984" size="8" />
139
+ <output>
140
+ <port id="0" precision="I64">
141
+ <dim>1</dim>
142
+ </port>
143
+ </output>
144
+ </layer>
145
+ <layer id="13" name="Constant_4426" type="Const" version="opset1">
146
+ <data element_type="i64" shape="1" offset="50992" size="8" />
147
+ <output>
148
+ <port id="0" precision="I64">
149
+ <dim>1</dim>
150
+ </port>
151
+ </output>
152
+ </layer>
153
+ <layer id="14" name="Constant_4427" type="Const" version="opset1">
154
+ <data element_type="i64" shape="" offset="50984" size="8" />
155
+ <output>
156
+ <port id="0" precision="I64" />
157
+ </output>
158
+ </layer>
159
+ <layer id="15" name="Gather_4428" type="Gather" version="opset8">
160
+ <data batch_dims="0" />
161
+ <input>
162
+ <port id="0" precision="I64">
163
+ <dim>2</dim>
164
+ </port>
165
+ <port id="1" precision="I64">
166
+ <dim>1</dim>
167
+ </port>
168
+ <port id="2" precision="I64" />
169
+ </input>
170
+ <output>
171
+ <port id="3" precision="I64" names="24">
172
+ <dim>1</dim>
173
+ </port>
174
+ </output>
175
+ </layer>
176
+ <layer id="16" name="Constant_54" type="Const" version="opset1">
177
+ <data element_type="i64" shape="1" offset="50992" size="8" />
178
+ <output>
179
+ <port id="0" precision="I64">
180
+ <dim>1</dim>
181
+ </port>
182
+ </output>
183
+ </layer>
184
+ <layer id="17" name="Constant_49" type="Const" version="opset1">
185
+ <data element_type="i64" shape="1" offset="50992" size="8" />
186
+ <output>
187
+ <port id="0" precision="I64">
188
+ <dim>1</dim>
189
+ </port>
190
+ </output>
191
+ </layer>
192
+ <layer id="18" name="__module.prenet.encode_positions/aten::slice/Slice_1" type="Slice" version="opset8">
193
+ <input>
194
+ <port id="0" precision="FP32">
195
+ <dim>1</dim>
196
+ <dim>450</dim>
197
+ <dim>24</dim>
198
+ </port>
199
+ <port id="1" precision="I64">
200
+ <dim>1</dim>
201
+ </port>
202
+ <port id="2" precision="I64">
203
+ <dim>1</dim>
204
+ </port>
205
+ <port id="3" precision="I64">
206
+ <dim>1</dim>
207
+ </port>
208
+ <port id="4" precision="I64">
209
+ <dim>1</dim>
210
+ </port>
211
+ </input>
212
+ <output>
213
+ <port id="5" precision="FP32" names="26,27">
214
+ <dim>1</dim>
215
+ <dim>-1</dim>
216
+ <dim>24</dim>
217
+ </port>
218
+ </output>
219
+ </layer>
220
+ <layer id="19" name="__module.prenet.encode_positions/aten::add/Add" type="Add" version="opset1">
221
+ <data auto_broadcast="numpy" />
222
+ <input>
223
+ <port id="0" precision="FP32">
224
+ <dim>1</dim>
225
+ <dim>-1</dim>
226
+ <dim>24</dim>
227
+ </port>
228
+ <port id="1" precision="FP32">
229
+ <dim>1</dim>
230
+ <dim>-1</dim>
231
+ <dim>24</dim>
232
+ </port>
233
+ </input>
234
+ <output>
235
+ <port id="2" precision="FP32" names="28,input.1">
236
+ <dim>1</dim>
237
+ <dim>-1</dim>
238
+ <dim>24</dim>
239
+ </port>
240
+ </output>
241
+ </layer>
242
+ <layer id="20" name="__module.wrapped_encoder.layer_norm/aten::layer_norm/Multiply" type="Const" version="opset1">
243
+ <data element_type="i32" shape="1" offset="51000" size="4" />
244
+ <output>
245
+ <port id="0" precision="I32">
246
+ <dim>1</dim>
247
+ </port>
248
+ </output>
249
+ </layer>
250
+ <layer id="21" name="__module.wrapped_encoder.layer_norm/aten::layer_norm/MVN" type="MVN" version="opset6">
251
+ <data eps="9.9999997473787516e-06" normalize_variance="true" eps_mode="INSIDE_SQRT" />
252
+ <input>
253
+ <port id="0" precision="FP32">
254
+ <dim>1</dim>
255
+ <dim>-1</dim>
256
+ <dim>24</dim>
257
+ </port>
258
+ <port id="1" precision="I32">
259
+ <dim>1</dim>
260
+ </port>
261
+ </input>
262
+ <output>
263
+ <port id="2" precision="FP32" names="79,input.3">
264
+ <dim>1</dim>
265
+ <dim>-1</dim>
266
+ <dim>24</dim>
267
+ </port>
268
+ </output>
269
+ </layer>
270
+ <layer id="22" name="Constant_4284" type="Const" version="opset1">
271
+ <data element_type="f32" shape="24, 24" offset="51004" size="2304" />
272
+ <output>
273
+ <port id="0" precision="FP32">
274
+ <dim>24</dim>
275
+ <dim>24</dim>
276
+ </port>
277
+ </output>
278
+ </layer>
279
+ <layer id="23" name="__module.wrapped_encoder.layers.0.attention/aten::mul/Multiply" type="MatMul" version="opset1">
280
+ <data transpose_a="false" transpose_b="true" />
281
+ <input>
282
+ <port id="0" precision="FP32">
283
+ <dim>1</dim>
284
+ <dim>-1</dim>
285
+ <dim>24</dim>
286
+ </port>
287
+ <port id="1" precision="FP32">
288
+ <dim>24</dim>
289
+ <dim>24</dim>
290
+ </port>
291
+ </input>
292
+ <output>
293
+ <port id="2" precision="FP32" names="113,tensor.5">
294
+ <dim>1</dim>
295
+ <dim>-1</dim>
296
+ <dim>24</dim>
297
+ </port>
298
+ </output>
299
+ </layer>
300
+ <layer id="24" name="Constant_4494" type="Const" version="opset1">
301
+ <data element_type="i64" shape="4" offset="53308" size="32" />
302
+ <output>
303
+ <port id="0" precision="I64">
304
+ <dim>4</dim>
305
+ </port>
306
+ </output>
307
+ </layer>
308
+ <layer id="25" name="__module.wrapped_encoder.layers.0.attention/aten::view/Reshape" type="Reshape" version="opset1">
309
+ <data special_zero="true" />
310
+ <input>
311
+ <port id="0" precision="FP32">
312
+ <dim>1</dim>
313
+ <dim>-1</dim>
314
+ <dim>24</dim>
315
+ </port>
316
+ <port id="1" precision="I64">
317
+ <dim>4</dim>
318
+ </port>
319
+ </input>
320
+ <output>
321
+ <port id="2" precision="FP32" names="133">
322
+ <dim>1</dim>
323
+ <dim>-1</dim>
324
+ <dim>2</dim>
325
+ <dim>12</dim>
326
+ </port>
327
+ </output>
328
+ </layer>
329
+ <layer id="26" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Constant" type="Const" version="opset1">
330
+ <data element_type="i32" shape="4" offset="53340" size="16" />
331
+ <output>
332
+ <port id="0" precision="I32">
333
+ <dim>4</dim>
334
+ </port>
335
+ </output>
336
+ </layer>
337
+ <layer id="27" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Transpose" type="Transpose" version="opset1">
338
+ <input>
339
+ <port id="0" precision="FP32">
340
+ <dim>1</dim>
341
+ <dim>-1</dim>
342
+ <dim>2</dim>
343
+ <dim>12</dim>
344
+ </port>
345
+ <port id="1" precision="I32">
346
+ <dim>4</dim>
347
+ </port>
348
+ </input>
349
+ <output>
350
+ <port id="2" precision="FP32" names="134">
351
+ <dim>1</dim>
352
+ <dim>2</dim>
353
+ <dim>-1</dim>
354
+ <dim>12</dim>
355
+ </port>
356
+ </output>
357
+ </layer>
358
+ <layer id="28" name="Constant_440" type="Const" version="opset1">
359
+ <data element_type="i64" shape="3" offset="53356" size="24" />
360
+ <output>
361
+ <port id="0" precision="I64">
362
+ <dim>3</dim>
363
+ </port>
364
+ </output>
365
+ </layer>
366
+ <layer id="29" name="__module.wrapped_encoder.layers.0.attention/aten::view/Reshape_1" type="Reshape" version="opset1">
367
+ <data special_zero="false" />
368
+ <input>
369
+ <port id="0" precision="FP32">
370
+ <dim>1</dim>
371
+ <dim>2</dim>
372
+ <dim>-1</dim>
373
+ <dim>12</dim>
374
+ </port>
375
+ <port id="1" precision="I64">
376
+ <dim>3</dim>
377
+ </port>
378
+ </input>
379
+ <output>
380
+ <port id="2" precision="FP32" names="137,query_states.1">
381
+ <dim>2</dim>
382
+ <dim>-1</dim>
383
+ <dim>12</dim>
384
+ </port>
385
+ </output>
386
+ </layer>
387
+ <layer id="30" name="self.wrapped_encoder.layers.0.attention.k_proj.weight" type="Const" version="opset1">
388
+ <data element_type="f32" shape="24, 24" offset="53380" size="2304" />
389
+ <output>
390
+ <port id="0" precision="FP32" names="self.wrapped_encoder.layers.0.attention.k_proj.weight">
391
+ <dim>24</dim>
392
+ <dim>24</dim>
393
+ </port>
394
+ </output>
395
+ </layer>
396
+ <layer id="31" name="__module.wrapped_encoder.layers.0.attention.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
397
+ <data transpose_a="false" transpose_b="true" />
398
+ <input>
399
+ <port id="0" precision="FP32">
400
+ <dim>1</dim>
401
+ <dim>-1</dim>
402
+ <dim>24</dim>
403
+ </port>
404
+ <port id="1" precision="FP32">
405
+ <dim>24</dim>
406
+ <dim>24</dim>
407
+ </port>
408
+ </input>
409
+ <output>
410
+ <port id="2" precision="FP32" names="116,tensor.1">
411
+ <dim>1</dim>
412
+ <dim>-1</dim>
413
+ <dim>24</dim>
414
+ </port>
415
+ </output>
416
+ </layer>
417
+ <layer id="32" name="Constant_289" type="Const" version="opset1">
418
+ <data element_type="i64" shape="4" offset="55684" size="32" />
419
+ <output>
420
+ <port id="0" precision="I64">
421
+ <dim>4</dim>
422
+ </port>
423
+ </output>
424
+ </layer>
425
+ <layer id="33" name="__module.wrapped_encoder.layers.0.attention/aten::view/Reshape_2" type="Reshape" version="opset1">
426
+ <data special_zero="false" />
427
+ <input>
428
+ <port id="0" precision="FP32">
429
+ <dim>1</dim>
430
+ <dim>-1</dim>
431
+ <dim>24</dim>
432
+ </port>
433
+ <port id="1" precision="I64">
434
+ <dim>4</dim>
435
+ </port>
436
+ </input>
437
+ <output>
438
+ <port id="2" precision="FP32" names="118">
439
+ <dim>1</dim>
440
+ <dim>-1</dim>
441
+ <dim>2</dim>
442
+ <dim>12</dim>
443
+ </port>
444
+ </output>
445
+ </layer>
446
+ <layer id="34" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Constant_1" type="Const" version="opset1">
447
+ <data element_type="i32" shape="4" offset="53340" size="16" />
448
+ <output>
449
+ <port id="0" precision="I32">
450
+ <dim>4</dim>
451
+ </port>
452
+ </output>
453
+ </layer>
454
+ <layer id="35" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Transpose_1" type="Transpose" version="opset1">
455
+ <input>
456
+ <port id="0" precision="FP32">
457
+ <dim>1</dim>
458
+ <dim>-1</dim>
459
+ <dim>2</dim>
460
+ <dim>12</dim>
461
+ </port>
462
+ <port id="1" precision="I32">
463
+ <dim>4</dim>
464
+ </port>
465
+ </input>
466
+ <output>
467
+ <port id="2" precision="FP32" names="119">
468
+ <dim>1</dim>
469
+ <dim>2</dim>
470
+ <dim>-1</dim>
471
+ <dim>12</dim>
472
+ </port>
473
+ </output>
474
+ </layer>
475
+ <layer id="36" name="Constant_456" type="Const" version="opset1">
476
+ <data element_type="i64" shape="3" offset="53356" size="24" />
477
+ <output>
478
+ <port id="0" precision="I64">
479
+ <dim>3</dim>
480
+ </port>
481
+ </output>
482
+ </layer>
483
+ <layer id="37" name="__module.wrapped_encoder.layers.0.attention/aten::view/Reshape_3" type="Reshape" version="opset1">
484
+ <data special_zero="false" />
485
+ <input>
486
+ <port id="0" precision="FP32">
487
+ <dim>1</dim>
488
+ <dim>2</dim>
489
+ <dim>-1</dim>
490
+ <dim>12</dim>
491
+ </port>
492
+ <port id="1" precision="I64">
493
+ <dim>3</dim>
494
+ </port>
495
+ </input>
496
+ <output>
497
+ <port id="2" precision="FP32" names="139,key_states.3">
498
+ <dim>2</dim>
499
+ <dim>-1</dim>
500
+ <dim>12</dim>
501
+ </port>
502
+ </output>
503
+ </layer>
504
+ <layer id="38" name="__module.wrapped_encoder.layers.0.attention/aten::bmm/MatMul" type="MatMul" version="opset1">
505
+ <data transpose_a="false" transpose_b="true" />
506
+ <input>
507
+ <port id="0" precision="FP32">
508
+ <dim>2</dim>
509
+ <dim>-1</dim>
510
+ <dim>12</dim>
511
+ </port>
512
+ <port id="1" precision="FP32">
513
+ <dim>2</dim>
514
+ <dim>-1</dim>
515
+ <dim>12</dim>
516
+ </port>
517
+ </input>
518
+ <output>
519
+ <port id="2" precision="FP32" names="144_1">
520
+ <dim>2</dim>
521
+ <dim>-1</dim>
522
+ <dim>-1</dim>
523
+ </port>
524
+ </output>
525
+ </layer>
526
+ <layer id="39" name="Constant_529" type="Const" version="opset1">
527
+ <data element_type="i64" shape="3" offset="53356" size="24" />
528
+ <output>
529
+ <port id="0" precision="I64">
530
+ <dim>3</dim>
531
+ </port>
532
+ </output>
533
+ </layer>
534
+ <layer id="40" name="__module.wrapped_encoder.layers.0.attention/aten::view/Reshape_4" type="Reshape" version="opset1">
535
+ <data special_zero="false" />
536
+ <input>
537
+ <port id="0" precision="FP32">
538
+ <dim>2</dim>
539
+ <dim>-1</dim>
540
+ <dim>12</dim>
541
+ </port>
542
+ <port id="1" precision="I64">
543
+ <dim>3</dim>
544
+ </port>
545
+ </input>
546
+ <output>
547
+ <port id="2" precision="FP32" names="149">
548
+ <dim>2</dim>
549
+ <dim>-1</dim>
550
+ <dim>12</dim>
551
+ </port>
552
+ </output>
553
+ </layer>
554
+ <layer id="41" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Constant_3" type="Const" version="opset1">
555
+ <data element_type="i32" shape="3" offset="55716" size="12" />
556
+ <output>
557
+ <port id="0" precision="I32">
558
+ <dim>3</dim>
559
+ </port>
560
+ </output>
561
+ </layer>
562
+ <layer id="42" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Transpose_3" type="Transpose" version="opset1">
563
+ <input>
564
+ <port id="0" precision="FP32">
565
+ <dim>2</dim>
566
+ <dim>-1</dim>
567
+ <dim>12</dim>
568
+ </port>
569
+ <port id="1" precision="I32">
570
+ <dim>3</dim>
571
+ </port>
572
+ </input>
573
+ <output>
574
+ <port id="2" precision="FP32" names="150">
575
+ <dim>-1</dim>
576
+ <dim>2</dim>
577
+ <dim>12</dim>
578
+ </port>
579
+ </output>
580
+ </layer>
581
+ <layer id="43" name="self.wrapped_encoder.embed_positions.pe_k.weight" type="Const" version="opset1">
582
+ <data element_type="f32" shape="320, 12" offset="55728" size="15360" />
583
+ <output>
584
+ <port id="0" precision="FP32" names="self.wrapped_encoder.embed_positions.pe_k.weight">
585
+ <dim>320</dim>
586
+ <dim>12</dim>
587
+ </port>
588
+ </output>
589
+ </layer>
590
+ <layer id="44" name="57" type="Const" version="opset1">
591
+ <data element_type="i64" shape="" offset="50984" size="8" />
592
+ <output>
593
+ <port id="0" precision="I64" names="57" />
594
+ </output>
595
+ </layer>
596
+ <layer id="45" name="ShapeOf_4431" type="ShapeOf" version="opset3">
597
+ <data output_type="i64" />
598
+ <input>
599
+ <port id="0" precision="FP32">
600
+ <dim>1</dim>
601
+ <dim>-1</dim>
602
+ <dim>24</dim>
603
+ </port>
604
+ </input>
605
+ <output>
606
+ <port id="1" precision="I64">
607
+ <dim>3</dim>
608
+ </port>
609
+ </output>
610
+ </layer>
611
+ <layer id="46" name="Constant_4432" type="Const" version="opset1">
612
+ <data element_type="i64" shape="" offset="50992" size="8" />
613
+ <output>
614
+ <port id="0" precision="I64" />
615
+ </output>
616
+ </layer>
617
+ <layer id="47" name="Constant_4433" type="Const" version="opset1">
618
+ <data element_type="i64" shape="" offset="50984" size="8" />
619
+ <output>
620
+ <port id="0" precision="I64" />
621
+ </output>
622
+ </layer>
623
+ <layer id="48" name="Gather_4434" type="Gather" version="opset8">
624
+ <data batch_dims="0" />
625
+ <input>
626
+ <port id="0" precision="I64">
627
+ <dim>3</dim>
628
+ </port>
629
+ <port id="1" precision="I64" />
630
+ <port id="2" precision="I64" />
631
+ </input>
632
+ <output>
633
+ <port id="3" precision="I64" names="109,82" />
634
+ </output>
635
+ </layer>
636
+ <layer id="49" name="__module.wrapped_encoder.embed_positions/aten::arange/Constant" type="Const" version="opset1">
637
+ <data element_type="i32" shape="" offset="71088" size="4" />
638
+ <output>
639
+ <port id="0" precision="I32" />
640
+ </output>
641
+ </layer>
642
+ <layer id="50" name="__module.wrapped_encoder.embed_positions/aten::arange/Range" type="Range" version="opset4">
643
+ <data output_type="f32" />
644
+ <input>
645
+ <port id="0" precision="I64" />
646
+ <port id="1" precision="I64" />
647
+ <port id="2" precision="I32" />
648
+ </input>
649
+ <output>
650
+ <port id="3" precision="FP32" names="83">
651
+ <dim>-1</dim>
652
+ </port>
653
+ </output>
654
+ </layer>
655
+ <layer id="51" name="__module.wrapped_encoder.embed_positions/aten::to/Convert" type="Convert" version="opset1">
656
+ <data destination_type="i64" />
657
+ <input>
658
+ <port id="0" precision="FP32">
659
+ <dim>-1</dim>
660
+ </port>
661
+ </input>
662
+ <output>
663
+ <port id="1" precision="I64" names="84,85,pos_seq.1">
664
+ <dim>-1</dim>
665
+ </port>
666
+ </output>
667
+ </layer>
668
+ <layer id="52" name="56" type="Const" version="opset1">
669
+ <data element_type="i64" shape="" offset="50992" size="8" />
670
+ <output>
671
+ <port id="0" precision="I64" names="56" />
672
+ </output>
673
+ </layer>
674
+ <layer id="53" name="__module.wrapped_encoder.embed_positions/aten::unsqueeze/Unsqueeze" type="Unsqueeze" version="opset1">
675
+ <input>
676
+ <port id="0" precision="I64">
677
+ <dim>-1</dim>
678
+ </port>
679
+ <port id="1" precision="I64" />
680
+ </input>
681
+ <output>
682
+ <port id="2" precision="I64" names="86">
683
+ <dim>-1</dim>
684
+ <dim>1</dim>
685
+ </port>
686
+ </output>
687
+ </layer>
688
+ <layer id="54" name="__module.wrapped_encoder.embed_positions/aten::unsqueeze/Unsqueeze_1" type="Unsqueeze" version="opset1">
689
+ <input>
690
+ <port id="0" precision="I64">
691
+ <dim>-1</dim>
692
+ </port>
693
+ <port id="1" precision="I64" />
694
+ </input>
695
+ <output>
696
+ <port id="2" precision="I64" names="87,88">
697
+ <dim>1</dim>
698
+ <dim>-1</dim>
699
+ </port>
700
+ </output>
701
+ </layer>
702
+ <layer id="55" name="__module.wrapped_encoder.embed_positions/aten::sub/Subtract" type="Subtract" version="opset1">
703
+ <data auto_broadcast="numpy" />
704
+ <input>
705
+ <port id="0" precision="I64">
706
+ <dim>-1</dim>
707
+ <dim>1</dim>
708
+ </port>
709
+ <port id="1" precision="I64">
710
+ <dim>1</dim>
711
+ <dim>-1</dim>
712
+ </port>
713
+ </input>
714
+ <output>
715
+ <port id="2" precision="I64" names="89_1">
716
+ <dim>-1</dim>
717
+ <dim>-1</dim>
718
+ </port>
719
+ </output>
720
+ </layer>
721
+ <layer id="56" name="Constant_4352" type="Const" version="opset1">
722
+ <data element_type="i64" shape="1, 1" offset="71092" size="8" />
723
+ <output>
724
+ <port id="0" precision="I64">
725
+ <dim>1</dim>
726
+ <dim>1</dim>
727
+ </port>
728
+ </output>
729
+ </layer>
730
+ <layer id="57" name="__module.wrapped_encoder.embed_positions/aten::lt/Less" type="Less" version="opset1">
731
+ <data auto_broadcast="numpy" />
732
+ <input>
733
+ <port id="0" precision="I64">
734
+ <dim>-1</dim>
735
+ <dim>-1</dim>
736
+ </port>
737
+ <port id="1" precision="I64">
738
+ <dim>1</dim>
739
+ <dim>1</dim>
740
+ </port>
741
+ </input>
742
+ <output>
743
+ <port id="2" precision="BOOL" names="90">
744
+ <dim>-1</dim>
745
+ <dim>-1</dim>
746
+ </port>
747
+ </output>
748
+ </layer>
749
+ <layer id="58" name="__module.wrapped_encoder.embed_positions/aten::index_put_/ConvertLike" type="Const" version="opset1">
750
+ <data element_type="i64" shape="" offset="71092" size="8" />
751
+ <output>
752
+ <port id="0" precision="I64" />
753
+ </output>
754
+ </layer>
755
+ <layer id="59" name="__module.wrapped_encoder.embed_positions/aten::index_put_/Select" type="Select" version="opset1">
756
+ <data auto_broadcast="numpy" />
757
+ <input>
758
+ <port id="0" precision="BOOL">
759
+ <dim>-1</dim>
760
+ <dim>-1</dim>
761
+ </port>
762
+ <port id="1" precision="I64" />
763
+ <port id="2" precision="I64">
764
+ <dim>-1</dim>
765
+ <dim>-1</dim>
766
+ </port>
767
+ </input>
768
+ <output>
769
+ <port id="3" precision="I64" names="89_2">
770
+ <dim>-1</dim>
771
+ <dim>-1</dim>
772
+ </port>
773
+ </output>
774
+ </layer>
775
+ <layer id="60" name="Constant_4353" type="Const" version="opset1">
776
+ <data element_type="i64" shape="1, 1" offset="71100" size="8" />
777
+ <output>
778
+ <port id="0" precision="I64">
779
+ <dim>1</dim>
780
+ <dim>1</dim>
781
+ </port>
782
+ </output>
783
+ </layer>
784
+ <layer id="61" name="__module.wrapped_encoder.embed_positions/aten::ge/GreaterEqual" type="GreaterEqual" version="opset1">
785
+ <data auto_broadcast="numpy" />
786
+ <input>
787
+ <port id="0" precision="I64">
788
+ <dim>-1</dim>
789
+ <dim>-1</dim>
790
+ </port>
791
+ <port id="1" precision="I64">
792
+ <dim>1</dim>
793
+ <dim>1</dim>
794
+ </port>
795
+ </input>
796
+ <output>
797
+ <port id="2" precision="BOOL" names="93">
798
+ <dim>-1</dim>
799
+ <dim>-1</dim>
800
+ </port>
801
+ </output>
802
+ </layer>
803
+ <layer id="62" name="__module.wrapped_encoder.embed_positions/aten::index_put_/ConvertLike_1" type="Const" version="opset1">
804
+ <data element_type="i64" shape="" offset="71108" size="8" />
805
+ <output>
806
+ <port id="0" precision="I64" />
807
+ </output>
808
+ </layer>
809
+ <layer id="63" name="__module.wrapped_encoder.embed_positions/aten::index_put_/Select_1" type="Select" version="opset1">
810
+ <data auto_broadcast="numpy" />
811
+ <input>
812
+ <port id="0" precision="BOOL">
813
+ <dim>-1</dim>
814
+ <dim>-1</dim>
815
+ </port>
816
+ <port id="1" precision="I64" />
817
+ <port id="2" precision="I64">
818
+ <dim>-1</dim>
819
+ <dim>-1</dim>
820
+ </port>
821
+ </input>
822
+ <output>
823
+ <port id="3" precision="I64" names="92,pos_seq.5">
824
+ <dim>-1</dim>
825
+ <dim>-1</dim>
826
+ </port>
827
+ </output>
828
+ </layer>
829
+ <layer id="64" name="Constant_4354" type="Const" version="opset1">
830
+ <data element_type="i64" shape="1, 1" offset="71100" size="8" />
831
+ <output>
832
+ <port id="0" precision="I64">
833
+ <dim>1</dim>
834
+ <dim>1</dim>
835
+ </port>
836
+ </output>
837
+ </layer>
838
+ <layer id="65" name="__module.wrapped_encoder.embed_positions/aten::add/Add" type="Add" version="opset1">
839
+ <data auto_broadcast="numpy" />
840
+ <input>
841
+ <port id="0" precision="I64">
842
+ <dim>-1</dim>
843
+ <dim>-1</dim>
844
+ </port>
845
+ <port id="1" precision="I64">
846
+ <dim>1</dim>
847
+ <dim>1</dim>
848
+ </port>
849
+ </input>
850
+ <output>
851
+ <port id="2" precision="I64" names="96">
852
+ <dim>-1</dim>
853
+ <dim>-1</dim>
854
+ </port>
855
+ </output>
856
+ </layer>
857
+ <layer id="66" name="__module.wrapped_encoder.embed_positions.pe_k/aten::embedding/Convert" type="Convert" version="opset1">
858
+ <data destination_type="i32" />
859
+ <input>
860
+ <port id="0" precision="I64">
861
+ <dim>-1</dim>
862
+ <dim>-1</dim>
863
+ </port>
864
+ </input>
865
+ <output>
866
+ <port id="1" precision="I32">
867
+ <dim>-1</dim>
868
+ <dim>-1</dim>
869
+ </port>
870
+ </output>
871
+ </layer>
872
+ <layer id="67" name="__module.wrapped_encoder.embed_positions.pe_k/aten::embedding/Constant" type="Const" version="opset1">
873
+ <data element_type="i32" shape="" offset="7780" size="4" />
874
+ <output>
875
+ <port id="0" precision="I32" />
876
+ </output>
877
+ </layer>
878
+ <layer id="68" name="__module.wrapped_encoder.embed_positions.pe_k/aten::embedding/Gather" type="Gather" version="opset8">
879
+ <data batch_dims="0" />
880
+ <input>
881
+ <port id="0" precision="FP32">
882
+ <dim>320</dim>
883
+ <dim>12</dim>
884
+ </port>
885
+ <port id="1" precision="I32">
886
+ <dim>-1</dim>
887
+ <dim>-1</dim>
888
+ </port>
889
+ <port id="2" precision="I32" />
890
+ </input>
891
+ <output>
892
+ <port id="3" precision="FP32" names="98,position_bias">
893
+ <dim>-1</dim>
894
+ <dim>-1</dim>
895
+ <dim>12</dim>
896
+ </port>
897
+ </output>
898
+ </layer>
899
+ <layer id="69" name="__module.wrapped_encoder.layers.0.attention/aten::matmul/MatMul" type="MatMul" version="opset1">
900
+ <data transpose_a="false" transpose_b="true" />
901
+ <input>
902
+ <port id="0" precision="FP32">
903
+ <dim>-1</dim>
904
+ <dim>2</dim>
905
+ <dim>12</dim>
906
+ </port>
907
+ <port id="1" precision="FP32">
908
+ <dim>-1</dim>
909
+ <dim>-1</dim>
910
+ <dim>12</dim>
911
+ </port>
912
+ </input>
913
+ <output>
914
+ <port id="2" precision="FP32" names="152,rel_pos_bias.1">
915
+ <dim>-1</dim>
916
+ <dim>2</dim>
917
+ <dim>-1</dim>
918
+ </port>
919
+ </output>
920
+ </layer>
921
+ <layer id="70" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Constant_5" type="Const" version="opset1">
922
+ <data element_type="i32" shape="3" offset="55716" size="12" />
923
+ <output>
924
+ <port id="0" precision="I32">
925
+ <dim>3</dim>
926
+ </port>
927
+ </output>
928
+ </layer>
929
+ <layer id="71" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Transpose_5" type="Transpose" version="opset1">
930
+ <input>
931
+ <port id="0" precision="FP32">
932
+ <dim>-1</dim>
933
+ <dim>2</dim>
934
+ <dim>-1</dim>
935
+ </port>
936
+ <port id="1" precision="I32">
937
+ <dim>3</dim>
938
+ </port>
939
+ </input>
940
+ <output>
941
+ <port id="2" precision="FP32" names="153">
942
+ <dim>2</dim>
943
+ <dim>-1</dim>
944
+ <dim>-1</dim>
945
+ </port>
946
+ </output>
947
+ </layer>
948
+ <layer id="72" name="Constant_4495" type="Const" version="opset1">
949
+ <data element_type="i64" shape="3" offset="71116" size="24" />
950
+ <output>
951
+ <port id="0" precision="I64">
952
+ <dim>3</dim>
953
+ </port>
954
+ </output>
955
+ </layer>
956
+ <layer id="73" name="__module.wrapped_encoder.layers.0.attention/aten::view/Reshape_5" type="Reshape" version="opset1">
957
+ <data special_zero="true" />
958
+ <input>
959
+ <port id="0" precision="FP32">
960
+ <dim>2</dim>
961
+ <dim>-1</dim>
962
+ <dim>-1</dim>
963
+ </port>
964
+ <port id="1" precision="I64">
965
+ <dim>3</dim>
966
+ </port>
967
+ </input>
968
+ <output>
969
+ <port id="2" precision="FP32" names="159,rel_pos_bias.3">
970
+ <dim>2</dim>
971
+ <dim>-1</dim>
972
+ <dim>-1</dim>
973
+ </port>
974
+ </output>
975
+ </layer>
976
+ <layer id="74" name="__module.wrapped_encoder.layers.0.attention/aten::add_/Add" type="Add" version="opset1">
977
+ <data auto_broadcast="numpy" />
978
+ <input>
979
+ <port id="0" precision="FP32">
980
+ <dim>2</dim>
981
+ <dim>-1</dim>
982
+ <dim>-1</dim>
983
+ </port>
984
+ <port id="1" precision="FP32">
985
+ <dim>2</dim>
986
+ <dim>-1</dim>
987
+ <dim>-1</dim>
988
+ </port>
989
+ </input>
990
+ <output>
991
+ <port id="2" precision="FP32" names="144,attn_weights.1">
992
+ <dim>2</dim>
993
+ <dim>-1</dim>
994
+ <dim>-1</dim>
995
+ </port>
996
+ </output>
997
+ </layer>
998
+ <layer id="75" name="Constant_704" type="Const" version="opset1">
999
+ <data element_type="i64" shape="1" offset="50992" size="8" />
1000
+ <output>
1001
+ <port id="0" precision="I64">
1002
+ <dim>1</dim>
1003
+ </port>
1004
+ </output>
1005
+ </layer>
1006
+ <layer id="76" name="Constant_705" type="Const" version="opset1">
1007
+ <data element_type="i64" shape="1" offset="71140" size="8" />
1008
+ <output>
1009
+ <port id="0" precision="I64">
1010
+ <dim>1</dim>
1011
+ </port>
1012
+ </output>
1013
+ </layer>
1014
+ <layer id="77" name="Constant_4435" type="Const" version="opset1">
1015
+ <data element_type="i64" shape="1" offset="50992" size="8" />
1016
+ <output>
1017
+ <port id="0" precision="I64">
1018
+ <dim>1</dim>
1019
+ </port>
1020
+ </output>
1021
+ </layer>
1022
+ <layer id="78" name="Reshape_4436" type="Reshape" version="opset1">
1023
+ <data special_zero="false" />
1024
+ <input>
1025
+ <port id="0" precision="I64" />
1026
+ <port id="1" precision="I64">
1027
+ <dim>1</dim>
1028
+ </port>
1029
+ </input>
1030
+ <output>
1031
+ <port id="2" precision="I64" names="211">
1032
+ <dim>1</dim>
1033
+ </port>
1034
+ </output>
1035
+ </layer>
1036
+ <layer id="79" name="ShapeOf_4449" type="ShapeOf" version="opset3">
1037
+ <data output_type="i64" />
1038
+ <input>
1039
+ <port id="0" precision="FP32">
1040
+ <dim>2</dim>
1041
+ <dim>-1</dim>
1042
+ <dim>12</dim>
1043
+ </port>
1044
+ </input>
1045
+ <output>
1046
+ <port id="1" precision="I64">
1047
+ <dim>3</dim>
1048
+ </port>
1049
+ </output>
1050
+ </layer>
1051
+ <layer id="80" name="Constant_4450" type="Const" version="opset1">
1052
+ <data element_type="i64" shape="1" offset="50992" size="8" />
1053
+ <output>
1054
+ <port id="0" precision="I64">
1055
+ <dim>1</dim>
1056
+ </port>
1057
+ </output>
1058
+ </layer>
1059
+ <layer id="81" name="Constant_4451" type="Const" version="opset1">
1060
+ <data element_type="i64" shape="" offset="50984" size="8" />
1061
+ <output>
1062
+ <port id="0" precision="I64" />
1063
+ </output>
1064
+ </layer>
1065
+ <layer id="82" name="Gather_4452" type="Gather" version="opset8">
1066
+ <data batch_dims="0" />
1067
+ <input>
1068
+ <port id="0" precision="I64">
1069
+ <dim>3</dim>
1070
+ </port>
1071
+ <port id="1" precision="I64">
1072
+ <dim>1</dim>
1073
+ </port>
1074
+ <port id="2" precision="I64" />
1075
+ </input>
1076
+ <output>
1077
+ <port id="3" precision="I64" names="142">
1078
+ <dim>1</dim>
1079
+ </port>
1080
+ </output>
1081
+ </layer>
1082
+ <layer id="83" name="__module.wrapped_encoder.layers.0.attention/prim::ListConstruct_2" type="Concat" version="opset1">
1083
+ <data axis="0" />
1084
+ <input>
1085
+ <port id="0" precision="I64">
1086
+ <dim>1</dim>
1087
+ </port>
1088
+ <port id="1" precision="I64">
1089
+ <dim>1</dim>
1090
+ </port>
1091
+ <port id="2" precision="I64">
1092
+ <dim>1</dim>
1093
+ </port>
1094
+ <port id="3" precision="I64">
1095
+ <dim>1</dim>
1096
+ </port>
1097
+ </input>
1098
+ <output>
1099
+ <port id="4" precision="I64">
1100
+ <dim>4</dim>
1101
+ </port>
1102
+ </output>
1103
+ </layer>
1104
+ <layer id="84" name="__module.wrapped_encoder.layers.0.attention/aten::view/Reshape_6" type="Reshape" version="opset1">
1105
+ <data special_zero="false" />
1106
+ <input>
1107
+ <port id="0" precision="FP32">
1108
+ <dim>2</dim>
1109
+ <dim>-1</dim>
1110
+ <dim>-1</dim>
1111
+ </port>
1112
+ <port id="1" precision="I64">
1113
+ <dim>4</dim>
1114
+ </port>
1115
+ </input>
1116
+ <output>
1117
+ <port id="2" precision="FP32" names="162">
1118
+ <dim>1</dim>
1119
+ <dim>2</dim>
1120
+ <dim>-1</dim>
1121
+ <dim>-1</dim>
1122
+ </port>
1123
+ </output>
1124
+ </layer>
1125
+ <layer id="85" name="Constant_4356" type="Const" version="opset1">
1126
+ <data element_type="f32" shape="1, 1, 1, 1" offset="0" size="4" />
1127
+ <output>
1128
+ <port id="0" precision="FP32">
1129
+ <dim>1</dim>
1130
+ <dim>1</dim>
1131
+ <dim>1</dim>
1132
+ <dim>1</dim>
1133
+ </port>
1134
+ </output>
1135
+ </layer>
1136
+ <layer id="86" name="__module.wrapped_encoder/aten::unsqueeze/Unsqueeze" type="Unsqueeze" version="opset1">
1137
+ <input>
1138
+ <port id="0" precision="I64">
1139
+ <dim>1</dim>
1140
+ <dim>-1</dim>
1141
+ </port>
1142
+ <port id="1" precision="I64" />
1143
+ </input>
1144
+ <output>
1145
+ <port id="2" precision="I64" names="67">
1146
+ <dim>1</dim>
1147
+ <dim>1</dim>
1148
+ <dim>-1</dim>
1149
+ </port>
1150
+ </output>
1151
+ </layer>
1152
+ <layer id="87" name="54" type="Const" version="opset1">
1153
+ <data element_type="i64" shape="" offset="71140" size="8" />
1154
+ <output>
1155
+ <port id="0" precision="I64" names="54" />
1156
+ </output>
1157
+ </layer>
1158
+ <layer id="88" name="__module.wrapped_encoder/aten::unsqueeze/Unsqueeze_1" type="Unsqueeze" version="opset1">
1159
+ <input>
1160
+ <port id="0" precision="I64">
1161
+ <dim>1</dim>
1162
+ <dim>1</dim>
1163
+ <dim>-1</dim>
1164
+ </port>
1165
+ <port id="1" precision="I64" />
1166
+ </input>
1167
+ <output>
1168
+ <port id="2" precision="I64" names="68,69">
1169
+ <dim>1</dim>
1170
+ <dim>1</dim>
1171
+ <dim>1</dim>
1172
+ <dim>-1</dim>
1173
+ </port>
1174
+ </output>
1175
+ </layer>
1176
+ <layer id="89" name="Constant_4114" type="Const" version="opset1">
1177
+ <data element_type="i64" shape="1" offset="50992" size="8" />
1178
+ <output>
1179
+ <port id="0" precision="I64">
1180
+ <dim>1</dim>
1181
+ </port>
1182
+ </output>
1183
+ </layer>
1184
+ <layer id="90" name="Constant_4116" type="Const" version="opset1">
1185
+ <data element_type="i64" shape="1" offset="50992" size="8" />
1186
+ <output>
1187
+ <port id="0" precision="I64">
1188
+ <dim>1</dim>
1189
+ </port>
1190
+ </output>
1191
+ </layer>
1192
+ <layer id="91" name="Constant_4158" type="Const" version="opset1">
1193
+ <data element_type="i64" shape="2" offset="71148" size="16" />
1194
+ <output>
1195
+ <port id="0" precision="I64">
1196
+ <dim>2</dim>
1197
+ </port>
1198
+ </output>
1199
+ </layer>
1200
+ <layer id="92" name="Constant_4159" type="Const" version="opset1">
1201
+ <data element_type="i64" shape="" offset="50984" size="8" />
1202
+ <output>
1203
+ <port id="0" precision="I64" />
1204
+ </output>
1205
+ </layer>
1206
+ <layer id="93" name="Gather_4160" type="Gather" version="opset8">
1207
+ <data batch_dims="0" />
1208
+ <input>
1209
+ <port id="0" precision="I64">
1210
+ <dim>2</dim>
1211
+ </port>
1212
+ <port id="1" precision="I64">
1213
+ <dim>2</dim>
1214
+ </port>
1215
+ <port id="2" precision="I64" />
1216
+ </input>
1217
+ <output>
1218
+ <port id="3" precision="I64">
1219
+ <dim>2</dim>
1220
+ </port>
1221
+ </output>
1222
+ </layer>
1223
+ <layer id="94" name="__module.wrapped_encoder/prim::ListConstruct" type="Concat" version="opset1">
1224
+ <data axis="0" />
1225
+ <input>
1226
+ <port id="0" precision="I64">
1227
+ <dim>1</dim>
1228
+ </port>
1229
+ <port id="1" precision="I64">
1230
+ <dim>1</dim>
1231
+ </port>
1232
+ <port id="2" precision="I64">
1233
+ <dim>2</dim>
1234
+ </port>
1235
+ </input>
1236
+ <output>
1237
+ <port id="3" precision="I64">
1238
+ <dim>4</dim>
1239
+ </port>
1240
+ </output>
1241
+ </layer>
1242
+ <layer id="95" name="__module.wrapped_encoder/aten::expand/Broadcast" type="Broadcast" version="opset3">
1243
+ <data mode="bidirectional" />
1244
+ <input>
1245
+ <port id="0" precision="I64">
1246
+ <dim>1</dim>
1247
+ <dim>1</dim>
1248
+ <dim>1</dim>
1249
+ <dim>-1</dim>
1250
+ </port>
1251
+ <port id="1" precision="I64">
1252
+ <dim>4</dim>
1253
+ </port>
1254
+ </input>
1255
+ <output>
1256
+ <port id="2" precision="I64" names="71">
1257
+ <dim>1</dim>
1258
+ <dim>1</dim>
1259
+ <dim>-1</dim>
1260
+ <dim>-1</dim>
1261
+ </port>
1262
+ </output>
1263
+ </layer>
1264
+ <layer id="96" name="__module.wrapped_encoder/aten::to/Convert" type="Convert" version="opset1">
1265
+ <data destination_type="f32" />
1266
+ <input>
1267
+ <port id="0" precision="I64">
1268
+ <dim>1</dim>
1269
+ <dim>1</dim>
1270
+ <dim>-1</dim>
1271
+ <dim>-1</dim>
1272
+ </port>
1273
+ </input>
1274
+ <output>
1275
+ <port id="1" precision="FP32" names="72">
1276
+ <dim>1</dim>
1277
+ <dim>1</dim>
1278
+ <dim>-1</dim>
1279
+ <dim>-1</dim>
1280
+ </port>
1281
+ </output>
1282
+ </layer>
1283
+ <layer id="97" name="Constant_4355" type="Const" version="opset1">
1284
+ <data element_type="f32" shape="1, 1, 1, 1" offset="0" size="4" />
1285
+ <output>
1286
+ <port id="0" precision="FP32">
1287
+ <dim>1</dim>
1288
+ <dim>1</dim>
1289
+ <dim>1</dim>
1290
+ <dim>1</dim>
1291
+ </port>
1292
+ </output>
1293
+ </layer>
1294
+ <layer id="98" name="__module.wrapped_encoder/aten::rsub/Multiply" type="Multiply" version="opset1">
1295
+ <data auto_broadcast="numpy" />
1296
+ <input>
1297
+ <port id="0" precision="FP32">
1298
+ <dim>1</dim>
1299
+ <dim>1</dim>
1300
+ <dim>-1</dim>
1301
+ <dim>-1</dim>
1302
+ </port>
1303
+ <port id="1" precision="FP32">
1304
+ <dim>1</dim>
1305
+ <dim>1</dim>
1306
+ <dim>1</dim>
1307
+ <dim>1</dim>
1308
+ </port>
1309
+ </input>
1310
+ <output>
1311
+ <port id="2" precision="FP32">
1312
+ <dim>1</dim>
1313
+ <dim>1</dim>
1314
+ <dim>-1</dim>
1315
+ <dim>-1</dim>
1316
+ </port>
1317
+ </output>
1318
+ </layer>
1319
+ <layer id="99" name="__module.wrapped_encoder/aten::rsub/Subtract" type="Subtract" version="opset1">
1320
+ <data auto_broadcast="numpy" />
1321
+ <input>
1322
+ <port id="0" precision="FP32">
1323
+ <dim>1</dim>
1324
+ <dim>1</dim>
1325
+ <dim>1</dim>
1326
+ <dim>1</dim>
1327
+ </port>
1328
+ <port id="1" precision="FP32">
1329
+ <dim>1</dim>
1330
+ <dim>1</dim>
1331
+ <dim>-1</dim>
1332
+ <dim>-1</dim>
1333
+ </port>
1334
+ </input>
1335
+ <output>
1336
+ <port id="2" precision="FP32" names="73,inverted_mask">
1337
+ <dim>1</dim>
1338
+ <dim>1</dim>
1339
+ <dim>-1</dim>
1340
+ <dim>-1</dim>
1341
+ </port>
1342
+ </output>
1343
+ </layer>
1344
+ <layer id="100" name="__module.wrapped_encoder/aten::to/Convert_1" type="Convert" version="opset1">
1345
+ <data destination_type="boolean" />
1346
+ <input>
1347
+ <port id="0" precision="FP32">
1348
+ <dim>1</dim>
1349
+ <dim>1</dim>
1350
+ <dim>-1</dim>
1351
+ <dim>-1</dim>
1352
+ </port>
1353
+ </input>
1354
+ <output>
1355
+ <port id="1" precision="BOOL" names="74">
1356
+ <dim>1</dim>
1357
+ <dim>1</dim>
1358
+ <dim>-1</dim>
1359
+ <dim>-1</dim>
1360
+ </port>
1361
+ </output>
1362
+ </layer>
1363
+ <layer id="101" name="__module.wrapped_encoder/aten::masked_fill/ConvertLike" type="Const" version="opset1">
1364
+ <data element_type="f32" shape="" offset="71164" size="4" />
1365
+ <output>
1366
+ <port id="0" precision="FP32" />
1367
+ </output>
1368
+ </layer>
1369
+ <layer id="102" name="__module.wrapped_encoder/aten::masked_fill/Select" type="Select" version="opset1">
1370
+ <data auto_broadcast="numpy" />
1371
+ <input>
1372
+ <port id="0" precision="BOOL">
1373
+ <dim>1</dim>
1374
+ <dim>1</dim>
1375
+ <dim>-1</dim>
1376
+ <dim>-1</dim>
1377
+ </port>
1378
+ <port id="1" precision="FP32" />
1379
+ <port id="2" precision="FP32">
1380
+ <dim>1</dim>
1381
+ <dim>1</dim>
1382
+ <dim>-1</dim>
1383
+ <dim>-1</dim>
1384
+ </port>
1385
+ </input>
1386
+ <output>
1387
+ <port id="3" precision="FP32" names="75,attention_mask">
1388
+ <dim>1</dim>
1389
+ <dim>1</dim>
1390
+ <dim>-1</dim>
1391
+ <dim>-1</dim>
1392
+ </port>
1393
+ </output>
1394
+ </layer>
1395
+ <layer id="103" name="__module.wrapped_encoder.layers.0.attention/aten::add/Add" type="Add" version="opset1">
1396
+ <data auto_broadcast="numpy" />
1397
+ <input>
1398
+ <port id="0" precision="FP32">
1399
+ <dim>1</dim>
1400
+ <dim>2</dim>
1401
+ <dim>-1</dim>
1402
+ <dim>-1</dim>
1403
+ </port>
1404
+ <port id="1" precision="FP32">
1405
+ <dim>1</dim>
1406
+ <dim>1</dim>
1407
+ <dim>-1</dim>
1408
+ <dim>-1</dim>
1409
+ </port>
1410
+ </input>
1411
+ <output>
1412
+ <port id="2" precision="FP32" names="163,attn_weights.5">
1413
+ <dim>1</dim>
1414
+ <dim>2</dim>
1415
+ <dim>-1</dim>
1416
+ <dim>-1</dim>
1417
+ </port>
1418
+ </output>
1419
+ </layer>
1420
+ <layer id="104" name="Constant_729" type="Const" version="opset1">
1421
+ <data element_type="i64" shape="1" offset="71140" size="8" />
1422
+ <output>
1423
+ <port id="0" precision="I64">
1424
+ <dim>1</dim>
1425
+ </port>
1426
+ </output>
1427
+ </layer>
1428
+ <layer id="105" name="__module.wrapped_encoder.layers.0.attention/prim::ListConstruct_3" type="Concat" version="opset1">
1429
+ <data axis="0" />
1430
+ <input>
1431
+ <port id="0" precision="I64">
1432
+ <dim>1</dim>
1433
+ </port>
1434
+ <port id="1" precision="I64">
1435
+ <dim>1</dim>
1436
+ </port>
1437
+ <port id="2" precision="I64">
1438
+ <dim>1</dim>
1439
+ </port>
1440
+ </input>
1441
+ <output>
1442
+ <port id="3" precision="I64">
1443
+ <dim>3</dim>
1444
+ </port>
1445
+ </output>
1446
+ </layer>
1447
+ <layer id="106" name="__module.wrapped_encoder.layers.0.attention/aten::view/Reshape_7" type="Reshape" version="opset1">
1448
+ <data special_zero="false" />
1449
+ <input>
1450
+ <port id="0" precision="FP32">
1451
+ <dim>1</dim>
1452
+ <dim>2</dim>
1453
+ <dim>-1</dim>
1454
+ <dim>-1</dim>
1455
+ </port>
1456
+ <port id="1" precision="I64">
1457
+ <dim>3</dim>
1458
+ </port>
1459
+ </input>
1460
+ <output>
1461
+ <port id="2" precision="FP32" names="167,input.5">
1462
+ <dim>2</dim>
1463
+ <dim>-1</dim>
1464
+ <dim>-1</dim>
1465
+ </port>
1466
+ </output>
1467
+ </layer>
1468
+ <layer id="107" name="__module.wrapped_encoder.layers.0.attention/aten::softmax/Softmax" type="SoftMax" version="opset8">
1469
+ <data axis="-1" />
1470
+ <input>
1471
+ <port id="0" precision="FP32">
1472
+ <dim>2</dim>
1473
+ <dim>-1</dim>
1474
+ <dim>-1</dim>
1475
+ </port>
1476
+ </input>
1477
+ <output>
1478
+ <port id="1" precision="FP32" names="168,input.7">
1479
+ <dim>2</dim>
1480
+ <dim>-1</dim>
1481
+ <dim>-1</dim>
1482
+ </port>
1483
+ </output>
1484
+ </layer>
1485
+ <layer id="108" name="self.wrapped_encoder.layers.0.attention.v_proj.weight" type="Const" version="opset1">
1486
+ <data element_type="f32" shape="24, 24" offset="71168" size="2304" />
1487
+ <output>
1488
+ <port id="0" precision="FP32" names="self.wrapped_encoder.layers.0.attention.v_proj.weight">
1489
+ <dim>24</dim>
1490
+ <dim>24</dim>
1491
+ </port>
1492
+ </output>
1493
+ </layer>
1494
+ <layer id="109" name="__module.wrapped_encoder.layers.0.attention.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
1495
+ <data transpose_a="false" transpose_b="true" />
1496
+ <input>
1497
+ <port id="0" precision="FP32">
1498
+ <dim>1</dim>
1499
+ <dim>-1</dim>
1500
+ <dim>24</dim>
1501
+ </port>
1502
+ <port id="1" precision="FP32">
1503
+ <dim>24</dim>
1504
+ <dim>24</dim>
1505
+ </port>
1506
+ </input>
1507
+ <output>
1508
+ <port id="2" precision="FP32" names="123,tensor.3">
1509
+ <dim>1</dim>
1510
+ <dim>-1</dim>
1511
+ <dim>24</dim>
1512
+ </port>
1513
+ </output>
1514
+ </layer>
1515
+ <layer id="110" name="Constant_341" type="Const" version="opset1">
1516
+ <data element_type="i64" shape="4" offset="55684" size="32" />
1517
+ <output>
1518
+ <port id="0" precision="I64">
1519
+ <dim>4</dim>
1520
+ </port>
1521
+ </output>
1522
+ </layer>
1523
+ <layer id="111" name="__module.wrapped_encoder.layers.0.attention/aten::view/Reshape_8" type="Reshape" version="opset1">
1524
+ <data special_zero="false" />
1525
+ <input>
1526
+ <port id="0" precision="FP32">
1527
+ <dim>1</dim>
1528
+ <dim>-1</dim>
1529
+ <dim>24</dim>
1530
+ </port>
1531
+ <port id="1" precision="I64">
1532
+ <dim>4</dim>
1533
+ </port>
1534
+ </input>
1535
+ <output>
1536
+ <port id="2" precision="FP32" names="125">
1537
+ <dim>1</dim>
1538
+ <dim>-1</dim>
1539
+ <dim>2</dim>
1540
+ <dim>12</dim>
1541
+ </port>
1542
+ </output>
1543
+ </layer>
1544
+ <layer id="112" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Constant_6" type="Const" version="opset1">
1545
+ <data element_type="i32" shape="4" offset="53340" size="16" />
1546
+ <output>
1547
+ <port id="0" precision="I32">
1548
+ <dim>4</dim>
1549
+ </port>
1550
+ </output>
1551
+ </layer>
1552
+ <layer id="113" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Transpose_6" type="Transpose" version="opset1">
1553
+ <input>
1554
+ <port id="0" precision="FP32">
1555
+ <dim>1</dim>
1556
+ <dim>-1</dim>
1557
+ <dim>2</dim>
1558
+ <dim>12</dim>
1559
+ </port>
1560
+ <port id="1" precision="I32">
1561
+ <dim>4</dim>
1562
+ </port>
1563
+ </input>
1564
+ <output>
1565
+ <port id="2" precision="FP32" names="126">
1566
+ <dim>1</dim>
1567
+ <dim>2</dim>
1568
+ <dim>-1</dim>
1569
+ <dim>12</dim>
1570
+ </port>
1571
+ </output>
1572
+ </layer>
1573
+ <layer id="114" name="Constant_472" type="Const" version="opset1">
1574
+ <data element_type="i64" shape="3" offset="53356" size="24" />
1575
+ <output>
1576
+ <port id="0" precision="I64">
1577
+ <dim>3</dim>
1578
+ </port>
1579
+ </output>
1580
+ </layer>
1581
+ <layer id="115" name="__module.wrapped_encoder.layers.0.attention/aten::view/Reshape_9" type="Reshape" version="opset1">
1582
+ <data special_zero="false" />
1583
+ <input>
1584
+ <port id="0" precision="FP32">
1585
+ <dim>1</dim>
1586
+ <dim>2</dim>
1587
+ <dim>-1</dim>
1588
+ <dim>12</dim>
1589
+ </port>
1590
+ <port id="1" precision="I64">
1591
+ <dim>3</dim>
1592
+ </port>
1593
+ </input>
1594
+ <output>
1595
+ <port id="2" precision="FP32" names="141">
1596
+ <dim>2</dim>
1597
+ <dim>-1</dim>
1598
+ <dim>12</dim>
1599
+ </port>
1600
+ </output>
1601
+ </layer>
1602
+ <layer id="116" name="__module.wrapped_encoder.layers.0.attention/aten::bmm/MatMul_1" type="MatMul" version="opset1">
1603
+ <data transpose_a="false" transpose_b="false" />
1604
+ <input>
1605
+ <port id="0" precision="FP32">
1606
+ <dim>2</dim>
1607
+ <dim>-1</dim>
1608
+ <dim>-1</dim>
1609
+ </port>
1610
+ <port id="1" precision="FP32">
1611
+ <dim>2</dim>
1612
+ <dim>-1</dim>
1613
+ <dim>12</dim>
1614
+ </port>
1615
+ </input>
1616
+ <output>
1617
+ <port id="2" precision="FP32" names="170,attn_output.1">
1618
+ <dim>2</dim>
1619
+ <dim>-1</dim>
1620
+ <dim>12</dim>
1621
+ </port>
1622
+ </output>
1623
+ </layer>
1624
+ <layer id="117" name="Constant_4496" type="Const" version="opset1">
1625
+ <data element_type="i64" shape="4" offset="73472" size="32" />
1626
+ <output>
1627
+ <port id="0" precision="I64">
1628
+ <dim>4</dim>
1629
+ </port>
1630
+ </output>
1631
+ </layer>
1632
+ <layer id="118" name="__module.wrapped_encoder.layers.0.attention/aten::view/Reshape_10" type="Reshape" version="opset1">
1633
+ <data special_zero="true" />
1634
+ <input>
1635
+ <port id="0" precision="FP32">
1636
+ <dim>2</dim>
1637
+ <dim>-1</dim>
1638
+ <dim>12</dim>
1639
+ </port>
1640
+ <port id="1" precision="I64">
1641
+ <dim>4</dim>
1642
+ </port>
1643
+ </input>
1644
+ <output>
1645
+ <port id="2" precision="FP32" names="172,attn_output.3">
1646
+ <dim>1</dim>
1647
+ <dim>2</dim>
1648
+ <dim>-1</dim>
1649
+ <dim>12</dim>
1650
+ </port>
1651
+ </output>
1652
+ </layer>
1653
+ <layer id="119" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Constant_7" type="Const" version="opset1">
1654
+ <data element_type="i32" shape="4" offset="53340" size="16" />
1655
+ <output>
1656
+ <port id="0" precision="I32">
1657
+ <dim>4</dim>
1658
+ </port>
1659
+ </output>
1660
+ </layer>
1661
+ <layer id="120" name="__module.wrapped_encoder.layers.0.attention/aten::transpose/Transpose_7" type="Transpose" version="opset1">
1662
+ <input>
1663
+ <port id="0" precision="FP32">
1664
+ <dim>1</dim>
1665
+ <dim>2</dim>
1666
+ <dim>-1</dim>
1667
+ <dim>12</dim>
1668
+ </port>
1669
+ <port id="1" precision="I32">
1670
+ <dim>4</dim>
1671
+ </port>
1672
+ </input>
1673
+ <output>
1674
+ <port id="2" precision="FP32" names="173,attn_output.5">
1675
+ <dim>1</dim>
1676
+ <dim>-1</dim>
1677
+ <dim>2</dim>
1678
+ <dim>12</dim>
1679
+ </port>
1680
+ </output>
1681
+ </layer>
1682
+ <layer id="121" name="Constant_4497" type="Const" version="opset1">
1683
+ <data element_type="i64" shape="3" offset="73504" size="24" />
1684
+ <output>
1685
+ <port id="0" precision="I64">
1686
+ <dim>3</dim>
1687
+ </port>
1688
+ </output>
1689
+ </layer>
1690
+ <layer id="122" name="__module.wrapped_encoder.layers.0.attention/aten::reshape/Reshape" type="Reshape" version="opset1">
1691
+ <data special_zero="true" />
1692
+ <input>
1693
+ <port id="0" precision="FP32">
1694
+ <dim>1</dim>
1695
+ <dim>-1</dim>
1696
+ <dim>2</dim>
1697
+ <dim>12</dim>
1698
+ </port>
1699
+ <port id="1" precision="I64">
1700
+ <dim>3</dim>
1701
+ </port>
1702
+ </input>
1703
+ <output>
1704
+ <port id="2" precision="FP32" names="175">
1705
+ <dim>1</dim>
1706
+ <dim>-1</dim>
1707
+ <dim>24</dim>
1708
+ </port>
1709
+ </output>
1710
+ </layer>
1711
+ <layer id="123" name="self.wrapped_encoder.layers.0.attention.out_proj.weight" type="Const" version="opset1">
1712
+ <data element_type="f32" shape="24, 24" offset="73528" size="2304" />
1713
+ <output>
1714
+ <port id="0" precision="FP32" names="self.wrapped_encoder.layers.0.attention.out_proj.weight">
1715
+ <dim>24</dim>
1716
+ <dim>24</dim>
1717
+ </port>
1718
+ </output>
1719
+ </layer>
1720
+ <layer id="124" name="__module.wrapped_encoder.layers.0.attention.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
1721
+ <data transpose_a="false" transpose_b="true" />
1722
+ <input>
1723
+ <port id="0" precision="FP32">
1724
+ <dim>1</dim>
1725
+ <dim>-1</dim>
1726
+ <dim>24</dim>
1727
+ </port>
1728
+ <port id="1" precision="FP32">
1729
+ <dim>24</dim>
1730
+ <dim>24</dim>
1731
+ </port>
1732
+ </input>
1733
+ <output>
1734
+ <port id="2" precision="FP32" names="178,input.9">
1735
+ <dim>1</dim>
1736
+ <dim>-1</dim>
1737
+ <dim>24</dim>
1738
+ </port>
1739
+ </output>
1740
+ </layer>
1741
+ <layer id="125" name="__module.wrapped_encoder.layers.0/aten::add/Add" type="Add" version="opset1">
1742
+ <data auto_broadcast="numpy" />
1743
+ <input>
1744
+ <port id="0" precision="FP32">
1745
+ <dim>1</dim>
1746
+ <dim>-1</dim>
1747
+ <dim>24</dim>
1748
+ </port>
1749
+ <port id="1" precision="FP32">
1750
+ <dim>1</dim>
1751
+ <dim>-1</dim>
1752
+ <dim>24</dim>
1753
+ </port>
1754
+ </input>
1755
+ <output>
1756
+ <port id="2" precision="FP32" names="180">
1757
+ <dim>1</dim>
1758
+ <dim>-1</dim>
1759
+ <dim>24</dim>
1760
+ </port>
1761
+ </output>
1762
+ </layer>
1763
+ <layer id="126" name="__module.wrapped_encoder.layers.0.layer_norm/aten::layer_norm/Multiply" type="Const" version="opset1">
1764
+ <data element_type="i32" shape="1" offset="51000" size="4" />
1765
+ <output>
1766
+ <port id="0" precision="I32">
1767
+ <dim>1</dim>
1768
+ </port>
1769
+ </output>
1770
+ </layer>
1771
+ <layer id="127" name="__module.wrapped_encoder.layers.0.layer_norm/aten::layer_norm/MVN" type="MVN" version="opset6">
1772
+ <data eps="9.9999997473787516e-06" normalize_variance="true" eps_mode="INSIDE_SQRT" />
1773
+ <input>
1774
+ <port id="0" precision="FP32">
1775
+ <dim>1</dim>
1776
+ <dim>-1</dim>
1777
+ <dim>24</dim>
1778
+ </port>
1779
+ <port id="1" precision="I32">
1780
+ <dim>1</dim>
1781
+ </port>
1782
+ </input>
1783
+ <output>
1784
+ <port id="2" precision="FP32" names="184,hidden_states.5">
1785
+ <dim>1</dim>
1786
+ <dim>-1</dim>
1787
+ <dim>24</dim>
1788
+ </port>
1789
+ </output>
1790
+ </layer>
1791
+ <layer id="128" name="self.wrapped_encoder.layers.0.feed_forward.intermediate_dense.weight" type="Const" version="opset1">
1792
+ <data element_type="f32" shape="4, 24" offset="75832" size="384" />
1793
+ <output>
1794
+ <port id="0" precision="FP32" names="self.wrapped_encoder.layers.0.feed_forward.intermediate_dense.weight">
1795
+ <dim>4</dim>
1796
+ <dim>24</dim>
1797
+ </port>
1798
+ </output>
1799
+ </layer>
1800
+ <layer id="129" name="__module.wrapped_encoder.layers.0.feed_forward.intermediate_dense/aten::linear/MatMul" type="MatMul" version="opset1">
1801
+ <data transpose_a="false" transpose_b="true" />
1802
+ <input>
1803
+ <port id="0" precision="FP32">
1804
+ <dim>1</dim>
1805
+ <dim>-1</dim>
1806
+ <dim>24</dim>
1807
+ </port>
1808
+ <port id="1" precision="FP32">
1809
+ <dim>4</dim>
1810
+ <dim>24</dim>
1811
+ </port>
1812
+ </input>
1813
+ <output>
1814
+ <port id="2" precision="FP32" names="189">
1815
+ <dim>1</dim>
1816
+ <dim>-1</dim>
1817
+ <dim>4</dim>
1818
+ </port>
1819
+ </output>
1820
+ </layer>
1821
+ <layer id="130" name="__module.wrapped_encoder.layers.0.feed_forward.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
1822
+ <data approximation_mode="ERF" />
1823
+ <input>
1824
+ <port id="0" precision="FP32">
1825
+ <dim>1</dim>
1826
+ <dim>-1</dim>
1827
+ <dim>4</dim>
1828
+ </port>
1829
+ </input>
1830
+ <output>
1831
+ <port id="1" precision="FP32" names="190,input.11">
1832
+ <dim>1</dim>
1833
+ <dim>-1</dim>
1834
+ <dim>4</dim>
1835
+ </port>
1836
+ </output>
1837
+ </layer>
1838
+ <layer id="131" name="self.wrapped_encoder.layers.0.feed_forward.output_dense.weight" type="Const" version="opset1">
1839
+ <data element_type="f32" shape="24, 4" offset="76216" size="384" />
1840
+ <output>
1841
+ <port id="0" precision="FP32" names="self.wrapped_encoder.layers.0.feed_forward.output_dense.weight">
1842
+ <dim>24</dim>
1843
+ <dim>4</dim>
1844
+ </port>
1845
+ </output>
1846
+ </layer>
1847
+ <layer id="132" name="__module.wrapped_encoder.layers.0.feed_forward.output_dense/aten::linear/MatMul" type="MatMul" version="opset1">
1848
+ <data transpose_a="false" transpose_b="true" />
1849
+ <input>
1850
+ <port id="0" precision="FP32">
1851
+ <dim>1</dim>
1852
+ <dim>-1</dim>
1853
+ <dim>4</dim>
1854
+ </port>
1855
+ <port id="1" precision="FP32">
1856
+ <dim>24</dim>
1857
+ <dim>4</dim>
1858
+ </port>
1859
+ </input>
1860
+ <output>
1861
+ <port id="2" precision="FP32" names="194,input.13">
1862
+ <dim>1</dim>
1863
+ <dim>-1</dim>
1864
+ <dim>24</dim>
1865
+ </port>
1866
+ </output>
1867
+ </layer>
1868
+ <layer id="133" name="__module.wrapped_encoder.layers.0/aten::add/Add_1" type="Add" version="opset1">
1869
+ <data auto_broadcast="numpy" />
1870
+ <input>
1871
+ <port id="0" precision="FP32">
1872
+ <dim>1</dim>
1873
+ <dim>-1</dim>
1874
+ <dim>24</dim>
1875
+ </port>
1876
+ <port id="1" precision="FP32">
1877
+ <dim>1</dim>
1878
+ <dim>-1</dim>
1879
+ <dim>24</dim>
1880
+ </port>
1881
+ </input>
1882
+ <output>
1883
+ <port id="2" precision="FP32" names="196">
1884
+ <dim>1</dim>
1885
+ <dim>-1</dim>
1886
+ <dim>24</dim>
1887
+ </port>
1888
+ </output>
1889
+ </layer>
1890
+ <layer id="134" name="__module.wrapped_encoder.layers.0.final_layer_norm/aten::layer_norm/Multiply" type="Const" version="opset1">
1891
+ <data element_type="i32" shape="1" offset="51000" size="4" />
1892
+ <output>
1893
+ <port id="0" precision="I32">
1894
+ <dim>1</dim>
1895
+ </port>
1896
+ </output>
1897
+ </layer>
1898
+ <layer id="135" name="__module.wrapped_encoder.layers.0.final_layer_norm/aten::layer_norm/MVN" type="MVN" version="opset6">
1899
+ <data eps="9.9999997473787516e-06" normalize_variance="true" eps_mode="INSIDE_SQRT" />
1900
+ <input>
1901
+ <port id="0" precision="FP32">
1902
+ <dim>1</dim>
1903
+ <dim>-1</dim>
1904
+ <dim>24</dim>
1905
+ </port>
1906
+ <port id="1" precision="I32">
1907
+ <dim>1</dim>
1908
+ </port>
1909
+ </input>
1910
+ <output>
1911
+ <port id="2" precision="FP32" names="200,hidden_states.7">
1912
+ <dim>1</dim>
1913
+ <dim>-1</dim>
1914
+ <dim>24</dim>
1915
+ </port>
1916
+ </output>
1917
+ </layer>
1918
+ <layer id="136" name="Constant_4287" type="Const" version="opset1">
1919
+ <data element_type="f32" shape="24, 24" offset="76600" size="2304" />
1920
+ <output>
1921
+ <port id="0" precision="FP32">
1922
+ <dim>24</dim>
1923
+ <dim>24</dim>
1924
+ </port>
1925
+ </output>
1926
+ </layer>
1927
+ <layer id="137" name="__module.wrapped_encoder.layers.1.attention/aten::mul/Multiply" type="MatMul" version="opset1">
1928
+ <data transpose_a="false" transpose_b="true" />
1929
+ <input>
1930
+ <port id="0" precision="FP32">
1931
+ <dim>1</dim>
1932
+ <dim>-1</dim>
1933
+ <dim>24</dim>
1934
+ </port>
1935
+ <port id="1" precision="FP32">
1936
+ <dim>24</dim>
1937
+ <dim>24</dim>
1938
+ </port>
1939
+ </input>
1940
+ <output>
1941
+ <port id="2" precision="FP32" names="215,tensor">
1942
+ <dim>1</dim>
1943
+ <dim>-1</dim>
1944
+ <dim>24</dim>
1945
+ </port>
1946
+ </output>
1947
+ </layer>
1948
+ <layer id="138" name="Constant_4498" type="Const" version="opset1">
1949
+ <data element_type="i64" shape="4" offset="53308" size="32" />
1950
+ <output>
1951
+ <port id="0" precision="I64">
1952
+ <dim>4</dim>
1953
+ </port>
1954
+ </output>
1955
+ </layer>
1956
+ <layer id="139" name="__module.wrapped_encoder.layers.1.attention/aten::view/Reshape" type="Reshape" version="opset1">
1957
+ <data special_zero="true" />
1958
+ <input>
1959
+ <port id="0" precision="FP32">
1960
+ <dim>1</dim>
1961
+ <dim>-1</dim>
1962
+ <dim>24</dim>
1963
+ </port>
1964
+ <port id="1" precision="I64">
1965
+ <dim>4</dim>
1966
+ </port>
1967
+ </input>
1968
+ <output>
1969
+ <port id="2" precision="FP32" names="235">
1970
+ <dim>1</dim>
1971
+ <dim>-1</dim>
1972
+ <dim>2</dim>
1973
+ <dim>12</dim>
1974
+ </port>
1975
+ </output>
1976
+ </layer>
1977
+ <layer id="140" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Constant" type="Const" version="opset1">
1978
+ <data element_type="i32" shape="4" offset="53340" size="16" />
1979
+ <output>
1980
+ <port id="0" precision="I32">
1981
+ <dim>4</dim>
1982
+ </port>
1983
+ </output>
1984
+ </layer>
1985
+ <layer id="141" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Transpose" type="Transpose" version="opset1">
1986
+ <input>
1987
+ <port id="0" precision="FP32">
1988
+ <dim>1</dim>
1989
+ <dim>-1</dim>
1990
+ <dim>2</dim>
1991
+ <dim>12</dim>
1992
+ </port>
1993
+ <port id="1" precision="I32">
1994
+ <dim>4</dim>
1995
+ </port>
1996
+ </input>
1997
+ <output>
1998
+ <port id="2" precision="FP32" names="236">
1999
+ <dim>1</dim>
2000
+ <dim>2</dim>
2001
+ <dim>-1</dim>
2002
+ <dim>12</dim>
2003
+ </port>
2004
+ </output>
2005
+ </layer>
2006
+ <layer id="142" name="Constant_1054" type="Const" version="opset1">
2007
+ <data element_type="i64" shape="3" offset="53356" size="24" />
2008
+ <output>
2009
+ <port id="0" precision="I64">
2010
+ <dim>3</dim>
2011
+ </port>
2012
+ </output>
2013
+ </layer>
2014
+ <layer id="143" name="__module.wrapped_encoder.layers.1.attention/aten::view/Reshape_1" type="Reshape" version="opset1">
2015
+ <data special_zero="false" />
2016
+ <input>
2017
+ <port id="0" precision="FP32">
2018
+ <dim>1</dim>
2019
+ <dim>2</dim>
2020
+ <dim>-1</dim>
2021
+ <dim>12</dim>
2022
+ </port>
2023
+ <port id="1" precision="I64">
2024
+ <dim>3</dim>
2025
+ </port>
2026
+ </input>
2027
+ <output>
2028
+ <port id="2" precision="FP32" names="239,query_states.5">
2029
+ <dim>2</dim>
2030
+ <dim>-1</dim>
2031
+ <dim>12</dim>
2032
+ </port>
2033
+ </output>
2034
+ </layer>
2035
+ <layer id="144" name="self.wrapped_encoder.layers.1.attention.k_proj.weight" type="Const" version="opset1">
2036
+ <data element_type="f32" shape="24, 24" offset="78904" size="2304" />
2037
+ <output>
2038
+ <port id="0" precision="FP32" names="self.wrapped_encoder.layers.1.attention.k_proj.weight">
2039
+ <dim>24</dim>
2040
+ <dim>24</dim>
2041
+ </port>
2042
+ </output>
2043
+ </layer>
2044
+ <layer id="145" name="__module.wrapped_encoder.layers.1.attention.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
2045
+ <data transpose_a="false" transpose_b="true" />
2046
+ <input>
2047
+ <port id="0" precision="FP32">
2048
+ <dim>1</dim>
2049
+ <dim>-1</dim>
2050
+ <dim>24</dim>
2051
+ </port>
2052
+ <port id="1" precision="FP32">
2053
+ <dim>24</dim>
2054
+ <dim>24</dim>
2055
+ </port>
2056
+ </input>
2057
+ <output>
2058
+ <port id="2" precision="FP32" names="218,tensor.7">
2059
+ <dim>1</dim>
2060
+ <dim>-1</dim>
2061
+ <dim>24</dim>
2062
+ </port>
2063
+ </output>
2064
+ </layer>
2065
+ <layer id="146" name="Constant_903" type="Const" version="opset1">
2066
+ <data element_type="i64" shape="4" offset="55684" size="32" />
2067
+ <output>
2068
+ <port id="0" precision="I64">
2069
+ <dim>4</dim>
2070
+ </port>
2071
+ </output>
2072
+ </layer>
2073
+ <layer id="147" name="__module.wrapped_encoder.layers.1.attention/aten::view/Reshape_2" type="Reshape" version="opset1">
2074
+ <data special_zero="false" />
2075
+ <input>
2076
+ <port id="0" precision="FP32">
2077
+ <dim>1</dim>
2078
+ <dim>-1</dim>
2079
+ <dim>24</dim>
2080
+ </port>
2081
+ <port id="1" precision="I64">
2082
+ <dim>4</dim>
2083
+ </port>
2084
+ </input>
2085
+ <output>
2086
+ <port id="2" precision="FP32" names="220">
2087
+ <dim>1</dim>
2088
+ <dim>-1</dim>
2089
+ <dim>2</dim>
2090
+ <dim>12</dim>
2091
+ </port>
2092
+ </output>
2093
+ </layer>
2094
+ <layer id="148" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Constant_1" type="Const" version="opset1">
2095
+ <data element_type="i32" shape="4" offset="53340" size="16" />
2096
+ <output>
2097
+ <port id="0" precision="I32">
2098
+ <dim>4</dim>
2099
+ </port>
2100
+ </output>
2101
+ </layer>
2102
+ <layer id="149" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Transpose_1" type="Transpose" version="opset1">
2103
+ <input>
2104
+ <port id="0" precision="FP32">
2105
+ <dim>1</dim>
2106
+ <dim>-1</dim>
2107
+ <dim>2</dim>
2108
+ <dim>12</dim>
2109
+ </port>
2110
+ <port id="1" precision="I32">
2111
+ <dim>4</dim>
2112
+ </port>
2113
+ </input>
2114
+ <output>
2115
+ <port id="2" precision="FP32" names="221">
2116
+ <dim>1</dim>
2117
+ <dim>2</dim>
2118
+ <dim>-1</dim>
2119
+ <dim>12</dim>
2120
+ </port>
2121
+ </output>
2122
+ </layer>
2123
+ <layer id="150" name="Constant_1070" type="Const" version="opset1">
2124
+ <data element_type="i64" shape="3" offset="53356" size="24" />
2125
+ <output>
2126
+ <port id="0" precision="I64">
2127
+ <dim>3</dim>
2128
+ </port>
2129
+ </output>
2130
+ </layer>
2131
+ <layer id="151" name="__module.wrapped_encoder.layers.1.attention/aten::view/Reshape_3" type="Reshape" version="opset1">
2132
+ <data special_zero="false" />
2133
+ <input>
2134
+ <port id="0" precision="FP32">
2135
+ <dim>1</dim>
2136
+ <dim>2</dim>
2137
+ <dim>-1</dim>
2138
+ <dim>12</dim>
2139
+ </port>
2140
+ <port id="1" precision="I64">
2141
+ <dim>3</dim>
2142
+ </port>
2143
+ </input>
2144
+ <output>
2145
+ <port id="2" precision="FP32" names="241,key_states">
2146
+ <dim>2</dim>
2147
+ <dim>-1</dim>
2148
+ <dim>12</dim>
2149
+ </port>
2150
+ </output>
2151
+ </layer>
2152
+ <layer id="152" name="__module.wrapped_encoder.layers.1.attention/aten::bmm/MatMul" type="MatMul" version="opset1">
2153
+ <data transpose_a="false" transpose_b="true" />
2154
+ <input>
2155
+ <port id="0" precision="FP32">
2156
+ <dim>2</dim>
2157
+ <dim>-1</dim>
2158
+ <dim>12</dim>
2159
+ </port>
2160
+ <port id="1" precision="FP32">
2161
+ <dim>2</dim>
2162
+ <dim>-1</dim>
2163
+ <dim>12</dim>
2164
+ </port>
2165
+ </input>
2166
+ <output>
2167
+ <port id="2" precision="FP32" names="246_1">
2168
+ <dim>2</dim>
2169
+ <dim>-1</dim>
2170
+ <dim>-1</dim>
2171
+ </port>
2172
+ </output>
2173
+ </layer>
2174
+ <layer id="153" name="Constant_1143" type="Const" version="opset1">
2175
+ <data element_type="i64" shape="3" offset="53356" size="24" />
2176
+ <output>
2177
+ <port id="0" precision="I64">
2178
+ <dim>3</dim>
2179
+ </port>
2180
+ </output>
2181
+ </layer>
2182
+ <layer id="154" name="__module.wrapped_encoder.layers.1.attention/aten::view/Reshape_4" type="Reshape" version="opset1">
2183
+ <data special_zero="false" />
2184
+ <input>
2185
+ <port id="0" precision="FP32">
2186
+ <dim>2</dim>
2187
+ <dim>-1</dim>
2188
+ <dim>12</dim>
2189
+ </port>
2190
+ <port id="1" precision="I64">
2191
+ <dim>3</dim>
2192
+ </port>
2193
+ </input>
2194
+ <output>
2195
+ <port id="2" precision="FP32" names="251">
2196
+ <dim>2</dim>
2197
+ <dim>-1</dim>
2198
+ <dim>12</dim>
2199
+ </port>
2200
+ </output>
2201
+ </layer>
2202
+ <layer id="155" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Constant_3" type="Const" version="opset1">
2203
+ <data element_type="i32" shape="3" offset="55716" size="12" />
2204
+ <output>
2205
+ <port id="0" precision="I32">
2206
+ <dim>3</dim>
2207
+ </port>
2208
+ </output>
2209
+ </layer>
2210
+ <layer id="156" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Transpose_3" type="Transpose" version="opset1">
2211
+ <input>
2212
+ <port id="0" precision="FP32">
2213
+ <dim>2</dim>
2214
+ <dim>-1</dim>
2215
+ <dim>12</dim>
2216
+ </port>
2217
+ <port id="1" precision="I32">
2218
+ <dim>3</dim>
2219
+ </port>
2220
+ </input>
2221
+ <output>
2222
+ <port id="2" precision="FP32" names="252">
2223
+ <dim>-1</dim>
2224
+ <dim>2</dim>
2225
+ <dim>12</dim>
2226
+ </port>
2227
+ </output>
2228
+ </layer>
2229
+ <layer id="157" name="__module.wrapped_encoder.layers.1.attention/aten::matmul/MatMul" type="MatMul" version="opset1">
2230
+ <data transpose_a="false" transpose_b="true" />
2231
+ <input>
2232
+ <port id="0" precision="FP32">
2233
+ <dim>-1</dim>
2234
+ <dim>2</dim>
2235
+ <dim>12</dim>
2236
+ </port>
2237
+ <port id="1" precision="FP32">
2238
+ <dim>-1</dim>
2239
+ <dim>-1</dim>
2240
+ <dim>12</dim>
2241
+ </port>
2242
+ </input>
2243
+ <output>
2244
+ <port id="2" precision="FP32" names="254,rel_pos_bias.5">
2245
+ <dim>-1</dim>
2246
+ <dim>2</dim>
2247
+ <dim>-1</dim>
2248
+ </port>
2249
+ </output>
2250
+ </layer>
2251
+ <layer id="158" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Constant_5" type="Const" version="opset1">
2252
+ <data element_type="i32" shape="3" offset="55716" size="12" />
2253
+ <output>
2254
+ <port id="0" precision="I32">
2255
+ <dim>3</dim>
2256
+ </port>
2257
+ </output>
2258
+ </layer>
2259
+ <layer id="159" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Transpose_5" type="Transpose" version="opset1">
2260
+ <input>
2261
+ <port id="0" precision="FP32">
2262
+ <dim>-1</dim>
2263
+ <dim>2</dim>
2264
+ <dim>-1</dim>
2265
+ </port>
2266
+ <port id="1" precision="I32">
2267
+ <dim>3</dim>
2268
+ </port>
2269
+ </input>
2270
+ <output>
2271
+ <port id="2" precision="FP32" names="255">
2272
+ <dim>2</dim>
2273
+ <dim>-1</dim>
2274
+ <dim>-1</dim>
2275
+ </port>
2276
+ </output>
2277
+ </layer>
2278
+ <layer id="160" name="Constant_4499" type="Const" version="opset1">
2279
+ <data element_type="i64" shape="3" offset="71116" size="24" />
2280
+ <output>
2281
+ <port id="0" precision="I64">
2282
+ <dim>3</dim>
2283
+ </port>
2284
+ </output>
2285
+ </layer>
2286
+ <layer id="161" name="__module.wrapped_encoder.layers.1.attention/aten::view/Reshape_5" type="Reshape" version="opset1">
2287
+ <data special_zero="true" />
2288
+ <input>
2289
+ <port id="0" precision="FP32">
2290
+ <dim>2</dim>
2291
+ <dim>-1</dim>
2292
+ <dim>-1</dim>
2293
+ </port>
2294
+ <port id="1" precision="I64">
2295
+ <dim>3</dim>
2296
+ </port>
2297
+ </input>
2298
+ <output>
2299
+ <port id="2" precision="FP32" names="261,rel_pos_bias">
2300
+ <dim>2</dim>
2301
+ <dim>-1</dim>
2302
+ <dim>-1</dim>
2303
+ </port>
2304
+ </output>
2305
+ </layer>
2306
+ <layer id="162" name="__module.wrapped_encoder.layers.1.attention/aten::add_/Add" type="Add" version="opset1">
2307
+ <data auto_broadcast="numpy" />
2308
+ <input>
2309
+ <port id="0" precision="FP32">
2310
+ <dim>2</dim>
2311
+ <dim>-1</dim>
2312
+ <dim>-1</dim>
2313
+ </port>
2314
+ <port id="1" precision="FP32">
2315
+ <dim>2</dim>
2316
+ <dim>-1</dim>
2317
+ <dim>-1</dim>
2318
+ </port>
2319
+ </input>
2320
+ <output>
2321
+ <port id="2" precision="FP32" names="246,attn_weights.7">
2322
+ <dim>2</dim>
2323
+ <dim>-1</dim>
2324
+ <dim>-1</dim>
2325
+ </port>
2326
+ </output>
2327
+ </layer>
2328
+ <layer id="163" name="Constant_1318" type="Const" version="opset1">
2329
+ <data element_type="i64" shape="1" offset="50992" size="8" />
2330
+ <output>
2331
+ <port id="0" precision="I64">
2332
+ <dim>1</dim>
2333
+ </port>
2334
+ </output>
2335
+ </layer>
2336
+ <layer id="164" name="Constant_1319" type="Const" version="opset1">
2337
+ <data element_type="i64" shape="1" offset="71140" size="8" />
2338
+ <output>
2339
+ <port id="0" precision="I64">
2340
+ <dim>1</dim>
2341
+ </port>
2342
+ </output>
2343
+ </layer>
2344
+ <layer id="165" name="ShapeOf_4478" type="ShapeOf" version="opset3">
2345
+ <data output_type="i64" />
2346
+ <input>
2347
+ <port id="0" precision="FP32">
2348
+ <dim>2</dim>
2349
+ <dim>-1</dim>
2350
+ <dim>12</dim>
2351
+ </port>
2352
+ </input>
2353
+ <output>
2354
+ <port id="1" precision="I64">
2355
+ <dim>3</dim>
2356
+ </port>
2357
+ </output>
2358
+ </layer>
2359
+ <layer id="166" name="Constant_4479" type="Const" version="opset1">
2360
+ <data element_type="i64" shape="1" offset="50992" size="8" />
2361
+ <output>
2362
+ <port id="0" precision="I64">
2363
+ <dim>1</dim>
2364
+ </port>
2365
+ </output>
2366
+ </layer>
2367
+ <layer id="167" name="Constant_4480" type="Const" version="opset1">
2368
+ <data element_type="i64" shape="" offset="50984" size="8" />
2369
+ <output>
2370
+ <port id="0" precision="I64" />
2371
+ </output>
2372
+ </layer>
2373
+ <layer id="168" name="Gather_4481" type="Gather" version="opset8">
2374
+ <data batch_dims="0" />
2375
+ <input>
2376
+ <port id="0" precision="I64">
2377
+ <dim>3</dim>
2378
+ </port>
2379
+ <port id="1" precision="I64">
2380
+ <dim>1</dim>
2381
+ </port>
2382
+ <port id="2" precision="I64" />
2383
+ </input>
2384
+ <output>
2385
+ <port id="3" precision="I64" names="244">
2386
+ <dim>1</dim>
2387
+ </port>
2388
+ </output>
2389
+ </layer>
2390
+ <layer id="169" name="__module.wrapped_encoder.layers.1.attention/prim::ListConstruct_2" type="Concat" version="opset1">
2391
+ <data axis="0" />
2392
+ <input>
2393
+ <port id="0" precision="I64">
2394
+ <dim>1</dim>
2395
+ </port>
2396
+ <port id="1" precision="I64">
2397
+ <dim>1</dim>
2398
+ </port>
2399
+ <port id="2" precision="I64">
2400
+ <dim>1</dim>
2401
+ </port>
2402
+ <port id="3" precision="I64">
2403
+ <dim>1</dim>
2404
+ </port>
2405
+ </input>
2406
+ <output>
2407
+ <port id="4" precision="I64">
2408
+ <dim>4</dim>
2409
+ </port>
2410
+ </output>
2411
+ </layer>
2412
+ <layer id="170" name="__module.wrapped_encoder.layers.1.attention/aten::view/Reshape_6" type="Reshape" version="opset1">
2413
+ <data special_zero="false" />
2414
+ <input>
2415
+ <port id="0" precision="FP32">
2416
+ <dim>2</dim>
2417
+ <dim>-1</dim>
2418
+ <dim>-1</dim>
2419
+ </port>
2420
+ <port id="1" precision="I64">
2421
+ <dim>4</dim>
2422
+ </port>
2423
+ </input>
2424
+ <output>
2425
+ <port id="2" precision="FP32" names="264">
2426
+ <dim>1</dim>
2427
+ <dim>2</dim>
2428
+ <dim>-1</dim>
2429
+ <dim>-1</dim>
2430
+ </port>
2431
+ </output>
2432
+ </layer>
2433
+ <layer id="171" name="__module.wrapped_encoder.layers.1.attention/aten::add/Add" type="Add" version="opset1">
2434
+ <data auto_broadcast="numpy" />
2435
+ <input>
2436
+ <port id="0" precision="FP32">
2437
+ <dim>1</dim>
2438
+ <dim>2</dim>
2439
+ <dim>-1</dim>
2440
+ <dim>-1</dim>
2441
+ </port>
2442
+ <port id="1" precision="FP32">
2443
+ <dim>1</dim>
2444
+ <dim>1</dim>
2445
+ <dim>-1</dim>
2446
+ <dim>-1</dim>
2447
+ </port>
2448
+ </input>
2449
+ <output>
2450
+ <port id="2" precision="FP32" names="265,attn_weights">
2451
+ <dim>1</dim>
2452
+ <dim>2</dim>
2453
+ <dim>-1</dim>
2454
+ <dim>-1</dim>
2455
+ </port>
2456
+ </output>
2457
+ </layer>
2458
+ <layer id="172" name="Constant_1343" type="Const" version="opset1">
2459
+ <data element_type="i64" shape="1" offset="71140" size="8" />
2460
+ <output>
2461
+ <port id="0" precision="I64">
2462
+ <dim>1</dim>
2463
+ </port>
2464
+ </output>
2465
+ </layer>
2466
+ <layer id="173" name="__module.wrapped_encoder.layers.1.attention/prim::ListConstruct_3" type="Concat" version="opset1">
2467
+ <data axis="0" />
2468
+ <input>
2469
+ <port id="0" precision="I64">
2470
+ <dim>1</dim>
2471
+ </port>
2472
+ <port id="1" precision="I64">
2473
+ <dim>1</dim>
2474
+ </port>
2475
+ <port id="2" precision="I64">
2476
+ <dim>1</dim>
2477
+ </port>
2478
+ </input>
2479
+ <output>
2480
+ <port id="3" precision="I64">
2481
+ <dim>3</dim>
2482
+ </port>
2483
+ </output>
2484
+ </layer>
2485
+ <layer id="174" name="__module.wrapped_encoder.layers.1.attention/aten::view/Reshape_7" type="Reshape" version="opset1">
2486
+ <data special_zero="false" />
2487
+ <input>
2488
+ <port id="0" precision="FP32">
2489
+ <dim>1</dim>
2490
+ <dim>2</dim>
2491
+ <dim>-1</dim>
2492
+ <dim>-1</dim>
2493
+ </port>
2494
+ <port id="1" precision="I64">
2495
+ <dim>3</dim>
2496
+ </port>
2497
+ </input>
2498
+ <output>
2499
+ <port id="2" precision="FP32" names="269,input.15">
2500
+ <dim>2</dim>
2501
+ <dim>-1</dim>
2502
+ <dim>-1</dim>
2503
+ </port>
2504
+ </output>
2505
+ </layer>
2506
+ <layer id="175" name="__module.wrapped_encoder.layers.1.attention/aten::softmax/Softmax" type="SoftMax" version="opset8">
2507
+ <data axis="-1" />
2508
+ <input>
2509
+ <port id="0" precision="FP32">
2510
+ <dim>2</dim>
2511
+ <dim>-1</dim>
2512
+ <dim>-1</dim>
2513
+ </port>
2514
+ </input>
2515
+ <output>
2516
+ <port id="1" precision="FP32" names="270,input.17">
2517
+ <dim>2</dim>
2518
+ <dim>-1</dim>
2519
+ <dim>-1</dim>
2520
+ </port>
2521
+ </output>
2522
+ </layer>
2523
+ <layer id="176" name="self.wrapped_encoder.layers.1.attention.v_proj.weight" type="Const" version="opset1">
2524
+ <data element_type="f32" shape="24, 24" offset="81208" size="2304" />
2525
+ <output>
2526
+ <port id="0" precision="FP32" names="self.wrapped_encoder.layers.1.attention.v_proj.weight">
2527
+ <dim>24</dim>
2528
+ <dim>24</dim>
2529
+ </port>
2530
+ </output>
2531
+ </layer>
2532
+ <layer id="177" name="__module.wrapped_encoder.layers.1.attention.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
2533
+ <data transpose_a="false" transpose_b="true" />
2534
+ <input>
2535
+ <port id="0" precision="FP32">
2536
+ <dim>1</dim>
2537
+ <dim>-1</dim>
2538
+ <dim>24</dim>
2539
+ </port>
2540
+ <port id="1" precision="FP32">
2541
+ <dim>24</dim>
2542
+ <dim>24</dim>
2543
+ </port>
2544
+ </input>
2545
+ <output>
2546
+ <port id="2" precision="FP32" names="225,tensor.9">
2547
+ <dim>1</dim>
2548
+ <dim>-1</dim>
2549
+ <dim>24</dim>
2550
+ </port>
2551
+ </output>
2552
+ </layer>
2553
+ <layer id="178" name="Constant_955" type="Const" version="opset1">
2554
+ <data element_type="i64" shape="4" offset="55684" size="32" />
2555
+ <output>
2556
+ <port id="0" precision="I64">
2557
+ <dim>4</dim>
2558
+ </port>
2559
+ </output>
2560
+ </layer>
2561
+ <layer id="179" name="__module.wrapped_encoder.layers.1.attention/aten::view/Reshape_8" type="Reshape" version="opset1">
2562
+ <data special_zero="false" />
2563
+ <input>
2564
+ <port id="0" precision="FP32">
2565
+ <dim>1</dim>
2566
+ <dim>-1</dim>
2567
+ <dim>24</dim>
2568
+ </port>
2569
+ <port id="1" precision="I64">
2570
+ <dim>4</dim>
2571
+ </port>
2572
+ </input>
2573
+ <output>
2574
+ <port id="2" precision="FP32" names="227">
2575
+ <dim>1</dim>
2576
+ <dim>-1</dim>
2577
+ <dim>2</dim>
2578
+ <dim>12</dim>
2579
+ </port>
2580
+ </output>
2581
+ </layer>
2582
+ <layer id="180" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Constant_6" type="Const" version="opset1">
2583
+ <data element_type="i32" shape="4" offset="53340" size="16" />
2584
+ <output>
2585
+ <port id="0" precision="I32">
2586
+ <dim>4</dim>
2587
+ </port>
2588
+ </output>
2589
+ </layer>
2590
+ <layer id="181" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Transpose_6" type="Transpose" version="opset1">
2591
+ <input>
2592
+ <port id="0" precision="FP32">
2593
+ <dim>1</dim>
2594
+ <dim>-1</dim>
2595
+ <dim>2</dim>
2596
+ <dim>12</dim>
2597
+ </port>
2598
+ <port id="1" precision="I32">
2599
+ <dim>4</dim>
2600
+ </port>
2601
+ </input>
2602
+ <output>
2603
+ <port id="2" precision="FP32" names="228">
2604
+ <dim>1</dim>
2605
+ <dim>2</dim>
2606
+ <dim>-1</dim>
2607
+ <dim>12</dim>
2608
+ </port>
2609
+ </output>
2610
+ </layer>
2611
+ <layer id="182" name="Constant_1086" type="Const" version="opset1">
2612
+ <data element_type="i64" shape="3" offset="53356" size="24" />
2613
+ <output>
2614
+ <port id="0" precision="I64">
2615
+ <dim>3</dim>
2616
+ </port>
2617
+ </output>
2618
+ </layer>
2619
+ <layer id="183" name="__module.wrapped_encoder.layers.1.attention/aten::view/Reshape_9" type="Reshape" version="opset1">
2620
+ <data special_zero="false" />
2621
+ <input>
2622
+ <port id="0" precision="FP32">
2623
+ <dim>1</dim>
2624
+ <dim>2</dim>
2625
+ <dim>-1</dim>
2626
+ <dim>12</dim>
2627
+ </port>
2628
+ <port id="1" precision="I64">
2629
+ <dim>3</dim>
2630
+ </port>
2631
+ </input>
2632
+ <output>
2633
+ <port id="2" precision="FP32" names="243">
2634
+ <dim>2</dim>
2635
+ <dim>-1</dim>
2636
+ <dim>12</dim>
2637
+ </port>
2638
+ </output>
2639
+ </layer>
2640
+ <layer id="184" name="__module.wrapped_encoder.layers.1.attention/aten::bmm/MatMul_1" type="MatMul" version="opset1">
2641
+ <data transpose_a="false" transpose_b="false" />
2642
+ <input>
2643
+ <port id="0" precision="FP32">
2644
+ <dim>2</dim>
2645
+ <dim>-1</dim>
2646
+ <dim>-1</dim>
2647
+ </port>
2648
+ <port id="1" precision="FP32">
2649
+ <dim>2</dim>
2650
+ <dim>-1</dim>
2651
+ <dim>12</dim>
2652
+ </port>
2653
+ </input>
2654
+ <output>
2655
+ <port id="2" precision="FP32" names="272,attn_output.7">
2656
+ <dim>2</dim>
2657
+ <dim>-1</dim>
2658
+ <dim>12</dim>
2659
+ </port>
2660
+ </output>
2661
+ </layer>
2662
+ <layer id="185" name="Constant_4500" type="Const" version="opset1">
2663
+ <data element_type="i64" shape="4" offset="73472" size="32" />
2664
+ <output>
2665
+ <port id="0" precision="I64">
2666
+ <dim>4</dim>
2667
+ </port>
2668
+ </output>
2669
+ </layer>
2670
+ <layer id="186" name="__module.wrapped_encoder.layers.1.attention/aten::view/Reshape_10" type="Reshape" version="opset1">
2671
+ <data special_zero="true" />
2672
+ <input>
2673
+ <port id="0" precision="FP32">
2674
+ <dim>2</dim>
2675
+ <dim>-1</dim>
2676
+ <dim>12</dim>
2677
+ </port>
2678
+ <port id="1" precision="I64">
2679
+ <dim>4</dim>
2680
+ </port>
2681
+ </input>
2682
+ <output>
2683
+ <port id="2" precision="FP32" names="274,attn_output.9">
2684
+ <dim>1</dim>
2685
+ <dim>2</dim>
2686
+ <dim>-1</dim>
2687
+ <dim>12</dim>
2688
+ </port>
2689
+ </output>
2690
+ </layer>
2691
+ <layer id="187" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Constant_7" type="Const" version="opset1">
2692
+ <data element_type="i32" shape="4" offset="53340" size="16" />
2693
+ <output>
2694
+ <port id="0" precision="I32">
2695
+ <dim>4</dim>
2696
+ </port>
2697
+ </output>
2698
+ </layer>
2699
+ <layer id="188" name="__module.wrapped_encoder.layers.1.attention/aten::transpose/Transpose_7" type="Transpose" version="opset1">
2700
+ <input>
2701
+ <port id="0" precision="FP32">
2702
+ <dim>1</dim>
2703
+ <dim>2</dim>
2704
+ <dim>-1</dim>
2705
+ <dim>12</dim>
2706
+ </port>
2707
+ <port id="1" precision="I32">
2708
+ <dim>4</dim>
2709
+ </port>
2710
+ </input>
2711
+ <output>
2712
+ <port id="2" precision="FP32" names="275,attn_output">
2713
+ <dim>1</dim>
2714
+ <dim>-1</dim>
2715
+ <dim>2</dim>
2716
+ <dim>12</dim>
2717
+ </port>
2718
+ </output>
2719
+ </layer>
2720
+ <layer id="189" name="Constant_4501" type="Const" version="opset1">
2721
+ <data element_type="i64" shape="3" offset="73504" size="24" />
2722
+ <output>
2723
+ <port id="0" precision="I64">
2724
+ <dim>3</dim>
2725
+ </port>
2726
+ </output>
2727
+ </layer>
2728
+ <layer id="190" name="__module.wrapped_encoder.layers.1.attention/aten::reshape/Reshape" type="Reshape" version="opset1">
2729
+ <data special_zero="true" />
2730
+ <input>
2731
+ <port id="0" precision="FP32">
2732
+ <dim>1</dim>
2733
+ <dim>-1</dim>
2734
+ <dim>2</dim>
2735
+ <dim>12</dim>
2736
+ </port>
2737
+ <port id="1" precision="I64">
2738
+ <dim>3</dim>
2739
+ </port>
2740
+ </input>
2741
+ <output>
2742
+ <port id="2" precision="FP32" names="277">
2743
+ <dim>1</dim>
2744
+ <dim>-1</dim>
2745
+ <dim>24</dim>
2746
+ </port>
2747
+ </output>
2748
+ </layer>
2749
+ <layer id="191" name="self.wrapped_encoder.layers.1.attention.out_proj.weight" type="Const" version="opset1">
2750
+ <data element_type="f32" shape="24, 24" offset="83512" size="2304" />
2751
+ <output>
2752
+ <port id="0" precision="FP32" names="self.wrapped_encoder.layers.1.attention.out_proj.weight">
2753
+ <dim>24</dim>
2754
+ <dim>24</dim>
2755
+ </port>
2756
+ </output>
2757
+ </layer>
2758
+ <layer id="192" name="__module.wrapped_encoder.layers.1.attention.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
2759
+ <data transpose_a="false" transpose_b="true" />
2760
+ <input>
2761
+ <port id="0" precision="FP32">
2762
+ <dim>1</dim>
2763
+ <dim>-1</dim>
2764
+ <dim>24</dim>
2765
+ </port>
2766
+ <port id="1" precision="FP32">
2767
+ <dim>24</dim>
2768
+ <dim>24</dim>
2769
+ </port>
2770
+ </input>
2771
+ <output>
2772
+ <port id="2" precision="FP32" names="280,input.19">
2773
+ <dim>1</dim>
2774
+ <dim>-1</dim>
2775
+ <dim>24</dim>
2776
+ </port>
2777
+ </output>
2778
+ </layer>
2779
+ <layer id="193" name="__module.wrapped_encoder.layers.1/aten::add/Add" type="Add" version="opset1">
2780
+ <data auto_broadcast="numpy" />
2781
+ <input>
2782
+ <port id="0" precision="FP32">
2783
+ <dim>1</dim>
2784
+ <dim>-1</dim>
2785
+ <dim>24</dim>
2786
+ </port>
2787
+ <port id="1" precision="FP32">
2788
+ <dim>1</dim>
2789
+ <dim>-1</dim>
2790
+ <dim>24</dim>
2791
+ </port>
2792
+ </input>
2793
+ <output>
2794
+ <port id="2" precision="FP32" names="282">
2795
+ <dim>1</dim>
2796
+ <dim>-1</dim>
2797
+ <dim>24</dim>
2798
+ </port>
2799
+ </output>
2800
+ </layer>
2801
+ <layer id="194" name="__module.wrapped_encoder.layers.1.layer_norm/aten::layer_norm/Multiply" type="Const" version="opset1">
2802
+ <data element_type="i32" shape="1" offset="51000" size="4" />
2803
+ <output>
2804
+ <port id="0" precision="I32">
2805
+ <dim>1</dim>
2806
+ </port>
2807
+ </output>
2808
+ </layer>
2809
+ <layer id="195" name="__module.wrapped_encoder.layers.1.layer_norm/aten::layer_norm/MVN" type="MVN" version="opset6">
2810
+ <data eps="9.9999997473787516e-06" normalize_variance="true" eps_mode="INSIDE_SQRT" />
2811
+ <input>
2812
+ <port id="0" precision="FP32">
2813
+ <dim>1</dim>
2814
+ <dim>-1</dim>
2815
+ <dim>24</dim>
2816
+ </port>
2817
+ <port id="1" precision="I32">
2818
+ <dim>1</dim>
2819
+ </port>
2820
+ </input>
2821
+ <output>
2822
+ <port id="2" precision="FP32" names="286,hidden_states">
2823
+ <dim>1</dim>
2824
+ <dim>-1</dim>
2825
+ <dim>24</dim>
2826
+ </port>
2827
+ </output>
2828
+ </layer>
2829
+ <layer id="196" name="self.wrapped_encoder.layers.1.feed_forward.intermediate_dense.weight" type="Const" version="opset1">
2830
+ <data element_type="f32" shape="4, 24" offset="85816" size="384" />
2831
+ <output>
2832
+ <port id="0" precision="FP32" names="self.wrapped_encoder.layers.1.feed_forward.intermediate_dense.weight">
2833
+ <dim>4</dim>
2834
+ <dim>24</dim>
2835
+ </port>
2836
+ </output>
2837
+ </layer>
2838
+ <layer id="197" name="__module.wrapped_encoder.layers.1.feed_forward.intermediate_dense/aten::linear/MatMul" type="MatMul" version="opset1">
2839
+ <data transpose_a="false" transpose_b="true" />
2840
+ <input>
2841
+ <port id="0" precision="FP32">
2842
+ <dim>1</dim>
2843
+ <dim>-1</dim>
2844
+ <dim>24</dim>
2845
+ </port>
2846
+ <port id="1" precision="FP32">
2847
+ <dim>4</dim>
2848
+ <dim>24</dim>
2849
+ </port>
2850
+ </input>
2851
+ <output>
2852
+ <port id="2" precision="FP32" names="291">
2853
+ <dim>1</dim>
2854
+ <dim>-1</dim>
2855
+ <dim>4</dim>
2856
+ </port>
2857
+ </output>
2858
+ </layer>
2859
+ <layer id="198" name="__module.wrapped_encoder.layers.1.feed_forward.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
2860
+ <data approximation_mode="ERF" />
2861
+ <input>
2862
+ <port id="0" precision="FP32">
2863
+ <dim>1</dim>
2864
+ <dim>-1</dim>
2865
+ <dim>4</dim>
2866
+ </port>
2867
+ </input>
2868
+ <output>
2869
+ <port id="1" precision="FP32" names="292,input.21">
2870
+ <dim>1</dim>
2871
+ <dim>-1</dim>
2872
+ <dim>4</dim>
2873
+ </port>
2874
+ </output>
2875
+ </layer>
2876
+ <layer id="199" name="self.wrapped_encoder.layers.1.feed_forward.output_dense.weight" type="Const" version="opset1">
2877
+ <data element_type="f32" shape="24, 4" offset="86200" size="384" />
2878
+ <output>
2879
+ <port id="0" precision="FP32" names="self.wrapped_encoder.layers.1.feed_forward.output_dense.weight">
2880
+ <dim>24</dim>
2881
+ <dim>4</dim>
2882
+ </port>
2883
+ </output>
2884
+ </layer>
2885
+ <layer id="200" name="__module.wrapped_encoder.layers.1.feed_forward.output_dense/aten::linear/MatMul" type="MatMul" version="opset1">
2886
+ <data transpose_a="false" transpose_b="true" />
2887
+ <input>
2888
+ <port id="0" precision="FP32">
2889
+ <dim>1</dim>
2890
+ <dim>-1</dim>
2891
+ <dim>4</dim>
2892
+ </port>
2893
+ <port id="1" precision="FP32">
2894
+ <dim>24</dim>
2895
+ <dim>4</dim>
2896
+ </port>
2897
+ </input>
2898
+ <output>
2899
+ <port id="2" precision="FP32" names="296,input">
2900
+ <dim>1</dim>
2901
+ <dim>-1</dim>
2902
+ <dim>24</dim>
2903
+ </port>
2904
+ </output>
2905
+ </layer>
2906
+ <layer id="201" name="__module.wrapped_encoder.layers.1/aten::add/Add_1" type="Add" version="opset1">
2907
+ <data auto_broadcast="numpy" />
2908
+ <input>
2909
+ <port id="0" precision="FP32">
2910
+ <dim>1</dim>
2911
+ <dim>-1</dim>
2912
+ <dim>24</dim>
2913
+ </port>
2914
+ <port id="1" precision="FP32">
2915
+ <dim>1</dim>
2916
+ <dim>-1</dim>
2917
+ <dim>24</dim>
2918
+ </port>
2919
+ </input>
2920
+ <output>
2921
+ <port id="2" precision="FP32" names="298">
2922
+ <dim>1</dim>
2923
+ <dim>-1</dim>
2924
+ <dim>24</dim>
2925
+ </port>
2926
+ </output>
2927
+ </layer>
2928
+ <layer id="202" name="__module.wrapped_encoder.layers.1.final_layer_norm/aten::layer_norm/Multiply" type="Const" version="opset1">
2929
+ <data element_type="i32" shape="1" offset="51000" size="4" />
2930
+ <output>
2931
+ <port id="0" precision="I32">
2932
+ <dim>1</dim>
2933
+ </port>
2934
+ </output>
2935
+ </layer>
2936
+ <layer id="203" name="__module.wrapped_encoder.layers.1.final_layer_norm/aten::layer_norm/Add" type="MVN" version="opset6">
2937
+ <data eps="9.9999997473787516e-06" normalize_variance="true" eps_mode="INSIDE_SQRT" />
2938
+ <input>
2939
+ <port id="0" precision="FP32">
2940
+ <dim>1</dim>
2941
+ <dim>-1</dim>
2942
+ <dim>24</dim>
2943
+ </port>
2944
+ <port id="1" precision="I32">
2945
+ <dim>1</dim>
2946
+ </port>
2947
+ </input>
2948
+ <output>
2949
+ <port id="2" precision="FP32" names="last_hidden_state">
2950
+ <dim>1</dim>
2951
+ <dim>-1</dim>
2952
+ <dim>24</dim>
2953
+ </port>
2954
+ </output>
2955
+ </layer>
2956
+ <layer id="204" name="Result_1518" type="Result" version="opset1" output_names="last_hidden_state">
2957
+ <input>
2958
+ <port id="0" precision="FP32">
2959
+ <dim>1</dim>
2960
+ <dim>-1</dim>
2961
+ <dim>24</dim>
2962
+ </port>
2963
+ </input>
2964
+ </layer>
2965
+ <layer id="7" name="Result_1517" type="Result" version="opset1" output_names="encoder_attention_mask">
2966
+ <input>
2967
+ <port id="0" precision="I64">
2968
+ <dim>1</dim>
2969
+ <dim>-1</dim>
2970
+ </port>
2971
+ </input>
2972
+ </layer>
2973
+ </layers>
2974
+ <edges>
2975
+ <edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
2976
+ <edge from-layer="1" from-port="0" to-layer="5" to-port="0" />
2977
+ <edge from-layer="2" from-port="1" to-layer="3" to-port="0" />
2978
+ <edge from-layer="2" from-port="1" to-layer="10" to-port="1" />
2979
+ <edge from-layer="3" from-port="1" to-layer="4" to-port="0" />
2980
+ <edge from-layer="3" from-port="1" to-layer="15" to-port="0" />
2981
+ <edge from-layer="3" from-port="1" to-layer="93" to-port="0" />
2982
+ <edge from-layer="4" from-port="1" to-layer="5" to-port="1" />
2983
+ <edge from-layer="5" from-port="2" to-layer="6" to-port="0" />
2984
+ <edge from-layer="6" from-port="1" to-layer="7" to-port="0" />
2985
+ <edge from-layer="6" from-port="1" to-layer="86" to-port="0" />
2986
+ <edge from-layer="8" from-port="0" to-layer="10" to-port="0" />
2987
+ <edge from-layer="9" from-port="0" to-layer="10" to-port="2" />
2988
+ <edge from-layer="10" from-port="3" to-layer="19" to-port="0" />
2989
+ <edge from-layer="11" from-port="0" to-layer="18" to-port="0" />
2990
+ <edge from-layer="12" from-port="0" to-layer="18" to-port="1" />
2991
+ <edge from-layer="13" from-port="0" to-layer="15" to-port="1" />
2992
+ <edge from-layer="14" from-port="0" to-layer="15" to-port="2" />
2993
+ <edge from-layer="15" from-port="3" to-layer="18" to-port="2" />
2994
+ <edge from-layer="16" from-port="0" to-layer="18" to-port="3" />
2995
+ <edge from-layer="17" from-port="0" to-layer="18" to-port="4" />
2996
+ <edge from-layer="18" from-port="5" to-layer="19" to-port="1" />
2997
+ <edge from-layer="19" from-port="2" to-layer="21" to-port="0" />
2998
+ <edge from-layer="20" from-port="0" to-layer="21" to-port="1" />
2999
+ <edge from-layer="21" from-port="2" to-layer="109" to-port="0" />
3000
+ <edge from-layer="21" from-port="2" to-layer="125" to-port="0" />
3001
+ <edge from-layer="21" from-port="2" to-layer="45" to-port="0" />
3002
+ <edge from-layer="21" from-port="2" to-layer="31" to-port="0" />
3003
+ <edge from-layer="21" from-port="2" to-layer="23" to-port="0" />
3004
+ <edge from-layer="22" from-port="0" to-layer="23" to-port="1" />
3005
+ <edge from-layer="23" from-port="2" to-layer="25" to-port="0" />
3006
+ <edge from-layer="24" from-port="0" to-layer="25" to-port="1" />
3007
+ <edge from-layer="25" from-port="2" to-layer="27" to-port="0" />
3008
+ <edge from-layer="26" from-port="0" to-layer="27" to-port="1" />
3009
+ <edge from-layer="27" from-port="2" to-layer="29" to-port="0" />
3010
+ <edge from-layer="28" from-port="0" to-layer="29" to-port="1" />
3011
+ <edge from-layer="29" from-port="2" to-layer="38" to-port="0" />
3012
+ <edge from-layer="29" from-port="2" to-layer="40" to-port="0" />
3013
+ <edge from-layer="30" from-port="0" to-layer="31" to-port="1" />
3014
+ <edge from-layer="31" from-port="2" to-layer="33" to-port="0" />
3015
+ <edge from-layer="32" from-port="0" to-layer="33" to-port="1" />
3016
+ <edge from-layer="33" from-port="2" to-layer="35" to-port="0" />
3017
+ <edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
3018
+ <edge from-layer="35" from-port="2" to-layer="37" to-port="0" />
3019
+ <edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
3020
+ <edge from-layer="37" from-port="2" to-layer="38" to-port="1" />
3021
+ <edge from-layer="37" from-port="2" to-layer="79" to-port="0" />
3022
+ <edge from-layer="38" from-port="2" to-layer="74" to-port="0" />
3023
+ <edge from-layer="39" from-port="0" to-layer="40" to-port="1" />
3024
+ <edge from-layer="40" from-port="2" to-layer="42" to-port="0" />
3025
+ <edge from-layer="41" from-port="0" to-layer="42" to-port="1" />
3026
+ <edge from-layer="42" from-port="2" to-layer="69" to-port="0" />
3027
+ <edge from-layer="43" from-port="0" to-layer="68" to-port="0" />
3028
+ <edge from-layer="44" from-port="0" to-layer="50" to-port="0" />
3029
+ <edge from-layer="44" from-port="0" to-layer="54" to-port="1" />
3030
+ <edge from-layer="45" from-port="1" to-layer="48" to-port="0" />
3031
+ <edge from-layer="46" from-port="0" to-layer="48" to-port="1" />
3032
+ <edge from-layer="47" from-port="0" to-layer="48" to-port="2" />
3033
+ <edge from-layer="48" from-port="3" to-layer="50" to-port="1" />
3034
+ <edge from-layer="48" from-port="3" to-layer="78" to-port="0" />
3035
+ <edge from-layer="49" from-port="0" to-layer="50" to-port="2" />
3036
+ <edge from-layer="50" from-port="3" to-layer="51" to-port="0" />
3037
+ <edge from-layer="51" from-port="1" to-layer="54" to-port="0" />
3038
+ <edge from-layer="51" from-port="1" to-layer="53" to-port="0" />
3039
+ <edge from-layer="52" from-port="0" to-layer="86" to-port="1" />
3040
+ <edge from-layer="52" from-port="0" to-layer="53" to-port="1" />
3041
+ <edge from-layer="53" from-port="2" to-layer="55" to-port="0" />
3042
+ <edge from-layer="54" from-port="2" to-layer="55" to-port="1" />
3043
+ <edge from-layer="55" from-port="2" to-layer="59" to-port="2" />
3044
+ <edge from-layer="55" from-port="2" to-layer="57" to-port="0" />
3045
+ <edge from-layer="56" from-port="0" to-layer="57" to-port="1" />
3046
+ <edge from-layer="57" from-port="2" to-layer="59" to-port="0" />
3047
+ <edge from-layer="58" from-port="0" to-layer="59" to-port="1" />
3048
+ <edge from-layer="59" from-port="3" to-layer="63" to-port="2" />
3049
+ <edge from-layer="59" from-port="3" to-layer="61" to-port="0" />
3050
+ <edge from-layer="60" from-port="0" to-layer="61" to-port="1" />
3051
+ <edge from-layer="61" from-port="2" to-layer="63" to-port="0" />
3052
+ <edge from-layer="62" from-port="0" to-layer="63" to-port="1" />
3053
+ <edge from-layer="63" from-port="3" to-layer="65" to-port="0" />
3054
+ <edge from-layer="64" from-port="0" to-layer="65" to-port="1" />
3055
+ <edge from-layer="65" from-port="2" to-layer="66" to-port="0" />
3056
+ <edge from-layer="66" from-port="1" to-layer="68" to-port="1" />
3057
+ <edge from-layer="67" from-port="0" to-layer="68" to-port="2" />
3058
+ <edge from-layer="68" from-port="3" to-layer="69" to-port="1" />
3059
+ <edge from-layer="68" from-port="3" to-layer="157" to-port="1" />
3060
+ <edge from-layer="69" from-port="2" to-layer="71" to-port="0" />
3061
+ <edge from-layer="70" from-port="0" to-layer="71" to-port="1" />
3062
+ <edge from-layer="71" from-port="2" to-layer="73" to-port="0" />
3063
+ <edge from-layer="72" from-port="0" to-layer="73" to-port="1" />
3064
+ <edge from-layer="73" from-port="2" to-layer="74" to-port="1" />
3065
+ <edge from-layer="74" from-port="2" to-layer="84" to-port="0" />
3066
+ <edge from-layer="75" from-port="0" to-layer="83" to-port="0" />
3067
+ <edge from-layer="76" from-port="0" to-layer="83" to-port="1" />
3068
+ <edge from-layer="77" from-port="0" to-layer="78" to-port="1" />
3069
+ <edge from-layer="78" from-port="2" to-layer="105" to-port="1" />
3070
+ <edge from-layer="78" from-port="2" to-layer="83" to-port="2" />
3071
+ <edge from-layer="78" from-port="2" to-layer="169" to-port="2" />
3072
+ <edge from-layer="78" from-port="2" to-layer="173" to-port="1" />
3073
+ <edge from-layer="79" from-port="1" to-layer="82" to-port="0" />
3074
+ <edge from-layer="80" from-port="0" to-layer="82" to-port="1" />
3075
+ <edge from-layer="81" from-port="0" to-layer="82" to-port="2" />
3076
+ <edge from-layer="82" from-port="3" to-layer="105" to-port="2" />
3077
+ <edge from-layer="82" from-port="3" to-layer="83" to-port="3" />
3078
+ <edge from-layer="83" from-port="4" to-layer="84" to-port="1" />
3079
+ <edge from-layer="84" from-port="2" to-layer="103" to-port="0" />
3080
+ <edge from-layer="85" from-port="0" to-layer="99" to-port="0" />
3081
+ <edge from-layer="86" from-port="2" to-layer="88" to-port="0" />
3082
+ <edge from-layer="87" from-port="0" to-layer="88" to-port="1" />
3083
+ <edge from-layer="88" from-port="2" to-layer="95" to-port="0" />
3084
+ <edge from-layer="89" from-port="0" to-layer="94" to-port="0" />
3085
+ <edge from-layer="90" from-port="0" to-layer="94" to-port="1" />
3086
+ <edge from-layer="91" from-port="0" to-layer="93" to-port="1" />
3087
+ <edge from-layer="92" from-port="0" to-layer="93" to-port="2" />
3088
+ <edge from-layer="93" from-port="3" to-layer="94" to-port="2" />
3089
+ <edge from-layer="94" from-port="3" to-layer="95" to-port="1" />
3090
+ <edge from-layer="95" from-port="2" to-layer="96" to-port="0" />
3091
+ <edge from-layer="96" from-port="1" to-layer="98" to-port="0" />
3092
+ <edge from-layer="97" from-port="0" to-layer="98" to-port="1" />
3093
+ <edge from-layer="98" from-port="2" to-layer="99" to-port="1" />
3094
+ <edge from-layer="99" from-port="2" to-layer="102" to-port="2" />
3095
+ <edge from-layer="99" from-port="2" to-layer="100" to-port="0" />
3096
+ <edge from-layer="100" from-port="1" to-layer="102" to-port="0" />
3097
+ <edge from-layer="101" from-port="0" to-layer="102" to-port="1" />
3098
+ <edge from-layer="102" from-port="3" to-layer="103" to-port="1" />
3099
+ <edge from-layer="102" from-port="3" to-layer="171" to-port="1" />
3100
+ <edge from-layer="103" from-port="2" to-layer="106" to-port="0" />
3101
+ <edge from-layer="104" from-port="0" to-layer="105" to-port="0" />
3102
+ <edge from-layer="105" from-port="3" to-layer="106" to-port="1" />
3103
+ <edge from-layer="106" from-port="2" to-layer="107" to-port="0" />
3104
+ <edge from-layer="107" from-port="1" to-layer="116" to-port="0" />
3105
+ <edge from-layer="108" from-port="0" to-layer="109" to-port="1" />
3106
+ <edge from-layer="109" from-port="2" to-layer="111" to-port="0" />
3107
+ <edge from-layer="110" from-port="0" to-layer="111" to-port="1" />
3108
+ <edge from-layer="111" from-port="2" to-layer="113" to-port="0" />
3109
+ <edge from-layer="112" from-port="0" to-layer="113" to-port="1" />
3110
+ <edge from-layer="113" from-port="2" to-layer="115" to-port="0" />
3111
+ <edge from-layer="114" from-port="0" to-layer="115" to-port="1" />
3112
+ <edge from-layer="115" from-port="2" to-layer="116" to-port="1" />
3113
+ <edge from-layer="116" from-port="2" to-layer="118" to-port="0" />
3114
+ <edge from-layer="117" from-port="0" to-layer="118" to-port="1" />
3115
+ <edge from-layer="118" from-port="2" to-layer="120" to-port="0" />
3116
+ <edge from-layer="119" from-port="0" to-layer="120" to-port="1" />
3117
+ <edge from-layer="120" from-port="2" to-layer="122" to-port="0" />
3118
+ <edge from-layer="121" from-port="0" to-layer="122" to-port="1" />
3119
+ <edge from-layer="122" from-port="2" to-layer="124" to-port="0" />
3120
+ <edge from-layer="123" from-port="0" to-layer="124" to-port="1" />
3121
+ <edge from-layer="124" from-port="2" to-layer="125" to-port="1" />
3122
+ <edge from-layer="125" from-port="2" to-layer="127" to-port="0" />
3123
+ <edge from-layer="126" from-port="0" to-layer="127" to-port="1" />
3124
+ <edge from-layer="127" from-port="2" to-layer="129" to-port="0" />
3125
+ <edge from-layer="127" from-port="2" to-layer="133" to-port="0" />
3126
+ <edge from-layer="128" from-port="0" to-layer="129" to-port="1" />
3127
+ <edge from-layer="129" from-port="2" to-layer="130" to-port="0" />
3128
+ <edge from-layer="130" from-port="1" to-layer="132" to-port="0" />
3129
+ <edge from-layer="131" from-port="0" to-layer="132" to-port="1" />
3130
+ <edge from-layer="132" from-port="2" to-layer="133" to-port="1" />
3131
+ <edge from-layer="133" from-port="2" to-layer="135" to-port="0" />
3132
+ <edge from-layer="134" from-port="0" to-layer="135" to-port="1" />
3133
+ <edge from-layer="135" from-port="2" to-layer="193" to-port="0" />
3134
+ <edge from-layer="135" from-port="2" to-layer="177" to-port="0" />
3135
+ <edge from-layer="135" from-port="2" to-layer="137" to-port="0" />
3136
+ <edge from-layer="135" from-port="2" to-layer="145" to-port="0" />
3137
+ <edge from-layer="136" from-port="0" to-layer="137" to-port="1" />
3138
+ <edge from-layer="137" from-port="2" to-layer="139" to-port="0" />
3139
+ <edge from-layer="138" from-port="0" to-layer="139" to-port="1" />
3140
+ <edge from-layer="139" from-port="2" to-layer="141" to-port="0" />
3141
+ <edge from-layer="140" from-port="0" to-layer="141" to-port="1" />
3142
+ <edge from-layer="141" from-port="2" to-layer="143" to-port="0" />
3143
+ <edge from-layer="142" from-port="0" to-layer="143" to-port="1" />
3144
+ <edge from-layer="143" from-port="2" to-layer="152" to-port="0" />
3145
+ <edge from-layer="143" from-port="2" to-layer="154" to-port="0" />
3146
+ <edge from-layer="144" from-port="0" to-layer="145" to-port="1" />
3147
+ <edge from-layer="145" from-port="2" to-layer="147" to-port="0" />
3148
+ <edge from-layer="146" from-port="0" to-layer="147" to-port="1" />
3149
+ <edge from-layer="147" from-port="2" to-layer="149" to-port="0" />
3150
+ <edge from-layer="148" from-port="0" to-layer="149" to-port="1" />
3151
+ <edge from-layer="149" from-port="2" to-layer="151" to-port="0" />
3152
+ <edge from-layer="150" from-port="0" to-layer="151" to-port="1" />
3153
+ <edge from-layer="151" from-port="2" to-layer="152" to-port="1" />
3154
+ <edge from-layer="151" from-port="2" to-layer="165" to-port="0" />
3155
+ <edge from-layer="152" from-port="2" to-layer="162" to-port="0" />
3156
+ <edge from-layer="153" from-port="0" to-layer="154" to-port="1" />
3157
+ <edge from-layer="154" from-port="2" to-layer="156" to-port="0" />
3158
+ <edge from-layer="155" from-port="0" to-layer="156" to-port="1" />
3159
+ <edge from-layer="156" from-port="2" to-layer="157" to-port="0" />
3160
+ <edge from-layer="157" from-port="2" to-layer="159" to-port="0" />
3161
+ <edge from-layer="158" from-port="0" to-layer="159" to-port="1" />
3162
+ <edge from-layer="159" from-port="2" to-layer="161" to-port="0" />
3163
+ <edge from-layer="160" from-port="0" to-layer="161" to-port="1" />
3164
+ <edge from-layer="161" from-port="2" to-layer="162" to-port="1" />
3165
+ <edge from-layer="162" from-port="2" to-layer="170" to-port="0" />
3166
+ <edge from-layer="163" from-port="0" to-layer="169" to-port="0" />
3167
+ <edge from-layer="164" from-port="0" to-layer="169" to-port="1" />
3168
+ <edge from-layer="165" from-port="1" to-layer="168" to-port="0" />
3169
+ <edge from-layer="166" from-port="0" to-layer="168" to-port="1" />
3170
+ <edge from-layer="167" from-port="0" to-layer="168" to-port="2" />
3171
+ <edge from-layer="168" from-port="3" to-layer="173" to-port="2" />
3172
+ <edge from-layer="168" from-port="3" to-layer="169" to-port="3" />
3173
+ <edge from-layer="169" from-port="4" to-layer="170" to-port="1" />
3174
+ <edge from-layer="170" from-port="2" to-layer="171" to-port="0" />
3175
+ <edge from-layer="171" from-port="2" to-layer="174" to-port="0" />
3176
+ <edge from-layer="172" from-port="0" to-layer="173" to-port="0" />
3177
+ <edge from-layer="173" from-port="3" to-layer="174" to-port="1" />
3178
+ <edge from-layer="174" from-port="2" to-layer="175" to-port="0" />
3179
+ <edge from-layer="175" from-port="1" to-layer="184" to-port="0" />
3180
+ <edge from-layer="176" from-port="0" to-layer="177" to-port="1" />
3181
+ <edge from-layer="177" from-port="2" to-layer="179" to-port="0" />
3182
+ <edge from-layer="178" from-port="0" to-layer="179" to-port="1" />
3183
+ <edge from-layer="179" from-port="2" to-layer="181" to-port="0" />
3184
+ <edge from-layer="180" from-port="0" to-layer="181" to-port="1" />
3185
+ <edge from-layer="181" from-port="2" to-layer="183" to-port="0" />
3186
+ <edge from-layer="182" from-port="0" to-layer="183" to-port="1" />
3187
+ <edge from-layer="183" from-port="2" to-layer="184" to-port="1" />
3188
+ <edge from-layer="184" from-port="2" to-layer="186" to-port="0" />
3189
+ <edge from-layer="185" from-port="0" to-layer="186" to-port="1" />
3190
+ <edge from-layer="186" from-port="2" to-layer="188" to-port="0" />
3191
+ <edge from-layer="187" from-port="0" to-layer="188" to-port="1" />
3192
+ <edge from-layer="188" from-port="2" to-layer="190" to-port="0" />
3193
+ <edge from-layer="189" from-port="0" to-layer="190" to-port="1" />
3194
+ <edge from-layer="190" from-port="2" to-layer="192" to-port="0" />
3195
+ <edge from-layer="191" from-port="0" to-layer="192" to-port="1" />
3196
+ <edge from-layer="192" from-port="2" to-layer="193" to-port="1" />
3197
+ <edge from-layer="193" from-port="2" to-layer="195" to-port="0" />
3198
+ <edge from-layer="194" from-port="0" to-layer="195" to-port="1" />
3199
+ <edge from-layer="195" from-port="2" to-layer="197" to-port="0" />
3200
+ <edge from-layer="195" from-port="2" to-layer="201" to-port="0" />
3201
+ <edge from-layer="196" from-port="0" to-layer="197" to-port="1" />
3202
+ <edge from-layer="197" from-port="2" to-layer="198" to-port="0" />
3203
+ <edge from-layer="198" from-port="1" to-layer="200" to-port="0" />
3204
+ <edge from-layer="199" from-port="0" to-layer="200" to-port="1" />
3205
+ <edge from-layer="200" from-port="2" to-layer="201" to-port="1" />
3206
+ <edge from-layer="201" from-port="2" to-layer="203" to-port="0" />
3207
+ <edge from-layer="202" from-port="0" to-layer="203" to-port="1" />
3208
+ <edge from-layer="203" from-port="2" to-layer="204" to-port="0" />
3209
+ </edges>
3210
+ <rt_info>
3211
+ <Runtime_version value="2025.1.0-18503-6fec06580ab-releases/2025/1" />
3212
+ <conversion_parameters>
3213
+ <framework value="pytorch" />
3214
+ <is_python_object value="True" />
3215
+ </conversion_parameters>
3216
+ <optimum>
3217
+ <optimum_intel_version value="1.23.0.dev0+81089b7" />
3218
+ <optimum_version value="1.25.0.dev0" />
3219
+ <pytorch_version value="2.5.1+cpu" />
3220
+ <transformers_version value="4.51.3" />
3221
+ </optimum>
3222
+ </rt_info>
3223
+ </net>
openvino_postnet.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00a42e63093d04609f52eaba7d94f9f52040ff15bcaa2a7c9cba8e09ba5f768c
3
+ size 4751412
openvino_postnet.xml ADDED
@@ -0,0 +1,504 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="Model6" version="11">
3
+ <layers>
4
+ <layer id="0" name="raw_spectrogram" type="Parameter" version="opset1">
5
+ <data shape="?,?,2,80" element_type="f32" />
6
+ <output>
7
+ <port id="0" precision="FP32" names="raw_spectrogram">
8
+ <dim>-1</dim>
9
+ <dim>-1</dim>
10
+ <dim>2</dim>
11
+ <dim>80</dim>
12
+ </port>
13
+ </output>
14
+ </layer>
15
+ <layer id="1" name="aten::transpose/Constant" type="Const" version="opset1">
16
+ <data element_type="i32" shape="4" offset="0" size="16" />
17
+ <output>
18
+ <port id="0" precision="I32">
19
+ <dim>4</dim>
20
+ </port>
21
+ </output>
22
+ </layer>
23
+ <layer id="2" name="aten::transpose/Transpose" type="Transpose" version="opset1">
24
+ <input>
25
+ <port id="0" precision="FP32">
26
+ <dim>-1</dim>
27
+ <dim>-1</dim>
28
+ <dim>2</dim>
29
+ <dim>80</dim>
30
+ </port>
31
+ <port id="1" precision="I32">
32
+ <dim>4</dim>
33
+ </port>
34
+ </input>
35
+ <output>
36
+ <port id="2" precision="FP32" names="19">
37
+ <dim>-1</dim>
38
+ <dim>-1</dim>
39
+ <dim>2</dim>
40
+ <dim>80</dim>
41
+ </port>
42
+ </output>
43
+ </layer>
44
+ <layer id="3" name="Constant_22093" type="Const" version="opset1">
45
+ <data element_type="i32" shape="1" offset="16" size="4" />
46
+ <output>
47
+ <port id="0" precision="I32">
48
+ <dim>1</dim>
49
+ </port>
50
+ </output>
51
+ </layer>
52
+ <layer id="4" name="aten::flatten/Constant_2" type="Const" version="opset1">
53
+ <data element_type="i32" shape="1" offset="20" size="4" />
54
+ <output>
55
+ <port id="0" precision="I32">
56
+ <dim>1</dim>
57
+ </port>
58
+ </output>
59
+ </layer>
60
+ <layer id="5" name="ShapeOf_21977" type="ShapeOf" version="opset3">
61
+ <data output_type="i32" />
62
+ <input>
63
+ <port id="0" precision="FP32">
64
+ <dim>-1</dim>
65
+ <dim>-1</dim>
66
+ <dim>2</dim>
67
+ <dim>80</dim>
68
+ </port>
69
+ </input>
70
+ <output>
71
+ <port id="1" precision="I32">
72
+ <dim>4</dim>
73
+ </port>
74
+ </output>
75
+ </layer>
76
+ <layer id="6" name="Constant_21984" type="Const" version="opset1">
77
+ <data element_type="i64" shape="1" offset="24" size="8" />
78
+ <output>
79
+ <port id="0" precision="I64">
80
+ <dim>1</dim>
81
+ </port>
82
+ </output>
83
+ </layer>
84
+ <layer id="7" name="Constant_21985" type="Const" version="opset1">
85
+ <data element_type="i64" shape="" offset="32" size="8" />
86
+ <output>
87
+ <port id="0" precision="I64" />
88
+ </output>
89
+ </layer>
90
+ <layer id="8" name="Gather_21986" type="Gather" version="opset8">
91
+ <data batch_dims="0" />
92
+ <input>
93
+ <port id="0" precision="I32">
94
+ <dim>4</dim>
95
+ </port>
96
+ <port id="1" precision="I64">
97
+ <dim>1</dim>
98
+ </port>
99
+ <port id="2" precision="I64" />
100
+ </input>
101
+ <output>
102
+ <port id="3" precision="I32">
103
+ <dim>1</dim>
104
+ </port>
105
+ </output>
106
+ </layer>
107
+ <layer id="9" name="aten::flatten/Concat" type="Concat" version="opset1">
108
+ <data axis="0" />
109
+ <input>
110
+ <port id="0" precision="I32">
111
+ <dim>1</dim>
112
+ </port>
113
+ <port id="1" precision="I32">
114
+ <dim>1</dim>
115
+ </port>
116
+ <port id="2" precision="I32">
117
+ <dim>1</dim>
118
+ </port>
119
+ </input>
120
+ <output>
121
+ <port id="3" precision="I32">
122
+ <dim>3</dim>
123
+ </port>
124
+ </output>
125
+ </layer>
126
+ <layer id="10" name="aten::flatten/Reshape" type="Reshape" version="opset1">
127
+ <data special_zero="true" />
128
+ <input>
129
+ <port id="0" precision="FP32">
130
+ <dim>-1</dim>
131
+ <dim>-1</dim>
132
+ <dim>2</dim>
133
+ <dim>80</dim>
134
+ </port>
135
+ <port id="1" precision="I32">
136
+ <dim>3</dim>
137
+ </port>
138
+ </input>
139
+ <output>
140
+ <port id="2" precision="FP32" names="22,hidden_states">
141
+ <dim>-1</dim>
142
+ <dim>-1</dim>
143
+ <dim>80</dim>
144
+ </port>
145
+ </output>
146
+ </layer>
147
+ <layer id="11" name="aten::transpose/Constant_1" type="Const" version="opset1">
148
+ <data element_type="i32" shape="3" offset="40" size="12" />
149
+ <output>
150
+ <port id="0" precision="I32">
151
+ <dim>3</dim>
152
+ </port>
153
+ </output>
154
+ </layer>
155
+ <layer id="12" name="aten::transpose/Transpose_1" type="Transpose" version="opset1">
156
+ <input>
157
+ <port id="0" precision="FP32">
158
+ <dim>-1</dim>
159
+ <dim>-1</dim>
160
+ <dim>80</dim>
161
+ </port>
162
+ <port id="1" precision="I32">
163
+ <dim>3</dim>
164
+ </port>
165
+ </input>
166
+ <output>
167
+ <port id="2" precision="FP32" names="25">
168
+ <dim>-1</dim>
169
+ <dim>80</dim>
170
+ <dim>-1</dim>
171
+ </port>
172
+ </output>
173
+ </layer>
174
+ <layer id="13" name="Multiply_21914" type="Const" version="opset1">
175
+ <data element_type="f32" shape="256, 80, 5" offset="52" size="409600" />
176
+ <output>
177
+ <port id="0" precision="FP32">
178
+ <dim>256</dim>
179
+ <dim>80</dim>
180
+ <dim>5</dim>
181
+ </port>
182
+ </output>
183
+ </layer>
184
+ <layer id="14" name="Multiply_21877" type="Convolution" version="opset1">
185
+ <data strides="1" dilations="1" pads_begin="2" pads_end="2" auto_pad="explicit" />
186
+ <input>
187
+ <port id="0" precision="FP32">
188
+ <dim>-1</dim>
189
+ <dim>80</dim>
190
+ <dim>-1</dim>
191
+ </port>
192
+ <port id="1" precision="FP32">
193
+ <dim>256</dim>
194
+ <dim>80</dim>
195
+ <dim>5</dim>
196
+ </port>
197
+ </input>
198
+ <output>
199
+ <port id="2" precision="FP32" names="60">
200
+ <dim>-1</dim>
201
+ <dim>256</dim>
202
+ <dim>-1</dim>
203
+ </port>
204
+ </output>
205
+ </layer>
206
+ <layer id="15" name="__module.speech_decoder_postnet.layers.0.activation/aten::tanh/Tanh" type="Tanh" version="opset1">
207
+ <input>
208
+ <port id="0" precision="FP32">
209
+ <dim>-1</dim>
210
+ <dim>256</dim>
211
+ <dim>-1</dim>
212
+ </port>
213
+ </input>
214
+ <output>
215
+ <port id="1" precision="FP32" names="61,input.1">
216
+ <dim>-1</dim>
217
+ <dim>256</dim>
218
+ <dim>-1</dim>
219
+ </port>
220
+ </output>
221
+ </layer>
222
+ <layer id="16" name="Multiply_21918" type="Const" version="opset1">
223
+ <data element_type="f32" shape="256, 256, 5" offset="409652" size="1310720" />
224
+ <output>
225
+ <port id="0" precision="FP32">
226
+ <dim>256</dim>
227
+ <dim>256</dim>
228
+ <dim>5</dim>
229
+ </port>
230
+ </output>
231
+ </layer>
232
+ <layer id="17" name="Multiply_21884" type="Convolution" version="opset1">
233
+ <data strides="1" dilations="1" pads_begin="2" pads_end="2" auto_pad="explicit" />
234
+ <input>
235
+ <port id="0" precision="FP32">
236
+ <dim>-1</dim>
237
+ <dim>256</dim>
238
+ <dim>-1</dim>
239
+ </port>
240
+ <port id="1" precision="FP32">
241
+ <dim>256</dim>
242
+ <dim>256</dim>
243
+ <dim>5</dim>
244
+ </port>
245
+ </input>
246
+ <output>
247
+ <port id="2" precision="FP32" names="86">
248
+ <dim>-1</dim>
249
+ <dim>256</dim>
250
+ <dim>-1</dim>
251
+ </port>
252
+ </output>
253
+ </layer>
254
+ <layer id="18" name="__module.speech_decoder_postnet.layers.1.activation/aten::tanh/Tanh" type="Tanh" version="opset1">
255
+ <input>
256
+ <port id="0" precision="FP32">
257
+ <dim>-1</dim>
258
+ <dim>256</dim>
259
+ <dim>-1</dim>
260
+ </port>
261
+ </input>
262
+ <output>
263
+ <port id="1" precision="FP32" names="87,input.3">
264
+ <dim>-1</dim>
265
+ <dim>256</dim>
266
+ <dim>-1</dim>
267
+ </port>
268
+ </output>
269
+ </layer>
270
+ <layer id="19" name="Multiply_21922" type="Const" version="opset1">
271
+ <data element_type="f32" shape="256, 256, 5" offset="1720372" size="1310720" />
272
+ <output>
273
+ <port id="0" precision="FP32">
274
+ <dim>256</dim>
275
+ <dim>256</dim>
276
+ <dim>5</dim>
277
+ </port>
278
+ </output>
279
+ </layer>
280
+ <layer id="20" name="Multiply_21891" type="Convolution" version="opset1">
281
+ <data strides="1" dilations="1" pads_begin="2" pads_end="2" auto_pad="explicit" />
282
+ <input>
283
+ <port id="0" precision="FP32">
284
+ <dim>-1</dim>
285
+ <dim>256</dim>
286
+ <dim>-1</dim>
287
+ </port>
288
+ <port id="1" precision="FP32">
289
+ <dim>256</dim>
290
+ <dim>256</dim>
291
+ <dim>5</dim>
292
+ </port>
293
+ </input>
294
+ <output>
295
+ <port id="2" precision="FP32" names="112">
296
+ <dim>-1</dim>
297
+ <dim>256</dim>
298
+ <dim>-1</dim>
299
+ </port>
300
+ </output>
301
+ </layer>
302
+ <layer id="21" name="__module.speech_decoder_postnet.layers.2.activation/aten::tanh/Tanh" type="Tanh" version="opset1">
303
+ <input>
304
+ <port id="0" precision="FP32">
305
+ <dim>-1</dim>
306
+ <dim>256</dim>
307
+ <dim>-1</dim>
308
+ </port>
309
+ </input>
310
+ <output>
311
+ <port id="1" precision="FP32" names="113,input.5">
312
+ <dim>-1</dim>
313
+ <dim>256</dim>
314
+ <dim>-1</dim>
315
+ </port>
316
+ </output>
317
+ </layer>
318
+ <layer id="22" name="Multiply_21926" type="Const" version="opset1">
319
+ <data element_type="f32" shape="256, 256, 5" offset="3031092" size="1310720" />
320
+ <output>
321
+ <port id="0" precision="FP32">
322
+ <dim>256</dim>
323
+ <dim>256</dim>
324
+ <dim>5</dim>
325
+ </port>
326
+ </output>
327
+ </layer>
328
+ <layer id="23" name="Multiply_21898" type="Convolution" version="opset1">
329
+ <data strides="1" dilations="1" pads_begin="2" pads_end="2" auto_pad="explicit" />
330
+ <input>
331
+ <port id="0" precision="FP32">
332
+ <dim>-1</dim>
333
+ <dim>256</dim>
334
+ <dim>-1</dim>
335
+ </port>
336
+ <port id="1" precision="FP32">
337
+ <dim>256</dim>
338
+ <dim>256</dim>
339
+ <dim>5</dim>
340
+ </port>
341
+ </input>
342
+ <output>
343
+ <port id="2" precision="FP32" names="138">
344
+ <dim>-1</dim>
345
+ <dim>256</dim>
346
+ <dim>-1</dim>
347
+ </port>
348
+ </output>
349
+ </layer>
350
+ <layer id="24" name="__module.speech_decoder_postnet.layers.3.activation/aten::tanh/Tanh" type="Tanh" version="opset1">
351
+ <input>
352
+ <port id="0" precision="FP32">
353
+ <dim>-1</dim>
354
+ <dim>256</dim>
355
+ <dim>-1</dim>
356
+ </port>
357
+ </input>
358
+ <output>
359
+ <port id="1" precision="FP32" names="139,input.7">
360
+ <dim>-1</dim>
361
+ <dim>256</dim>
362
+ <dim>-1</dim>
363
+ </port>
364
+ </output>
365
+ </layer>
366
+ <layer id="25" name="Multiply_21930" type="Const" version="opset1">
367
+ <data element_type="f32" shape="80, 256, 5" offset="4341812" size="409600" />
368
+ <output>
369
+ <port id="0" precision="FP32">
370
+ <dim>80</dim>
371
+ <dim>256</dim>
372
+ <dim>5</dim>
373
+ </port>
374
+ </output>
375
+ </layer>
376
+ <layer id="26" name="Multiply_21905" type="Convolution" version="opset1">
377
+ <data strides="1" dilations="1" pads_begin="2" pads_end="2" auto_pad="explicit" />
378
+ <input>
379
+ <port id="0" precision="FP32">
380
+ <dim>-1</dim>
381
+ <dim>256</dim>
382
+ <dim>-1</dim>
383
+ </port>
384
+ <port id="1" precision="FP32">
385
+ <dim>80</dim>
386
+ <dim>256</dim>
387
+ <dim>5</dim>
388
+ </port>
389
+ </input>
390
+ <output>
391
+ <port id="2" precision="FP32" names="163,input">
392
+ <dim>-1</dim>
393
+ <dim>80</dim>
394
+ <dim>-1</dim>
395
+ </port>
396
+ </output>
397
+ </layer>
398
+ <layer id="27" name="aten::transpose/Constant_2" type="Const" version="opset1">
399
+ <data element_type="i32" shape="3" offset="40" size="12" />
400
+ <output>
401
+ <port id="0" precision="I32">
402
+ <dim>3</dim>
403
+ </port>
404
+ </output>
405
+ </layer>
406
+ <layer id="28" name="aten::transpose/Transpose_2" type="Transpose" version="opset1">
407
+ <input>
408
+ <port id="0" precision="FP32">
409
+ <dim>-1</dim>
410
+ <dim>80</dim>
411
+ <dim>-1</dim>
412
+ </port>
413
+ <port id="1" precision="I32">
414
+ <dim>3</dim>
415
+ </port>
416
+ </input>
417
+ <output>
418
+ <port id="2" precision="FP32" names="33">
419
+ <dim>-1</dim>
420
+ <dim>-1</dim>
421
+ <dim>80</dim>
422
+ </port>
423
+ </output>
424
+ </layer>
425
+ <layer id="29" name="aten::add/Add" type="Add" version="opset1">
426
+ <data auto_broadcast="numpy" />
427
+ <input>
428
+ <port id="0" precision="FP32">
429
+ <dim>-1</dim>
430
+ <dim>-1</dim>
431
+ <dim>80</dim>
432
+ </port>
433
+ <port id="1" precision="FP32">
434
+ <dim>-1</dim>
435
+ <dim>-1</dim>
436
+ <dim>80</dim>
437
+ </port>
438
+ </input>
439
+ <output>
440
+ <port id="2" precision="FP32" names="postnet_spectrogram">
441
+ <dim>-1</dim>
442
+ <dim>-1</dim>
443
+ <dim>80</dim>
444
+ </port>
445
+ </output>
446
+ </layer>
447
+ <layer id="30" name="Result_19725" type="Result" version="opset1" output_names="postnet_spectrogram">
448
+ <input>
449
+ <port id="0" precision="FP32">
450
+ <dim>-1</dim>
451
+ <dim>-1</dim>
452
+ <dim>80</dim>
453
+ </port>
454
+ </input>
455
+ </layer>
456
+ </layers>
457
+ <edges>
458
+ <edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
459
+ <edge from-layer="1" from-port="0" to-layer="2" to-port="1" />
460
+ <edge from-layer="2" from-port="2" to-layer="10" to-port="0" />
461
+ <edge from-layer="2" from-port="2" to-layer="5" to-port="0" />
462
+ <edge from-layer="3" from-port="0" to-layer="9" to-port="0" />
463
+ <edge from-layer="4" from-port="0" to-layer="9" to-port="1" />
464
+ <edge from-layer="5" from-port="1" to-layer="8" to-port="0" />
465
+ <edge from-layer="6" from-port="0" to-layer="8" to-port="1" />
466
+ <edge from-layer="7" from-port="0" to-layer="8" to-port="2" />
467
+ <edge from-layer="8" from-port="3" to-layer="9" to-port="2" />
468
+ <edge from-layer="9" from-port="3" to-layer="10" to-port="1" />
469
+ <edge from-layer="10" from-port="2" to-layer="12" to-port="0" />
470
+ <edge from-layer="10" from-port="2" to-layer="29" to-port="0" />
471
+ <edge from-layer="11" from-port="0" to-layer="12" to-port="1" />
472
+ <edge from-layer="12" from-port="2" to-layer="14" to-port="0" />
473
+ <edge from-layer="13" from-port="0" to-layer="14" to-port="1" />
474
+ <edge from-layer="14" from-port="2" to-layer="15" to-port="0" />
475
+ <edge from-layer="15" from-port="1" to-layer="17" to-port="0" />
476
+ <edge from-layer="16" from-port="0" to-layer="17" to-port="1" />
477
+ <edge from-layer="17" from-port="2" to-layer="18" to-port="0" />
478
+ <edge from-layer="18" from-port="1" to-layer="20" to-port="0" />
479
+ <edge from-layer="19" from-port="0" to-layer="20" to-port="1" />
480
+ <edge from-layer="20" from-port="2" to-layer="21" to-port="0" />
481
+ <edge from-layer="21" from-port="1" to-layer="23" to-port="0" />
482
+ <edge from-layer="22" from-port="0" to-layer="23" to-port="1" />
483
+ <edge from-layer="23" from-port="2" to-layer="24" to-port="0" />
484
+ <edge from-layer="24" from-port="1" to-layer="26" to-port="0" />
485
+ <edge from-layer="25" from-port="0" to-layer="26" to-port="1" />
486
+ <edge from-layer="26" from-port="2" to-layer="28" to-port="0" />
487
+ <edge from-layer="27" from-port="0" to-layer="28" to-port="1" />
488
+ <edge from-layer="28" from-port="2" to-layer="29" to-port="1" />
489
+ <edge from-layer="29" from-port="2" to-layer="30" to-port="0" />
490
+ </edges>
491
+ <rt_info>
492
+ <Runtime_version value="2025.1.0-18503-6fec06580ab-releases/2025/1" />
493
+ <conversion_parameters>
494
+ <framework value="pytorch" />
495
+ <is_python_object value="True" />
496
+ </conversion_parameters>
497
+ <optimum>
498
+ <optimum_intel_version value="1.23.0.dev0+81089b7" />
499
+ <optimum_version value="1.25.0.dev0" />
500
+ <pytorch_version value="2.5.1+cpu" />
501
+ <transformers_version value="4.51.3" />
502
+ </optimum>
503
+ </rt_info>
504
+ </net>
openvino_vocoder.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:060e32bf00a0c80e7656dfbca9feef9cc3cf851d11d71d9c7f2c0175ea11f133
3
+ size 3196512
openvino_vocoder.xml ADDED
@@ -0,0 +1,2886 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="Model9" version="11">
3
+ <layers>
4
+ <layer id="0" name="spectrogram" type="Parameter" version="opset1">
5
+ <data shape="?,?,80" element_type="f32" />
6
+ <output>
7
+ <port id="0" precision="FP32" names="spectrogram">
8
+ <dim>-1</dim>
9
+ <dim>-1</dim>
10
+ <dim>80</dim>
11
+ </port>
12
+ </output>
13
+ </layer>
14
+ <layer id="1" name="Constant_27237" type="Const" version="opset1">
15
+ <data element_type="f32" shape="1, 1, 80" offset="0" size="320" />
16
+ <output>
17
+ <port id="0" precision="FP32">
18
+ <dim>1</dim>
19
+ <dim>1</dim>
20
+ <dim>80</dim>
21
+ </port>
22
+ </output>
23
+ </layer>
24
+ <layer id="2" name="aten::sub/Subtract" type="Add" version="opset1">
25
+ <data auto_broadcast="numpy" />
26
+ <input>
27
+ <port id="0" precision="FP32">
28
+ <dim>-1</dim>
29
+ <dim>-1</dim>
30
+ <dim>80</dim>
31
+ </port>
32
+ <port id="1" precision="FP32">
33
+ <dim>1</dim>
34
+ <dim>1</dim>
35
+ <dim>80</dim>
36
+ </port>
37
+ </input>
38
+ <output>
39
+ <port id="2" precision="FP32" names="23,24,spectrogram_1">
40
+ <dim>-1</dim>
41
+ <dim>-1</dim>
42
+ <dim>80</dim>
43
+ </port>
44
+ </output>
45
+ </layer>
46
+ <layer id="3" name="aten::transpose/Constant" type="Const" version="opset1">
47
+ <data element_type="i32" shape="3" offset="320" size="12" />
48
+ <output>
49
+ <port id="0" precision="I32">
50
+ <dim>3</dim>
51
+ </port>
52
+ </output>
53
+ </layer>
54
+ <layer id="4" name="aten::transpose/Transpose" type="Transpose" version="opset1">
55
+ <input>
56
+ <port id="0" precision="FP32">
57
+ <dim>-1</dim>
58
+ <dim>-1</dim>
59
+ <dim>80</dim>
60
+ </port>
61
+ <port id="1" precision="I32">
62
+ <dim>3</dim>
63
+ </port>
64
+ </input>
65
+ <output>
66
+ <port id="2" precision="FP32" names="27">
67
+ <dim>-1</dim>
68
+ <dim>80</dim>
69
+ <dim>-1</dim>
70
+ </port>
71
+ </output>
72
+ </layer>
73
+ <layer id="5" name="self.conv_pre.weight" type="Const" version="opset1">
74
+ <data element_type="f32" shape="128, 80, 7" offset="332" size="286720" />
75
+ <output>
76
+ <port id="0" precision="FP32" names="self.conv_pre.weight">
77
+ <dim>128</dim>
78
+ <dim>80</dim>
79
+ <dim>7</dim>
80
+ </port>
81
+ </output>
82
+ </layer>
83
+ <layer id="6" name="__module.conv_pre/aten::_convolution/Convolution" type="Convolution" version="opset1">
84
+ <data strides="1" dilations="1" pads_begin="3" pads_end="3" auto_pad="explicit" />
85
+ <input>
86
+ <port id="0" precision="FP32">
87
+ <dim>-1</dim>
88
+ <dim>80</dim>
89
+ <dim>-1</dim>
90
+ </port>
91
+ <port id="1" precision="FP32">
92
+ <dim>128</dim>
93
+ <dim>80</dim>
94
+ <dim>7</dim>
95
+ </port>
96
+ </input>
97
+ <output>
98
+ <port id="2" precision="FP32" names="70,input.1">
99
+ <dim>-1</dim>
100
+ <dim>128</dim>
101
+ <dim>-1</dim>
102
+ </port>
103
+ </output>
104
+ </layer>
105
+ <layer id="7" name="29" type="Const" version="opset1">
106
+ <data element_type="f32" shape="" offset="287052" size="4" />
107
+ <output>
108
+ <port id="0" precision="FP32" names="29" />
109
+ </output>
110
+ </layer>
111
+ <layer id="8" name="aten::leaky_relu/PRelu" type="PReLU" version="opset1">
112
+ <input>
113
+ <port id="0" precision="FP32">
114
+ <dim>-1</dim>
115
+ <dim>128</dim>
116
+ <dim>-1</dim>
117
+ </port>
118
+ <port id="1" precision="FP32" />
119
+ </input>
120
+ <output>
121
+ <port id="2" precision="FP32" names="30">
122
+ <dim>-1</dim>
123
+ <dim>128</dim>
124
+ <dim>-1</dim>
125
+ </port>
126
+ </output>
127
+ </layer>
128
+ <layer id="9" name="self.upsampler.0.weight" type="Const" version="opset1">
129
+ <data element_type="f32" shape="128, 64, 8" offset="287056" size="262144" />
130
+ <output>
131
+ <port id="0" precision="FP32" names="self.upsampler.0.weight">
132
+ <dim>128</dim>
133
+ <dim>64</dim>
134
+ <dim>8</dim>
135
+ </port>
136
+ </output>
137
+ </layer>
138
+ <layer id="10" name="__module.upsampler.0/aten::_convolution/ConvolutionBackpropData" type="ConvolutionBackpropData" version="opset1">
139
+ <data strides="4" dilations="1" pads_begin="2" pads_end="2" auto_pad="explicit" output_padding="0" />
140
+ <input>
141
+ <port id="0" precision="FP32">
142
+ <dim>-1</dim>
143
+ <dim>128</dim>
144
+ <dim>-1</dim>
145
+ </port>
146
+ <port id="1" precision="FP32">
147
+ <dim>128</dim>
148
+ <dim>64</dim>
149
+ <dim>8</dim>
150
+ </port>
151
+ </input>
152
+ <output>
153
+ <port id="2" precision="FP32">
154
+ <dim>-1</dim>
155
+ <dim>64</dim>
156
+ <dim>-1</dim>
157
+ </port>
158
+ </output>
159
+ </layer>
160
+ <layer id="11" name="__module.upsampler.0/aten::_convolution/Reshape" type="Const" version="opset1">
161
+ <data element_type="f32" shape="1, 64, 1" offset="549200" size="256" />
162
+ <output>
163
+ <port id="0" precision="FP32">
164
+ <dim>1</dim>
165
+ <dim>64</dim>
166
+ <dim>1</dim>
167
+ </port>
168
+ </output>
169
+ </layer>
170
+ <layer id="12" name="__module.upsampler.0/aten::_convolution/Add" type="Add" version="opset1">
171
+ <data auto_broadcast="numpy" />
172
+ <input>
173
+ <port id="0" precision="FP32">
174
+ <dim>-1</dim>
175
+ <dim>64</dim>
176
+ <dim>-1</dim>
177
+ </port>
178
+ <port id="1" precision="FP32">
179
+ <dim>1</dim>
180
+ <dim>64</dim>
181
+ <dim>1</dim>
182
+ </port>
183
+ </input>
184
+ <output>
185
+ <port id="2" precision="FP32" names="83,input.3">
186
+ <dim>-1</dim>
187
+ <dim>64</dim>
188
+ <dim>-1</dim>
189
+ </port>
190
+ </output>
191
+ </layer>
192
+ <layer id="13" name="90" type="Const" version="opset1">
193
+ <data element_type="f32" shape="" offset="287052" size="4" />
194
+ <output>
195
+ <port id="0" precision="FP32" names="90" />
196
+ </output>
197
+ </layer>
198
+ <layer id="14" name="__module.resblocks.0/aten::leaky_relu/PRelu" type="PReLU" version="opset1">
199
+ <input>
200
+ <port id="0" precision="FP32">
201
+ <dim>-1</dim>
202
+ <dim>64</dim>
203
+ <dim>-1</dim>
204
+ </port>
205
+ <port id="1" precision="FP32" />
206
+ </input>
207
+ <output>
208
+ <port id="2" precision="FP32" names="103,175,247">
209
+ <dim>-1</dim>
210
+ <dim>64</dim>
211
+ <dim>-1</dim>
212
+ </port>
213
+ </output>
214
+ </layer>
215
+ <layer id="15" name="self.resblocks.0.convs1.0.weight" type="Const" version="opset1">
216
+ <data element_type="f32" shape="64, 64, 3" offset="549456" size="49152" />
217
+ <output>
218
+ <port id="0" precision="FP32" names="self.resblocks.0.convs1.0.weight">
219
+ <dim>64</dim>
220
+ <dim>64</dim>
221
+ <dim>3</dim>
222
+ </port>
223
+ </output>
224
+ </layer>
225
+ <layer id="16" name="__module.resblocks.0.convs1.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
226
+ <data strides="1" dilations="1" pads_begin="1" pads_end="1" auto_pad="explicit" />
227
+ <input>
228
+ <port id="0" precision="FP32">
229
+ <dim>-1</dim>
230
+ <dim>64</dim>
231
+ <dim>-1</dim>
232
+ </port>
233
+ <port id="1" precision="FP32">
234
+ <dim>64</dim>
235
+ <dim>64</dim>
236
+ <dim>3</dim>
237
+ </port>
238
+ </input>
239
+ <output>
240
+ <port id="2" precision="FP32" names="110,input.5">
241
+ <dim>-1</dim>
242
+ <dim>64</dim>
243
+ <dim>-1</dim>
244
+ </port>
245
+ </output>
246
+ </layer>
247
+ <layer id="17" name="__module.resblocks.0/aten::leaky_relu/PRelu_1" type="PReLU" version="opset1">
248
+ <input>
249
+ <port id="0" precision="FP32">
250
+ <dim>-1</dim>
251
+ <dim>64</dim>
252
+ <dim>-1</dim>
253
+ </port>
254
+ <port id="1" precision="FP32" />
255
+ </input>
256
+ <output>
257
+ <port id="2" precision="FP32" names="111">
258
+ <dim>-1</dim>
259
+ <dim>64</dim>
260
+ <dim>-1</dim>
261
+ </port>
262
+ </output>
263
+ </layer>
264
+ <layer id="18" name="self.resblocks.0.convs2.0.weight" type="Const" version="opset1">
265
+ <data element_type="f32" shape="64, 64, 3" offset="598608" size="49152" />
266
+ <output>
267
+ <port id="0" precision="FP32" names="self.resblocks.0.convs2.0.weight">
268
+ <dim>64</dim>
269
+ <dim>64</dim>
270
+ <dim>3</dim>
271
+ </port>
272
+ </output>
273
+ </layer>
274
+ <layer id="19" name="__module.resblocks.0.convs2.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
275
+ <data strides="1" dilations="1" pads_begin="1" pads_end="1" auto_pad="explicit" />
276
+ <input>
277
+ <port id="0" precision="FP32">
278
+ <dim>-1</dim>
279
+ <dim>64</dim>
280
+ <dim>-1</dim>
281
+ </port>
282
+ <port id="1" precision="FP32">
283
+ <dim>64</dim>
284
+ <dim>64</dim>
285
+ <dim>3</dim>
286
+ </port>
287
+ </input>
288
+ <output>
289
+ <port id="2" precision="FP32" names="118,hidden_states.1">
290
+ <dim>-1</dim>
291
+ <dim>64</dim>
292
+ <dim>-1</dim>
293
+ </port>
294
+ </output>
295
+ </layer>
296
+ <layer id="20" name="__module.resblocks.0/aten::add/Add" type="Add" version="opset1">
297
+ <data auto_broadcast="numpy" />
298
+ <input>
299
+ <port id="0" precision="FP32">
300
+ <dim>-1</dim>
301
+ <dim>64</dim>
302
+ <dim>-1</dim>
303
+ </port>
304
+ <port id="1" precision="FP32">
305
+ <dim>-1</dim>
306
+ <dim>64</dim>
307
+ <dim>-1</dim>
308
+ </port>
309
+ </input>
310
+ <output>
311
+ <port id="2" precision="FP32" names="119,input.7">
312
+ <dim>-1</dim>
313
+ <dim>64</dim>
314
+ <dim>-1</dim>
315
+ </port>
316
+ </output>
317
+ </layer>
318
+ <layer id="21" name="__module.resblocks.0/aten::leaky_relu/PRelu_2" type="PReLU" version="opset1">
319
+ <input>
320
+ <port id="0" precision="FP32">
321
+ <dim>-1</dim>
322
+ <dim>64</dim>
323
+ <dim>-1</dim>
324
+ </port>
325
+ <port id="1" precision="FP32" />
326
+ </input>
327
+ <output>
328
+ <port id="2" precision="FP32" names="120">
329
+ <dim>-1</dim>
330
+ <dim>64</dim>
331
+ <dim>-1</dim>
332
+ </port>
333
+ </output>
334
+ </layer>
335
+ <layer id="22" name="self.resblocks.0.convs1.1.weight" type="Const" version="opset1">
336
+ <data element_type="f32" shape="64, 64, 3" offset="647760" size="49152" />
337
+ <output>
338
+ <port id="0" precision="FP32" names="self.resblocks.0.convs1.1.weight">
339
+ <dim>64</dim>
340
+ <dim>64</dim>
341
+ <dim>3</dim>
342
+ </port>
343
+ </output>
344
+ </layer>
345
+ <layer id="23" name="__module.resblocks.0.convs1.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
346
+ <data strides="1" dilations="3" pads_begin="3" pads_end="3" auto_pad="explicit" />
347
+ <input>
348
+ <port id="0" precision="FP32">
349
+ <dim>-1</dim>
350
+ <dim>64</dim>
351
+ <dim>-1</dim>
352
+ </port>
353
+ <port id="1" precision="FP32">
354
+ <dim>64</dim>
355
+ <dim>64</dim>
356
+ <dim>3</dim>
357
+ </port>
358
+ </input>
359
+ <output>
360
+ <port id="2" precision="FP32" names="127,input.9">
361
+ <dim>-1</dim>
362
+ <dim>64</dim>
363
+ <dim>-1</dim>
364
+ </port>
365
+ </output>
366
+ </layer>
367
+ <layer id="24" name="__module.resblocks.0/aten::leaky_relu/PRelu_3" type="PReLU" version="opset1">
368
+ <input>
369
+ <port id="0" precision="FP32">
370
+ <dim>-1</dim>
371
+ <dim>64</dim>
372
+ <dim>-1</dim>
373
+ </port>
374
+ <port id="1" precision="FP32" />
375
+ </input>
376
+ <output>
377
+ <port id="2" precision="FP32" names="128">
378
+ <dim>-1</dim>
379
+ <dim>64</dim>
380
+ <dim>-1</dim>
381
+ </port>
382
+ </output>
383
+ </layer>
384
+ <layer id="25" name="self.resblocks.0.convs2.1.weight" type="Const" version="opset1">
385
+ <data element_type="f32" shape="64, 64, 3" offset="696912" size="49152" />
386
+ <output>
387
+ <port id="0" precision="FP32" names="self.resblocks.0.convs2.1.weight">
388
+ <dim>64</dim>
389
+ <dim>64</dim>
390
+ <dim>3</dim>
391
+ </port>
392
+ </output>
393
+ </layer>
394
+ <layer id="26" name="__module.resblocks.0.convs2.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
395
+ <data strides="1" dilations="1" pads_begin="1" pads_end="1" auto_pad="explicit" />
396
+ <input>
397
+ <port id="0" precision="FP32">
398
+ <dim>-1</dim>
399
+ <dim>64</dim>
400
+ <dim>-1</dim>
401
+ </port>
402
+ <port id="1" precision="FP32">
403
+ <dim>64</dim>
404
+ <dim>64</dim>
405
+ <dim>3</dim>
406
+ </port>
407
+ </input>
408
+ <output>
409
+ <port id="2" precision="FP32" names="135,hidden_states.3">
410
+ <dim>-1</dim>
411
+ <dim>64</dim>
412
+ <dim>-1</dim>
413
+ </port>
414
+ </output>
415
+ </layer>
416
+ <layer id="27" name="__module.resblocks.0/aten::add/Add_1" type="Add" version="opset1">
417
+ <data auto_broadcast="numpy" />
418
+ <input>
419
+ <port id="0" precision="FP32">
420
+ <dim>-1</dim>
421
+ <dim>64</dim>
422
+ <dim>-1</dim>
423
+ </port>
424
+ <port id="1" precision="FP32">
425
+ <dim>-1</dim>
426
+ <dim>64</dim>
427
+ <dim>-1</dim>
428
+ </port>
429
+ </input>
430
+ <output>
431
+ <port id="2" precision="FP32" names="136,input.11">
432
+ <dim>-1</dim>
433
+ <dim>64</dim>
434
+ <dim>-1</dim>
435
+ </port>
436
+ </output>
437
+ </layer>
438
+ <layer id="28" name="__module.resblocks.0/aten::leaky_relu/PRelu_4" type="PReLU" version="opset1">
439
+ <input>
440
+ <port id="0" precision="FP32">
441
+ <dim>-1</dim>
442
+ <dim>64</dim>
443
+ <dim>-1</dim>
444
+ </port>
445
+ <port id="1" precision="FP32" />
446
+ </input>
447
+ <output>
448
+ <port id="2" precision="FP32" names="137">
449
+ <dim>-1</dim>
450
+ <dim>64</dim>
451
+ <dim>-1</dim>
452
+ </port>
453
+ </output>
454
+ </layer>
455
+ <layer id="29" name="self.resblocks.0.convs1.2.weight" type="Const" version="opset1">
456
+ <data element_type="f32" shape="64, 64, 3" offset="746064" size="49152" />
457
+ <output>
458
+ <port id="0" precision="FP32" names="self.resblocks.0.convs1.2.weight">
459
+ <dim>64</dim>
460
+ <dim>64</dim>
461
+ <dim>3</dim>
462
+ </port>
463
+ </output>
464
+ </layer>
465
+ <layer id="30" name="__module.resblocks.0.convs1.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
466
+ <data strides="1" dilations="5" pads_begin="5" pads_end="5" auto_pad="explicit" />
467
+ <input>
468
+ <port id="0" precision="FP32">
469
+ <dim>-1</dim>
470
+ <dim>64</dim>
471
+ <dim>-1</dim>
472
+ </port>
473
+ <port id="1" precision="FP32">
474
+ <dim>64</dim>
475
+ <dim>64</dim>
476
+ <dim>3</dim>
477
+ </port>
478
+ </input>
479
+ <output>
480
+ <port id="2" precision="FP32" names="144,input.13">
481
+ <dim>-1</dim>
482
+ <dim>64</dim>
483
+ <dim>-1</dim>
484
+ </port>
485
+ </output>
486
+ </layer>
487
+ <layer id="31" name="__module.resblocks.0/aten::leaky_relu/PRelu_5" type="PReLU" version="opset1">
488
+ <input>
489
+ <port id="0" precision="FP32">
490
+ <dim>-1</dim>
491
+ <dim>64</dim>
492
+ <dim>-1</dim>
493
+ </port>
494
+ <port id="1" precision="FP32" />
495
+ </input>
496
+ <output>
497
+ <port id="2" precision="FP32" names="145">
498
+ <dim>-1</dim>
499
+ <dim>64</dim>
500
+ <dim>-1</dim>
501
+ </port>
502
+ </output>
503
+ </layer>
504
+ <layer id="32" name="self.resblocks.0.convs2.2.weight" type="Const" version="opset1">
505
+ <data element_type="f32" shape="64, 64, 3" offset="795216" size="49152" />
506
+ <output>
507
+ <port id="0" precision="FP32" names="self.resblocks.0.convs2.2.weight">
508
+ <dim>64</dim>
509
+ <dim>64</dim>
510
+ <dim>3</dim>
511
+ </port>
512
+ </output>
513
+ </layer>
514
+ <layer id="33" name="__module.resblocks.0.convs2.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
515
+ <data strides="1" dilations="1" pads_begin="1" pads_end="1" auto_pad="explicit" />
516
+ <input>
517
+ <port id="0" precision="FP32">
518
+ <dim>-1</dim>
519
+ <dim>64</dim>
520
+ <dim>-1</dim>
521
+ </port>
522
+ <port id="1" precision="FP32">
523
+ <dim>64</dim>
524
+ <dim>64</dim>
525
+ <dim>3</dim>
526
+ </port>
527
+ </input>
528
+ <output>
529
+ <port id="2" precision="FP32" names="152,hidden_states.5">
530
+ <dim>-1</dim>
531
+ <dim>64</dim>
532
+ <dim>-1</dim>
533
+ </port>
534
+ </output>
535
+ </layer>
536
+ <layer id="34" name="__module.resblocks.0/aten::add/Add_2" type="Add" version="opset1">
537
+ <data auto_broadcast="numpy" />
538
+ <input>
539
+ <port id="0" precision="FP32">
540
+ <dim>-1</dim>
541
+ <dim>64</dim>
542
+ <dim>-1</dim>
543
+ </port>
544
+ <port id="1" precision="FP32">
545
+ <dim>-1</dim>
546
+ <dim>64</dim>
547
+ <dim>-1</dim>
548
+ </port>
549
+ </input>
550
+ <output>
551
+ <port id="2" precision="FP32" names="153_1">
552
+ <dim>-1</dim>
553
+ <dim>64</dim>
554
+ <dim>-1</dim>
555
+ </port>
556
+ </output>
557
+ </layer>
558
+ <layer id="35" name="self.resblocks.1.convs1.0.weight" type="Const" version="opset1">
559
+ <data element_type="f32" shape="64, 64, 7" offset="844368" size="114688" />
560
+ <output>
561
+ <port id="0" precision="FP32" names="self.resblocks.1.convs1.0.weight">
562
+ <dim>64</dim>
563
+ <dim>64</dim>
564
+ <dim>7</dim>
565
+ </port>
566
+ </output>
567
+ </layer>
568
+ <layer id="36" name="__module.resblocks.1.convs1.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
569
+ <data strides="1" dilations="1" pads_begin="3" pads_end="3" auto_pad="explicit" />
570
+ <input>
571
+ <port id="0" precision="FP32">
572
+ <dim>-1</dim>
573
+ <dim>64</dim>
574
+ <dim>-1</dim>
575
+ </port>
576
+ <port id="1" precision="FP32">
577
+ <dim>64</dim>
578
+ <dim>64</dim>
579
+ <dim>7</dim>
580
+ </port>
581
+ </input>
582
+ <output>
583
+ <port id="2" precision="FP32" names="182,input.15">
584
+ <dim>-1</dim>
585
+ <dim>64</dim>
586
+ <dim>-1</dim>
587
+ </port>
588
+ </output>
589
+ </layer>
590
+ <layer id="37" name="162" type="Const" version="opset1">
591
+ <data element_type="f32" shape="" offset="287052" size="4" />
592
+ <output>
593
+ <port id="0" precision="FP32" names="162" />
594
+ </output>
595
+ </layer>
596
+ <layer id="38" name="__module.resblocks.1/aten::leaky_relu/PRelu_1" type="PReLU" version="opset1">
597
+ <input>
598
+ <port id="0" precision="FP32">
599
+ <dim>-1</dim>
600
+ <dim>64</dim>
601
+ <dim>-1</dim>
602
+ </port>
603
+ <port id="1" precision="FP32" />
604
+ </input>
605
+ <output>
606
+ <port id="2" precision="FP32" names="183">
607
+ <dim>-1</dim>
608
+ <dim>64</dim>
609
+ <dim>-1</dim>
610
+ </port>
611
+ </output>
612
+ </layer>
613
+ <layer id="39" name="self.resblocks.1.convs2.0.weight" type="Const" version="opset1">
614
+ <data element_type="f32" shape="64, 64, 7" offset="959056" size="114688" />
615
+ <output>
616
+ <port id="0" precision="FP32" names="self.resblocks.1.convs2.0.weight">
617
+ <dim>64</dim>
618
+ <dim>64</dim>
619
+ <dim>7</dim>
620
+ </port>
621
+ </output>
622
+ </layer>
623
+ <layer id="40" name="__module.resblocks.1.convs2.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
624
+ <data strides="1" dilations="1" pads_begin="3" pads_end="3" auto_pad="explicit" />
625
+ <input>
626
+ <port id="0" precision="FP32">
627
+ <dim>-1</dim>
628
+ <dim>64</dim>
629
+ <dim>-1</dim>
630
+ </port>
631
+ <port id="1" precision="FP32">
632
+ <dim>64</dim>
633
+ <dim>64</dim>
634
+ <dim>7</dim>
635
+ </port>
636
+ </input>
637
+ <output>
638
+ <port id="2" precision="FP32" names="190,hidden_states.7">
639
+ <dim>-1</dim>
640
+ <dim>64</dim>
641
+ <dim>-1</dim>
642
+ </port>
643
+ </output>
644
+ </layer>
645
+ <layer id="41" name="__module.resblocks.1/aten::add/Add" type="Add" version="opset1">
646
+ <data auto_broadcast="numpy" />
647
+ <input>
648
+ <port id="0" precision="FP32">
649
+ <dim>-1</dim>
650
+ <dim>64</dim>
651
+ <dim>-1</dim>
652
+ </port>
653
+ <port id="1" precision="FP32">
654
+ <dim>-1</dim>
655
+ <dim>64</dim>
656
+ <dim>-1</dim>
657
+ </port>
658
+ </input>
659
+ <output>
660
+ <port id="2" precision="FP32" names="191,input.17">
661
+ <dim>-1</dim>
662
+ <dim>64</dim>
663
+ <dim>-1</dim>
664
+ </port>
665
+ </output>
666
+ </layer>
667
+ <layer id="42" name="__module.resblocks.1/aten::leaky_relu/PRelu_2" type="PReLU" version="opset1">
668
+ <input>
669
+ <port id="0" precision="FP32">
670
+ <dim>-1</dim>
671
+ <dim>64</dim>
672
+ <dim>-1</dim>
673
+ </port>
674
+ <port id="1" precision="FP32" />
675
+ </input>
676
+ <output>
677
+ <port id="2" precision="FP32" names="192">
678
+ <dim>-1</dim>
679
+ <dim>64</dim>
680
+ <dim>-1</dim>
681
+ </port>
682
+ </output>
683
+ </layer>
684
+ <layer id="43" name="self.resblocks.1.convs1.1.weight" type="Const" version="opset1">
685
+ <data element_type="f32" shape="64, 64, 7" offset="1073744" size="114688" />
686
+ <output>
687
+ <port id="0" precision="FP32" names="self.resblocks.1.convs1.1.weight">
688
+ <dim>64</dim>
689
+ <dim>64</dim>
690
+ <dim>7</dim>
691
+ </port>
692
+ </output>
693
+ </layer>
694
+ <layer id="44" name="__module.resblocks.1.convs1.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
695
+ <data strides="1" dilations="3" pads_begin="9" pads_end="9" auto_pad="explicit" />
696
+ <input>
697
+ <port id="0" precision="FP32">
698
+ <dim>-1</dim>
699
+ <dim>64</dim>
700
+ <dim>-1</dim>
701
+ </port>
702
+ <port id="1" precision="FP32">
703
+ <dim>64</dim>
704
+ <dim>64</dim>
705
+ <dim>7</dim>
706
+ </port>
707
+ </input>
708
+ <output>
709
+ <port id="2" precision="FP32" names="199,input.19">
710
+ <dim>-1</dim>
711
+ <dim>64</dim>
712
+ <dim>-1</dim>
713
+ </port>
714
+ </output>
715
+ </layer>
716
+ <layer id="45" name="__module.resblocks.1/aten::leaky_relu/PRelu_3" type="PReLU" version="opset1">
717
+ <input>
718
+ <port id="0" precision="FP32">
719
+ <dim>-1</dim>
720
+ <dim>64</dim>
721
+ <dim>-1</dim>
722
+ </port>
723
+ <port id="1" precision="FP32" />
724
+ </input>
725
+ <output>
726
+ <port id="2" precision="FP32" names="200">
727
+ <dim>-1</dim>
728
+ <dim>64</dim>
729
+ <dim>-1</dim>
730
+ </port>
731
+ </output>
732
+ </layer>
733
+ <layer id="46" name="self.resblocks.1.convs2.1.weight" type="Const" version="opset1">
734
+ <data element_type="f32" shape="64, 64, 7" offset="1188432" size="114688" />
735
+ <output>
736
+ <port id="0" precision="FP32" names="self.resblocks.1.convs2.1.weight">
737
+ <dim>64</dim>
738
+ <dim>64</dim>
739
+ <dim>7</dim>
740
+ </port>
741
+ </output>
742
+ </layer>
743
+ <layer id="47" name="__module.resblocks.1.convs2.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
744
+ <data strides="1" dilations="1" pads_begin="3" pads_end="3" auto_pad="explicit" />
745
+ <input>
746
+ <port id="0" precision="FP32">
747
+ <dim>-1</dim>
748
+ <dim>64</dim>
749
+ <dim>-1</dim>
750
+ </port>
751
+ <port id="1" precision="FP32">
752
+ <dim>64</dim>
753
+ <dim>64</dim>
754
+ <dim>7</dim>
755
+ </port>
756
+ </input>
757
+ <output>
758
+ <port id="2" precision="FP32" names="207,hidden_states.9">
759
+ <dim>-1</dim>
760
+ <dim>64</dim>
761
+ <dim>-1</dim>
762
+ </port>
763
+ </output>
764
+ </layer>
765
+ <layer id="48" name="__module.resblocks.1/aten::add/Add_1" type="Add" version="opset1">
766
+ <data auto_broadcast="numpy" />
767
+ <input>
768
+ <port id="0" precision="FP32">
769
+ <dim>-1</dim>
770
+ <dim>64</dim>
771
+ <dim>-1</dim>
772
+ </port>
773
+ <port id="1" precision="FP32">
774
+ <dim>-1</dim>
775
+ <dim>64</dim>
776
+ <dim>-1</dim>
777
+ </port>
778
+ </input>
779
+ <output>
780
+ <port id="2" precision="FP32" names="208,input.21">
781
+ <dim>-1</dim>
782
+ <dim>64</dim>
783
+ <dim>-1</dim>
784
+ </port>
785
+ </output>
786
+ </layer>
787
+ <layer id="49" name="__module.resblocks.1/aten::leaky_relu/PRelu_4" type="PReLU" version="opset1">
788
+ <input>
789
+ <port id="0" precision="FP32">
790
+ <dim>-1</dim>
791
+ <dim>64</dim>
792
+ <dim>-1</dim>
793
+ </port>
794
+ <port id="1" precision="FP32" />
795
+ </input>
796
+ <output>
797
+ <port id="2" precision="FP32" names="209">
798
+ <dim>-1</dim>
799
+ <dim>64</dim>
800
+ <dim>-1</dim>
801
+ </port>
802
+ </output>
803
+ </layer>
804
+ <layer id="50" name="self.resblocks.1.convs1.2.weight" type="Const" version="opset1">
805
+ <data element_type="f32" shape="64, 64, 7" offset="1303120" size="114688" />
806
+ <output>
807
+ <port id="0" precision="FP32" names="self.resblocks.1.convs1.2.weight">
808
+ <dim>64</dim>
809
+ <dim>64</dim>
810
+ <dim>7</dim>
811
+ </port>
812
+ </output>
813
+ </layer>
814
+ <layer id="51" name="__module.resblocks.1.convs1.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
815
+ <data strides="1" dilations="5" pads_begin="15" pads_end="15" auto_pad="explicit" />
816
+ <input>
817
+ <port id="0" precision="FP32">
818
+ <dim>-1</dim>
819
+ <dim>64</dim>
820
+ <dim>-1</dim>
821
+ </port>
822
+ <port id="1" precision="FP32">
823
+ <dim>64</dim>
824
+ <dim>64</dim>
825
+ <dim>7</dim>
826
+ </port>
827
+ </input>
828
+ <output>
829
+ <port id="2" precision="FP32" names="216,input.23">
830
+ <dim>-1</dim>
831
+ <dim>64</dim>
832
+ <dim>-1</dim>
833
+ </port>
834
+ </output>
835
+ </layer>
836
+ <layer id="52" name="__module.resblocks.1/aten::leaky_relu/PRelu_5" type="PReLU" version="opset1">
837
+ <input>
838
+ <port id="0" precision="FP32">
839
+ <dim>-1</dim>
840
+ <dim>64</dim>
841
+ <dim>-1</dim>
842
+ </port>
843
+ <port id="1" precision="FP32" />
844
+ </input>
845
+ <output>
846
+ <port id="2" precision="FP32" names="217">
847
+ <dim>-1</dim>
848
+ <dim>64</dim>
849
+ <dim>-1</dim>
850
+ </port>
851
+ </output>
852
+ </layer>
853
+ <layer id="53" name="self.resblocks.1.convs2.2.weight" type="Const" version="opset1">
854
+ <data element_type="f32" shape="64, 64, 7" offset="1417808" size="114688" />
855
+ <output>
856
+ <port id="0" precision="FP32" names="self.resblocks.1.convs2.2.weight">
857
+ <dim>64</dim>
858
+ <dim>64</dim>
859
+ <dim>7</dim>
860
+ </port>
861
+ </output>
862
+ </layer>
863
+ <layer id="54" name="__module.resblocks.1.convs2.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
864
+ <data strides="1" dilations="1" pads_begin="3" pads_end="3" auto_pad="explicit" />
865
+ <input>
866
+ <port id="0" precision="FP32">
867
+ <dim>-1</dim>
868
+ <dim>64</dim>
869
+ <dim>-1</dim>
870
+ </port>
871
+ <port id="1" precision="FP32">
872
+ <dim>64</dim>
873
+ <dim>64</dim>
874
+ <dim>7</dim>
875
+ </port>
876
+ </input>
877
+ <output>
878
+ <port id="2" precision="FP32" names="224,hidden_states.11">
879
+ <dim>-1</dim>
880
+ <dim>64</dim>
881
+ <dim>-1</dim>
882
+ </port>
883
+ </output>
884
+ </layer>
885
+ <layer id="55" name="__module.resblocks.1/aten::add/Add_2" type="Add" version="opset1">
886
+ <data auto_broadcast="numpy" />
887
+ <input>
888
+ <port id="0" precision="FP32">
889
+ <dim>-1</dim>
890
+ <dim>64</dim>
891
+ <dim>-1</dim>
892
+ </port>
893
+ <port id="1" precision="FP32">
894
+ <dim>-1</dim>
895
+ <dim>64</dim>
896
+ <dim>-1</dim>
897
+ </port>
898
+ </input>
899
+ <output>
900
+ <port id="2" precision="FP32" names="225">
901
+ <dim>-1</dim>
902
+ <dim>64</dim>
903
+ <dim>-1</dim>
904
+ </port>
905
+ </output>
906
+ </layer>
907
+ <layer id="56" name="aten::add_/Add" type="Add" version="opset1">
908
+ <data auto_broadcast="numpy" />
909
+ <input>
910
+ <port id="0" precision="FP32">
911
+ <dim>-1</dim>
912
+ <dim>64</dim>
913
+ <dim>-1</dim>
914
+ </port>
915
+ <port id="1" precision="FP32">
916
+ <dim>-1</dim>
917
+ <dim>64</dim>
918
+ <dim>-1</dim>
919
+ </port>
920
+ </input>
921
+ <output>
922
+ <port id="2" precision="FP32" names="153_2">
923
+ <dim>-1</dim>
924
+ <dim>64</dim>
925
+ <dim>-1</dim>
926
+ </port>
927
+ </output>
928
+ </layer>
929
+ <layer id="57" name="self.resblocks.2.convs1.0.weight" type="Const" version="opset1">
930
+ <data element_type="f32" shape="64, 64, 11" offset="1532496" size="180224" />
931
+ <output>
932
+ <port id="0" precision="FP32" names="self.resblocks.2.convs1.0.weight">
933
+ <dim>64</dim>
934
+ <dim>64</dim>
935
+ <dim>11</dim>
936
+ </port>
937
+ </output>
938
+ </layer>
939
+ <layer id="58" name="__module.resblocks.2.convs1.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
940
+ <data strides="1" dilations="1" pads_begin="5" pads_end="5" auto_pad="explicit" />
941
+ <input>
942
+ <port id="0" precision="FP32">
943
+ <dim>-1</dim>
944
+ <dim>64</dim>
945
+ <dim>-1</dim>
946
+ </port>
947
+ <port id="1" precision="FP32">
948
+ <dim>64</dim>
949
+ <dim>64</dim>
950
+ <dim>11</dim>
951
+ </port>
952
+ </input>
953
+ <output>
954
+ <port id="2" precision="FP32" names="254,input.25">
955
+ <dim>-1</dim>
956
+ <dim>64</dim>
957
+ <dim>-1</dim>
958
+ </port>
959
+ </output>
960
+ </layer>
961
+ <layer id="59" name="234" type="Const" version="opset1">
962
+ <data element_type="f32" shape="" offset="287052" size="4" />
963
+ <output>
964
+ <port id="0" precision="FP32" names="234" />
965
+ </output>
966
+ </layer>
967
+ <layer id="60" name="__module.resblocks.2/aten::leaky_relu/PRelu_1" type="PReLU" version="opset1">
968
+ <input>
969
+ <port id="0" precision="FP32">
970
+ <dim>-1</dim>
971
+ <dim>64</dim>
972
+ <dim>-1</dim>
973
+ </port>
974
+ <port id="1" precision="FP32" />
975
+ </input>
976
+ <output>
977
+ <port id="2" precision="FP32" names="255">
978
+ <dim>-1</dim>
979
+ <dim>64</dim>
980
+ <dim>-1</dim>
981
+ </port>
982
+ </output>
983
+ </layer>
984
+ <layer id="61" name="self.resblocks.2.convs2.0.weight" type="Const" version="opset1">
985
+ <data element_type="f32" shape="64, 64, 11" offset="1712720" size="180224" />
986
+ <output>
987
+ <port id="0" precision="FP32" names="self.resblocks.2.convs2.0.weight">
988
+ <dim>64</dim>
989
+ <dim>64</dim>
990
+ <dim>11</dim>
991
+ </port>
992
+ </output>
993
+ </layer>
994
+ <layer id="62" name="__module.resblocks.2.convs2.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
995
+ <data strides="1" dilations="1" pads_begin="5" pads_end="5" auto_pad="explicit" />
996
+ <input>
997
+ <port id="0" precision="FP32">
998
+ <dim>-1</dim>
999
+ <dim>64</dim>
1000
+ <dim>-1</dim>
1001
+ </port>
1002
+ <port id="1" precision="FP32">
1003
+ <dim>64</dim>
1004
+ <dim>64</dim>
1005
+ <dim>11</dim>
1006
+ </port>
1007
+ </input>
1008
+ <output>
1009
+ <port id="2" precision="FP32" names="262,hidden_states.13">
1010
+ <dim>-1</dim>
1011
+ <dim>64</dim>
1012
+ <dim>-1</dim>
1013
+ </port>
1014
+ </output>
1015
+ </layer>
1016
+ <layer id="63" name="__module.resblocks.2/aten::add/Add" type="Add" version="opset1">
1017
+ <data auto_broadcast="numpy" />
1018
+ <input>
1019
+ <port id="0" precision="FP32">
1020
+ <dim>-1</dim>
1021
+ <dim>64</dim>
1022
+ <dim>-1</dim>
1023
+ </port>
1024
+ <port id="1" precision="FP32">
1025
+ <dim>-1</dim>
1026
+ <dim>64</dim>
1027
+ <dim>-1</dim>
1028
+ </port>
1029
+ </input>
1030
+ <output>
1031
+ <port id="2" precision="FP32" names="263,input.27">
1032
+ <dim>-1</dim>
1033
+ <dim>64</dim>
1034
+ <dim>-1</dim>
1035
+ </port>
1036
+ </output>
1037
+ </layer>
1038
+ <layer id="64" name="__module.resblocks.2/aten::leaky_relu/PRelu_2" type="PReLU" version="opset1">
1039
+ <input>
1040
+ <port id="0" precision="FP32">
1041
+ <dim>-1</dim>
1042
+ <dim>64</dim>
1043
+ <dim>-1</dim>
1044
+ </port>
1045
+ <port id="1" precision="FP32" />
1046
+ </input>
1047
+ <output>
1048
+ <port id="2" precision="FP32" names="264">
1049
+ <dim>-1</dim>
1050
+ <dim>64</dim>
1051
+ <dim>-1</dim>
1052
+ </port>
1053
+ </output>
1054
+ </layer>
1055
+ <layer id="65" name="self.resblocks.2.convs1.1.weight" type="Const" version="opset1">
1056
+ <data element_type="f32" shape="64, 64, 11" offset="1892944" size="180224" />
1057
+ <output>
1058
+ <port id="0" precision="FP32" names="self.resblocks.2.convs1.1.weight">
1059
+ <dim>64</dim>
1060
+ <dim>64</dim>
1061
+ <dim>11</dim>
1062
+ </port>
1063
+ </output>
1064
+ </layer>
1065
+ <layer id="66" name="__module.resblocks.2.convs1.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
1066
+ <data strides="1" dilations="3" pads_begin="15" pads_end="15" auto_pad="explicit" />
1067
+ <input>
1068
+ <port id="0" precision="FP32">
1069
+ <dim>-1</dim>
1070
+ <dim>64</dim>
1071
+ <dim>-1</dim>
1072
+ </port>
1073
+ <port id="1" precision="FP32">
1074
+ <dim>64</dim>
1075
+ <dim>64</dim>
1076
+ <dim>11</dim>
1077
+ </port>
1078
+ </input>
1079
+ <output>
1080
+ <port id="2" precision="FP32" names="271,input.29">
1081
+ <dim>-1</dim>
1082
+ <dim>64</dim>
1083
+ <dim>-1</dim>
1084
+ </port>
1085
+ </output>
1086
+ </layer>
1087
+ <layer id="67" name="__module.resblocks.2/aten::leaky_relu/PRelu_3" type="PReLU" version="opset1">
1088
+ <input>
1089
+ <port id="0" precision="FP32">
1090
+ <dim>-1</dim>
1091
+ <dim>64</dim>
1092
+ <dim>-1</dim>
1093
+ </port>
1094
+ <port id="1" precision="FP32" />
1095
+ </input>
1096
+ <output>
1097
+ <port id="2" precision="FP32" names="272">
1098
+ <dim>-1</dim>
1099
+ <dim>64</dim>
1100
+ <dim>-1</dim>
1101
+ </port>
1102
+ </output>
1103
+ </layer>
1104
+ <layer id="68" name="self.resblocks.2.convs2.1.weight" type="Const" version="opset1">
1105
+ <data element_type="f32" shape="64, 64, 11" offset="2073168" size="180224" />
1106
+ <output>
1107
+ <port id="0" precision="FP32" names="self.resblocks.2.convs2.1.weight">
1108
+ <dim>64</dim>
1109
+ <dim>64</dim>
1110
+ <dim>11</dim>
1111
+ </port>
1112
+ </output>
1113
+ </layer>
1114
+ <layer id="69" name="__module.resblocks.2.convs2.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
1115
+ <data strides="1" dilations="1" pads_begin="5" pads_end="5" auto_pad="explicit" />
1116
+ <input>
1117
+ <port id="0" precision="FP32">
1118
+ <dim>-1</dim>
1119
+ <dim>64</dim>
1120
+ <dim>-1</dim>
1121
+ </port>
1122
+ <port id="1" precision="FP32">
1123
+ <dim>64</dim>
1124
+ <dim>64</dim>
1125
+ <dim>11</dim>
1126
+ </port>
1127
+ </input>
1128
+ <output>
1129
+ <port id="2" precision="FP32" names="279,hidden_states.15">
1130
+ <dim>-1</dim>
1131
+ <dim>64</dim>
1132
+ <dim>-1</dim>
1133
+ </port>
1134
+ </output>
1135
+ </layer>
1136
+ <layer id="70" name="__module.resblocks.2/aten::add/Add_1" type="Add" version="opset1">
1137
+ <data auto_broadcast="numpy" />
1138
+ <input>
1139
+ <port id="0" precision="FP32">
1140
+ <dim>-1</dim>
1141
+ <dim>64</dim>
1142
+ <dim>-1</dim>
1143
+ </port>
1144
+ <port id="1" precision="FP32">
1145
+ <dim>-1</dim>
1146
+ <dim>64</dim>
1147
+ <dim>-1</dim>
1148
+ </port>
1149
+ </input>
1150
+ <output>
1151
+ <port id="2" precision="FP32" names="280,input.31">
1152
+ <dim>-1</dim>
1153
+ <dim>64</dim>
1154
+ <dim>-1</dim>
1155
+ </port>
1156
+ </output>
1157
+ </layer>
1158
+ <layer id="71" name="__module.resblocks.2/aten::leaky_relu/PRelu_4" type="PReLU" version="opset1">
1159
+ <input>
1160
+ <port id="0" precision="FP32">
1161
+ <dim>-1</dim>
1162
+ <dim>64</dim>
1163
+ <dim>-1</dim>
1164
+ </port>
1165
+ <port id="1" precision="FP32" />
1166
+ </input>
1167
+ <output>
1168
+ <port id="2" precision="FP32" names="281">
1169
+ <dim>-1</dim>
1170
+ <dim>64</dim>
1171
+ <dim>-1</dim>
1172
+ </port>
1173
+ </output>
1174
+ </layer>
1175
+ <layer id="72" name="self.resblocks.2.convs1.2.weight" type="Const" version="opset1">
1176
+ <data element_type="f32" shape="64, 64, 11" offset="2253392" size="180224" />
1177
+ <output>
1178
+ <port id="0" precision="FP32" names="self.resblocks.2.convs1.2.weight">
1179
+ <dim>64</dim>
1180
+ <dim>64</dim>
1181
+ <dim>11</dim>
1182
+ </port>
1183
+ </output>
1184
+ </layer>
1185
+ <layer id="73" name="__module.resblocks.2.convs1.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
1186
+ <data strides="1" dilations="5" pads_begin="25" pads_end="25" auto_pad="explicit" />
1187
+ <input>
1188
+ <port id="0" precision="FP32">
1189
+ <dim>-1</dim>
1190
+ <dim>64</dim>
1191
+ <dim>-1</dim>
1192
+ </port>
1193
+ <port id="1" precision="FP32">
1194
+ <dim>64</dim>
1195
+ <dim>64</dim>
1196
+ <dim>11</dim>
1197
+ </port>
1198
+ </input>
1199
+ <output>
1200
+ <port id="2" precision="FP32" names="288,input.33">
1201
+ <dim>-1</dim>
1202
+ <dim>64</dim>
1203
+ <dim>-1</dim>
1204
+ </port>
1205
+ </output>
1206
+ </layer>
1207
+ <layer id="74" name="__module.resblocks.2/aten::leaky_relu/PRelu_5" type="PReLU" version="opset1">
1208
+ <input>
1209
+ <port id="0" precision="FP32">
1210
+ <dim>-1</dim>
1211
+ <dim>64</dim>
1212
+ <dim>-1</dim>
1213
+ </port>
1214
+ <port id="1" precision="FP32" />
1215
+ </input>
1216
+ <output>
1217
+ <port id="2" precision="FP32" names="289">
1218
+ <dim>-1</dim>
1219
+ <dim>64</dim>
1220
+ <dim>-1</dim>
1221
+ </port>
1222
+ </output>
1223
+ </layer>
1224
+ <layer id="75" name="self.resblocks.2.convs2.2.weight" type="Const" version="opset1">
1225
+ <data element_type="f32" shape="64, 64, 11" offset="2433616" size="180224" />
1226
+ <output>
1227
+ <port id="0" precision="FP32" names="self.resblocks.2.convs2.2.weight">
1228
+ <dim>64</dim>
1229
+ <dim>64</dim>
1230
+ <dim>11</dim>
1231
+ </port>
1232
+ </output>
1233
+ </layer>
1234
+ <layer id="76" name="__module.resblocks.2.convs2.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
1235
+ <data strides="1" dilations="1" pads_begin="5" pads_end="5" auto_pad="explicit" />
1236
+ <input>
1237
+ <port id="0" precision="FP32">
1238
+ <dim>-1</dim>
1239
+ <dim>64</dim>
1240
+ <dim>-1</dim>
1241
+ </port>
1242
+ <port id="1" precision="FP32">
1243
+ <dim>64</dim>
1244
+ <dim>64</dim>
1245
+ <dim>11</dim>
1246
+ </port>
1247
+ </input>
1248
+ <output>
1249
+ <port id="2" precision="FP32" names="296,hidden_states.17">
1250
+ <dim>-1</dim>
1251
+ <dim>64</dim>
1252
+ <dim>-1</dim>
1253
+ </port>
1254
+ </output>
1255
+ </layer>
1256
+ <layer id="77" name="__module.resblocks.2/aten::add/Add_2" type="Add" version="opset1">
1257
+ <data auto_broadcast="numpy" />
1258
+ <input>
1259
+ <port id="0" precision="FP32">
1260
+ <dim>-1</dim>
1261
+ <dim>64</dim>
1262
+ <dim>-1</dim>
1263
+ </port>
1264
+ <port id="1" precision="FP32">
1265
+ <dim>-1</dim>
1266
+ <dim>64</dim>
1267
+ <dim>-1</dim>
1268
+ </port>
1269
+ </input>
1270
+ <output>
1271
+ <port id="2" precision="FP32" names="297">
1272
+ <dim>-1</dim>
1273
+ <dim>64</dim>
1274
+ <dim>-1</dim>
1275
+ </port>
1276
+ </output>
1277
+ </layer>
1278
+ <layer id="78" name="aten::add_/Add_1" type="Add" version="opset1">
1279
+ <data auto_broadcast="numpy" />
1280
+ <input>
1281
+ <port id="0" precision="FP32">
1282
+ <dim>-1</dim>
1283
+ <dim>64</dim>
1284
+ <dim>-1</dim>
1285
+ </port>
1286
+ <port id="1" precision="FP32">
1287
+ <dim>-1</dim>
1288
+ <dim>64</dim>
1289
+ <dim>-1</dim>
1290
+ </port>
1291
+ </input>
1292
+ <output>
1293
+ <port id="2" precision="FP32" names="35,res_state.3">
1294
+ <dim>-1</dim>
1295
+ <dim>64</dim>
1296
+ <dim>-1</dim>
1297
+ </port>
1298
+ </output>
1299
+ </layer>
1300
+ <layer id="79" name="Constant_27238" type="Const" version="opset1">
1301
+ <data element_type="f32" shape="1, 1, 1" offset="2613840" size="4" />
1302
+ <output>
1303
+ <port id="0" precision="FP32">
1304
+ <dim>1</dim>
1305
+ <dim>1</dim>
1306
+ <dim>1</dim>
1307
+ </port>
1308
+ </output>
1309
+ </layer>
1310
+ <layer id="80" name="aten::div/Divide_1" type="Divide" version="opset1">
1311
+ <data auto_broadcast="numpy" m_pythondiv="true" />
1312
+ <input>
1313
+ <port id="0" precision="FP32">
1314
+ <dim>-1</dim>
1315
+ <dim>64</dim>
1316
+ <dim>-1</dim>
1317
+ </port>
1318
+ <port id="1" precision="FP32">
1319
+ <dim>1</dim>
1320
+ <dim>1</dim>
1321
+ <dim>1</dim>
1322
+ </port>
1323
+ </input>
1324
+ <output>
1325
+ <port id="2" precision="FP32" names="40,input.35">
1326
+ <dim>-1</dim>
1327
+ <dim>64</dim>
1328
+ <dim>-1</dim>
1329
+ </port>
1330
+ </output>
1331
+ </layer>
1332
+ <layer id="81" name="41" type="Const" version="opset1">
1333
+ <data element_type="f32" shape="" offset="287052" size="4" />
1334
+ <output>
1335
+ <port id="0" precision="FP32" names="41" />
1336
+ </output>
1337
+ </layer>
1338
+ <layer id="82" name="aten::leaky_relu/PRelu_1" type="PReLU" version="opset1">
1339
+ <input>
1340
+ <port id="0" precision="FP32">
1341
+ <dim>-1</dim>
1342
+ <dim>64</dim>
1343
+ <dim>-1</dim>
1344
+ </port>
1345
+ <port id="1" precision="FP32" />
1346
+ </input>
1347
+ <output>
1348
+ <port id="2" precision="FP32" names="42">
1349
+ <dim>-1</dim>
1350
+ <dim>64</dim>
1351
+ <dim>-1</dim>
1352
+ </port>
1353
+ </output>
1354
+ </layer>
1355
+ <layer id="83" name="self.upsampler.1.weight" type="Const" version="opset1">
1356
+ <data element_type="f32" shape="64, 32, 8" offset="2613844" size="65536" />
1357
+ <output>
1358
+ <port id="0" precision="FP32" names="self.upsampler.1.weight">
1359
+ <dim>64</dim>
1360
+ <dim>32</dim>
1361
+ <dim>8</dim>
1362
+ </port>
1363
+ </output>
1364
+ </layer>
1365
+ <layer id="84" name="__module.upsampler.1/aten::_convolution/ConvolutionBackpropData" type="ConvolutionBackpropData" version="opset1">
1366
+ <data strides="4" dilations="1" pads_begin="2" pads_end="2" auto_pad="explicit" output_padding="0" />
1367
+ <input>
1368
+ <port id="0" precision="FP32">
1369
+ <dim>-1</dim>
1370
+ <dim>64</dim>
1371
+ <dim>-1</dim>
1372
+ </port>
1373
+ <port id="1" precision="FP32">
1374
+ <dim>64</dim>
1375
+ <dim>32</dim>
1376
+ <dim>8</dim>
1377
+ </port>
1378
+ </input>
1379
+ <output>
1380
+ <port id="2" precision="FP32">
1381
+ <dim>-1</dim>
1382
+ <dim>32</dim>
1383
+ <dim>-1</dim>
1384
+ </port>
1385
+ </output>
1386
+ </layer>
1387
+ <layer id="85" name="__module.upsampler.1/aten::_convolution/Reshape" type="Const" version="opset1">
1388
+ <data element_type="f32" shape="1, 32, 1" offset="2679380" size="128" />
1389
+ <output>
1390
+ <port id="0" precision="FP32">
1391
+ <dim>1</dim>
1392
+ <dim>32</dim>
1393
+ <dim>1</dim>
1394
+ </port>
1395
+ </output>
1396
+ </layer>
1397
+ <layer id="86" name="__module.upsampler.1/aten::_convolution/Add" type="Add" version="opset1">
1398
+ <data auto_broadcast="numpy" />
1399
+ <input>
1400
+ <port id="0" precision="FP32">
1401
+ <dim>-1</dim>
1402
+ <dim>32</dim>
1403
+ <dim>-1</dim>
1404
+ </port>
1405
+ <port id="1" precision="FP32">
1406
+ <dim>1</dim>
1407
+ <dim>32</dim>
1408
+ <dim>1</dim>
1409
+ </port>
1410
+ </input>
1411
+ <output>
1412
+ <port id="2" precision="FP32" names="310,input.37">
1413
+ <dim>-1</dim>
1414
+ <dim>32</dim>
1415
+ <dim>-1</dim>
1416
+ </port>
1417
+ </output>
1418
+ </layer>
1419
+ <layer id="87" name="317" type="Const" version="opset1">
1420
+ <data element_type="f32" shape="" offset="287052" size="4" />
1421
+ <output>
1422
+ <port id="0" precision="FP32" names="317" />
1423
+ </output>
1424
+ </layer>
1425
+ <layer id="88" name="__module.resblocks.3/aten::leaky_relu/PRelu" type="PReLU" version="opset1">
1426
+ <input>
1427
+ <port id="0" precision="FP32">
1428
+ <dim>-1</dim>
1429
+ <dim>32</dim>
1430
+ <dim>-1</dim>
1431
+ </port>
1432
+ <port id="1" precision="FP32" />
1433
+ </input>
1434
+ <output>
1435
+ <port id="2" precision="FP32" names="330,402,474">
1436
+ <dim>-1</dim>
1437
+ <dim>32</dim>
1438
+ <dim>-1</dim>
1439
+ </port>
1440
+ </output>
1441
+ </layer>
1442
+ <layer id="89" name="self.resblocks.3.convs1.0.weight" type="Const" version="opset1">
1443
+ <data element_type="f32" shape="32, 32, 3" offset="2679508" size="12288" />
1444
+ <output>
1445
+ <port id="0" precision="FP32" names="self.resblocks.3.convs1.0.weight">
1446
+ <dim>32</dim>
1447
+ <dim>32</dim>
1448
+ <dim>3</dim>
1449
+ </port>
1450
+ </output>
1451
+ </layer>
1452
+ <layer id="90" name="__module.resblocks.3.convs1.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
1453
+ <data strides="1" dilations="1" pads_begin="1" pads_end="1" auto_pad="explicit" />
1454
+ <input>
1455
+ <port id="0" precision="FP32">
1456
+ <dim>-1</dim>
1457
+ <dim>32</dim>
1458
+ <dim>-1</dim>
1459
+ </port>
1460
+ <port id="1" precision="FP32">
1461
+ <dim>32</dim>
1462
+ <dim>32</dim>
1463
+ <dim>3</dim>
1464
+ </port>
1465
+ </input>
1466
+ <output>
1467
+ <port id="2" precision="FP32" names="337,input.39">
1468
+ <dim>-1</dim>
1469
+ <dim>32</dim>
1470
+ <dim>-1</dim>
1471
+ </port>
1472
+ </output>
1473
+ </layer>
1474
+ <layer id="91" name="__module.resblocks.3/aten::leaky_relu/PRelu_1" type="PReLU" version="opset1">
1475
+ <input>
1476
+ <port id="0" precision="FP32">
1477
+ <dim>-1</dim>
1478
+ <dim>32</dim>
1479
+ <dim>-1</dim>
1480
+ </port>
1481
+ <port id="1" precision="FP32" />
1482
+ </input>
1483
+ <output>
1484
+ <port id="2" precision="FP32" names="338">
1485
+ <dim>-1</dim>
1486
+ <dim>32</dim>
1487
+ <dim>-1</dim>
1488
+ </port>
1489
+ </output>
1490
+ </layer>
1491
+ <layer id="92" name="self.resblocks.3.convs2.0.weight" type="Const" version="opset1">
1492
+ <data element_type="f32" shape="32, 32, 3" offset="2691796" size="12288" />
1493
+ <output>
1494
+ <port id="0" precision="FP32" names="self.resblocks.3.convs2.0.weight">
1495
+ <dim>32</dim>
1496
+ <dim>32</dim>
1497
+ <dim>3</dim>
1498
+ </port>
1499
+ </output>
1500
+ </layer>
1501
+ <layer id="93" name="__module.resblocks.3.convs2.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
1502
+ <data strides="1" dilations="1" pads_begin="1" pads_end="1" auto_pad="explicit" />
1503
+ <input>
1504
+ <port id="0" precision="FP32">
1505
+ <dim>-1</dim>
1506
+ <dim>32</dim>
1507
+ <dim>-1</dim>
1508
+ </port>
1509
+ <port id="1" precision="FP32">
1510
+ <dim>32</dim>
1511
+ <dim>32</dim>
1512
+ <dim>3</dim>
1513
+ </port>
1514
+ </input>
1515
+ <output>
1516
+ <port id="2" precision="FP32" names="345,hidden_states.19">
1517
+ <dim>-1</dim>
1518
+ <dim>32</dim>
1519
+ <dim>-1</dim>
1520
+ </port>
1521
+ </output>
1522
+ </layer>
1523
+ <layer id="94" name="__module.resblocks.3/aten::add/Add" type="Add" version="opset1">
1524
+ <data auto_broadcast="numpy" />
1525
+ <input>
1526
+ <port id="0" precision="FP32">
1527
+ <dim>-1</dim>
1528
+ <dim>32</dim>
1529
+ <dim>-1</dim>
1530
+ </port>
1531
+ <port id="1" precision="FP32">
1532
+ <dim>-1</dim>
1533
+ <dim>32</dim>
1534
+ <dim>-1</dim>
1535
+ </port>
1536
+ </input>
1537
+ <output>
1538
+ <port id="2" precision="FP32" names="346,input.41">
1539
+ <dim>-1</dim>
1540
+ <dim>32</dim>
1541
+ <dim>-1</dim>
1542
+ </port>
1543
+ </output>
1544
+ </layer>
1545
+ <layer id="95" name="__module.resblocks.3/aten::leaky_relu/PRelu_2" type="PReLU" version="opset1">
1546
+ <input>
1547
+ <port id="0" precision="FP32">
1548
+ <dim>-1</dim>
1549
+ <dim>32</dim>
1550
+ <dim>-1</dim>
1551
+ </port>
1552
+ <port id="1" precision="FP32" />
1553
+ </input>
1554
+ <output>
1555
+ <port id="2" precision="FP32" names="347">
1556
+ <dim>-1</dim>
1557
+ <dim>32</dim>
1558
+ <dim>-1</dim>
1559
+ </port>
1560
+ </output>
1561
+ </layer>
1562
+ <layer id="96" name="self.resblocks.3.convs1.1.weight" type="Const" version="opset1">
1563
+ <data element_type="f32" shape="32, 32, 3" offset="2704084" size="12288" />
1564
+ <output>
1565
+ <port id="0" precision="FP32" names="self.resblocks.3.convs1.1.weight">
1566
+ <dim>32</dim>
1567
+ <dim>32</dim>
1568
+ <dim>3</dim>
1569
+ </port>
1570
+ </output>
1571
+ </layer>
1572
+ <layer id="97" name="__module.resblocks.3.convs1.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
1573
+ <data strides="1" dilations="3" pads_begin="3" pads_end="3" auto_pad="explicit" />
1574
+ <input>
1575
+ <port id="0" precision="FP32">
1576
+ <dim>-1</dim>
1577
+ <dim>32</dim>
1578
+ <dim>-1</dim>
1579
+ </port>
1580
+ <port id="1" precision="FP32">
1581
+ <dim>32</dim>
1582
+ <dim>32</dim>
1583
+ <dim>3</dim>
1584
+ </port>
1585
+ </input>
1586
+ <output>
1587
+ <port id="2" precision="FP32" names="354,input.43">
1588
+ <dim>-1</dim>
1589
+ <dim>32</dim>
1590
+ <dim>-1</dim>
1591
+ </port>
1592
+ </output>
1593
+ </layer>
1594
+ <layer id="98" name="__module.resblocks.3/aten::leaky_relu/PRelu_3" type="PReLU" version="opset1">
1595
+ <input>
1596
+ <port id="0" precision="FP32">
1597
+ <dim>-1</dim>
1598
+ <dim>32</dim>
1599
+ <dim>-1</dim>
1600
+ </port>
1601
+ <port id="1" precision="FP32" />
1602
+ </input>
1603
+ <output>
1604
+ <port id="2" precision="FP32" names="355">
1605
+ <dim>-1</dim>
1606
+ <dim>32</dim>
1607
+ <dim>-1</dim>
1608
+ </port>
1609
+ </output>
1610
+ </layer>
1611
+ <layer id="99" name="self.resblocks.3.convs2.1.weight" type="Const" version="opset1">
1612
+ <data element_type="f32" shape="32, 32, 3" offset="2716372" size="12288" />
1613
+ <output>
1614
+ <port id="0" precision="FP32" names="self.resblocks.3.convs2.1.weight">
1615
+ <dim>32</dim>
1616
+ <dim>32</dim>
1617
+ <dim>3</dim>
1618
+ </port>
1619
+ </output>
1620
+ </layer>
1621
+ <layer id="100" name="__module.resblocks.3.convs2.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
1622
+ <data strides="1" dilations="1" pads_begin="1" pads_end="1" auto_pad="explicit" />
1623
+ <input>
1624
+ <port id="0" precision="FP32">
1625
+ <dim>-1</dim>
1626
+ <dim>32</dim>
1627
+ <dim>-1</dim>
1628
+ </port>
1629
+ <port id="1" precision="FP32">
1630
+ <dim>32</dim>
1631
+ <dim>32</dim>
1632
+ <dim>3</dim>
1633
+ </port>
1634
+ </input>
1635
+ <output>
1636
+ <port id="2" precision="FP32" names="362,hidden_states.21">
1637
+ <dim>-1</dim>
1638
+ <dim>32</dim>
1639
+ <dim>-1</dim>
1640
+ </port>
1641
+ </output>
1642
+ </layer>
1643
+ <layer id="101" name="__module.resblocks.3/aten::add/Add_1" type="Add" version="opset1">
1644
+ <data auto_broadcast="numpy" />
1645
+ <input>
1646
+ <port id="0" precision="FP32">
1647
+ <dim>-1</dim>
1648
+ <dim>32</dim>
1649
+ <dim>-1</dim>
1650
+ </port>
1651
+ <port id="1" precision="FP32">
1652
+ <dim>-1</dim>
1653
+ <dim>32</dim>
1654
+ <dim>-1</dim>
1655
+ </port>
1656
+ </input>
1657
+ <output>
1658
+ <port id="2" precision="FP32" names="363,input.45">
1659
+ <dim>-1</dim>
1660
+ <dim>32</dim>
1661
+ <dim>-1</dim>
1662
+ </port>
1663
+ </output>
1664
+ </layer>
1665
+ <layer id="102" name="__module.resblocks.3/aten::leaky_relu/PRelu_4" type="PReLU" version="opset1">
1666
+ <input>
1667
+ <port id="0" precision="FP32">
1668
+ <dim>-1</dim>
1669
+ <dim>32</dim>
1670
+ <dim>-1</dim>
1671
+ </port>
1672
+ <port id="1" precision="FP32" />
1673
+ </input>
1674
+ <output>
1675
+ <port id="2" precision="FP32" names="364">
1676
+ <dim>-1</dim>
1677
+ <dim>32</dim>
1678
+ <dim>-1</dim>
1679
+ </port>
1680
+ </output>
1681
+ </layer>
1682
+ <layer id="103" name="self.resblocks.3.convs1.2.weight" type="Const" version="opset1">
1683
+ <data element_type="f32" shape="32, 32, 3" offset="2728660" size="12288" />
1684
+ <output>
1685
+ <port id="0" precision="FP32" names="self.resblocks.3.convs1.2.weight">
1686
+ <dim>32</dim>
1687
+ <dim>32</dim>
1688
+ <dim>3</dim>
1689
+ </port>
1690
+ </output>
1691
+ </layer>
1692
+ <layer id="104" name="__module.resblocks.3.convs1.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
1693
+ <data strides="1" dilations="5" pads_begin="5" pads_end="5" auto_pad="explicit" />
1694
+ <input>
1695
+ <port id="0" precision="FP32">
1696
+ <dim>-1</dim>
1697
+ <dim>32</dim>
1698
+ <dim>-1</dim>
1699
+ </port>
1700
+ <port id="1" precision="FP32">
1701
+ <dim>32</dim>
1702
+ <dim>32</dim>
1703
+ <dim>3</dim>
1704
+ </port>
1705
+ </input>
1706
+ <output>
1707
+ <port id="2" precision="FP32" names="371,input.47">
1708
+ <dim>-1</dim>
1709
+ <dim>32</dim>
1710
+ <dim>-1</dim>
1711
+ </port>
1712
+ </output>
1713
+ </layer>
1714
+ <layer id="105" name="__module.resblocks.3/aten::leaky_relu/PRelu_5" type="PReLU" version="opset1">
1715
+ <input>
1716
+ <port id="0" precision="FP32">
1717
+ <dim>-1</dim>
1718
+ <dim>32</dim>
1719
+ <dim>-1</dim>
1720
+ </port>
1721
+ <port id="1" precision="FP32" />
1722
+ </input>
1723
+ <output>
1724
+ <port id="2" precision="FP32" names="372">
1725
+ <dim>-1</dim>
1726
+ <dim>32</dim>
1727
+ <dim>-1</dim>
1728
+ </port>
1729
+ </output>
1730
+ </layer>
1731
+ <layer id="106" name="self.resblocks.3.convs2.2.weight" type="Const" version="opset1">
1732
+ <data element_type="f32" shape="32, 32, 3" offset="2740948" size="12288" />
1733
+ <output>
1734
+ <port id="0" precision="FP32" names="self.resblocks.3.convs2.2.weight">
1735
+ <dim>32</dim>
1736
+ <dim>32</dim>
1737
+ <dim>3</dim>
1738
+ </port>
1739
+ </output>
1740
+ </layer>
1741
+ <layer id="107" name="__module.resblocks.3.convs2.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
1742
+ <data strides="1" dilations="1" pads_begin="1" pads_end="1" auto_pad="explicit" />
1743
+ <input>
1744
+ <port id="0" precision="FP32">
1745
+ <dim>-1</dim>
1746
+ <dim>32</dim>
1747
+ <dim>-1</dim>
1748
+ </port>
1749
+ <port id="1" precision="FP32">
1750
+ <dim>32</dim>
1751
+ <dim>32</dim>
1752
+ <dim>3</dim>
1753
+ </port>
1754
+ </input>
1755
+ <output>
1756
+ <port id="2" precision="FP32" names="379,hidden_states.23">
1757
+ <dim>-1</dim>
1758
+ <dim>32</dim>
1759
+ <dim>-1</dim>
1760
+ </port>
1761
+ </output>
1762
+ </layer>
1763
+ <layer id="108" name="__module.resblocks.3/aten::add/Add_2" type="Add" version="opset1">
1764
+ <data auto_broadcast="numpy" />
1765
+ <input>
1766
+ <port id="0" precision="FP32">
1767
+ <dim>-1</dim>
1768
+ <dim>32</dim>
1769
+ <dim>-1</dim>
1770
+ </port>
1771
+ <port id="1" precision="FP32">
1772
+ <dim>-1</dim>
1773
+ <dim>32</dim>
1774
+ <dim>-1</dim>
1775
+ </port>
1776
+ </input>
1777
+ <output>
1778
+ <port id="2" precision="FP32" names="380_1">
1779
+ <dim>-1</dim>
1780
+ <dim>32</dim>
1781
+ <dim>-1</dim>
1782
+ </port>
1783
+ </output>
1784
+ </layer>
1785
+ <layer id="109" name="self.resblocks.4.convs1.0.weight" type="Const" version="opset1">
1786
+ <data element_type="f32" shape="32, 32, 7" offset="2753236" size="28672" />
1787
+ <output>
1788
+ <port id="0" precision="FP32" names="self.resblocks.4.convs1.0.weight">
1789
+ <dim>32</dim>
1790
+ <dim>32</dim>
1791
+ <dim>7</dim>
1792
+ </port>
1793
+ </output>
1794
+ </layer>
1795
+ <layer id="110" name="__module.resblocks.4.convs1.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
1796
+ <data strides="1" dilations="1" pads_begin="3" pads_end="3" auto_pad="explicit" />
1797
+ <input>
1798
+ <port id="0" precision="FP32">
1799
+ <dim>-1</dim>
1800
+ <dim>32</dim>
1801
+ <dim>-1</dim>
1802
+ </port>
1803
+ <port id="1" precision="FP32">
1804
+ <dim>32</dim>
1805
+ <dim>32</dim>
1806
+ <dim>7</dim>
1807
+ </port>
1808
+ </input>
1809
+ <output>
1810
+ <port id="2" precision="FP32" names="409,input.49">
1811
+ <dim>-1</dim>
1812
+ <dim>32</dim>
1813
+ <dim>-1</dim>
1814
+ </port>
1815
+ </output>
1816
+ </layer>
1817
+ <layer id="111" name="389" type="Const" version="opset1">
1818
+ <data element_type="f32" shape="" offset="287052" size="4" />
1819
+ <output>
1820
+ <port id="0" precision="FP32" names="389" />
1821
+ </output>
1822
+ </layer>
1823
+ <layer id="112" name="__module.resblocks.4/aten::leaky_relu/PRelu_1" type="PReLU" version="opset1">
1824
+ <input>
1825
+ <port id="0" precision="FP32">
1826
+ <dim>-1</dim>
1827
+ <dim>32</dim>
1828
+ <dim>-1</dim>
1829
+ </port>
1830
+ <port id="1" precision="FP32" />
1831
+ </input>
1832
+ <output>
1833
+ <port id="2" precision="FP32" names="410">
1834
+ <dim>-1</dim>
1835
+ <dim>32</dim>
1836
+ <dim>-1</dim>
1837
+ </port>
1838
+ </output>
1839
+ </layer>
1840
+ <layer id="113" name="self.resblocks.4.convs2.0.weight" type="Const" version="opset1">
1841
+ <data element_type="f32" shape="32, 32, 7" offset="2781908" size="28672" />
1842
+ <output>
1843
+ <port id="0" precision="FP32" names="self.resblocks.4.convs2.0.weight">
1844
+ <dim>32</dim>
1845
+ <dim>32</dim>
1846
+ <dim>7</dim>
1847
+ </port>
1848
+ </output>
1849
+ </layer>
1850
+ <layer id="114" name="__module.resblocks.4.convs2.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
1851
+ <data strides="1" dilations="1" pads_begin="3" pads_end="3" auto_pad="explicit" />
1852
+ <input>
1853
+ <port id="0" precision="FP32">
1854
+ <dim>-1</dim>
1855
+ <dim>32</dim>
1856
+ <dim>-1</dim>
1857
+ </port>
1858
+ <port id="1" precision="FP32">
1859
+ <dim>32</dim>
1860
+ <dim>32</dim>
1861
+ <dim>7</dim>
1862
+ </port>
1863
+ </input>
1864
+ <output>
1865
+ <port id="2" precision="FP32" names="417,hidden_states.25">
1866
+ <dim>-1</dim>
1867
+ <dim>32</dim>
1868
+ <dim>-1</dim>
1869
+ </port>
1870
+ </output>
1871
+ </layer>
1872
+ <layer id="115" name="__module.resblocks.4/aten::add/Add" type="Add" version="opset1">
1873
+ <data auto_broadcast="numpy" />
1874
+ <input>
1875
+ <port id="0" precision="FP32">
1876
+ <dim>-1</dim>
1877
+ <dim>32</dim>
1878
+ <dim>-1</dim>
1879
+ </port>
1880
+ <port id="1" precision="FP32">
1881
+ <dim>-1</dim>
1882
+ <dim>32</dim>
1883
+ <dim>-1</dim>
1884
+ </port>
1885
+ </input>
1886
+ <output>
1887
+ <port id="2" precision="FP32" names="418,input.51">
1888
+ <dim>-1</dim>
1889
+ <dim>32</dim>
1890
+ <dim>-1</dim>
1891
+ </port>
1892
+ </output>
1893
+ </layer>
1894
+ <layer id="116" name="__module.resblocks.4/aten::leaky_relu/PRelu_2" type="PReLU" version="opset1">
1895
+ <input>
1896
+ <port id="0" precision="FP32">
1897
+ <dim>-1</dim>
1898
+ <dim>32</dim>
1899
+ <dim>-1</dim>
1900
+ </port>
1901
+ <port id="1" precision="FP32" />
1902
+ </input>
1903
+ <output>
1904
+ <port id="2" precision="FP32" names="419">
1905
+ <dim>-1</dim>
1906
+ <dim>32</dim>
1907
+ <dim>-1</dim>
1908
+ </port>
1909
+ </output>
1910
+ </layer>
1911
+ <layer id="117" name="self.resblocks.4.convs1.1.weight" type="Const" version="opset1">
1912
+ <data element_type="f32" shape="32, 32, 7" offset="2810580" size="28672" />
1913
+ <output>
1914
+ <port id="0" precision="FP32" names="self.resblocks.4.convs1.1.weight">
1915
+ <dim>32</dim>
1916
+ <dim>32</dim>
1917
+ <dim>7</dim>
1918
+ </port>
1919
+ </output>
1920
+ </layer>
1921
+ <layer id="118" name="__module.resblocks.4.convs1.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
1922
+ <data strides="1" dilations="3" pads_begin="9" pads_end="9" auto_pad="explicit" />
1923
+ <input>
1924
+ <port id="0" precision="FP32">
1925
+ <dim>-1</dim>
1926
+ <dim>32</dim>
1927
+ <dim>-1</dim>
1928
+ </port>
1929
+ <port id="1" precision="FP32">
1930
+ <dim>32</dim>
1931
+ <dim>32</dim>
1932
+ <dim>7</dim>
1933
+ </port>
1934
+ </input>
1935
+ <output>
1936
+ <port id="2" precision="FP32" names="426,input.53">
1937
+ <dim>-1</dim>
1938
+ <dim>32</dim>
1939
+ <dim>-1</dim>
1940
+ </port>
1941
+ </output>
1942
+ </layer>
1943
+ <layer id="119" name="__module.resblocks.4/aten::leaky_relu/PRelu_3" type="PReLU" version="opset1">
1944
+ <input>
1945
+ <port id="0" precision="FP32">
1946
+ <dim>-1</dim>
1947
+ <dim>32</dim>
1948
+ <dim>-1</dim>
1949
+ </port>
1950
+ <port id="1" precision="FP32" />
1951
+ </input>
1952
+ <output>
1953
+ <port id="2" precision="FP32" names="427">
1954
+ <dim>-1</dim>
1955
+ <dim>32</dim>
1956
+ <dim>-1</dim>
1957
+ </port>
1958
+ </output>
1959
+ </layer>
1960
+ <layer id="120" name="self.resblocks.4.convs2.1.weight" type="Const" version="opset1">
1961
+ <data element_type="f32" shape="32, 32, 7" offset="2839252" size="28672" />
1962
+ <output>
1963
+ <port id="0" precision="FP32" names="self.resblocks.4.convs2.1.weight">
1964
+ <dim>32</dim>
1965
+ <dim>32</dim>
1966
+ <dim>7</dim>
1967
+ </port>
1968
+ </output>
1969
+ </layer>
1970
+ <layer id="121" name="__module.resblocks.4.convs2.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
1971
+ <data strides="1" dilations="1" pads_begin="3" pads_end="3" auto_pad="explicit" />
1972
+ <input>
1973
+ <port id="0" precision="FP32">
1974
+ <dim>-1</dim>
1975
+ <dim>32</dim>
1976
+ <dim>-1</dim>
1977
+ </port>
1978
+ <port id="1" precision="FP32">
1979
+ <dim>32</dim>
1980
+ <dim>32</dim>
1981
+ <dim>7</dim>
1982
+ </port>
1983
+ </input>
1984
+ <output>
1985
+ <port id="2" precision="FP32" names="434,hidden_states.27">
1986
+ <dim>-1</dim>
1987
+ <dim>32</dim>
1988
+ <dim>-1</dim>
1989
+ </port>
1990
+ </output>
1991
+ </layer>
1992
+ <layer id="122" name="__module.resblocks.4/aten::add/Add_1" type="Add" version="opset1">
1993
+ <data auto_broadcast="numpy" />
1994
+ <input>
1995
+ <port id="0" precision="FP32">
1996
+ <dim>-1</dim>
1997
+ <dim>32</dim>
1998
+ <dim>-1</dim>
1999
+ </port>
2000
+ <port id="1" precision="FP32">
2001
+ <dim>-1</dim>
2002
+ <dim>32</dim>
2003
+ <dim>-1</dim>
2004
+ </port>
2005
+ </input>
2006
+ <output>
2007
+ <port id="2" precision="FP32" names="435,input.55">
2008
+ <dim>-1</dim>
2009
+ <dim>32</dim>
2010
+ <dim>-1</dim>
2011
+ </port>
2012
+ </output>
2013
+ </layer>
2014
+ <layer id="123" name="__module.resblocks.4/aten::leaky_relu/PRelu_4" type="PReLU" version="opset1">
2015
+ <input>
2016
+ <port id="0" precision="FP32">
2017
+ <dim>-1</dim>
2018
+ <dim>32</dim>
2019
+ <dim>-1</dim>
2020
+ </port>
2021
+ <port id="1" precision="FP32" />
2022
+ </input>
2023
+ <output>
2024
+ <port id="2" precision="FP32" names="436">
2025
+ <dim>-1</dim>
2026
+ <dim>32</dim>
2027
+ <dim>-1</dim>
2028
+ </port>
2029
+ </output>
2030
+ </layer>
2031
+ <layer id="124" name="self.resblocks.4.convs1.2.weight" type="Const" version="opset1">
2032
+ <data element_type="f32" shape="32, 32, 7" offset="2867924" size="28672" />
2033
+ <output>
2034
+ <port id="0" precision="FP32" names="self.resblocks.4.convs1.2.weight">
2035
+ <dim>32</dim>
2036
+ <dim>32</dim>
2037
+ <dim>7</dim>
2038
+ </port>
2039
+ </output>
2040
+ </layer>
2041
+ <layer id="125" name="__module.resblocks.4.convs1.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
2042
+ <data strides="1" dilations="5" pads_begin="15" pads_end="15" auto_pad="explicit" />
2043
+ <input>
2044
+ <port id="0" precision="FP32">
2045
+ <dim>-1</dim>
2046
+ <dim>32</dim>
2047
+ <dim>-1</dim>
2048
+ </port>
2049
+ <port id="1" precision="FP32">
2050
+ <dim>32</dim>
2051
+ <dim>32</dim>
2052
+ <dim>7</dim>
2053
+ </port>
2054
+ </input>
2055
+ <output>
2056
+ <port id="2" precision="FP32" names="443,input.57">
2057
+ <dim>-1</dim>
2058
+ <dim>32</dim>
2059
+ <dim>-1</dim>
2060
+ </port>
2061
+ </output>
2062
+ </layer>
2063
+ <layer id="126" name="__module.resblocks.4/aten::leaky_relu/PRelu_5" type="PReLU" version="opset1">
2064
+ <input>
2065
+ <port id="0" precision="FP32">
2066
+ <dim>-1</dim>
2067
+ <dim>32</dim>
2068
+ <dim>-1</dim>
2069
+ </port>
2070
+ <port id="1" precision="FP32" />
2071
+ </input>
2072
+ <output>
2073
+ <port id="2" precision="FP32" names="444">
2074
+ <dim>-1</dim>
2075
+ <dim>32</dim>
2076
+ <dim>-1</dim>
2077
+ </port>
2078
+ </output>
2079
+ </layer>
2080
+ <layer id="127" name="self.resblocks.4.convs2.2.weight" type="Const" version="opset1">
2081
+ <data element_type="f32" shape="32, 32, 7" offset="2896596" size="28672" />
2082
+ <output>
2083
+ <port id="0" precision="FP32" names="self.resblocks.4.convs2.2.weight">
2084
+ <dim>32</dim>
2085
+ <dim>32</dim>
2086
+ <dim>7</dim>
2087
+ </port>
2088
+ </output>
2089
+ </layer>
2090
+ <layer id="128" name="__module.resblocks.4.convs2.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
2091
+ <data strides="1" dilations="1" pads_begin="3" pads_end="3" auto_pad="explicit" />
2092
+ <input>
2093
+ <port id="0" precision="FP32">
2094
+ <dim>-1</dim>
2095
+ <dim>32</dim>
2096
+ <dim>-1</dim>
2097
+ </port>
2098
+ <port id="1" precision="FP32">
2099
+ <dim>32</dim>
2100
+ <dim>32</dim>
2101
+ <dim>7</dim>
2102
+ </port>
2103
+ </input>
2104
+ <output>
2105
+ <port id="2" precision="FP32" names="451,hidden_states.29">
2106
+ <dim>-1</dim>
2107
+ <dim>32</dim>
2108
+ <dim>-1</dim>
2109
+ </port>
2110
+ </output>
2111
+ </layer>
2112
+ <layer id="129" name="__module.resblocks.4/aten::add/Add_2" type="Add" version="opset1">
2113
+ <data auto_broadcast="numpy" />
2114
+ <input>
2115
+ <port id="0" precision="FP32">
2116
+ <dim>-1</dim>
2117
+ <dim>32</dim>
2118
+ <dim>-1</dim>
2119
+ </port>
2120
+ <port id="1" precision="FP32">
2121
+ <dim>-1</dim>
2122
+ <dim>32</dim>
2123
+ <dim>-1</dim>
2124
+ </port>
2125
+ </input>
2126
+ <output>
2127
+ <port id="2" precision="FP32" names="452">
2128
+ <dim>-1</dim>
2129
+ <dim>32</dim>
2130
+ <dim>-1</dim>
2131
+ </port>
2132
+ </output>
2133
+ </layer>
2134
+ <layer id="130" name="aten::add_/Add_2" type="Add" version="opset1">
2135
+ <data auto_broadcast="numpy" />
2136
+ <input>
2137
+ <port id="0" precision="FP32">
2138
+ <dim>-1</dim>
2139
+ <dim>32</dim>
2140
+ <dim>-1</dim>
2141
+ </port>
2142
+ <port id="1" precision="FP32">
2143
+ <dim>-1</dim>
2144
+ <dim>32</dim>
2145
+ <dim>-1</dim>
2146
+ </port>
2147
+ </input>
2148
+ <output>
2149
+ <port id="2" precision="FP32" names="380_2">
2150
+ <dim>-1</dim>
2151
+ <dim>32</dim>
2152
+ <dim>-1</dim>
2153
+ </port>
2154
+ </output>
2155
+ </layer>
2156
+ <layer id="131" name="self.resblocks.5.convs1.0.weight" type="Const" version="opset1">
2157
+ <data element_type="f32" shape="32, 32, 11" offset="2925268" size="45056" />
2158
+ <output>
2159
+ <port id="0" precision="FP32" names="self.resblocks.5.convs1.0.weight">
2160
+ <dim>32</dim>
2161
+ <dim>32</dim>
2162
+ <dim>11</dim>
2163
+ </port>
2164
+ </output>
2165
+ </layer>
2166
+ <layer id="132" name="__module.resblocks.5.convs1.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
2167
+ <data strides="1" dilations="1" pads_begin="5" pads_end="5" auto_pad="explicit" />
2168
+ <input>
2169
+ <port id="0" precision="FP32">
2170
+ <dim>-1</dim>
2171
+ <dim>32</dim>
2172
+ <dim>-1</dim>
2173
+ </port>
2174
+ <port id="1" precision="FP32">
2175
+ <dim>32</dim>
2176
+ <dim>32</dim>
2177
+ <dim>11</dim>
2178
+ </port>
2179
+ </input>
2180
+ <output>
2181
+ <port id="2" precision="FP32" names="481,input.59">
2182
+ <dim>-1</dim>
2183
+ <dim>32</dim>
2184
+ <dim>-1</dim>
2185
+ </port>
2186
+ </output>
2187
+ </layer>
2188
+ <layer id="133" name="461" type="Const" version="opset1">
2189
+ <data element_type="f32" shape="" offset="287052" size="4" />
2190
+ <output>
2191
+ <port id="0" precision="FP32" names="461" />
2192
+ </output>
2193
+ </layer>
2194
+ <layer id="134" name="__module.resblocks.5/aten::leaky_relu/PRelu_1" type="PReLU" version="opset1">
2195
+ <input>
2196
+ <port id="0" precision="FP32">
2197
+ <dim>-1</dim>
2198
+ <dim>32</dim>
2199
+ <dim>-1</dim>
2200
+ </port>
2201
+ <port id="1" precision="FP32" />
2202
+ </input>
2203
+ <output>
2204
+ <port id="2" precision="FP32" names="482">
2205
+ <dim>-1</dim>
2206
+ <dim>32</dim>
2207
+ <dim>-1</dim>
2208
+ </port>
2209
+ </output>
2210
+ </layer>
2211
+ <layer id="135" name="self.resblocks.5.convs2.0.weight" type="Const" version="opset1">
2212
+ <data element_type="f32" shape="32, 32, 11" offset="2970324" size="45056" />
2213
+ <output>
2214
+ <port id="0" precision="FP32" names="self.resblocks.5.convs2.0.weight">
2215
+ <dim>32</dim>
2216
+ <dim>32</dim>
2217
+ <dim>11</dim>
2218
+ </port>
2219
+ </output>
2220
+ </layer>
2221
+ <layer id="136" name="__module.resblocks.5.convs2.0/aten::_convolution/Convolution" type="Convolution" version="opset1">
2222
+ <data strides="1" dilations="1" pads_begin="5" pads_end="5" auto_pad="explicit" />
2223
+ <input>
2224
+ <port id="0" precision="FP32">
2225
+ <dim>-1</dim>
2226
+ <dim>32</dim>
2227
+ <dim>-1</dim>
2228
+ </port>
2229
+ <port id="1" precision="FP32">
2230
+ <dim>32</dim>
2231
+ <dim>32</dim>
2232
+ <dim>11</dim>
2233
+ </port>
2234
+ </input>
2235
+ <output>
2236
+ <port id="2" precision="FP32" names="489,hidden_states.31">
2237
+ <dim>-1</dim>
2238
+ <dim>32</dim>
2239
+ <dim>-1</dim>
2240
+ </port>
2241
+ </output>
2242
+ </layer>
2243
+ <layer id="137" name="__module.resblocks.5/aten::add/Add" type="Add" version="opset1">
2244
+ <data auto_broadcast="numpy" />
2245
+ <input>
2246
+ <port id="0" precision="FP32">
2247
+ <dim>-1</dim>
2248
+ <dim>32</dim>
2249
+ <dim>-1</dim>
2250
+ </port>
2251
+ <port id="1" precision="FP32">
2252
+ <dim>-1</dim>
2253
+ <dim>32</dim>
2254
+ <dim>-1</dim>
2255
+ </port>
2256
+ </input>
2257
+ <output>
2258
+ <port id="2" precision="FP32" names="490,input.61">
2259
+ <dim>-1</dim>
2260
+ <dim>32</dim>
2261
+ <dim>-1</dim>
2262
+ </port>
2263
+ </output>
2264
+ </layer>
2265
+ <layer id="138" name="__module.resblocks.5/aten::leaky_relu/PRelu_2" type="PReLU" version="opset1">
2266
+ <input>
2267
+ <port id="0" precision="FP32">
2268
+ <dim>-1</dim>
2269
+ <dim>32</dim>
2270
+ <dim>-1</dim>
2271
+ </port>
2272
+ <port id="1" precision="FP32" />
2273
+ </input>
2274
+ <output>
2275
+ <port id="2" precision="FP32" names="491">
2276
+ <dim>-1</dim>
2277
+ <dim>32</dim>
2278
+ <dim>-1</dim>
2279
+ </port>
2280
+ </output>
2281
+ </layer>
2282
+ <layer id="139" name="self.resblocks.5.convs1.1.weight" type="Const" version="opset1">
2283
+ <data element_type="f32" shape="32, 32, 11" offset="3015380" size="45056" />
2284
+ <output>
2285
+ <port id="0" precision="FP32" names="self.resblocks.5.convs1.1.weight">
2286
+ <dim>32</dim>
2287
+ <dim>32</dim>
2288
+ <dim>11</dim>
2289
+ </port>
2290
+ </output>
2291
+ </layer>
2292
+ <layer id="140" name="__module.resblocks.5.convs1.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
2293
+ <data strides="1" dilations="3" pads_begin="15" pads_end="15" auto_pad="explicit" />
2294
+ <input>
2295
+ <port id="0" precision="FP32">
2296
+ <dim>-1</dim>
2297
+ <dim>32</dim>
2298
+ <dim>-1</dim>
2299
+ </port>
2300
+ <port id="1" precision="FP32">
2301
+ <dim>32</dim>
2302
+ <dim>32</dim>
2303
+ <dim>11</dim>
2304
+ </port>
2305
+ </input>
2306
+ <output>
2307
+ <port id="2" precision="FP32" names="498,input.63">
2308
+ <dim>-1</dim>
2309
+ <dim>32</dim>
2310
+ <dim>-1</dim>
2311
+ </port>
2312
+ </output>
2313
+ </layer>
2314
+ <layer id="141" name="__module.resblocks.5/aten::leaky_relu/PRelu_3" type="PReLU" version="opset1">
2315
+ <input>
2316
+ <port id="0" precision="FP32">
2317
+ <dim>-1</dim>
2318
+ <dim>32</dim>
2319
+ <dim>-1</dim>
2320
+ </port>
2321
+ <port id="1" precision="FP32" />
2322
+ </input>
2323
+ <output>
2324
+ <port id="2" precision="FP32" names="499">
2325
+ <dim>-1</dim>
2326
+ <dim>32</dim>
2327
+ <dim>-1</dim>
2328
+ </port>
2329
+ </output>
2330
+ </layer>
2331
+ <layer id="142" name="self.resblocks.5.convs2.1.weight" type="Const" version="opset1">
2332
+ <data element_type="f32" shape="32, 32, 11" offset="3060436" size="45056" />
2333
+ <output>
2334
+ <port id="0" precision="FP32" names="self.resblocks.5.convs2.1.weight">
2335
+ <dim>32</dim>
2336
+ <dim>32</dim>
2337
+ <dim>11</dim>
2338
+ </port>
2339
+ </output>
2340
+ </layer>
2341
+ <layer id="143" name="__module.resblocks.5.convs2.1/aten::_convolution/Convolution" type="Convolution" version="opset1">
2342
+ <data strides="1" dilations="1" pads_begin="5" pads_end="5" auto_pad="explicit" />
2343
+ <input>
2344
+ <port id="0" precision="FP32">
2345
+ <dim>-1</dim>
2346
+ <dim>32</dim>
2347
+ <dim>-1</dim>
2348
+ </port>
2349
+ <port id="1" precision="FP32">
2350
+ <dim>32</dim>
2351
+ <dim>32</dim>
2352
+ <dim>11</dim>
2353
+ </port>
2354
+ </input>
2355
+ <output>
2356
+ <port id="2" precision="FP32" names="506,hidden_states.33">
2357
+ <dim>-1</dim>
2358
+ <dim>32</dim>
2359
+ <dim>-1</dim>
2360
+ </port>
2361
+ </output>
2362
+ </layer>
2363
+ <layer id="144" name="__module.resblocks.5/aten::add/Add_1" type="Add" version="opset1">
2364
+ <data auto_broadcast="numpy" />
2365
+ <input>
2366
+ <port id="0" precision="FP32">
2367
+ <dim>-1</dim>
2368
+ <dim>32</dim>
2369
+ <dim>-1</dim>
2370
+ </port>
2371
+ <port id="1" precision="FP32">
2372
+ <dim>-1</dim>
2373
+ <dim>32</dim>
2374
+ <dim>-1</dim>
2375
+ </port>
2376
+ </input>
2377
+ <output>
2378
+ <port id="2" precision="FP32" names="507,input.65">
2379
+ <dim>-1</dim>
2380
+ <dim>32</dim>
2381
+ <dim>-1</dim>
2382
+ </port>
2383
+ </output>
2384
+ </layer>
2385
+ <layer id="145" name="__module.resblocks.5/aten::leaky_relu/PRelu_4" type="PReLU" version="opset1">
2386
+ <input>
2387
+ <port id="0" precision="FP32">
2388
+ <dim>-1</dim>
2389
+ <dim>32</dim>
2390
+ <dim>-1</dim>
2391
+ </port>
2392
+ <port id="1" precision="FP32" />
2393
+ </input>
2394
+ <output>
2395
+ <port id="2" precision="FP32" names="508">
2396
+ <dim>-1</dim>
2397
+ <dim>32</dim>
2398
+ <dim>-1</dim>
2399
+ </port>
2400
+ </output>
2401
+ </layer>
2402
+ <layer id="146" name="self.resblocks.5.convs1.2.weight" type="Const" version="opset1">
2403
+ <data element_type="f32" shape="32, 32, 11" offset="3105492" size="45056" />
2404
+ <output>
2405
+ <port id="0" precision="FP32" names="self.resblocks.5.convs1.2.weight">
2406
+ <dim>32</dim>
2407
+ <dim>32</dim>
2408
+ <dim>11</dim>
2409
+ </port>
2410
+ </output>
2411
+ </layer>
2412
+ <layer id="147" name="__module.resblocks.5.convs1.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
2413
+ <data strides="1" dilations="5" pads_begin="25" pads_end="25" auto_pad="explicit" />
2414
+ <input>
2415
+ <port id="0" precision="FP32">
2416
+ <dim>-1</dim>
2417
+ <dim>32</dim>
2418
+ <dim>-1</dim>
2419
+ </port>
2420
+ <port id="1" precision="FP32">
2421
+ <dim>32</dim>
2422
+ <dim>32</dim>
2423
+ <dim>11</dim>
2424
+ </port>
2425
+ </input>
2426
+ <output>
2427
+ <port id="2" precision="FP32" names="515,input.67">
2428
+ <dim>-1</dim>
2429
+ <dim>32</dim>
2430
+ <dim>-1</dim>
2431
+ </port>
2432
+ </output>
2433
+ </layer>
2434
+ <layer id="148" name="__module.resblocks.5/aten::leaky_relu/PRelu_5" type="PReLU" version="opset1">
2435
+ <input>
2436
+ <port id="0" precision="FP32">
2437
+ <dim>-1</dim>
2438
+ <dim>32</dim>
2439
+ <dim>-1</dim>
2440
+ </port>
2441
+ <port id="1" precision="FP32" />
2442
+ </input>
2443
+ <output>
2444
+ <port id="2" precision="FP32" names="516">
2445
+ <dim>-1</dim>
2446
+ <dim>32</dim>
2447
+ <dim>-1</dim>
2448
+ </port>
2449
+ </output>
2450
+ </layer>
2451
+ <layer id="149" name="self.resblocks.5.convs2.2.weight" type="Const" version="opset1">
2452
+ <data element_type="f32" shape="32, 32, 11" offset="3150548" size="45056" />
2453
+ <output>
2454
+ <port id="0" precision="FP32" names="self.resblocks.5.convs2.2.weight">
2455
+ <dim>32</dim>
2456
+ <dim>32</dim>
2457
+ <dim>11</dim>
2458
+ </port>
2459
+ </output>
2460
+ </layer>
2461
+ <layer id="150" name="__module.resblocks.5.convs2.2/aten::_convolution/Convolution" type="Convolution" version="opset1">
2462
+ <data strides="1" dilations="1" pads_begin="5" pads_end="5" auto_pad="explicit" />
2463
+ <input>
2464
+ <port id="0" precision="FP32">
2465
+ <dim>-1</dim>
2466
+ <dim>32</dim>
2467
+ <dim>-1</dim>
2468
+ </port>
2469
+ <port id="1" precision="FP32">
2470
+ <dim>32</dim>
2471
+ <dim>32</dim>
2472
+ <dim>11</dim>
2473
+ </port>
2474
+ </input>
2475
+ <output>
2476
+ <port id="2" precision="FP32" names="523,hidden_states.35">
2477
+ <dim>-1</dim>
2478
+ <dim>32</dim>
2479
+ <dim>-1</dim>
2480
+ </port>
2481
+ </output>
2482
+ </layer>
2483
+ <layer id="151" name="__module.resblocks.5/aten::add/Add_2" type="Add" version="opset1">
2484
+ <data auto_broadcast="numpy" />
2485
+ <input>
2486
+ <port id="0" precision="FP32">
2487
+ <dim>-1</dim>
2488
+ <dim>32</dim>
2489
+ <dim>-1</dim>
2490
+ </port>
2491
+ <port id="1" precision="FP32">
2492
+ <dim>-1</dim>
2493
+ <dim>32</dim>
2494
+ <dim>-1</dim>
2495
+ </port>
2496
+ </input>
2497
+ <output>
2498
+ <port id="2" precision="FP32" names="524">
2499
+ <dim>-1</dim>
2500
+ <dim>32</dim>
2501
+ <dim>-1</dim>
2502
+ </port>
2503
+ </output>
2504
+ </layer>
2505
+ <layer id="152" name="aten::add_/Add_3" type="Add" version="opset1">
2506
+ <data auto_broadcast="numpy" />
2507
+ <input>
2508
+ <port id="0" precision="FP32">
2509
+ <dim>-1</dim>
2510
+ <dim>32</dim>
2511
+ <dim>-1</dim>
2512
+ </port>
2513
+ <port id="1" precision="FP32">
2514
+ <dim>-1</dim>
2515
+ <dim>32</dim>
2516
+ <dim>-1</dim>
2517
+ </port>
2518
+ </input>
2519
+ <output>
2520
+ <port id="2" precision="FP32" names="47,res_state.9">
2521
+ <dim>-1</dim>
2522
+ <dim>32</dim>
2523
+ <dim>-1</dim>
2524
+ </port>
2525
+ </output>
2526
+ </layer>
2527
+ <layer id="153" name="Constant_27239" type="Const" version="opset1">
2528
+ <data element_type="f32" shape="1, 1, 1" offset="2613840" size="4" />
2529
+ <output>
2530
+ <port id="0" precision="FP32">
2531
+ <dim>1</dim>
2532
+ <dim>1</dim>
2533
+ <dim>1</dim>
2534
+ </port>
2535
+ </output>
2536
+ </layer>
2537
+ <layer id="154" name="aten::div/Divide_2" type="Divide" version="opset1">
2538
+ <data auto_broadcast="numpy" m_pythondiv="true" />
2539
+ <input>
2540
+ <port id="0" precision="FP32">
2541
+ <dim>-1</dim>
2542
+ <dim>32</dim>
2543
+ <dim>-1</dim>
2544
+ </port>
2545
+ <port id="1" precision="FP32">
2546
+ <dim>1</dim>
2547
+ <dim>1</dim>
2548
+ <dim>1</dim>
2549
+ </port>
2550
+ </input>
2551
+ <output>
2552
+ <port id="2" precision="FP32" names="52,input">
2553
+ <dim>-1</dim>
2554
+ <dim>32</dim>
2555
+ <dim>-1</dim>
2556
+ </port>
2557
+ </output>
2558
+ </layer>
2559
+ <layer id="155" name="53" type="Const" version="opset1">
2560
+ <data element_type="f32" shape="" offset="3195604" size="4" />
2561
+ <output>
2562
+ <port id="0" precision="FP32" names="53" />
2563
+ </output>
2564
+ </layer>
2565
+ <layer id="156" name="aten::leaky_relu/PRelu_2" type="PReLU" version="opset1">
2566
+ <input>
2567
+ <port id="0" precision="FP32">
2568
+ <dim>-1</dim>
2569
+ <dim>32</dim>
2570
+ <dim>-1</dim>
2571
+ </port>
2572
+ <port id="1" precision="FP32" />
2573
+ </input>
2574
+ <output>
2575
+ <port id="2" precision="FP32" names="54">
2576
+ <dim>-1</dim>
2577
+ <dim>32</dim>
2578
+ <dim>-1</dim>
2579
+ </port>
2580
+ </output>
2581
+ </layer>
2582
+ <layer id="157" name="self.conv_post.weight" type="Const" version="opset1">
2583
+ <data element_type="f32" shape="1, 32, 7" offset="3195608" size="896" />
2584
+ <output>
2585
+ <port id="0" precision="FP32" names="self.conv_post.weight">
2586
+ <dim>1</dim>
2587
+ <dim>32</dim>
2588
+ <dim>7</dim>
2589
+ </port>
2590
+ </output>
2591
+ </layer>
2592
+ <layer id="158" name="__module.conv_post/aten::_convolution/Convolution" type="Convolution" version="opset1">
2593
+ <data strides="1" dilations="1" pads_begin="3" pads_end="3" auto_pad="explicit" />
2594
+ <input>
2595
+ <port id="0" precision="FP32">
2596
+ <dim>-1</dim>
2597
+ <dim>32</dim>
2598
+ <dim>-1</dim>
2599
+ </port>
2600
+ <port id="1" precision="FP32">
2601
+ <dim>1</dim>
2602
+ <dim>32</dim>
2603
+ <dim>7</dim>
2604
+ </port>
2605
+ </input>
2606
+ <output>
2607
+ <port id="2" precision="FP32" names="536">
2608
+ <dim>-1</dim>
2609
+ <dim>1</dim>
2610
+ <dim>-1</dim>
2611
+ </port>
2612
+ </output>
2613
+ </layer>
2614
+ <layer id="159" name="aten::tanh/Tanh" type="Tanh" version="opset1">
2615
+ <input>
2616
+ <port id="0" precision="FP32">
2617
+ <dim>-1</dim>
2618
+ <dim>1</dim>
2619
+ <dim>-1</dim>
2620
+ </port>
2621
+ </input>
2622
+ <output>
2623
+ <port id="1" precision="FP32" names="56,hidden_states">
2624
+ <dim>-1</dim>
2625
+ <dim>1</dim>
2626
+ <dim>-1</dim>
2627
+ </port>
2628
+ </output>
2629
+ </layer>
2630
+ <layer id="160" name="57" type="Const" version="opset1">
2631
+ <data element_type="i64" shape="" offset="3196504" size="8" />
2632
+ <output>
2633
+ <port id="0" precision="I64" names="57" />
2634
+ </output>
2635
+ </layer>
2636
+ <layer id="161" name="aten::squeeze/Squeeze" type="Squeeze" version="opset1">
2637
+ <input>
2638
+ <port id="0" precision="FP32">
2639
+ <dim>-1</dim>
2640
+ <dim>1</dim>
2641
+ <dim>-1</dim>
2642
+ </port>
2643
+ <port id="1" precision="I64" />
2644
+ </input>
2645
+ <output>
2646
+ <port id="2" precision="FP32" names="waveform">
2647
+ <dim>-1</dim>
2648
+ <dim>-1</dim>
2649
+ </port>
2650
+ </output>
2651
+ </layer>
2652
+ <layer id="162" name="Result_24118" type="Result" version="opset1" output_names="waveform">
2653
+ <input>
2654
+ <port id="0" precision="FP32">
2655
+ <dim>-1</dim>
2656
+ <dim>-1</dim>
2657
+ </port>
2658
+ </input>
2659
+ </layer>
2660
+ </layers>
2661
+ <edges>
2662
+ <edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
2663
+ <edge from-layer="1" from-port="0" to-layer="2" to-port="1" />
2664
+ <edge from-layer="2" from-port="2" to-layer="4" to-port="0" />
2665
+ <edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
2666
+ <edge from-layer="4" from-port="2" to-layer="6" to-port="0" />
2667
+ <edge from-layer="5" from-port="0" to-layer="6" to-port="1" />
2668
+ <edge from-layer="6" from-port="2" to-layer="8" to-port="0" />
2669
+ <edge from-layer="7" from-port="0" to-layer="8" to-port="1" />
2670
+ <edge from-layer="8" from-port="2" to-layer="10" to-port="0" />
2671
+ <edge from-layer="9" from-port="0" to-layer="10" to-port="1" />
2672
+ <edge from-layer="10" from-port="2" to-layer="12" to-port="0" />
2673
+ <edge from-layer="11" from-port="0" to-layer="12" to-port="1" />
2674
+ <edge from-layer="12" from-port="2" to-layer="20" to-port="1" />
2675
+ <edge from-layer="12" from-port="2" to-layer="41" to-port="1" />
2676
+ <edge from-layer="12" from-port="2" to-layer="63" to-port="1" />
2677
+ <edge from-layer="12" from-port="2" to-layer="14" to-port="0" />
2678
+ <edge from-layer="13" from-port="0" to-layer="17" to-port="1" />
2679
+ <edge from-layer="13" from-port="0" to-layer="31" to-port="1" />
2680
+ <edge from-layer="13" from-port="0" to-layer="14" to-port="1" />
2681
+ <edge from-layer="13" from-port="0" to-layer="21" to-port="1" />
2682
+ <edge from-layer="13" from-port="0" to-layer="28" to-port="1" />
2683
+ <edge from-layer="13" from-port="0" to-layer="24" to-port="1" />
2684
+ <edge from-layer="14" from-port="2" to-layer="36" to-port="0" />
2685
+ <edge from-layer="14" from-port="2" to-layer="16" to-port="0" />
2686
+ <edge from-layer="14" from-port="2" to-layer="58" to-port="0" />
2687
+ <edge from-layer="15" from-port="0" to-layer="16" to-port="1" />
2688
+ <edge from-layer="16" from-port="2" to-layer="17" to-port="0" />
2689
+ <edge from-layer="17" from-port="2" to-layer="19" to-port="0" />
2690
+ <edge from-layer="18" from-port="0" to-layer="19" to-port="1" />
2691
+ <edge from-layer="19" from-port="2" to-layer="20" to-port="0" />
2692
+ <edge from-layer="20" from-port="2" to-layer="27" to-port="1" />
2693
+ <edge from-layer="20" from-port="2" to-layer="21" to-port="0" />
2694
+ <edge from-layer="21" from-port="2" to-layer="23" to-port="0" />
2695
+ <edge from-layer="22" from-port="0" to-layer="23" to-port="1" />
2696
+ <edge from-layer="23" from-port="2" to-layer="24" to-port="0" />
2697
+ <edge from-layer="24" from-port="2" to-layer="26" to-port="0" />
2698
+ <edge from-layer="25" from-port="0" to-layer="26" to-port="1" />
2699
+ <edge from-layer="26" from-port="2" to-layer="27" to-port="0" />
2700
+ <edge from-layer="27" from-port="2" to-layer="28" to-port="0" />
2701
+ <edge from-layer="27" from-port="2" to-layer="34" to-port="1" />
2702
+ <edge from-layer="28" from-port="2" to-layer="30" to-port="0" />
2703
+ <edge from-layer="29" from-port="0" to-layer="30" to-port="1" />
2704
+ <edge from-layer="30" from-port="2" to-layer="31" to-port="0" />
2705
+ <edge from-layer="31" from-port="2" to-layer="33" to-port="0" />
2706
+ <edge from-layer="32" from-port="0" to-layer="33" to-port="1" />
2707
+ <edge from-layer="33" from-port="2" to-layer="34" to-port="0" />
2708
+ <edge from-layer="34" from-port="2" to-layer="56" to-port="0" />
2709
+ <edge from-layer="35" from-port="0" to-layer="36" to-port="1" />
2710
+ <edge from-layer="36" from-port="2" to-layer="38" to-port="0" />
2711
+ <edge from-layer="37" from-port="0" to-layer="49" to-port="1" />
2712
+ <edge from-layer="37" from-port="0" to-layer="52" to-port="1" />
2713
+ <edge from-layer="37" from-port="0" to-layer="45" to-port="1" />
2714
+ <edge from-layer="37" from-port="0" to-layer="42" to-port="1" />
2715
+ <edge from-layer="37" from-port="0" to-layer="38" to-port="1" />
2716
+ <edge from-layer="38" from-port="2" to-layer="40" to-port="0" />
2717
+ <edge from-layer="39" from-port="0" to-layer="40" to-port="1" />
2718
+ <edge from-layer="40" from-port="2" to-layer="41" to-port="0" />
2719
+ <edge from-layer="41" from-port="2" to-layer="42" to-port="0" />
2720
+ <edge from-layer="41" from-port="2" to-layer="48" to-port="1" />
2721
+ <edge from-layer="42" from-port="2" to-layer="44" to-port="0" />
2722
+ <edge from-layer="43" from-port="0" to-layer="44" to-port="1" />
2723
+ <edge from-layer="44" from-port="2" to-layer="45" to-port="0" />
2724
+ <edge from-layer="45" from-port="2" to-layer="47" to-port="0" />
2725
+ <edge from-layer="46" from-port="0" to-layer="47" to-port="1" />
2726
+ <edge from-layer="47" from-port="2" to-layer="48" to-port="0" />
2727
+ <edge from-layer="48" from-port="2" to-layer="49" to-port="0" />
2728
+ <edge from-layer="48" from-port="2" to-layer="55" to-port="1" />
2729
+ <edge from-layer="49" from-port="2" to-layer="51" to-port="0" />
2730
+ <edge from-layer="50" from-port="0" to-layer="51" to-port="1" />
2731
+ <edge from-layer="51" from-port="2" to-layer="52" to-port="0" />
2732
+ <edge from-layer="52" from-port="2" to-layer="54" to-port="0" />
2733
+ <edge from-layer="53" from-port="0" to-layer="54" to-port="1" />
2734
+ <edge from-layer="54" from-port="2" to-layer="55" to-port="0" />
2735
+ <edge from-layer="55" from-port="2" to-layer="56" to-port="1" />
2736
+ <edge from-layer="56" from-port="2" to-layer="78" to-port="0" />
2737
+ <edge from-layer="57" from-port="0" to-layer="58" to-port="1" />
2738
+ <edge from-layer="58" from-port="2" to-layer="60" to-port="0" />
2739
+ <edge from-layer="59" from-port="0" to-layer="60" to-port="1" />
2740
+ <edge from-layer="59" from-port="0" to-layer="71" to-port="1" />
2741
+ <edge from-layer="59" from-port="0" to-layer="74" to-port="1" />
2742
+ <edge from-layer="59" from-port="0" to-layer="67" to-port="1" />
2743
+ <edge from-layer="59" from-port="0" to-layer="64" to-port="1" />
2744
+ <edge from-layer="60" from-port="2" to-layer="62" to-port="0" />
2745
+ <edge from-layer="61" from-port="0" to-layer="62" to-port="1" />
2746
+ <edge from-layer="62" from-port="2" to-layer="63" to-port="0" />
2747
+ <edge from-layer="63" from-port="2" to-layer="64" to-port="0" />
2748
+ <edge from-layer="63" from-port="2" to-layer="70" to-port="1" />
2749
+ <edge from-layer="64" from-port="2" to-layer="66" to-port="0" />
2750
+ <edge from-layer="65" from-port="0" to-layer="66" to-port="1" />
2751
+ <edge from-layer="66" from-port="2" to-layer="67" to-port="0" />
2752
+ <edge from-layer="67" from-port="2" to-layer="69" to-port="0" />
2753
+ <edge from-layer="68" from-port="0" to-layer="69" to-port="1" />
2754
+ <edge from-layer="69" from-port="2" to-layer="70" to-port="0" />
2755
+ <edge from-layer="70" from-port="2" to-layer="71" to-port="0" />
2756
+ <edge from-layer="70" from-port="2" to-layer="77" to-port="1" />
2757
+ <edge from-layer="71" from-port="2" to-layer="73" to-port="0" />
2758
+ <edge from-layer="72" from-port="0" to-layer="73" to-port="1" />
2759
+ <edge from-layer="73" from-port="2" to-layer="74" to-port="0" />
2760
+ <edge from-layer="74" from-port="2" to-layer="76" to-port="0" />
2761
+ <edge from-layer="75" from-port="0" to-layer="76" to-port="1" />
2762
+ <edge from-layer="76" from-port="2" to-layer="77" to-port="0" />
2763
+ <edge from-layer="77" from-port="2" to-layer="78" to-port="1" />
2764
+ <edge from-layer="78" from-port="2" to-layer="80" to-port="0" />
2765
+ <edge from-layer="79" from-port="0" to-layer="80" to-port="1" />
2766
+ <edge from-layer="80" from-port="2" to-layer="82" to-port="0" />
2767
+ <edge from-layer="81" from-port="0" to-layer="82" to-port="1" />
2768
+ <edge from-layer="82" from-port="2" to-layer="84" to-port="0" />
2769
+ <edge from-layer="83" from-port="0" to-layer="84" to-port="1" />
2770
+ <edge from-layer="84" from-port="2" to-layer="86" to-port="0" />
2771
+ <edge from-layer="85" from-port="0" to-layer="86" to-port="1" />
2772
+ <edge from-layer="86" from-port="2" to-layer="88" to-port="0" />
2773
+ <edge from-layer="86" from-port="2" to-layer="137" to-port="1" />
2774
+ <edge from-layer="86" from-port="2" to-layer="115" to-port="1" />
2775
+ <edge from-layer="86" from-port="2" to-layer="94" to-port="1" />
2776
+ <edge from-layer="87" from-port="0" to-layer="105" to-port="1" />
2777
+ <edge from-layer="87" from-port="0" to-layer="102" to-port="1" />
2778
+ <edge from-layer="87" from-port="0" to-layer="98" to-port="1" />
2779
+ <edge from-layer="87" from-port="0" to-layer="95" to-port="1" />
2780
+ <edge from-layer="87" from-port="0" to-layer="88" to-port="1" />
2781
+ <edge from-layer="87" from-port="0" to-layer="91" to-port="1" />
2782
+ <edge from-layer="88" from-port="2" to-layer="90" to-port="0" />
2783
+ <edge from-layer="88" from-port="2" to-layer="132" to-port="0" />
2784
+ <edge from-layer="88" from-port="2" to-layer="110" to-port="0" />
2785
+ <edge from-layer="89" from-port="0" to-layer="90" to-port="1" />
2786
+ <edge from-layer="90" from-port="2" to-layer="91" to-port="0" />
2787
+ <edge from-layer="91" from-port="2" to-layer="93" to-port="0" />
2788
+ <edge from-layer="92" from-port="0" to-layer="93" to-port="1" />
2789
+ <edge from-layer="93" from-port="2" to-layer="94" to-port="0" />
2790
+ <edge from-layer="94" from-port="2" to-layer="101" to-port="1" />
2791
+ <edge from-layer="94" from-port="2" to-layer="95" to-port="0" />
2792
+ <edge from-layer="95" from-port="2" to-layer="97" to-port="0" />
2793
+ <edge from-layer="96" from-port="0" to-layer="97" to-port="1" />
2794
+ <edge from-layer="97" from-port="2" to-layer="98" to-port="0" />
2795
+ <edge from-layer="98" from-port="2" to-layer="100" to-port="0" />
2796
+ <edge from-layer="99" from-port="0" to-layer="100" to-port="1" />
2797
+ <edge from-layer="100" from-port="2" to-layer="101" to-port="0" />
2798
+ <edge from-layer="101" from-port="2" to-layer="102" to-port="0" />
2799
+ <edge from-layer="101" from-port="2" to-layer="108" to-port="1" />
2800
+ <edge from-layer="102" from-port="2" to-layer="104" to-port="0" />
2801
+ <edge from-layer="103" from-port="0" to-layer="104" to-port="1" />
2802
+ <edge from-layer="104" from-port="2" to-layer="105" to-port="0" />
2803
+ <edge from-layer="105" from-port="2" to-layer="107" to-port="0" />
2804
+ <edge from-layer="106" from-port="0" to-layer="107" to-port="1" />
2805
+ <edge from-layer="107" from-port="2" to-layer="108" to-port="0" />
2806
+ <edge from-layer="108" from-port="2" to-layer="130" to-port="0" />
2807
+ <edge from-layer="109" from-port="0" to-layer="110" to-port="1" />
2808
+ <edge from-layer="110" from-port="2" to-layer="112" to-port="0" />
2809
+ <edge from-layer="111" from-port="0" to-layer="116" to-port="1" />
2810
+ <edge from-layer="111" from-port="0" to-layer="119" to-port="1" />
2811
+ <edge from-layer="111" from-port="0" to-layer="126" to-port="1" />
2812
+ <edge from-layer="111" from-port="0" to-layer="123" to-port="1" />
2813
+ <edge from-layer="111" from-port="0" to-layer="112" to-port="1" />
2814
+ <edge from-layer="112" from-port="2" to-layer="114" to-port="0" />
2815
+ <edge from-layer="113" from-port="0" to-layer="114" to-port="1" />
2816
+ <edge from-layer="114" from-port="2" to-layer="115" to-port="0" />
2817
+ <edge from-layer="115" from-port="2" to-layer="116" to-port="0" />
2818
+ <edge from-layer="115" from-port="2" to-layer="122" to-port="1" />
2819
+ <edge from-layer="116" from-port="2" to-layer="118" to-port="0" />
2820
+ <edge from-layer="117" from-port="0" to-layer="118" to-port="1" />
2821
+ <edge from-layer="118" from-port="2" to-layer="119" to-port="0" />
2822
+ <edge from-layer="119" from-port="2" to-layer="121" to-port="0" />
2823
+ <edge from-layer="120" from-port="0" to-layer="121" to-port="1" />
2824
+ <edge from-layer="121" from-port="2" to-layer="122" to-port="0" />
2825
+ <edge from-layer="122" from-port="2" to-layer="123" to-port="0" />
2826
+ <edge from-layer="122" from-port="2" to-layer="129" to-port="1" />
2827
+ <edge from-layer="123" from-port="2" to-layer="125" to-port="0" />
2828
+ <edge from-layer="124" from-port="0" to-layer="125" to-port="1" />
2829
+ <edge from-layer="125" from-port="2" to-layer="126" to-port="0" />
2830
+ <edge from-layer="126" from-port="2" to-layer="128" to-port="0" />
2831
+ <edge from-layer="127" from-port="0" to-layer="128" to-port="1" />
2832
+ <edge from-layer="128" from-port="2" to-layer="129" to-port="0" />
2833
+ <edge from-layer="129" from-port="2" to-layer="130" to-port="1" />
2834
+ <edge from-layer="130" from-port="2" to-layer="152" to-port="0" />
2835
+ <edge from-layer="131" from-port="0" to-layer="132" to-port="1" />
2836
+ <edge from-layer="132" from-port="2" to-layer="134" to-port="0" />
2837
+ <edge from-layer="133" from-port="0" to-layer="141" to-port="1" />
2838
+ <edge from-layer="133" from-port="0" to-layer="145" to-port="1" />
2839
+ <edge from-layer="133" from-port="0" to-layer="148" to-port="1" />
2840
+ <edge from-layer="133" from-port="0" to-layer="138" to-port="1" />
2841
+ <edge from-layer="133" from-port="0" to-layer="134" to-port="1" />
2842
+ <edge from-layer="134" from-port="2" to-layer="136" to-port="0" />
2843
+ <edge from-layer="135" from-port="0" to-layer="136" to-port="1" />
2844
+ <edge from-layer="136" from-port="2" to-layer="137" to-port="0" />
2845
+ <edge from-layer="137" from-port="2" to-layer="144" to-port="1" />
2846
+ <edge from-layer="137" from-port="2" to-layer="138" to-port="0" />
2847
+ <edge from-layer="138" from-port="2" to-layer="140" to-port="0" />
2848
+ <edge from-layer="139" from-port="0" to-layer="140" to-port="1" />
2849
+ <edge from-layer="140" from-port="2" to-layer="141" to-port="0" />
2850
+ <edge from-layer="141" from-port="2" to-layer="143" to-port="0" />
2851
+ <edge from-layer="142" from-port="0" to-layer="143" to-port="1" />
2852
+ <edge from-layer="143" from-port="2" to-layer="144" to-port="0" />
2853
+ <edge from-layer="144" from-port="2" to-layer="151" to-port="1" />
2854
+ <edge from-layer="144" from-port="2" to-layer="145" to-port="0" />
2855
+ <edge from-layer="145" from-port="2" to-layer="147" to-port="0" />
2856
+ <edge from-layer="146" from-port="0" to-layer="147" to-port="1" />
2857
+ <edge from-layer="147" from-port="2" to-layer="148" to-port="0" />
2858
+ <edge from-layer="148" from-port="2" to-layer="150" to-port="0" />
2859
+ <edge from-layer="149" from-port="0" to-layer="150" to-port="1" />
2860
+ <edge from-layer="150" from-port="2" to-layer="151" to-port="0" />
2861
+ <edge from-layer="151" from-port="2" to-layer="152" to-port="1" />
2862
+ <edge from-layer="152" from-port="2" to-layer="154" to-port="0" />
2863
+ <edge from-layer="153" from-port="0" to-layer="154" to-port="1" />
2864
+ <edge from-layer="154" from-port="2" to-layer="156" to-port="0" />
2865
+ <edge from-layer="155" from-port="0" to-layer="156" to-port="1" />
2866
+ <edge from-layer="156" from-port="2" to-layer="158" to-port="0" />
2867
+ <edge from-layer="157" from-port="0" to-layer="158" to-port="1" />
2868
+ <edge from-layer="158" from-port="2" to-layer="159" to-port="0" />
2869
+ <edge from-layer="159" from-port="1" to-layer="161" to-port="0" />
2870
+ <edge from-layer="160" from-port="0" to-layer="161" to-port="1" />
2871
+ <edge from-layer="161" from-port="2" to-layer="162" to-port="0" />
2872
+ </edges>
2873
+ <rt_info>
2874
+ <Runtime_version value="2025.1.0-18503-6fec06580ab-releases/2025/1" />
2875
+ <conversion_parameters>
2876
+ <framework value="pytorch" />
2877
+ <is_python_object value="True" />
2878
+ </conversion_parameters>
2879
+ <optimum>
2880
+ <optimum_intel_version value="1.23.0.dev0+81089b7" />
2881
+ <optimum_version value="1.25.0.dev0" />
2882
+ <pytorch_version value="2.5.1+cpu" />
2883
+ <transformers_version value="4.51.3" />
2884
+ </optimum>
2885
+ </rt_info>
2886
+ </net>