Kari commited on
Commit
cd91465
·
1 Parent(s): 14e6deb

add more models

Browse files
test-ml-trained/config.json DELETED
@@ -1,64 +0,0 @@
1
- {
2
- "_name_or_path": "Helsinki-NLP/opus-mt-en-zh",
3
- "activation_dropout": 0.0,
4
- "activation_function": "swish",
5
- "add_bias_logits": false,
6
- "add_final_layer_norm": false,
7
- "architectures": [
8
- "MarianMTModel"
9
- ],
10
- "attention_dropout": 0.0,
11
- "bad_words_ids": [
12
- [
13
- 65000
14
- ]
15
- ],
16
- "bos_token_id": 0,
17
- "classif_dropout": 0.0,
18
- "classifier_dropout": 0.0,
19
- "d_model": 512,
20
- "decoder_attention_heads": 8,
21
- "decoder_ffn_dim": 2048,
22
- "decoder_layerdrop": 0.0,
23
- "decoder_layers": 6,
24
- "decoder_start_token_id": 65000,
25
- "decoder_vocab_size": 65001,
26
- "do_blenderbot_90_layernorm": false,
27
- "dropout": 0.1,
28
- "encoder_attention_heads": 8,
29
- "encoder_ffn_dim": 2048,
30
- "encoder_layerdrop": 0.0,
31
- "encoder_layers": 6,
32
- "eos_token_id": 0,
33
- "extra_pos_embeddings": 0,
34
- "force_bos_token_to_be_generated": false,
35
- "forced_eos_token_id": 0,
36
- "gradient_checkpointing": false,
37
- "id2label": {
38
- "0": "LABEL_0",
39
- "1": "LABEL_1",
40
- "2": "LABEL_2"
41
- },
42
- "init_std": 0.02,
43
- "is_encoder_decoder": true,
44
- "label2id": {
45
- "LABEL_0": 0,
46
- "LABEL_1": 1,
47
- "LABEL_2": 2
48
- },
49
- "max_length": 512,
50
- "max_position_embeddings": 512,
51
- "model_type": "marian",
52
- "normalize_before": false,
53
- "normalize_embedding": false,
54
- "num_beams": 4,
55
- "num_hidden_layers": 6,
56
- "pad_token_id": 65000,
57
- "scale_embedding": true,
58
- "share_encoder_decoder_embeddings": true,
59
- "static_position_embeddings": true,
60
- "torch_dtype": "float32",
61
- "transformers_version": "4.24.0",
62
- "use_cache": true,
63
- "vocab_size": 65001
64
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
test-ml-trained/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7177dcf4308939d6a6036fc4ad3d9c9834cbc8abfaa20eaff6c703831b942304
3
- size 310020485
 
 
 
 
test-ml-trained/source.spm DELETED
Binary file (806 kB)
 
test-ml-trained/special_tokens_map.json DELETED
@@ -1,5 +0,0 @@
1
- {
2
- "eos_token": "</s>",
3
- "pad_token": "<pad>",
4
- "unk_token": "<unk>"
5
- }
 
 
 
 
 
 
test-ml-trained/target.spm DELETED
Binary file (805 kB)
 
test-ml-trained/tokenizer_config.json DELETED
@@ -1,14 +0,0 @@
1
- {
2
- "eos_token": "</s>",
3
- "model_max_length": 512,
4
- "name_or_path": "Helsinki-NLP/opus-mt-en-zh",
5
- "pad_token": "<pad>",
6
- "return_tensors": "tf",
7
- "separate_vocabs": false,
8
- "source_lang": "eng",
9
- "sp_model_kwargs": {},
10
- "special_tokens_map_file": null,
11
- "target_lang": "zho",
12
- "tokenizer_class": "MarianTokenizer",
13
- "unk_token": "<unk>"
14
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
test-ml-trained/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0b6746404952c33e4c0d2f9b49e3034f856735430ca20172c5dd127315fdc271
3
- size 3567
 
 
 
 
test-ml-trained/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
test-ml-trained_3/config.json DELETED
@@ -1,64 +0,0 @@
1
- {
2
- "_name_or_path": "Helsinki-NLP/opus-mt-en-zh",
3
- "activation_dropout": 0.0,
4
- "activation_function": "swish",
5
- "add_bias_logits": false,
6
- "add_final_layer_norm": false,
7
- "architectures": [
8
- "MarianMTModel"
9
- ],
10
- "attention_dropout": 0.0,
11
- "bad_words_ids": [
12
- [
13
- 65000
14
- ]
15
- ],
16
- "bos_token_id": 0,
17
- "classif_dropout": 0.0,
18
- "classifier_dropout": 0.0,
19
- "d_model": 512,
20
- "decoder_attention_heads": 8,
21
- "decoder_ffn_dim": 2048,
22
- "decoder_layerdrop": 0.0,
23
- "decoder_layers": 6,
24
- "decoder_start_token_id": 65000,
25
- "decoder_vocab_size": 65001,
26
- "do_blenderbot_90_layernorm": false,
27
- "dropout": 0.1,
28
- "encoder_attention_heads": 8,
29
- "encoder_ffn_dim": 2048,
30
- "encoder_layerdrop": 0.0,
31
- "encoder_layers": 6,
32
- "eos_token_id": 0,
33
- "extra_pos_embeddings": 0,
34
- "force_bos_token_to_be_generated": false,
35
- "forced_eos_token_id": 0,
36
- "gradient_checkpointing": false,
37
- "id2label": {
38
- "0": "LABEL_0",
39
- "1": "LABEL_1",
40
- "2": "LABEL_2"
41
- },
42
- "init_std": 0.02,
43
- "is_encoder_decoder": true,
44
- "label2id": {
45
- "LABEL_0": 0,
46
- "LABEL_1": 1,
47
- "LABEL_2": 2
48
- },
49
- "max_length": 512,
50
- "max_position_embeddings": 512,
51
- "model_type": "marian",
52
- "normalize_before": false,
53
- "normalize_embedding": false,
54
- "num_beams": 4,
55
- "num_hidden_layers": 6,
56
- "pad_token_id": 65000,
57
- "scale_embedding": true,
58
- "share_encoder_decoder_embeddings": true,
59
- "static_position_embeddings": true,
60
- "torch_dtype": "float32",
61
- "transformers_version": "4.29.1",
62
- "use_cache": true,
63
- "vocab_size": 65001
64
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
test-ml-trained_3/generation_config.json DELETED
@@ -1,16 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bad_words_ids": [
4
- [
5
- 65000
6
- ]
7
- ],
8
- "bos_token_id": 0,
9
- "decoder_start_token_id": 65000,
10
- "eos_token_id": 0,
11
- "forced_eos_token_id": 0,
12
- "max_length": 512,
13
- "num_beams": 4,
14
- "pad_token_id": 65000,
15
- "transformers_version": "4.29.1"
16
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
test-ml-trained_3/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c6b33150ccbb6ac7f92a21e294a101052341ed9ecdf64e47faada1946b86fd9d
3
- size 310020485
 
 
 
 
test-ml-trained_3/source.spm DELETED
Binary file (806 kB)
 
test-ml-trained_3/special_tokens_map.json DELETED
@@ -1,5 +0,0 @@
1
- {
2
- "eos_token": "</s>",
3
- "pad_token": "<pad>",
4
- "unk_token": "<unk>"
5
- }
 
 
 
 
 
 
test-ml-trained_3/target.spm DELETED
Binary file (805 kB)
 
test-ml-trained_3/tokenizer_config.json DELETED
@@ -1,13 +0,0 @@
1
- {
2
- "clean_up_tokenization_spaces": true,
3
- "eos_token": "</s>",
4
- "model_max_length": 512,
5
- "pad_token": "<pad>",
6
- "return_tensors": "tf",
7
- "separate_vocabs": false,
8
- "source_lang": "eng",
9
- "sp_model_kwargs": {},
10
- "target_lang": "zho",
11
- "tokenizer_class": "MarianTokenizer",
12
- "unk_token": "<unk>"
13
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
test-ml-trained_3/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:59e123a6b2fdb976af2853bf1026aa89ec0adf370814cc36d24464e4b1335c82
3
- size 4079
 
 
 
 
test-ml-trained_3/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
test-ml-trained_4/config.json DELETED
@@ -1,64 +0,0 @@
1
- {
2
- "_name_or_path": "Helsinki-NLP/opus-mt-en-zh",
3
- "activation_dropout": 0.0,
4
- "activation_function": "swish",
5
- "add_bias_logits": false,
6
- "add_final_layer_norm": false,
7
- "architectures": [
8
- "MarianMTModel"
9
- ],
10
- "attention_dropout": 0.0,
11
- "bad_words_ids": [
12
- [
13
- 65000
14
- ]
15
- ],
16
- "bos_token_id": 0,
17
- "classif_dropout": 0.0,
18
- "classifier_dropout": 0.0,
19
- "d_model": 512,
20
- "decoder_attention_heads": 8,
21
- "decoder_ffn_dim": 2048,
22
- "decoder_layerdrop": 0.0,
23
- "decoder_layers": 6,
24
- "decoder_start_token_id": 65000,
25
- "decoder_vocab_size": 65001,
26
- "do_blenderbot_90_layernorm": false,
27
- "dropout": 0.1,
28
- "encoder_attention_heads": 8,
29
- "encoder_ffn_dim": 2048,
30
- "encoder_layerdrop": 0.0,
31
- "encoder_layers": 6,
32
- "eos_token_id": 0,
33
- "extra_pos_embeddings": 0,
34
- "force_bos_token_to_be_generated": false,
35
- "forced_eos_token_id": 0,
36
- "gradient_checkpointing": false,
37
- "id2label": {
38
- "0": "LABEL_0",
39
- "1": "LABEL_1",
40
- "2": "LABEL_2"
41
- },
42
- "init_std": 0.02,
43
- "is_encoder_decoder": true,
44
- "label2id": {
45
- "LABEL_0": 0,
46
- "LABEL_1": 1,
47
- "LABEL_2": 2
48
- },
49
- "max_length": 512,
50
- "max_position_embeddings": 512,
51
- "model_type": "marian",
52
- "normalize_before": false,
53
- "normalize_embedding": false,
54
- "num_beams": 4,
55
- "num_hidden_layers": 6,
56
- "pad_token_id": 65000,
57
- "scale_embedding": true,
58
- "share_encoder_decoder_embeddings": true,
59
- "static_position_embeddings": true,
60
- "torch_dtype": "float32",
61
- "transformers_version": "4.29.1",
62
- "use_cache": true,
63
- "vocab_size": 65001
64
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
test-ml-trained_4/generation_config.json DELETED
@@ -1,16 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bad_words_ids": [
4
- [
5
- 65000
6
- ]
7
- ],
8
- "bos_token_id": 0,
9
- "decoder_start_token_id": 65000,
10
- "eos_token_id": 0,
11
- "forced_eos_token_id": 0,
12
- "max_length": 512,
13
- "num_beams": 4,
14
- "pad_token_id": 65000,
15
- "transformers_version": "4.29.1"
16
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
test-ml-trained_4/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a74ceefe4252262435e6d6d39402ac7ace005fe45730e6aa6cba757c6e099784
3
- size 310020485
 
 
 
 
test-ml-trained_4/source.spm DELETED
Binary file (806 kB)
 
test-ml-trained_4/special_tokens_map.json DELETED
@@ -1,5 +0,0 @@
1
- {
2
- "eos_token": "</s>",
3
- "pad_token": "<pad>",
4
- "unk_token": "<unk>"
5
- }
 
 
 
 
 
 
test-ml-trained_4/target.spm DELETED
Binary file (805 kB)
 
test-ml-trained_4/tokenizer_config.json DELETED
@@ -1,13 +0,0 @@
1
- {
2
- "clean_up_tokenization_spaces": true,
3
- "eos_token": "</s>",
4
- "model_max_length": 512,
5
- "pad_token": "<pad>",
6
- "return_tensors": "tf",
7
- "separate_vocabs": false,
8
- "source_lang": "eng",
9
- "sp_model_kwargs": {},
10
- "target_lang": "zho",
11
- "tokenizer_class": "MarianTokenizer",
12
- "unk_token": "<unk>"
13
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
test-ml-trained_4/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:81e77664de4c24d5dba2450ed86b13c0991a7b5bff69f39a1673da6a1ae4b139
3
- size 4079
 
 
 
 
test-ml-trained_4/vocab.json DELETED
The diff for this file is too large to render. See raw diff