SystemAdmin123 commited on
Commit
4cd7b81
·
verified ·
1 Parent(s): e2311e0

Training in progress, step 100, checkpoint

Browse files
last-checkpoint/added_tokens.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<|assistant|>": 32001,
3
+ "<|endoftext|>": 32000,
4
+ "<|end|>": 32007,
5
+ "<|placeholder1|>": 32002,
6
+ "<|placeholder2|>": 32003,
7
+ "<|placeholder3|>": 32004,
8
+ "<|placeholder4|>": 32005,
9
+ "<|placeholder5|>": 32008,
10
+ "<|placeholder6|>": 32009,
11
+ "<|system|>": 32006,
12
+ "<|user|>": 32010
13
+ }
last-checkpoint/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Xenova/tiny-random-Phi3ForCausalLM",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "embd_pdrop": 0.0,
9
+ "eos_token_id": 32000,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 32,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 64,
14
+ "max_position_embeddings": 4096,
15
+ "model_type": "phi3",
16
+ "num_attention_heads": 4,
17
+ "num_hidden_layers": 2,
18
+ "num_key_value_heads": 4,
19
+ "original_max_position_embeddings": 4096,
20
+ "pad_token_id": 32000,
21
+ "resid_pdrop": 0.0,
22
+ "rms_norm_eps": 1e-05,
23
+ "rope_scaling": null,
24
+ "rope_theta": 10000.0,
25
+ "sliding_window": 2047,
26
+ "tie_word_embeddings": false,
27
+ "torch_dtype": "bfloat16",
28
+ "transformers_version": "4.48.1",
29
+ "use_cache": false,
30
+ "vocab_size": 32011
31
+ }
last-checkpoint/generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 32000,
6
+ "pad_token_id": 32000,
7
+ "transformers_version": "4.48.1"
8
+ }
last-checkpoint/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c914af7e4717984588deb34667879d37816d8425bf9f40c7f2df62411adc325a
3
+ size 4140280
last-checkpoint/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f1d4cc7e69b6d4ecabbbf7997c661f2f6b544b7bb70ab8878c1272f23010f6f
3
+ size 4291766
last-checkpoint/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4651e32e118f1ea1a8e26dfbbe64298593e12e6a71bcd36cb77f04f86d3f86d
3
+ size 15024
last-checkpoint/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0c1eba909fbb51daca773a25c075f182b4096aff21c9b4ff19dbada2080ac99
3
+ size 15024
last-checkpoint/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28adb9f06e220aefdc723ea4380a84d42b8bfb87cc53ce65859d55ce1876f51c
3
+ size 15024
last-checkpoint/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:423c49ed521f6986d20d8b29112b383f4b0f3f2e228084ef82c2ad7dcd5d1de8
3
+ size 15024
last-checkpoint/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a60c7d771c1fd156acee762fba03c724cb41829a3f71df370ecd1d20b134982
3
+ size 1064
last-checkpoint/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
last-checkpoint/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
last-checkpoint/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
last-checkpoint/tokenizer_config.json ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": true,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "32000": {
31
+ "content": "<|endoftext|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "32001": {
39
+ "content": "<|assistant|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": true,
43
+ "single_word": false,
44
+ "special": true
45
+ },
46
+ "32002": {
47
+ "content": "<|placeholder1|>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": true,
51
+ "single_word": false,
52
+ "special": true
53
+ },
54
+ "32003": {
55
+ "content": "<|placeholder2|>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": true,
59
+ "single_word": false,
60
+ "special": true
61
+ },
62
+ "32004": {
63
+ "content": "<|placeholder3|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": true,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "32005": {
71
+ "content": "<|placeholder4|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": true,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "32006": {
79
+ "content": "<|system|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": true,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "32007": {
87
+ "content": "<|end|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": true,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "32008": {
95
+ "content": "<|placeholder5|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": true,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "32009": {
103
+ "content": "<|placeholder6|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": true,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "32010": {
111
+ "content": "<|user|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": true,
115
+ "single_word": false,
116
+ "special": true
117
+ }
118
+ },
119
+ "bos_token": "<s>",
120
+ "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') %}{{'<|user|>' + '\n' + message['content'] + '<|end|>' + '\n' + '<|assistant|>' + '\n'}}{% elif (message['role'] == 'assistant') %}{{message['content'] + '<|end|>' + '\n'}}{% endif %}{% endfor %}",
121
+ "clean_up_tokenization_spaces": false,
122
+ "eos_token": "<|endoftext|>",
123
+ "extra_special_tokens": {},
124
+ "legacy": true,
125
+ "model_max_length": 4096,
126
+ "pad_token": "<|endoftext|>",
127
+ "padding_side": "left",
128
+ "sp_model_kwargs": {},
129
+ "tokenizer_class": "LlamaTokenizer",
130
+ "unk_token": "<unk>",
131
+ "use_default_system_prompt": false,
132
+ "use_fast": true
133
+ }
last-checkpoint/trainer_state.json ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 16.666666666666668,
5
+ "eval_steps": 200,
6
+ "global_step": 100,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.16666666666666666,
13
+ "eval_loss": 10.377338409423828,
14
+ "eval_runtime": 2.3495,
15
+ "eval_samples_per_second": 638.849,
16
+ "eval_steps_per_second": 5.107,
17
+ "step": 1
18
+ },
19
+ {
20
+ "epoch": 1.6666666666666665,
21
+ "grad_norm": 0.208984375,
22
+ "learning_rate": 0.00019863613034027224,
23
+ "loss": 10.3698,
24
+ "step": 10
25
+ },
26
+ {
27
+ "epoch": 3.3333333333333335,
28
+ "grad_norm": 0.4296875,
29
+ "learning_rate": 0.0001879473751206489,
30
+ "loss": 10.3199,
31
+ "step": 20
32
+ },
33
+ {
34
+ "epoch": 5.0,
35
+ "grad_norm": 0.45703125,
36
+ "learning_rate": 0.00016772815716257412,
37
+ "loss": 10.2229,
38
+ "step": 30
39
+ },
40
+ {
41
+ "epoch": 6.666666666666667,
42
+ "grad_norm": 0.458984375,
43
+ "learning_rate": 0.00014016954246529696,
44
+ "loss": 10.1217,
45
+ "step": 40
46
+ },
47
+ {
48
+ "epoch": 8.333333333333334,
49
+ "grad_norm": 0.4609375,
50
+ "learning_rate": 0.00010825793454723325,
51
+ "loss": 10.0382,
52
+ "step": 50
53
+ },
54
+ {
55
+ "epoch": 10.0,
56
+ "grad_norm": 0.4609375,
57
+ "learning_rate": 7.54514512859201e-05,
58
+ "loss": 9.9768,
59
+ "step": 60
60
+ },
61
+ {
62
+ "epoch": 11.666666666666666,
63
+ "grad_norm": 0.4609375,
64
+ "learning_rate": 4.530518418775733e-05,
65
+ "loss": 9.9373,
66
+ "step": 70
67
+ },
68
+ {
69
+ "epoch": 13.333333333333334,
70
+ "grad_norm": 0.462890625,
71
+ "learning_rate": 2.1085949060360654e-05,
72
+ "loss": 9.9182,
73
+ "step": 80
74
+ },
75
+ {
76
+ "epoch": 15.0,
77
+ "grad_norm": 0.46484375,
78
+ "learning_rate": 5.418275829936537e-06,
79
+ "loss": 9.9122,
80
+ "step": 90
81
+ },
82
+ {
83
+ "epoch": 16.666666666666668,
84
+ "grad_norm": 0.462890625,
85
+ "learning_rate": 0.0,
86
+ "loss": 9.9115,
87
+ "step": 100
88
+ }
89
+ ],
90
+ "logging_steps": 10,
91
+ "max_steps": 100,
92
+ "num_input_tokens_seen": 0,
93
+ "num_train_epochs": 17,
94
+ "save_steps": 200,
95
+ "stateful_callbacks": {
96
+ "TrainerControl": {
97
+ "args": {
98
+ "should_epoch_stop": false,
99
+ "should_evaluate": false,
100
+ "should_log": false,
101
+ "should_save": true,
102
+ "should_training_stop": true
103
+ },
104
+ "attributes": {}
105
+ }
106
+ },
107
+ "total_flos": 164363029708800.0,
108
+ "train_batch_size": 32,
109
+ "trial_name": null,
110
+ "trial_params": null
111
+ }
last-checkpoint/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:767cbb0126f5e19779e9fcc2851f3166d36ca132f2aa13429b17ffa28b07b942
3
+ size 6904