stojchet commited on
Commit
a216288
·
verified ·
1 Parent(s): 1275a63

End of training

Browse files
Files changed (2) hide show
  1. README.md +14 -9
  2. generation_config.json +6 -0
README.md CHANGED
@@ -1,11 +1,11 @@
1
  ---
 
2
  license: other
3
- library_name: peft
4
  tags:
5
  - trl
6
  - kto
7
  - generated_from_trainer
8
- base_model: deepseek-ai/deepseek-coder-1.3b-base
9
  model-index:
10
  - name: kto_test
11
  results: []
@@ -35,21 +35,26 @@ More information needed
35
  ### Training hyperparameters
36
 
37
  The following hyperparameters were used during training:
38
- - learning_rate: 1.41e-05
39
  - train_batch_size: 8
40
  - eval_batch_size: 8
41
  - seed: 42
42
  - gradient_accumulation_steps: 16
43
  - total_train_batch_size: 128
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
- - lr_scheduler_type: cosine
46
  - lr_scheduler_warmup_ratio: 0.1
 
47
  - num_epochs: 1
 
 
 
 
 
48
 
49
  ### Framework versions
50
 
51
- - PEFT 0.10.0
52
- - Transformers 4.42.0.dev0
53
- - Pytorch 2.2.2+cu121
54
- - Datasets 2.19.0
55
- - Tokenizers 0.19.1
 
1
  ---
2
+ library_name: transformers
3
  license: other
4
+ base_model: deepseek-ai/deepseek-coder-1.3b-base
5
  tags:
6
  - trl
7
  - kto
8
  - generated_from_trainer
 
9
  model-index:
10
  - name: kto_test
11
  results: []
 
35
  ### Training hyperparameters
36
 
37
  The following hyperparameters were used during training:
38
+ - learning_rate: 0.1
39
  - train_batch_size: 8
40
  - eval_batch_size: 8
41
  - seed: 42
42
  - gradient_accumulation_steps: 16
43
  - total_train_batch_size: 128
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
+ - lr_scheduler_type: linear
46
  - lr_scheduler_warmup_ratio: 0.1
47
+ - lr_scheduler_warmup_steps: 200
48
  - num_epochs: 1
49
+ - mixed_precision_training: Native AMP
50
+
51
+ ### Training results
52
+
53
+
54
 
55
  ### Framework versions
56
 
57
+ - Transformers 4.45.0
58
+ - Pytorch 2.5.1+cu124
59
+ - Datasets 2.19.2
60
+ - Tokenizers 0.20.3
 
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 32013,
4
+ "eos_token_id": 32014,
5
+ "transformers_version": "4.45.0"
6
+ }