Xenova HF Staff commited on
Commit
37e7b81
·
verified ·
1 Parent(s): 83eb260

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +1 -1
  3. tokenizer_config.json +1 -0
config.json CHANGED
@@ -26,7 +26,7 @@
26
  "rope_scaling": null,
27
  "rope_theta": 100000,
28
  "tie_word_embeddings": true,
29
- "transformers_version": "4.46.3",
30
  "use_cache": true,
31
  "vocab_size": 49154,
32
  "transformers.js_config": {
 
26
  "rope_scaling": null,
27
  "rope_theta": 100000,
28
  "tie_word_embeddings": true,
29
+ "transformers_version": "4.49.0",
30
  "use_cache": true,
31
  "vocab_size": 49154,
32
  "transformers.js_config": {
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
- "transformers_version": "4.46.3"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
+ "transformers_version": "4.49.0"
6
  }
tokenizer_config.json CHANGED
@@ -164,6 +164,7 @@
164
  "chat_template": "{% for message in messages %}{{'<|im_start|>' + '<|' + message['role'] + '|>' + message['content'] + '<|im_end|>'}}{% endfor %}",
165
  "clean_up_tokenization_spaces": false,
166
  "eos_token": "<|endoftext|>",
 
167
  "model_max_length": 8192,
168
  "pad_token": "<|endoftext|>",
169
  "tokenizer_class": "GPT2Tokenizer",
 
164
  "chat_template": "{% for message in messages %}{{'<|im_start|>' + '<|' + message['role'] + '|>' + message['content'] + '<|im_end|>'}}{% endfor %}",
165
  "clean_up_tokenization_spaces": false,
166
  "eos_token": "<|endoftext|>",
167
+ "extra_special_tokens": {},
168
  "model_max_length": 8192,
169
  "pad_token": "<|endoftext|>",
170
  "tokenizer_class": "GPT2Tokenizer",