sirsam01 commited on
Commit
530bd01
·
verified ·
1 Parent(s): 876cf9a

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer.json +1 -8
  2. tokenizer_config.json +4 -0
tokenizer.json CHANGED
@@ -6,14 +6,7 @@
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
- "padding": {
10
- "strategy": "BatchLongest",
11
- "direction": "Left",
12
- "pad_to_multiple_of": null,
13
- "pad_id": 0,
14
- "pad_type_id": 0,
15
- "pad_token": "<|endoftext|>"
16
- },
17
  "added_tokens": [
18
  {
19
  "id": 0,
 
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
+ "padding": null,
 
 
 
 
 
 
 
10
  "added_tokens": [
11
  {
12
  "id": 0,
tokenizer_config.json CHANGED
@@ -160,9 +160,13 @@
160
  "bos_token": "<|endoftext|>",
161
  "clean_up_tokenization_spaces": false,
162
  "eos_token": "<|endoftext|>",
 
163
  "model_max_length": 1000000000000000019884624838656,
164
  "pad_token": "<|endoftext|>",
 
165
  "tokenizer_class": "GPT2Tokenizer",
 
 
166
  "unk_token": "<|endoftext|>",
167
  "vocab_size": 49152
168
  }
 
160
  "bos_token": "<|endoftext|>",
161
  "clean_up_tokenization_spaces": false,
162
  "eos_token": "<|endoftext|>",
163
+ "max_length": 349,
164
  "model_max_length": 1000000000000000019884624838656,
165
  "pad_token": "<|endoftext|>",
166
+ "stride": 0,
167
  "tokenizer_class": "GPT2Tokenizer",
168
+ "truncation_side": "right",
169
+ "truncation_strategy": "longest_first",
170
  "unk_token": "<|endoftext|>",
171
  "vocab_size": 49152
172
  }