Upload tokenizer
Browse files- tokenizer_config.json +5 -1
tokenizer_config.json
CHANGED
@@ -113,7 +113,11 @@
|
|
113 |
],
|
114 |
"clean_up_tokenization_spaces": true,
|
115 |
"eos_token": "<|endoftext|>",
|
|
|
116 |
"model_max_length": 2048,
|
117 |
"pad_token": "<|endoftext|>",
|
118 |
-
"
|
|
|
|
|
|
|
119 |
}
|
|
|
113 |
],
|
114 |
"clean_up_tokenization_spaces": true,
|
115 |
"eos_token": "<|endoftext|>",
|
116 |
+
"max_length": 1024,
|
117 |
"model_max_length": 2048,
|
118 |
"pad_token": "<|endoftext|>",
|
119 |
+
"stride": 0,
|
120 |
+
"tokenizer_class": "PreTrainedTokenizerFast",
|
121 |
+
"truncation_side": "right",
|
122 |
+
"truncation_strategy": "longest_first"
|
123 |
}
|