cointegrated commited on
Commit
a9a6562
1 Parent(s): 1604554

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +7 -1
  2. tokenizer_config.json +8 -2
special_tokens_map.json CHANGED
@@ -1,3 +1,9 @@
1
  {
2
- "pad_token": "<pad>"
 
 
 
 
 
 
3
  }
 
1
  {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": "<mask>",
6
+ "pad_token": "<pad>",
7
+ "sep_token": "</s>",
8
+ "unk_token": "<unk>"
9
  }
tokenizer_config.json CHANGED
@@ -1657,8 +1657,14 @@
1657
  "special": true
1658
  }
1659
  },
 
1660
  "clean_up_tokenization_spaces": true,
1661
- "model_max_length": 1000000000000000019884624838656,
 
 
 
1662
  "pad_token": "<pad>",
1663
- "tokenizer_class": "PreTrainedTokenizerFast"
 
 
1664
  }
 
1657
  "special": true
1658
  }
1659
  },
1660
+ "bos_token": "<s>",
1661
  "clean_up_tokenization_spaces": true,
1662
+ "cls_token": "<s>",
1663
+ "eos_token": "</s>",
1664
+ "mask_token": "<mask>",
1665
+ "model_max_length": 1024,
1666
  "pad_token": "<pad>",
1667
+ "sep_token": "</s>",
1668
+ "tokenizer_class": "PreTrainedTokenizerFast",
1669
+ "unk_token": "<unk>"
1670
  }