saattrupdan commited on
Commit
b3c52cb
1 Parent(s): 933a6e7

Update tokenizer_config.json

Browse files

The [NeuralBeagle](https://huggingface.co/mlabonne/NeuralBeagle14-7B) model [recently updated its tokenization config to fix the bugs with it](https://huggingface.co/mlabonne/NeuralBeagle14-7B/commit/7d8fe89cdddfc9c28dbee535f5f53d38b5440d74), and this simply copy-pastes this in here.

Files changed (1) hide show
  1. tokenizer_config.json +12 -5
tokenizer_config.json CHANGED
@@ -27,16 +27,23 @@
27
  "special": true
28
  }
29
  },
30
- "additional_special_tokens": [],
 
 
 
 
31
  "bos_token": "<s>",
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
  "legacy": true,
35
- "model_max_length": 1000000000000000019884624838656,
36
- "pad_token": null,
 
37
  "sp_model_kwargs": {},
38
  "spaces_between_special_tokens": false,
 
39
  "tokenizer_class": "LlamaTokenizer",
40
  "unk_token": "<unk>",
41
- "use_default_system_prompt": false
42
- }
 
 
27
  "special": true
28
  }
29
  },
30
+ "additional_special_tokens": [
31
+ "<unk>",
32
+ "<s>",
33
+ "</s>"
34
+ ],
35
  "bos_token": "<s>",
36
  "clean_up_tokenization_spaces": false,
37
  "eos_token": "</s>",
38
  "legacy": true,
39
+ "model_max_length": 8192,
40
+ "pad_token": "</s>",
41
+ "padding_side": "left",
42
  "sp_model_kwargs": {},
43
  "spaces_between_special_tokens": false,
44
+ "split_special_tokens": false,
45
  "tokenizer_class": "LlamaTokenizer",
46
  "unk_token": "<unk>",
47
+ "chat_template": "{% for message in messages %}{{bos_token + message['role'] + '\n' + message['content'] + eos_token + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ bos_token + 'assistant\n' }}{% endif %}",
48
+ "use_default_system_prompt": true
49
+ }