LeroyDyer commited on
Commit
4575e3b
1 Parent(s): 43b0c57

Delete tokenizer_config.json

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +0 -61
tokenizer_config.json DELETED
@@ -1,61 +0,0 @@
1
- {
2
- "add_bos_token": true,
3
- "add_eos_token": false,
4
- "added_tokens_decoder": {
5
- "0": {
6
- "content": "<unk>",
7
- "lstrip": false,
8
- "normalized": false,
9
- "rstrip": false,
10
- "single_word": false,
11
- "special": true
12
- },
13
- "1": {
14
- "content": "<s>",
15
- "lstrip": false,
16
- "normalized": false,
17
- "rstrip": false,
18
- "single_word": false,
19
- "special": true
20
- },
21
- "2": {
22
- "content": "</s>",
23
- "lstrip": false,
24
- "normalized": false,
25
- "rstrip": false,
26
- "single_word": false,
27
- "special": true
28
- },
29
- "32000": {
30
- "content": "<|endthought|>",
31
- "lstrip": false,
32
- "normalized": false,
33
- "rstrip": false,
34
- "single_word": false,
35
- "special": true
36
- },
37
- "32001": {
38
- "content": "<|startthought|>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": false,
42
- "single_word": false,
43
- "special": true
44
- }
45
- },
46
- "additional_special_tokens": [
47
- "<|endthought|>",
48
- "<|startthought|>"
49
- ],
50
- "bos_token": "<s>",
51
- "clean_up_tokenization_spaces": false,
52
- "eos_token": "</s>",
53
- "legacy": true,
54
- "model_max_length": 1000000000000000019884624838656,
55
- "pad_token": "</s>",
56
- "sp_model_kwargs": {},
57
- "spaces_between_special_tokens": false,
58
- "tokenizer_class": "LlamaTokenizer",
59
- "unk_token": "<unk>",
60
- "use_default_system_prompt": false
61
- }