indiejoseph commited on
Commit
7890584
1 Parent(s): 941c288

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +14 -0
  2. tokenizer.json +0 -0
  3. tokenizer_config.json +19 -1
special_tokens_map.json CHANGED
@@ -1,5 +1,12 @@
1
  {
2
  "additional_special_tokens": [
 
 
 
 
 
 
 
3
  {
4
  "content": "<|im_end|>",
5
  "lstrip": false,
@@ -20,6 +27,13 @@
20
  "normalized": false,
21
  "rstrip": false,
22
  "single_word": false
 
 
 
 
 
 
 
23
  }
24
  ],
25
  "bos_token": {
 
1
  {
2
  "additional_special_tokens": [
3
+ {
4
+ "content": "<|im_start|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
  {
11
  "content": "<|im_end|>",
12
  "lstrip": false,
 
27
  "normalized": false,
28
  "rstrip": false,
29
  "single_word": false
30
+ },
31
+ {
32
+ "content": "<|System|>",
33
+ "lstrip": false,
34
+ "normalized": false,
35
+ "rstrip": false,
36
+ "single_word": false
37
  }
38
  ],
39
  "bos_token": {
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -43,6 +43,22 @@
43
  "single_word": false,
44
  "special": true
45
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
  "7": {
47
  "content": "<|im_end|>",
48
  "lstrip": false,
@@ -53,9 +69,11 @@
53
  }
54
  },
55
  "additional_special_tokens": [
 
56
  "<|im_end|>",
57
  "<|Human|>",
58
- "<|Assistant|>"
 
59
  ],
60
  "bos_token": "<|startoftext|>",
61
  "clean_up_tokenization_spaces": false,
 
43
  "single_word": false,
44
  "special": true
45
  },
46
+ "5": {
47
+ "content": "<|System|>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": true
53
+ },
54
+ "6": {
55
+ "content": "<|im_start|>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": true
61
+ },
62
  "7": {
63
  "content": "<|im_end|>",
64
  "lstrip": false,
 
69
  }
70
  },
71
  "additional_special_tokens": [
72
+ "<|im_start|>",
73
  "<|im_end|>",
74
  "<|Human|>",
75
+ "<|Assistant|>",
76
+ "<|System|>"
77
  ],
78
  "bos_token": "<|startoftext|>",
79
  "clean_up_tokenization_spaces": false,