colerobertson commited on
Commit
92181c4
1 Parent(s): c4271d8

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +16 -0
  2. tokenizer_config.json +2 -37
  3. vocab.json +36 -36
special_tokens_map.json CHANGED
@@ -1,4 +1,20 @@
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "bos_token": "<s>",
3
  "eos_token": "</s>",
4
  "pad_token": "[PAD]",
 
1
  {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "</s>",
12
+ "lstrip": false,
13
+ "normalized": true,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ }
17
+ ],
18
  "bos_token": "<s>",
19
  "eos_token": "</s>",
20
  "pad_token": "[PAD]",
tokenizer_config.json CHANGED
@@ -1,46 +1,11 @@
1
  {
2
- "added_tokens_decoder": {
3
- "35": {
4
- "content": "[UNK]",
5
- "lstrip": true,
6
- "normalized": false,
7
- "rstrip": true,
8
- "single_word": false,
9
- "special": false
10
- },
11
- "36": {
12
- "content": "[PAD]",
13
- "lstrip": true,
14
- "normalized": false,
15
- "rstrip": true,
16
- "single_word": false,
17
- "special": false
18
- },
19
- "37": {
20
- "content": "<s>",
21
- "lstrip": false,
22
- "normalized": false,
23
- "rstrip": false,
24
- "single_word": false,
25
- "special": true
26
- },
27
- "38": {
28
- "content": "</s>",
29
- "lstrip": false,
30
- "normalized": false,
31
- "rstrip": false,
32
- "single_word": false,
33
- "special": true
34
- }
35
- },
36
  "bos_token": "<s>",
37
- "clean_up_tokenization_spaces": true,
38
  "do_lower_case": false,
39
  "eos_token": "</s>",
40
- "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
42
  "replace_word_delimiter_char": " ",
43
- "target_lang": null,
44
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
45
  "unk_token": "[UNK]",
46
  "word_delimiter_token": "|"
 
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "bos_token": "<s>",
 
3
  "do_lower_case": false,
4
  "eos_token": "</s>",
5
+ "name_or_path": "./",
6
  "pad_token": "[PAD]",
7
  "replace_word_delimiter_char": " ",
8
+ "special_tokens_map_file": null,
9
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
10
  "unk_token": "[UNK]",
11
  "word_delimiter_token": "|"
vocab.json CHANGED
@@ -1,39 +1,39 @@
1
  {
2
- "'": 25,
3
- "0": 23,
4
  "1": 14,
5
- "2": 30,
6
- "3": 31,
7
- "5": 0,
8
- "9": 17,
9
- "[PAD]": 36,
10
- "[UNK]": 35,
11
- "a": 20,
12
- "b": 11,
13
- "c": 10,
14
- "d": 19,
15
- "e": 7,
16
- "f": 8,
17
- "g": 26,
18
- "h": 1,
19
- "i": 32,
20
- "j": 13,
21
- "k": 2,
22
- "l": 12,
23
- "m": 28,
24
- "n": 9,
25
- "o": 21,
26
- "p": 18,
27
- "q": 15,
28
- "r": 27,
29
- "s": 16,
30
- "t": 22,
31
- "u": 24,
32
- "v": 29,
33
- "w": 4,
34
- "x": 34,
35
- "y": 5,
36
- "z": 3,
37
- "|": 6,
38
- "£": 33
39
  }
 
1
  {
2
+ "'": 35,
3
+ "0": 10,
4
  "1": 14,
5
+ "2": 32,
6
+ "3": 19,
7
+ "5": 34,
8
+ "9": 9,
9
+ "[PAD]": 0,
10
+ "[UNK]": 36,
11
+ "a": 17,
12
+ "b": 24,
13
+ "c": 30,
14
+ "d": 6,
15
+ "e": 16,
16
+ "f": 18,
17
+ "g": 4,
18
+ "h": 27,
19
+ "i": 2,
20
+ "j": 5,
21
+ "k": 11,
22
+ "l": 1,
23
+ "m": 7,
24
+ "n": 25,
25
+ "o": 28,
26
+ "p": 22,
27
+ "q": 20,
28
+ "r": 8,
29
+ "s": 3,
30
+ "t": 26,
31
+ "u": 12,
32
+ "v": 23,
33
+ "w": 15,
34
+ "x": 31,
35
+ "y": 13,
36
+ "z": 33,
37
+ "|": 29,
38
+ "£": 21
39
  }