EzraWilliam commited on
Commit
d6dd10f
1 Parent(s): 0031855

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer_config.json +3 -0
  2. vocab.json +28 -28
tokenizer_config.json CHANGED
@@ -1,9 +1,12 @@
1
  {
2
  "bos_token": "<s>",
 
3
  "do_lower_case": false,
4
  "eos_token": "</s>",
 
5
  "pad_token": "[PAD]",
6
  "replace_word_delimiter_char": " ",
 
7
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
8
  "unk_token": "[UNK]",
9
  "word_delimiter_token": "|"
 
1
  {
2
  "bos_token": "<s>",
3
+ "clean_up_tokenization_spaces": true,
4
  "do_lower_case": false,
5
  "eos_token": "</s>",
6
+ "model_max_length": 1000000000000000019884624838656,
7
  "pad_token": "[PAD]",
8
  "replace_word_delimiter_char": " ",
9
+ "target_lang": null,
10
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
11
  "unk_token": "[UNK]",
12
  "word_delimiter_token": "|"
vocab.json CHANGED
@@ -1,32 +1,32 @@
1
  {
2
- "'": 6,
3
  "[PAD]": 29,
4
  "[UNK]": 28,
5
- "a": 5,
6
- "b": 10,
7
- "c": 25,
8
- "d": 19,
9
- "e": 13,
10
- "f": 8,
11
- "g": 4,
12
- "h": 23,
13
- "i": 9,
14
- "j": 27,
15
- "k": 24,
16
- "l": 16,
17
- "m": 0,
18
- "n": 22,
19
- "o": 1,
20
- "p": 12,
21
- "q": 18,
22
- "r": 3,
23
- "s": 20,
24
- "t": 26,
25
- "u": 14,
26
- "v": 17,
27
- "w": 15,
28
- "x": 2,
29
- "y": 11,
30
- "z": 21,
31
- "|": 7
32
  }
 
1
  {
2
+ "'": 1,
3
  "[PAD]": 29,
4
  "[UNK]": 28,
5
+ "a": 12,
6
+ "b": 3,
7
+ "c": 16,
8
+ "d": 11,
9
+ "e": 15,
10
+ "f": 27,
11
+ "g": 6,
12
+ "h": 25,
13
+ "i": 5,
14
+ "j": 23,
15
+ "k": 14,
16
+ "l": 21,
17
+ "m": 10,
18
+ "n": 7,
19
+ "o": 26,
20
+ "p": 2,
21
+ "q": 8,
22
+ "r": 22,
23
+ "s": 17,
24
+ "t": 4,
25
+ "u": 18,
26
+ "v": 13,
27
+ "w": 20,
28
+ "x": 24,
29
+ "y": 19,
30
+ "z": 0,
31
+ "|": 9
32
  }