URGENT, error tokenizer
#10
by
allanjie
- opened
>>> from transformers import AutoTokenizer
tokenizer = Auto>>> tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen25-7b-multi", trust_remote_code=True)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/miniconda/envs/py310/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 801, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
File "/miniconda/envs/py310/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2029, in from_pretrained
return cls._from_pretrained(
File "/miniconda/envs/py310/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2261, in _from_pretrained
tokenizer = cls(*init_inputs, **init_kwargs)
File "/root/.cache/huggingface/modules/transformers_modules/Salesforce/codegen25-7b-multi/d85e7275a8b5534b210ba3941e10e12713fcf445/tokenization_codegen25.py", line 136, in __init__
super().__init__(
File "/miniconda/envs/py310/lib/python3.10/site-packages/transformers/tokenization_utils.py", line 367, in __init__
self._add_tokens(
File "/miniconda/envs/py310/lib/python3.10/site-packages/transformers/tokenization_utils.py", line 467, in _add_tokens
current_vocab = self.get_vocab().copy()
File "/root/.cache/huggingface/modules/transformers_modules/Salesforce/codegen25-7b-multi/d85e7275a8b5534b210ba3941e10e12713fcf445/tokenization_codegen25.py", line 153, in get_vocab
vocab = {self._convert_id_to_token(i): i for i in range(self.vocab_size)}
File "/root/.cache/huggingface/modules/transformers_modules/Salesforce/codegen25-7b-multi/d85e7275a8b5534b210ba3941e10e12713fcf445/tokenization_codegen25.py", line 149, in vocab_size
return self.encoder.n_vocab
AttributeError: 'CodeGen25Tokenizer' object has no attribute 'encoder'. Did you mean: 'encode'?
>>>
You probably updated the transformers version but the code pushed was probably before 4.34 so not compatible with the latests changes!