bump-up-transformers

#859
by alozowski HF staff - opened
pyproject.toml CHANGED
@@ -40,7 +40,7 @@ plotly = "5.14.1"
40
  python-dateutil = "2.8.2"
41
  sentencepiece = "^0.2.0"
42
  tqdm = "4.65.0"
43
- transformers = "4.41.1"
44
  tokenizers = ">=0.15.0"
45
  gradio-space-ci = {git = "https://huggingface.co/spaces/Wauplin/gradio-space-ci", rev = "0.2.3"}
46
  isort = "^5.13.2"
 
40
  python-dateutil = "2.8.2"
41
  sentencepiece = "^0.2.0"
42
  tqdm = "4.65.0"
43
+ transformers = "4.43.1"
44
  tokenizers = ">=0.15.0"
45
  gradio-space-ci = {git = "https://huggingface.co/spaces/Wauplin/gradio-space-ci", rev = "0.2.3"}
46
  isort = "^5.13.2"
requirements.txt CHANGED
@@ -10,7 +10,7 @@ plotly==5.14.1
10
  python-dateutil==2.8.2
11
  sentencepiece
12
  tqdm==4.65.0
13
- transformers==4.41.1
14
  tokenizers>=0.15.0
15
  gradio-space-ci @ git+https://huggingface.co/spaces/Wauplin/[email protected] # CI !!!
16
  isort
 
10
  python-dateutil==2.8.2
11
  sentencepiece
12
  tqdm==4.65.0
13
+ transformers==4.43.1
14
  tokenizers>=0.15.0
15
  gradio-space-ci @ git+https://huggingface.co/spaces/Wauplin/[email protected] # CI !!!
16
  isort
src/submission/check_validity.py CHANGED
@@ -46,8 +46,7 @@ def is_model_on_hub(
46
  ) -> tuple[bool, str, AutoConfig]:
47
  try:
48
  config = AutoConfig.from_pretrained(
49
- model_name, revision=revision, trust_remote_code=trust_remote_code, token=token
50
- ) # , force_download=True)
51
  if test_tokenizer:
52
  try:
53
  tk = AutoTokenizer.from_pretrained(
 
46
  ) -> tuple[bool, str, AutoConfig]:
47
  try:
48
  config = AutoConfig.from_pretrained(
49
+ model_name, revision=revision, trust_remote_code=trust_remote_code, token=token, force_download=True)
 
50
  if test_tokenizer:
51
  try:
52
  tk = AutoTokenizer.from_pretrained(