Spaces:
Anush008
/
Runtime error

Felix Marty commited on
Commit
c43dedc
โ€ข
1 Parent(s): 1433ab1
Files changed (2) hide show
  1. app.py +10 -3
  2. onnx_export.py +17 -8
app.py CHANGED
@@ -21,7 +21,7 @@ if HF_TOKEN:
21
  repo = Repository(local_dir=DATADIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)
22
 
23
 
24
- def onnx_export(token: str, model_id: str, task: str) -> str:
25
  if token == "" or model_id == "":
26
  return """
27
  ### Invalid input ๐Ÿž
@@ -31,7 +31,7 @@ def onnx_export(token: str, model_id: str, task: str) -> str:
31
  try:
32
  api = HfApi(token=token)
33
 
34
- error, commit_info = convert(api=api, model_id=model_id, task=task)
35
  if error != "0":
36
  return error
37
 
@@ -129,12 +129,19 @@ with gr.Blocks() as demo:
129
  max_lines=1,
130
  label='Task (can be left to "auto", will be automatically inferred)',
131
  )
 
 
 
 
 
132
 
133
  btn = gr.Button("Convert to ONNX")
134
  output = gr.Markdown(label="Output")
135
 
136
  btn.click(
137
- fn=onnx_export, inputs=[input_token, input_model, input_task], outputs=output
 
 
138
  )
139
 
140
  """
 
21
  repo = Repository(local_dir=DATADIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)
22
 
23
 
24
+ def onnx_export(token: str, model_id: str, task: str, opset: int) -> str:
25
  if token == "" or model_id == "":
26
  return """
27
  ### Invalid input ๐Ÿž
 
31
  try:
32
  api = HfApi(token=token)
33
 
34
+ error, commit_info = convert(api=api, model_id=model_id, task=task, opset=opset)
35
  if error != "0":
36
  return error
37
 
 
129
  max_lines=1,
130
  label='Task (can be left to "auto", will be automatically inferred)',
131
  )
132
+ onnx_opset = gr.Textbox(
133
+ placeholder="for example 14, can be left blank",
134
+ max_lines=1,
135
+ label="ONNX opset (optional, can be left blank)",
136
+ )
137
 
138
  btn = gr.Button("Convert to ONNX")
139
  output = gr.Markdown(label="Output")
140
 
141
  btn.click(
142
+ fn=onnx_export,
143
+ inputs=[input_token, input_model, input_task, onnx_opset],
144
+ outputs=output,
145
  )
146
 
147
  """
onnx_export.py CHANGED
@@ -5,11 +5,14 @@ from pathlib import Path
5
  from tempfile import TemporaryDirectory
6
  from typing import List, Optional, Tuple
7
 
8
- from huggingface_hub import (CommitOperationAdd, HfApi, get_repo_discussions,
9
- hf_hub_download)
 
 
 
 
10
  from huggingface_hub.file_download import repo_folder_name
11
- from optimum.exporters.onnx import (OnnxConfigWithPast, export,
12
- validate_model_outputs)
13
  from optimum.exporters.tasks import TasksManager
14
  from transformers import AutoConfig, AutoTokenizer, is_torch_available
15
 
@@ -30,7 +33,7 @@ def previous_pr(api: "HfApi", model_id: str, pr_title: str) -> Optional["Discuss
30
  return discussion
31
 
32
 
33
- def convert_onnx(model_id: str, task: str, folder: str) -> List:
34
 
35
  # Allocate the model
36
  model = TasksManager.get_model_from_task(task, model_id, framework="pt")
@@ -58,8 +61,10 @@ def convert_onnx(model_id: str, task: str, folder: str) -> List:
58
  "Could not infer the pad token id, which is needed in this case, please provide it with the --pad_token_id argument"
59
  )
60
 
 
61
  # Ensure the requested opset is sufficient
62
- opset = onnx_config.DEFAULT_ONNX_OPSET
 
63
 
64
  output = Path(folder).joinpath("model.onnx")
65
  onnx_inputs, onnx_outputs = export(
@@ -107,7 +112,11 @@ def convert_onnx(model_id: str, task: str, folder: str) -> List:
107
 
108
 
109
  def convert(
110
- api: "HfApi", model_id: str, task: str, force: bool = False
 
 
 
 
111
  ) -> Tuple[int, "CommitInfo"]:
112
  pr_title = "Adding ONNX file of this model"
113
  info = api.model_info(model_id)
@@ -139,7 +148,7 @@ def convert(
139
  f"Model {model_id} already has an open PR check out [{url}]({url})"
140
  )
141
  else:
142
- operations = convert_onnx(model_id, task, folder)
143
 
144
  commit_description = f"""
145
  Beep boop I am the [ONNX export bot ๐Ÿค–๐ŸŽ๏ธ]({SPACES_URL}). On behalf of [{requesting_user}](https://huggingface.co/{requesting_user}), I would like to add to this repository the model converted to ONNX.
 
5
  from tempfile import TemporaryDirectory
6
  from typing import List, Optional, Tuple
7
 
8
+ from huggingface_hub import (
9
+ CommitOperationAdd,
10
+ HfApi,
11
+ get_repo_discussions,
12
+ hf_hub_download,
13
+ )
14
  from huggingface_hub.file_download import repo_folder_name
15
+ from optimum.exporters.onnx import OnnxConfigWithPast, export, validate_model_outputs
 
16
  from optimum.exporters.tasks import TasksManager
17
  from transformers import AutoConfig, AutoTokenizer, is_torch_available
18
 
 
33
  return discussion
34
 
35
 
36
+ def convert_onnx(model_id: str, task: str, folder: str, opset: int) -> List:
37
 
38
  # Allocate the model
39
  model = TasksManager.get_model_from_task(task, model_id, framework="pt")
 
61
  "Could not infer the pad token id, which is needed in this case, please provide it with the --pad_token_id argument"
62
  )
63
 
64
+ print("opset:", opset)
65
  # Ensure the requested opset is sufficient
66
+ if opset == None:
67
+ opset = onnx_config.DEFAULT_ONNX_OPSET
68
 
69
  output = Path(folder).joinpath("model.onnx")
70
  onnx_inputs, onnx_outputs = export(
 
112
 
113
 
114
  def convert(
115
+ api: "HfApi",
116
+ model_id: str,
117
+ task: str,
118
+ force: bool = False,
119
+ opset: int = None,
120
  ) -> Tuple[int, "CommitInfo"]:
121
  pr_title = "Adding ONNX file of this model"
122
  info = api.model_info(model_id)
 
148
  f"Model {model_id} already has an open PR check out [{url}]({url})"
149
  )
150
  else:
151
+ operations = convert_onnx(model_id, task, folder, opset)
152
 
153
  commit_description = f"""
154
  Beep boop I am the [ONNX export bot ๐Ÿค–๐ŸŽ๏ธ]({SPACES_URL}). On behalf of [{requesting_user}](https://huggingface.co/{requesting_user}), I would like to add to this repository the model converted to ONNX.