Felix Marty
commited on
Commit
•
93aa084
1
Parent(s):
210777d
better with two cols?
Browse files
app.py
CHANGED
@@ -92,7 +92,9 @@ TITLE = """
|
|
92 |
# for some reason https://huggingface.co/settings/tokens is not showing as a link by default?
|
93 |
DESCRIPTION = """
|
94 |
This Space allows to automatically convert to ONNX 🤗 transformers PyTorch models hosted on the Hugging Face Hub. It opens a PR on the target model, and it is up to the owner of the original model
|
95 |
-
to merge the PR to allow people to leverage the ONNX standard to share and use the model on a wide range of devices!
|
|
|
|
|
96 |
Check out [this guide](https://huggingface.co/docs/optimum/main/en/onnxruntime/usage_guides/models) to see how!
|
97 |
|
98 |
The steps are the following:
|
@@ -100,12 +102,15 @@ The steps are the following:
|
|
100 |
- Input a model id from the Hub (for example: [textattack/distilbert-base-cased-CoLA](https://huggingface.co/textattack/distilbert-base-cased-CoLA))
|
101 |
- Click "Convert to ONNX"
|
102 |
- That's it! You'll get feedback if it works or not, and if it worked, you'll get the URL of the opened PR!
|
|
|
|
|
103 |
"""
|
104 |
|
105 |
with gr.Blocks() as demo:
|
106 |
-
gr.
|
107 |
-
|
108 |
-
|
|
|
109 |
|
110 |
with gr.Column():
|
111 |
input_token = gr.Textbox(max_lines=1, label="Hugging Face token")
|
@@ -123,10 +128,6 @@ with gr.Blocks() as demo:
|
|
123 |
btn = gr.Button("Convert to ONNX")
|
124 |
output = gr.Markdown(label="Output")
|
125 |
|
126 |
-
gr.Markdown("""
|
127 |
-
Note: in case the model to convert is larger than 2 GB, it will be saved in a subfolder called `onnx/`. To load it from Optimum, the argument `subfolder="onnx"` should be provided.
|
128 |
-
""")
|
129 |
-
|
130 |
btn.click(
|
131 |
fn=onnx_export, inputs=[input_token, input_model, input_task], outputs=output
|
132 |
)
|
|
|
92 |
# for some reason https://huggingface.co/settings/tokens is not showing as a link by default?
|
93 |
DESCRIPTION = """
|
94 |
This Space allows to automatically convert to ONNX 🤗 transformers PyTorch models hosted on the Hugging Face Hub. It opens a PR on the target model, and it is up to the owner of the original model
|
95 |
+
to merge the PR to allow people to leverage the ONNX standard to share and use the model on a wide range of devices!
|
96 |
+
|
97 |
+
Once converted, the model can for example be used in the [🤗 Optimum](https://huggingface.co/docs/optimum/) library following closely the transormers API.
|
98 |
Check out [this guide](https://huggingface.co/docs/optimum/main/en/onnxruntime/usage_guides/models) to see how!
|
99 |
|
100 |
The steps are the following:
|
|
|
102 |
- Input a model id from the Hub (for example: [textattack/distilbert-base-cased-CoLA](https://huggingface.co/textattack/distilbert-base-cased-CoLA))
|
103 |
- Click "Convert to ONNX"
|
104 |
- That's it! You'll get feedback if it works or not, and if it worked, you'll get the URL of the opened PR!
|
105 |
+
|
106 |
+
Note: in case the model to convert is larger than 2 GB, it will be saved in a subfolder called `onnx/`. To load it from Optimum, the argument `subfolder="onnx"` should be provided.
|
107 |
"""
|
108 |
|
109 |
with gr.Blocks() as demo:
|
110 |
+
with gr.Column():
|
111 |
+
gr.HTML(TTILE_IMAGE)
|
112 |
+
gr.HTML(TITLE)
|
113 |
+
gr.Markdown(DESCRIPTION)
|
114 |
|
115 |
with gr.Column():
|
116 |
input_token = gr.Textbox(max_lines=1, label="Hugging Face token")
|
|
|
128 |
btn = gr.Button("Convert to ONNX")
|
129 |
output = gr.Markdown(label="Output")
|
130 |
|
|
|
|
|
|
|
|
|
131 |
btn.click(
|
132 |
fn=onnx_export, inputs=[input_token, input_model, input_task], outputs=output
|
133 |
)
|