Felix Marty commited on
Commit
bf7786d
β€’
1 Parent(s): 1569942

hopefully better style

Browse files
Files changed (2) hide show
  1. app.py +21 -3
  2. clean_hf_onnx.png +0 -0
app.py CHANGED
@@ -60,7 +60,11 @@ DESCRIPTION = """
60
  <img src="https://huggingface.co/spaces/optimum/exporters/resolve/main/clean_hf_onnx.png"/>
61
  </p>
62
 
63
- This Space allows to automatically convert to ONNX transformers models hosted on the Hugging Face Hub. It opens a PR on the target model, and it is up to the owner of the original model
 
 
 
 
64
  to merge the PR to allow people to leverage the ONNX standard to share and use the model on a wide range of devices!
65
 
66
  Once converted, the model can for example be used in the [πŸ€— Optimum](https://huggingface.co/docs/optimum/) library following closely the transormers API.
@@ -76,11 +80,24 @@ The steps are the following:
76
  Note: in case the model to convert is larger than 2 GB, it will be saved in a subfolder called `onnx/`. To load it from Optimum, the argument `subfolder="onnx"` should be provided.
77
  """
78
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
  demo = gr.Interface(
80
- title="Convert any PyTorch model to ONNX with πŸ€— Optimum Exporters 🏎️",
81
  description=DESCRIPTION,
82
  allow_flagging="never",
83
- article="Check out the [Optimum repo on GitHub](https://github.com/huggingface/optimum)",
84
  inputs=[
85
  gr.Text(max_lines=1, label="Hugging Face token"),
86
  gr.Text(max_lines=1, label="Model name", placeholder="textattack/distilbert-base-cased-CoLA"),
@@ -89,5 +106,6 @@ demo = gr.Interface(
89
  outputs=[gr.Markdown(label="output")],
90
  fn=onnx_export,
91
  )
 
92
 
93
  demo.launch()
 
60
  <img src="https://huggingface.co/spaces/optimum/exporters/resolve/main/clean_hf_onnx.png"/>
61
  </p>
62
 
63
+ <p align="center">
64
+ # Convert any PyTorch model to ONNX with πŸ€— Optimum exporters 🏎️
65
+ </p>
66
+
67
+ This Space allows to automatically convert to ONNX πŸ€— transformers models hosted on the Hugging Face Hub. It opens a PR on the target model, and it is up to the owner of the original model
68
  to merge the PR to allow people to leverage the ONNX standard to share and use the model on a wide range of devices!
69
 
70
  Once converted, the model can for example be used in the [πŸ€— Optimum](https://huggingface.co/docs/optimum/) library following closely the transormers API.
 
80
  Note: in case the model to convert is larger than 2 GB, it will be saved in a subfolder called `onnx/`. To load it from Optimum, the argument `subfolder="onnx"` should be provided.
81
  """
82
 
83
+ with gr.Blocks() as demo:
84
+ gr.Markdown(DESCRIPTION)
85
+
86
+ with gr.Row():
87
+ input_token = gr.TextBox(max_lines=1, label="Hugging Face token"),
88
+ input_model = gr.TextBox(max_lines=1, label="Model name", placeholder="textattack/distilbert-base-cased-CoLA"),
89
+ input_task = gr.TextBox(value="auto", max_lines=1, label="Task (can be left blank, will be automatically inferred)")
90
+ output = gr.Markdown(label="Output")
91
+
92
+ btn = gr.Button("Convert to ONNX")
93
+ btn.click(fn=onnx_export, inputs=[input_token, input_model, input_task], outputs=output)
94
+
95
+ """
96
  demo = gr.Interface(
97
+ title="",
98
  description=DESCRIPTION,
99
  allow_flagging="never",
100
+ article="Check out the [πŸ€— Optimum repoository on GitHub](https://github.com/huggingface/optimum) as well!",
101
  inputs=[
102
  gr.Text(max_lines=1, label="Hugging Face token"),
103
  gr.Text(max_lines=1, label="Model name", placeholder="textattack/distilbert-base-cased-CoLA"),
 
106
  outputs=[gr.Markdown(label="output")],
107
  fn=onnx_export,
108
  )
109
+ """
110
 
111
  demo.launch()
clean_hf_onnx.png CHANGED