Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
add sign in with hf button
Browse files- app.py +21 -25
- onnx_export.py +3 -1
- requirements.txt +2 -0
app.py
CHANGED
@@ -7,7 +7,7 @@ import gradio as gr
|
|
7 |
from huggingface_hub import HfApi, Repository
|
8 |
|
9 |
from onnx_export import convert
|
10 |
-
|
11 |
from apscheduler.schedulers.background import BackgroundScheduler
|
12 |
|
13 |
DATASET_REPO_URL = "https://huggingface.co/datasets/optimum/exporters"
|
@@ -23,20 +23,22 @@ repo: Optional[Repository] = None
|
|
23 |
# repo = Repository(local_dir=DATADIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)
|
24 |
|
25 |
|
26 |
-
def onnx_export(
|
27 |
-
|
28 |
-
|
29 |
-
|
|
|
|
|
|
|
|
|
30 |
|
31 |
-
Please fill a token and model name.
|
32 |
-
"""
|
33 |
try:
|
34 |
if opset == "":
|
35 |
opset = None
|
36 |
else:
|
37 |
opset = int(opset)
|
38 |
|
39 |
-
api = HfApi(token=token)
|
40 |
|
41 |
error, commit_info = convert(api=api, model_id=model_id, task=task, opset=opset)
|
42 |
if error != "0":
|
@@ -100,22 +102,19 @@ TITLE = """
|
|
100 |
|
101 |
# for some reason https://huggingface.co/settings/tokens is not showing as a link by default?
|
102 |
DESCRIPTION = """
|
103 |
-
This Space allows you to automatically export π€ transformers PyTorch models hosted on the Hugging Face Hub to [ONNX](https://onnx.ai/). It opens a PR on the target model, and it is up to the owner of the original model
|
104 |
to merge the PR to allow people to leverage the ONNX standard to share and use the model on a wide range of devices!
|
105 |
|
106 |
Once exported, the model can, for example, be used in the [π€ Optimum](https://huggingface.co/docs/optimum/) library closely following the transformers API.
|
107 |
Check out [this guide](https://huggingface.co/docs/optimum/main/en/onnxruntime/usage_guides/models) to see how!
|
108 |
|
109 |
-
The steps are as following:
|
110 |
-
- Paste a read-access token from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens). Read access is enough given that we will open a PR against the source repo.
|
111 |
-
- Input a model id from the Hub (for example: [textattack/distilbert-base-cased-CoLA](https://huggingface.co/textattack/distilbert-base-cased-CoLA))
|
112 |
-
- Click "Export to ONNX"
|
113 |
-
- That's it! You'll get feedback on if the export was successful or not, and if it was, you'll get the URL of the opened PR!
|
114 |
-
|
115 |
Note: in case the model to export is larger than 2 GB, it will be saved in a subfolder called `onnx/`. To load it from Optimum, the argument `subfolder="onnx"` should be provided.
|
116 |
"""
|
117 |
|
118 |
with gr.Blocks() as demo:
|
|
|
|
|
|
|
119 |
gr.HTML(TTILE_IMAGE)
|
120 |
gr.HTML(TITLE)
|
121 |
|
@@ -124,14 +123,10 @@ with gr.Blocks() as demo:
|
|
124 |
gr.Markdown(DESCRIPTION)
|
125 |
|
126 |
with gr.Column(scale=50):
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
input_model = gr.Textbox(
|
132 |
-
max_lines=1,
|
133 |
-
label="Model name",
|
134 |
-
placeholder="textattack/distilbert-base-cased-CoLA",
|
135 |
)
|
136 |
input_task = gr.Textbox(
|
137 |
value="auto",
|
@@ -143,19 +138,20 @@ with gr.Blocks() as demo:
|
|
143 |
max_lines=1,
|
144 |
label="ONNX opset (optional, can be left blank)",
|
145 |
)
|
146 |
-
|
147 |
btn = gr.Button("Export to ONNX")
|
148 |
output = gr.Markdown(label="Output")
|
149 |
|
150 |
btn.click(
|
151 |
fn=onnx_export,
|
152 |
-
inputs=[
|
153 |
outputs=output,
|
154 |
)
|
155 |
|
|
|
156 |
def restart_space():
|
157 |
HfApi().restart_space(repo_id="onnx/export", token=HF_TOKEN, factory_reboot=True)
|
158 |
|
|
|
159 |
scheduler = BackgroundScheduler()
|
160 |
scheduler.add_job(restart_space, "interval", seconds=21600)
|
161 |
scheduler.start()
|
|
|
7 |
from huggingface_hub import HfApi, Repository
|
8 |
|
9 |
from onnx_export import convert
|
10 |
+
from gradio_huggingfacehub_search import HuggingfaceHubSearch
|
11 |
from apscheduler.schedulers.background import BackgroundScheduler
|
12 |
|
13 |
DATASET_REPO_URL = "https://huggingface.co/datasets/optimum/exporters"
|
|
|
23 |
# repo = Repository(local_dir=DATADIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)
|
24 |
|
25 |
|
26 |
+
def onnx_export(
|
27 |
+
model_id: str, task: str, opset: Union[int, str], oauth_token: gr.OAuthToken
|
28 |
+
) -> str:
|
29 |
+
if oauth_token.token is None:
|
30 |
+
return "You must be logged in to use this space"
|
31 |
+
|
32 |
+
if not model_id:
|
33 |
+
return f"### Invalid input π Please specify a model name, got {model_id}"
|
34 |
|
|
|
|
|
35 |
try:
|
36 |
if opset == "":
|
37 |
opset = None
|
38 |
else:
|
39 |
opset = int(opset)
|
40 |
|
41 |
+
api = HfApi(token=oauth_token.token)
|
42 |
|
43 |
error, commit_info = convert(api=api, model_id=model_id, task=task, opset=opset)
|
44 |
if error != "0":
|
|
|
102 |
|
103 |
# for some reason https://huggingface.co/settings/tokens is not showing as a link by default?
|
104 |
DESCRIPTION = """
|
105 |
+
This Space allows you to automatically export π€ transformers, diffusers, timm and sentence-transformers PyTorch models hosted on the Hugging Face Hub to [ONNX](https://onnx.ai/). It opens a PR on the target model, and it is up to the owner of the original model
|
106 |
to merge the PR to allow people to leverage the ONNX standard to share and use the model on a wide range of devices!
|
107 |
|
108 |
Once exported, the model can, for example, be used in the [π€ Optimum](https://huggingface.co/docs/optimum/) library closely following the transformers API.
|
109 |
Check out [this guide](https://huggingface.co/docs/optimum/main/en/onnxruntime/usage_guides/models) to see how!
|
110 |
|
|
|
|
|
|
|
|
|
|
|
|
|
111 |
Note: in case the model to export is larger than 2 GB, it will be saved in a subfolder called `onnx/`. To load it from Optimum, the argument `subfolder="onnx"` should be provided.
|
112 |
"""
|
113 |
|
114 |
with gr.Blocks() as demo:
|
115 |
+
gr.Markdown("You must be logged to use this space")
|
116 |
+
gr.LoginButton(min_width=250)
|
117 |
+
|
118 |
gr.HTML(TTILE_IMAGE)
|
119 |
gr.HTML(TITLE)
|
120 |
|
|
|
123 |
gr.Markdown(DESCRIPTION)
|
124 |
|
125 |
with gr.Column(scale=50):
|
126 |
+
input_model = HuggingfaceHubSearch(
|
127 |
+
label="Hub model ID",
|
128 |
+
placeholder="Search for model ID on the hub",
|
129 |
+
search_type="model",
|
|
|
|
|
|
|
|
|
130 |
)
|
131 |
input_task = gr.Textbox(
|
132 |
value="auto",
|
|
|
138 |
max_lines=1,
|
139 |
label="ONNX opset (optional, can be left blank)",
|
140 |
)
|
|
|
141 |
btn = gr.Button("Export to ONNX")
|
142 |
output = gr.Markdown(label="Output")
|
143 |
|
144 |
btn.click(
|
145 |
fn=onnx_export,
|
146 |
+
inputs=[input_model, input_task, onnx_opset],
|
147 |
outputs=output,
|
148 |
)
|
149 |
|
150 |
+
|
151 |
def restart_space():
|
152 |
HfApi().restart_space(repo_id="onnx/export", token=HF_TOKEN, factory_reboot=True)
|
153 |
|
154 |
+
|
155 |
scheduler = BackgroundScheduler()
|
156 |
scheduler.add_job(restart_space, "interval", seconds=21600)
|
157 |
scheduler.start()
|
onnx_export.py
CHANGED
@@ -82,7 +82,9 @@ def convert(
|
|
82 |
try:
|
83 |
pr = previous_pr(api, model_id, pr_title)
|
84 |
if "model.onnx" in filenames and not force:
|
85 |
-
raise Exception(
|
|
|
|
|
86 |
elif pr is not None and not force:
|
87 |
url = f"https://huggingface.co/{model_id}/discussions/{pr.num}"
|
88 |
new_pr = pr
|
|
|
82 |
try:
|
83 |
pr = previous_pr(api, model_id, pr_title)
|
84 |
if "model.onnx" in filenames and not force:
|
85 |
+
raise Exception(
|
86 |
+
f"Model {model_id} is already converted, skipping the export."
|
87 |
+
)
|
88 |
elif pr is not None and not force:
|
89 |
url = f"https://huggingface.co/{model_id}/discussions/{pr.num}"
|
90 |
new_pr = pr
|
requirements.txt
CHANGED
@@ -5,3 +5,5 @@ diffusers
|
|
5 |
sentence-transformers
|
6 |
timm
|
7 |
APScheduler
|
|
|
|
|
|
5 |
sentence-transformers
|
6 |
timm
|
7 |
APScheduler
|
8 |
+
gradio[oauth]>=4.37.2
|
9 |
+
gradio_huggingfacehub_search==0.0.6
|