Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
add onnx tag
Browse files- app.py +1 -1
- onnx_export.py +17 -2
app.py
CHANGED
@@ -40,7 +40,7 @@ def onnx_export(
|
|
40 |
|
41 |
api = HfApi(token=oauth_token.token)
|
42 |
|
43 |
-
error, commit_info = convert(api=api, model_id=model_id, task=task, opset=opset)
|
44 |
if error != "0":
|
45 |
return error
|
46 |
|
|
|
40 |
|
41 |
api = HfApi(token=oauth_token.token)
|
42 |
|
43 |
+
error, commit_info = convert(api=api, model_id=model_id, task=task, opset=opset, token=oauth_token.token)
|
44 |
if error != "0":
|
45 |
return error
|
46 |
|
onnx_export.py
CHANGED
@@ -6,6 +6,9 @@ from typing import List, Optional, Tuple
|
|
6 |
from huggingface_hub import (
|
7 |
CommitOperationAdd,
|
8 |
HfApi,
|
|
|
|
|
|
|
9 |
)
|
10 |
from huggingface_hub.file_download import repo_folder_name
|
11 |
from optimum.exporters.onnx import main_export
|
@@ -27,12 +30,13 @@ def previous_pr(api: "HfApi", model_id: str, pr_title: str) -> Optional["Discuss
|
|
27 |
return discussion
|
28 |
|
29 |
|
30 |
-
def export_and_git_add(model_id: str, task: str, folder: str, opset: int) -> List:
|
31 |
main_export(
|
32 |
model_name_or_path=model_id,
|
33 |
output=folder,
|
34 |
task=task,
|
35 |
opset=opset,
|
|
|
36 |
)
|
37 |
|
38 |
n_files = len(
|
@@ -59,6 +63,16 @@ def export_and_git_add(model_id: str, task: str, folder: str, opset: int) -> Lis
|
|
59 |
for file_name in os.listdir(folder)
|
60 |
]
|
61 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
62 |
return operations
|
63 |
|
64 |
|
@@ -68,6 +82,7 @@ def convert(
|
|
68 |
task: str,
|
69 |
force: bool = False,
|
70 |
opset: int = None,
|
|
|
71 |
) -> Tuple[int, "CommitInfo"]:
|
72 |
pr_title = "Adding ONNX file of this model"
|
73 |
info = api.model_info(model_id)
|
@@ -92,7 +107,7 @@ def convert(
|
|
92 |
f"Model {model_id} already has an open PR check out [{url}]({url})"
|
93 |
)
|
94 |
else:
|
95 |
-
operations = export_and_git_add(model_id, task, folder, opset)
|
96 |
|
97 |
commit_description = f"""
|
98 |
Beep boop I am the [ONNX export bot ๐ค๐๏ธ]({SPACES_URL}). On behalf of [{requesting_user}](https://huggingface.co/{requesting_user}), I would like to add to this repository the model converted to ONNX.
|
|
|
6 |
from huggingface_hub import (
|
7 |
CommitOperationAdd,
|
8 |
HfApi,
|
9 |
+
ModelCard,
|
10 |
+
Discussion,
|
11 |
+
CommitInfo,
|
12 |
)
|
13 |
from huggingface_hub.file_download import repo_folder_name
|
14 |
from optimum.exporters.onnx import main_export
|
|
|
30 |
return discussion
|
31 |
|
32 |
|
33 |
+
def export_and_git_add(model_id: str, task: str, folder: str, opset: int, token: str) -> List:
|
34 |
main_export(
|
35 |
model_name_or_path=model_id,
|
36 |
output=folder,
|
37 |
task=task,
|
38 |
opset=opset,
|
39 |
+
token=token,
|
40 |
)
|
41 |
|
42 |
n_files = len(
|
|
|
63 |
for file_name in os.listdir(folder)
|
64 |
]
|
65 |
|
66 |
+
try:
|
67 |
+
card = ModelCard.load(model_id, token=token)
|
68 |
+
if card.data.tags is None:
|
69 |
+
card.data.tags = []
|
70 |
+
card.data.tags.append("onnx")
|
71 |
+
card.save(os.path.join(folder, "README.md"))
|
72 |
+
operations.append(CommitOperationAdd(path_in_repo="README.md", path_or_fileobj=os.path.join(folder, "README.md")))
|
73 |
+
except:
|
74 |
+
pass
|
75 |
+
|
76 |
return operations
|
77 |
|
78 |
|
|
|
82 |
task: str,
|
83 |
force: bool = False,
|
84 |
opset: int = None,
|
85 |
+
token: str = None,
|
86 |
) -> Tuple[int, "CommitInfo"]:
|
87 |
pr_title = "Adding ONNX file of this model"
|
88 |
info = api.model_info(model_id)
|
|
|
107 |
f"Model {model_id} already has an open PR check out [{url}]({url})"
|
108 |
)
|
109 |
else:
|
110 |
+
operations = export_and_git_add(model_id, task, folder, opset, token)
|
111 |
|
112 |
commit_description = f"""
|
113 |
Beep boop I am the [ONNX export bot ๐ค๐๏ธ]({SPACES_URL}). On behalf of [{requesting_user}](https://huggingface.co/{requesting_user}), I would like to add to this repository the model converted to ONNX.
|