Spaces:
Running
Running
Create optimum_neuron_export.py
Browse files- optimum_neuron_export.py +185 -0
optimum_neuron_export.py
ADDED
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import shutil
|
3 |
+
from tempfile import TemporaryDirectory
|
4 |
+
from typing import List, Optional, Tuple, Dict, Any
|
5 |
+
from huggingface_hub import (
|
6 |
+
CommitOperationAdd,
|
7 |
+
HfApi,
|
8 |
+
ModelCard,
|
9 |
+
Discussion,
|
10 |
+
CommitInfo,
|
11 |
+
)
|
12 |
+
from huggingface_hub.file_download import repo_folder_name
|
13 |
+
from optimum.exporters.neuron import main_export
|
14 |
+
from optimum.exporters.tasks import TasksManager
|
15 |
+
|
16 |
+
SPACES_URL = "https://huggingface.co/spaces/optimum/neuron-export"
|
17 |
+
|
18 |
+
def get_default_compiler_kwargs() -> Dict[str, Any]:
|
19 |
+
"""
|
20 |
+
Get default compiler kwargs for neuron export.
|
21 |
+
Based on infer_compiler_kwargs function from the original code.
|
22 |
+
"""
|
23 |
+
return {
|
24 |
+
"auto_cast": None, # Default to None (equivalent to "none")
|
25 |
+
"auto_cast_type": None,
|
26 |
+
# Add other compiler-specific kwargs if needed
|
27 |
+
# "disable_fast_relayout": False, # Only available for certain compilers
|
28 |
+
# "disable_fallback": False, # Only available for certain compilers
|
29 |
+
}
|
30 |
+
|
31 |
+
def previous_pr(api: "HfApi", model_id: str, pr_title: str) -> Optional["Discussion"]:
|
32 |
+
try:
|
33 |
+
discussions = api.get_repo_discussions(repo_id=model_id)
|
34 |
+
except Exception:
|
35 |
+
return None
|
36 |
+
for discussion in discussions:
|
37 |
+
if (
|
38 |
+
discussion.status == "open"
|
39 |
+
and discussion.is_pull_request
|
40 |
+
and discussion.title == pr_title
|
41 |
+
):
|
42 |
+
return discussion
|
43 |
+
return None
|
44 |
+
|
45 |
+
def export_and_git_add(model_id: str, task: str, folder: str, token: str) -> List:
|
46 |
+
# Get default compiler kwargs
|
47 |
+
compiler_kwargs = get_default_compiler_kwargs()
|
48 |
+
|
49 |
+
# Infer task if it's "auto"
|
50 |
+
if task == "auto":
|
51 |
+
try:
|
52 |
+
task = TasksManager.infer_task_from_model(model_id)
|
53 |
+
except Exception as e:
|
54 |
+
raise Exception(f"Could not infer task for model {model_id}: {e}")
|
55 |
+
|
56 |
+
print(f"Exporting model {model_id} with task: {task}")
|
57 |
+
|
58 |
+
try:
|
59 |
+
# Call main_export with all required parameters
|
60 |
+
main_export(
|
61 |
+
model_name_or_path=model_id,
|
62 |
+
output=folder,
|
63 |
+
compiler_kwargs=compiler_kwargs,
|
64 |
+
task=task,
|
65 |
+
token=token,
|
66 |
+
# Add other commonly needed parameters with sensible defaults
|
67 |
+
torch_dtype=None, # Let it use model's default
|
68 |
+
dynamic_batch_size=False,
|
69 |
+
do_validation=False, # Disable validation for now to avoid issues
|
70 |
+
trust_remote_code=False,
|
71 |
+
force_download=False,
|
72 |
+
local_files_only=False,
|
73 |
+
# Default optimization level (O2 is the default from original code)
|
74 |
+
optlevel="2",
|
75 |
+
# Other defaults
|
76 |
+
tensor_parallel_size=1,
|
77 |
+
disable_neuron_cache=False,
|
78 |
+
inline_weights_to_neff=True,
|
79 |
+
output_attentions=False,
|
80 |
+
output_hidden_states=False,
|
81 |
+
# Add input shapes for common models
|
82 |
+
batch_size=1,
|
83 |
+
sequence_length=128,
|
84 |
+
)
|
85 |
+
print(f"Export completed successfully")
|
86 |
+
except Exception as e:
|
87 |
+
print(f"Export failed with error: {e}")
|
88 |
+
raise
|
89 |
+
|
90 |
+
operations = [
|
91 |
+
CommitOperationAdd(
|
92 |
+
path_in_repo=os.path.join("neuron", file_name),
|
93 |
+
path_or_fileobj=os.path.join(folder, file_name),
|
94 |
+
)
|
95 |
+
for file_name in os.listdir(folder)
|
96 |
+
if os.path.isfile(os.path.join(folder, file_name)) # Only add files, not directories
|
97 |
+
]
|
98 |
+
|
99 |
+
try:
|
100 |
+
card = ModelCard.load(model_id, token=token)
|
101 |
+
if card.data.tags is None:
|
102 |
+
card.data.tags = []
|
103 |
+
if "neuron" not in card.data.tags:
|
104 |
+
card.data.tags.append("neuron")
|
105 |
+
card.save(os.path.join(folder, "README.md"))
|
106 |
+
operations.append(
|
107 |
+
CommitOperationAdd(
|
108 |
+
path_in_repo="README.md",
|
109 |
+
path_or_fileobj=os.path.join(folder, "README.md")
|
110 |
+
)
|
111 |
+
)
|
112 |
+
except Exception as e:
|
113 |
+
print(f"Warning: Could not update model card: {e}")
|
114 |
+
pass
|
115 |
+
|
116 |
+
return operations
|
117 |
+
|
118 |
+
def convert(
|
119 |
+
api: "HfApi",
|
120 |
+
model_id: str,
|
121 |
+
task: str,
|
122 |
+
force: bool = False,
|
123 |
+
token: str = None,
|
124 |
+
) -> Tuple[str, "CommitInfo"]:
|
125 |
+
pr_title = "Adding Neuron-optimized model files"
|
126 |
+
info = api.model_info(model_id, token=token)
|
127 |
+
filenames = set(s.rfilename for s in info.siblings)
|
128 |
+
requesting_user = api.whoami(token=token)["name"]
|
129 |
+
|
130 |
+
with TemporaryDirectory() as d:
|
131 |
+
folder = os.path.join(d, repo_folder_name(repo_id=model_id, repo_type="models"))
|
132 |
+
os.makedirs(folder, exist_ok=True)
|
133 |
+
new_pr = None
|
134 |
+
|
135 |
+
try:
|
136 |
+
pr = previous_pr(api, model_id, pr_title)
|
137 |
+
|
138 |
+
if any(fname.startswith("neuron/") for fname in filenames) and not force:
|
139 |
+
raise Exception(
|
140 |
+
f"Model {model_id} already has Neuron files, skipping export."
|
141 |
+
)
|
142 |
+
elif pr is not None and not force:
|
143 |
+
url = f"https://huggingface.co/{model_id}/discussions/{pr.num}"
|
144 |
+
new_pr = pr
|
145 |
+
raise Exception(
|
146 |
+
f"Model {model_id} already has an open PR: [{url}]({url})"
|
147 |
+
)
|
148 |
+
else:
|
149 |
+
operations = export_and_git_add(model_id, task, folder, token=token)
|
150 |
+
|
151 |
+
if not operations:
|
152 |
+
raise Exception("No files were generated during export")
|
153 |
+
|
154 |
+
commit_description = f"""
|
155 |
+
🤖 Neuron Export Bot: On behalf of [{requesting_user}](https://huggingface.co/{requesting_user}), adding AWS Neuron-optimized model files.
|
156 |
+
|
157 |
+
Neuron-optimized models can achieve high-performance inference on AWS Inferentia and Trainium chips. Learn more:
|
158 |
+
- [AWS Neuron Documentation](https://awsdocs-neuron.readthedocs-hosted.com)
|
159 |
+
- [🤗 Optimum Neuron Guide](https://huggingface.co/docs/optimum-neuron/index)
|
160 |
+
"""
|
161 |
+
|
162 |
+
new_pr = api.create_commit(
|
163 |
+
repo_id=model_id,
|
164 |
+
operations=operations,
|
165 |
+
commit_message=pr_title,
|
166 |
+
commit_description=commit_description,
|
167 |
+
create_pr=True,
|
168 |
+
token=token,
|
169 |
+
)
|
170 |
+
|
171 |
+
except Exception as e:
|
172 |
+
# Clean up folder before re-raising
|
173 |
+
if os.path.exists(folder):
|
174 |
+
shutil.rmtree(folder, ignore_errors=True)
|
175 |
+
print(f"Conversion failed with error: {e}") # Print the actual error
|
176 |
+
return "1", str(e) # Return error code and message
|
177 |
+
|
178 |
+
finally:
|
179 |
+
# Ensure cleanup
|
180 |
+
if os.path.exists(folder):
|
181 |
+
shutil.rmtree(folder, ignore_errors=True)
|
182 |
+
|
183 |
+
return "0", new_pr
|
184 |
+
|
185 |
+
|