import csv
import os
from datetime import datetime
from typing import Optional
import gradio as gr
from huggingface_hub import HfApi, Repository
from optimum_neuron_export import convert
from gradio_huggingfacehub_search import HuggingfaceHubSearch
from apscheduler.schedulers.background import BackgroundScheduler
DATASET_REPO_URL = "https://huggingface.co/datasets/optimum/neuron-exports"
DATA_FILENAME = "exports.csv"
DATA_FILE = os.path.join("data", DATA_FILENAME)
HF_TOKEN = os.environ.get("HF_WRITE_TOKEN")
DATADIR = "neuron_exports_data"
repo: Optional[Repository] = None
# Uncomment if you want to push to dataset repo with token
# if HF_TOKEN:
# repo = Repository(local_dir=DATADIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)
def neuron_export(model_id: str, task: str) -> str:
if not model_id:
return f"### Invalid input 🐞 Please specify a model name, got {model_id}"
try:
api = HfApi(token=HF_TOKEN) # Use HF_TOKEN if available, else anonymous
token = HF_TOKEN # Pass token to convert only if available
error, commit_info = convert(api=api, model_id=model_id, task=task, token=token)
if error != "0":
return error
print("[commit_info]", commit_info)
# Save in a private dataset if repo initialized
if repo is not None:
repo.git_pull(rebase=True)
with open(os.path.join(DATADIR, DATA_FILE), "a") as csvfile:
writer = csv.DictWriter(
csvfile, fieldnames=["model_id", "pr_url", "time"]
)
writer.writerow(
{
"model_id": model_id,
"pr_url": commit_info.pr_url,
"time": str(datetime.now()),
}
)
commit_url = repo.push_to_hub()
print("[dataset]", commit_url)
pr_revision = commit_info.pr_revision.replace("/", "%2F")
return f"#### Success 🔥 This model was successfully exported and a PR was opened: [{commit_info.pr_url}]({commit_info.pr_url}). To use the model before the PR is approved, go to https://huggingface.co/{model_id}/tree/{pr_revision}"
except Exception as e:
return f"#### Error: {e}"
TITLE_IMAGE = """
"""
TITLE = """
🤗 Optimum Neuron Model Exporter (WIP)
"""
DESCRIPTION = """
Export 🤗 Transformers models hosted on the Hugging Face Hub to AWS Neuron-optimized format for Inferentia/Trainium acceleration.
**Features:**
- Automatically opens PR with Neuron-optimized model
- Preserves original model weights
- Adds proper tags to model card
**Note:**
- PR creation requires the Space owner to have a valid write token set via HF_WRITE_TOKEN
"""
# Custom CSS to fix dark mode compatibility and transparency issues
CUSTOM_CSS = """
/* Fix for HuggingfaceHubSearch component visibility in both light and dark modes */
.gradio-container .gr-form {
background: var(--background-fill-primary) !important;
border: 1px solid var(--border-color-primary) !important;
}
/* Ensure text is visible in both modes */
.gradio-container input[type="text"],
.gradio-container textarea,
.gradio-container .gr-textbox input {
color: var(--body-text-color) !important;
background: var(--input-background-fill) !important;
border: 1px solid var(--border-color-primary) !important;
}
/* Fix dropdown/search results visibility */
.gradio-container .gr-dropdown,
.gradio-container .gr-dropdown .gr-box,
.gradio-container [data-testid="textbox"] {
background: var(--background-fill-primary) !important;
color: var(--body-text-color) !important;
border: 1px solid var(--border-color-primary) !important;
}
/* Fix for search component specifically */
.gradio-container .gr-form > div,
.gradio-container .gr-form input {
background: var(--input-background-fill) !important;
color: var(--body-text-color) !important;
}
/* Ensure proper contrast for placeholder text */
.gradio-container input::placeholder {
color: var(--body-text-color-subdued) !important;
opacity: 0.7;
}
/* Fix any remaining transparent backgrounds */
.gradio-container .gr-box,
.gradio-container .gr-panel {
background: var(--background-fill-primary) !important;
}
/* Make sure search results are visible */
.gradio-container .gr-dropdown-item {
color: var(--body-text-color) !important;
background: var(--background-fill-primary) !important;
}
.gradio-container .gr-dropdown-item:hover {
background: var(--background-fill-secondary) !important;
}
"""
with gr.Blocks(css=CUSTOM_CSS) as demo:
# Centered title and image
gr.HTML(TITLE_IMAGE)
gr.HTML(TITLE)
# Full-width description
gr.Markdown(DESCRIPTION)
# Input controls in a row at the bottom
with gr.Row():
input_model = HuggingfaceHubSearch(
label="Hub model ID",
placeholder="Search for model ID on the hub",
search_type="model",
)
input_task = gr.Textbox(
value="auto",
max_lines=1,
label='Task (can be left to "auto", will be automatically inferred)',
)
# Export button below the inputs
btn = gr.Button("Export to Neuron", size="lg")
# Output section
output = gr.Markdown(label="Output")
btn.click(
fn=neuron_export,
inputs=[input_model, input_task],
outputs=output,
)
if __name__ == "__main__":
def restart_space():
if HF_TOKEN:
HfApi().restart_space(repo_id="optimum/neuron-export", token=HF_TOKEN, factory_reboot=True)
scheduler = BackgroundScheduler()
scheduler.add_job(restart_space, "interval", seconds=21600)
scheduler.start()
demo.launch()