File size: 6,637 Bytes
11e4904
 
 
ffec3e8
11e4904
58e6c10
11e4904
58e6c10
11e4904
 
 
 
 
 
 
 
 
58e6c10
11e4904
58e6c10
11e4904
 
 
 
58e6c10
7d762a5
ffec3e8
 
 
 
11e4904
 
58e6c10
11e4904
ffec3e8
58e6c10
ffec3e8
11e4904
 
58e6c10
11e4904
58e6c10
11e4904
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58e6c10
11e4904
ffec3e8
11e4904
 
 
 
 
 
 
 
 
58e6c10
11e4904
 
96104e4
 
8cae1e2
11e4904
 
58e6c10
11e4904
 
ffec3e8
96104e4
 
11e4904
 
 
96104e4
ffec3e8
 
 
11e4904
 
7d762a5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37c1483
 
 
 
54760e3
 
37c1483
 
7fb2842
7d762a5
 
 
ffec3e8
 
 
 
96104e4
11e4904
 
96104e4
 
 
 
 
11e4904
96104e4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11e4904
 
 
 
 
 
58e6c10
7d762a5
58e6c10
11e4904
 
 
 
 
 
 
 
7d762a5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
import csv
import os
from datetime import datetime
from typing import Optional, Union

import gradio as gr
from huggingface_hub import HfApi, Repository

from optimum_neuron_export import convert
from gradio_huggingfacehub_search import HuggingfaceHubSearch
from apscheduler.schedulers.background import BackgroundScheduler

DATASET_REPO_URL = "https://huggingface.co/datasets/optimum/neuron-exports"
DATA_FILENAME = "exports.csv"
DATA_FILE = os.path.join("data", DATA_FILENAME)

HF_TOKEN = os.environ.get("HF_WRITE_TOKEN")

DATADIR = "neuron_exports_data"

repo: Optional[Repository] = None
# Uncomment if you want to push to dataset repo with token
# if HF_TOKEN:
#     repo = Repository(local_dir=DATADIR, clone_from=DATASET_REPO_URL, token=HF_TOKEN)


def neuron_export(model_id: str, task: str, oauth_token: gr.OAuthToken) -> str:
    if oauth_token.token is None:
        return "You must be logged in to use this space"

    if not model_id:
        return f"### Invalid input 🐞 Please specify a model name, got {model_id}"

    try:
        api = HfApi(token=oauth_token.token)

        error, commit_info = convert(api=api, model_id=model_id, task=task, token=oauth_token.token)
        if error != "0":
            return error

        print("[commit_info]", commit_info)

        # Save in a private dataset if repo initialized
        if repo is not None:
            repo.git_pull(rebase=True)
            with open(os.path.join(DATADIR, DATA_FILE), "a") as csvfile:
                writer = csv.DictWriter(
                    csvfile, fieldnames=["model_id", "pr_url", "time"]
                )
                writer.writerow(
                    {
                        "model_id": model_id,
                        "pr_url": commit_info.pr_url,
                        "time": str(datetime.now()),
                    }
                )
            commit_url = repo.push_to_hub()
            print("[dataset]", commit_url)

        pr_revision = commit_info.pr_revision.replace("/", "%2F")
        return f"#### Success πŸ”₯ Yay! This model was successfully exported and a PR was opened using your token: [{commit_info.pr_url}]({commit_info.pr_url}). If you would like to use the exported model without waiting for the PR to be approved, head to https://huggingface.co/{model_id}/tree/{pr_revision}"

    except Exception as e:
        return f"#### Error: {e}"


TITLE_IMAGE = """
<div style="display: block; margin-left: auto; margin-right: auto; width: 50%;">
<img src="https://huggingface.co/spaces/optimum/neuron-export/resolve/main/huggingfaceXneuron.png"/>
</div>
"""

TITLE = """
<div style="text-align: center; max-width: 1400px; margin: 0 auto;">
<h1 style="font-weight: 900; margin-bottom: 10px; margin-top: 10px; font-size: 2.2rem;">
    πŸ€— Optimum Neuron Model Exporter 🏎️ (WIP)
</h1>
</div>
"""

DESCRIPTION = """
This Space allows you to automatically export πŸ€— transformers models hosted on the Hugging Face Hub to AWS Neuron-optimized format for Inferentia/Trainium acceleration. It opens a PR on the target model, and it is up to the owner of the original model to merge the PR to allow people to leverage Neuron optimization!

**Features:**
- Automatically opens PR with Neuron-optimized model
- Preserves original model weights
- Adds proper tags to model card

**Requirements:**
- Model must be compatible with [Optimum Neuron](https://huggingface.co/docs/optimum-neuron)
- User must be logged in with write token
"""

# Custom CSS to fix dark mode compatibility and transparency issues
CUSTOM_CSS = """
/* Fix for HuggingfaceHubSearch component visibility in both light and dark modes */
.gradio-container .gr-form {
    background: var(--background-fill-primary) !important;
    border: 1px solid var(--border-color-primary) !important;
}

/* Ensure text is visible in both modes */
.gradio-container input[type="text"], 
.gradio-container textarea,
.gradio-container .gr-textbox input {
    color: var(--body-text-color) !important;
    background: var(--input-background-fill) !important;
    border: 1px solid var(--border-color-primary) !important;
}

/* Fix dropdown/search results visibility */
.gradio-container .gr-dropdown,
.gradio-container .gr-dropdown .gr-box,
.gradio-container [data-testid="textbox"] {
    background: var(--background-fill-primary) !important;
    color: var(--body-text-color) !important;
    border: 1px solid var(--border-color-primary) !important;
}

/* Fix for search component specifically */
.gradio-container .gr-form > div,
.gradio-container .gr-form input {
    background: var(--input-background-fill) !important;
    color: var(--body-text-color) !important;
}

/* Ensure proper contrast for placeholder text */
.gradio-container input::placeholder {
    color: var(--body-text-color-subdued) !important;
    opacity: 0.7;
}

/* Fix any remaining transparent backgrounds */
.gradio-container .gr-box,
.gradio-container .gr-panel {
    background: var(--background-fill-primary) !important;
}

/* Make sure search results are visible */
.gradio-container .gr-dropdown-item {
    color: var(--body-text-color) !important;
    background: var(--background-fill-primary) !important;
}

.gradio-container .gr-dropdown-item:hover {
    background: var(--background-fill-secondary) !important;
}
"""

with gr.Blocks(css=CUSTOM_CSS) as demo:
    # Login requirement notice and button
    gr.Markdown("**You must be logged in to use this space**")
    gr.LoginButton(min_width=250)
    
    # Centered title and image
    gr.HTML(TITLE_IMAGE)
    gr.HTML(TITLE)
    
    # Full-width description
    gr.Markdown(DESCRIPTION)
    
    # Input controls in a row at the bottom
    with gr.Row():
        input_model = HuggingfaceHubSearch(
            label="Hub model ID",
            placeholder="Search for model ID on the hub",
            search_type="model",
        )
        input_task = gr.Textbox(
            value="auto",
            max_lines=1,
            label='Task (can be left to "auto", will be automatically inferred)',
        )
    
    # Export button below the inputs
    btn = gr.Button("Export to Neuron", size="lg")
    
    # Output section
    output = gr.Markdown(label="Output")

    btn.click(
        fn=neuron_export,
        inputs=[input_model, input_task],
        outputs=output,
    )


if __name__ == "__main__":
    def restart_space():
        if HF_TOKEN:
            HfApi().restart_space(repo_id="optimum/neuron-export", token=HF_TOKEN, factory_reboot=True)

    scheduler = BackgroundScheduler()
    scheduler.add_job(restart_space, "interval", seconds=21600)
    scheduler.start()

    demo.launch()