File size: 3,395 Bytes
867cb11
 
 
 
 
 
 
 
4545835
 
867cb11
 
4545835
 
 
867cb11
4545835
 
 
 
 
867cb11
 
 
 
 
 
 
 
 
 
4545835
 
 
867cb11
4545835
867cb11
 
4545835
867cb11
4545835
867cb11
 
 
4545835
 
 
 
 
 
867cb11
4545835
 
867cb11
 
4545835
867cb11
 
 
4545835
867cb11
4545835
867cb11
 
 
4545835
867cb11
4545835
867cb11
 
 
 
4545835
 
 
 
 
 
867cb11
 
 
 
 
4545835
867cb11
 
4545835
 
 
867cb11
 
 
4545835
867cb11
 
 
4545835
867cb11
4545835
 
 
867cb11
 
4545835
 
867cb11
 
 
4545835
867cb11
4545835
 
 
 
 
 
867cb11
 
 
 
 
 
 
 
4545835
867cb11
4545835
 
 
867cb11
 
 
4545835
867cb11
 
 
 
4545835
 
867cb11
 
 
 
 
4545835
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
# /// script
# requires-python = ">=3.12"
# dependencies = [
#     "marimo-labs==0.1.0",
#     "marimo",
# ]
# ///

import marimo

__generated_with = "0.11.5"
app = marimo.App(width="medium")


@app.cell
def _():
    import marimo as mo
    return (mo,)


@app.cell(hide_code=True)
def _(mo):
    default_model = mo.query_params().get("model") or ""
    model_repo = mo.ui.text(
        label="Model repo",
        full_width=True,
        value=default_model,
        placeholder="e.g. gpt2, bert-base-uncased",
    ).form(bordered=False)
    model_repo
    return default_model, model_repo


@app.cell(hide_code=True)
def _(mo):
    mo.md(
        r"""
        Interactively try out AI models hosted on HuggingFace!

        Some popular models to try:

        - **Feature Extraction**: `julien-c/distilbert-feature-extraction`
        - **Text to Speech**: `julien-c/ljspeech_tts_train_tacotron2_raw_phn_tacotron_g2p_en_no_space_train`
        - **Text to Image**: `runwayml/stable-diffusion-v1-5`
        """
    )
    return


@app.cell(hide_code=True)
def _(mo):
    mo.accordion(
        {
            "View more": mo.md("""
    _Audio Models_

    - **Audio Classification**: `ehcalabres/wav2vec2-lg-xlsr-en-speech-emotion-recognition`
    - **Audio to Audio**: `facebook/xm_transformer_sm_all-en`
    - **Speech Recognition**: `facebook/wav2vec2-base-960h`

    _Image Models_

    - **Image Classification**: `google/vit-base-patch16-224`
    - **Image to Text**: `Salesforce/blip-image-captioning-base`
    - **Object Detection**: `microsoft/table-transformer-detection`

    _Text Models_

    - **Fill Mask**: `distilbert/distilbert-base-uncased`
    - **Zero-shot Classification**: `facebook/bart-large-mnli`
    - **Sentence Similarity**: `sentence-transformers/all-MiniLM-L6-v2`
        """)
        }
    )
    return


@app.cell(hide_code=True)
def _(mo):
    token = mo.ui.text(
        label="HuggingFace token (optional)",
        kind="password",
    ).form(bordered=False)

    mo.accordion({"Enter your HuggingFace token (optional)": token})
    return (token,)


@app.cell(hide_code=True)
def _(mo, model_repo, molabs, token):
    if not model_repo.value:
        mo.stop("Please enter a model name")

    model_path = "models/" + model_repo.value.replace("models/", "")
    model = molabs.huggingface.load(
        model_path, hf_token=token.value if token.value else None
    )
    return model, model_path


@app.cell(hide_code=True)
def _(mo, model):
    mo.stop(not model)

    mo.md(
        f"""
        ### Example inputs
        Try these sample inputs with the model:

        {mo.as_html(model.examples)}
        """
    )
    return


@app.cell(hide_code=True)
def _(mo, model):
    inputs = model.inputs
    mo.vstack(
        [
            mo.md("### Model Inputs"),
            mo.md("_Submit inputs below to run inference_ 👇"),
            inputs,
        ],
    )
    return (inputs,)


@app.cell(hide_code=True)
def _(inputs, mo, model):
    if inputs.value is None:
        mo.stop("Please provide input values")

    with mo.status.spinner("Running inference..."):
        output = model.inference_function(inputs.value)
        mo.output.replace(mo.vstack([mo.md("### Results"), output]))
    return (output,)


@app.cell
def _():
    import sys
    import marimo_labs as molabs
    return molabs, sys


if __name__ == "__main__":
    app.run()