Spaces:
Runtime error
Runtime error
""" | |
Adapted from https://huggingface.co/spaces/stabilityai/stable-diffusion | |
""" | |
from tensorflow import keras | |
import time | |
import gradio as gr | |
import keras_cv | |
from constants import css, examples, img_height, img_width, num_images_to_gen | |
from share_btn import community_icon_html, loading_icon_html, share_js | |
from huggingface_hub import from_pretrained_keras | |
import requests | |
# MODEL_CKPT = "chansung/textual-inversion-pipeline@v1673026791" | |
# MODEL = from_pretrained_keras(MODEL_CKPT) | |
# model = keras_cv.models.StableDiffusion( | |
# img_width=img_width, img_height=img_height, jit_compile=True | |
# ) | |
# model._text_encoder = MODEL | |
# model._text_encoder.compile(jit_compile=True) | |
# # Warm-up the model. | |
# _ = model.text_to_image("Teddy bear", batch_size=num_images_to_gen) | |
def generate_image_fn(prompt: str, unconditional_guidance_scale: int) -> list: | |
start_time = time.time() | |
# `images is an `np.ndarray`. So we convert it to a list of ndarrays. | |
# Each ndarray represents a generated image. | |
# Reference: https://gradio.app/docs/#gallery | |
images = model.text_to_image( | |
prompt, | |
batch_size=num_images_to_gen, | |
unconditional_guidance_scale=unconditional_guidance_scale, | |
) | |
end_time = time.time() | |
print(f"Time taken: {end_time - start_time} seconds.") | |
return [image for image in images] | |
demoInterface = gr.Interface( | |
generate_image_fn, | |
inputs=[ | |
gr.Textbox( | |
label="Enter your prompt", | |
max_lines=1, | |
# placeholder="cute Sundar Pichai creature", | |
), | |
gr.Slider(value=40, minimum=8, maximum=50, step=1), | |
], | |
outputs=gr.Gallery().style(grid=[2], height="auto"), | |
# examples=[["cute Sundar Pichai creature", 8], ["Hello kitty", 8]], | |
allow_flagging=False, | |
) | |
def welcome(name): | |
return f"Welcome to Gradio, {name}!" | |
def avaliable_providers(): | |
providers = [] | |
headers = { | |
"Content-Type": "application/json", | |
} | |
endpoint_url = "https://api.endpoints.huggingface.cloud/provider" | |
response = requests.get(endpoint_url, headers=headers) | |
for provider in response.json()['items']: | |
if provider['status'] != 'avaliable': | |
providers.append(provider['vendor']) | |
return providers | |
with gr.Blocks() as demo: | |
gr.Markdown( | |
""" | |
# Your own Stable Diffusion on Google Cloud Platform | |
""") | |
with gr.Row(): | |
gcp_project_id = gr.Textbox( | |
label="GCP project ID", | |
) | |
gcp_region = gr.Dropdown( | |
["us-central1", "asia‑east1", "asia-northeast1"], | |
value="us-central1", | |
interactive=True, | |
label="GCP Region" | |
) | |
gr.Markdown( | |
""" | |
Configurations on scalability | |
""") | |
with gr.Row(): | |
min_nodes = gr.Slider( | |
label="minimum number of nodes", | |
minimum=1, | |
maximum=10) | |
max_nodes = gr.Slider( | |
label="maximum number of nodes", | |
minimum=1, | |
maximum=10) | |
btn = gr.Button(value="Ready to Deploy!") | |
# btn.click(mirror, inputs=[im], outputs=[im_2]) | |
def get_avaliable_regions(provider): | |
avalialbe_regions = [] | |
headers = { | |
"Content-Type": "application/json", | |
} | |
endpoint_url = f"https://api.endpoints.huggingface.cloud/provider/{provider}/region" | |
response = requests.get(endpoint_url, headers=headers) | |
for region in response.json()['items']: | |
if region['status'] != 'avaliable': | |
avalialbe_regions.append(f"{region['region']}/{region['label']}") | |
print(avalialbe_regions) | |
return avalialbe_regions | |
with gr.Blocks() as demo2: | |
gr.Markdown( | |
""" | |
# Your own Stable Diffusion on Hugging Face 🤗 Endpoint | |
""") | |
providers = avaliable_providers() | |
with gr.Row(): | |
provider_selector = gr.Dropdown( | |
label="select cloud provider", | |
interactive=True, | |
choices=providers | |
) | |
region_selector = gr.Dropdown( | |
[], | |
value="", | |
interactive=True, | |
label="select a region" | |
) | |
gr.Dropdown.change(get_avaliable_regions, inputs=provider_selector, outputs=region_selector) | |
region_selector.update(interactive=True) | |
gr.TabbedInterface( | |
[demoInterface, demo, demo2], ["Try-out", "🚀 Deploy on GCP", " Deploy on 🤗 Endpoint"] | |
).launch(enable_queue=True) |