update
Browse files
README.md
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
---
|
| 2 |
-
title: FLUX.1 Dev with fal.ai
|
| 3 |
emoji: ⚡
|
| 4 |
colorFrom: yellow
|
| 5 |
colorTo: pink
|
|
@@ -8,6 +8,9 @@ sdk_version: 5.31.0
|
|
| 8 |
app_file: app.py
|
| 9 |
pinned: false
|
| 10 |
short_description: FLUX.1-dev with fal.ai ⚡
|
|
|
|
|
|
|
|
|
|
| 11 |
---
|
| 12 |
|
| 13 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
| 1 |
---
|
| 2 |
+
title: FLUX.1 Dev with fal.ai
|
| 3 |
emoji: ⚡
|
| 4 |
colorFrom: yellow
|
| 5 |
colorTo: pink
|
|
|
|
| 8 |
app_file: app.py
|
| 9 |
pinned: false
|
| 10 |
short_description: FLUX.1-dev with fal.ai ⚡
|
| 11 |
+
hf_oauth: true
|
| 12 |
+
hf_oauth_scopes:
|
| 13 |
+
- inference-api
|
| 14 |
---
|
| 15 |
|
| 16 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
CHANGED
|
@@ -8,8 +8,6 @@ from huggingface_hub import InferenceClient, login
|
|
| 8 |
MAX_SEED = np.iinfo(np.int32).max
|
| 9 |
MAX_IMAGE_SIZE = 2048
|
| 10 |
|
| 11 |
-
hf_token = os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_API_KEY")
|
| 12 |
-
login(token=hf_token)
|
| 13 |
|
| 14 |
def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)):
|
| 15 |
client = InferenceClient(provider="fal-ai")
|
|
@@ -37,7 +35,10 @@ css="""
|
|
| 37 |
"""
|
| 38 |
|
| 39 |
with gr.Blocks(css=css) as demo:
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
| 41 |
with gr.Column(elem_id="col-container"):
|
| 42 |
gr.Markdown(f"""# FLUX.1 [schnell] with fal-ai through HF Inference Providers ⚡
|
| 43 |
learn more about HF Inference Providers [here](https://huggingface.co/docs/inference-providers/index)""")
|
|
|
|
| 8 |
MAX_SEED = np.iinfo(np.int32).max
|
| 9 |
MAX_IMAGE_SIZE = 2048
|
| 10 |
|
|
|
|
|
|
|
| 11 |
|
| 12 |
def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)):
|
| 13 |
client = InferenceClient(provider="fal-ai")
|
|
|
|
| 35 |
"""
|
| 36 |
|
| 37 |
with gr.Blocks(css=css) as demo:
|
| 38 |
+
with gr.Sidebar():
|
| 39 |
+
gr.Markdown("# Inference Provider")
|
| 40 |
+
gr.Markdown("This Space showcases the black-forest-labs/FLUX.1-dev model, served by the nebius API. Sign in with your Hugging Face account to use this API.")
|
| 41 |
+
button = gr.LoginButton("Sign in")
|
| 42 |
with gr.Column(elem_id="col-container"):
|
| 43 |
gr.Markdown(f"""# FLUX.1 [schnell] with fal-ai through HF Inference Providers ⚡
|
| 44 |
learn more about HF Inference Providers [here](https://huggingface.co/docs/inference-providers/index)""")
|