Nymbo commited on
Commit
764029a
·
verified ·
1 Parent(s): a5d225b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -38
app.py CHANGED
@@ -1,18 +1,13 @@
1
  import gradio as gr
2
- import requests
3
- import io
4
  import random
5
  import os
6
- import time
7
  from PIL import Image
8
- import json
9
  from typing import Optional
 
10
 
11
  # Project by Nymbo
12
 
13
- API_URL = "https://api-inference.huggingface.co/models/black-forest-labs/FLUX.1-Krea-dev"
14
  API_TOKEN = os.getenv("HF_READ_TOKEN")
15
- headers = {"Authorization": f"Bearer {API_TOKEN}"}
16
  timeout = 100
17
 
18
  # Function to query the API and return the generated image
@@ -53,44 +48,53 @@ def flux_krea_generate(
53
 
54
  key = random.randint(0, 999)
55
 
56
- API_TOKEN = random.choice([os.getenv("HF_READ_TOKEN")])
57
- headers = {"Authorization": f"Bearer {API_TOKEN}"}
58
-
59
  # Add some extra flair to the prompt
60
  enhanced_prompt = f"{prompt} | ultra detail, ultra elaboration, ultra quality, perfect."
61
  print(f'\033[1mGeneration {key}:\033[0m {enhanced_prompt}')
62
 
63
- # Prepare the payload for the API call, including width and height
64
- payload = {
65
- "inputs": enhanced_prompt,
66
- "is_negative": False,
67
- "steps": steps,
68
- "cfg_scale": cfg_scale,
69
- "seed": seed if seed != -1 else random.randint(1, 1000000000),
70
- "strength": strength,
71
- "parameters": {
72
- "width": width, # Pass the width to the API
73
- "height": height # Pass the height to the API
74
- }
75
- }
76
-
77
- # Send the request to the API and handle the response
78
- response = requests.post(API_URL, headers=headers, json=payload, timeout=timeout)
79
- if response.status_code != 200:
80
- print(f"Error: Failed to get image. Response status: {response.status_code}")
81
- print(f"Response content: {response.text}")
82
- if response.status_code == 503:
83
- raise gr.Error(f"{response.status_code} : The model is being loaded")
84
- raise gr.Error(f"{response.status_code}")
85
-
86
  try:
87
- # Convert the response content into an image
88
- image_bytes = response.content
89
- image = Image.open(io.BytesIO(image_bytes))
90
- print(f'\033[1mGeneration {key} completed!\033[0m ({enhanced_prompt})')
91
- return image
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
  except Exception as e:
93
- print(f"Error when trying to open the image: {e}")
 
 
 
 
 
 
 
 
94
  return None
95
 
96
  # CSS to style the app
 
1
  import gradio as gr
 
 
2
  import random
3
  import os
 
4
  from PIL import Image
 
5
  from typing import Optional
6
+ from huggingface_hub import InferenceClient
7
 
8
  # Project by Nymbo
9
 
 
10
  API_TOKEN = os.getenv("HF_READ_TOKEN")
 
11
  timeout = 100
12
 
13
  # Function to query the API and return the generated image
 
48
 
49
  key = random.randint(0, 999)
50
 
 
 
 
51
  # Add some extra flair to the prompt
52
  enhanced_prompt = f"{prompt} | ultra detail, ultra elaboration, ultra quality, perfect."
53
  print(f'\033[1mGeneration {key}:\033[0m {enhanced_prompt}')
54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  try:
56
+ # Initialize the Hugging Face Inference Client
57
+ # Try different providers in order of preference
58
+ providers = ["auto", "replicate", "fal-ai"]
59
+
60
+ for provider in providers:
61
+ try:
62
+ client = InferenceClient(
63
+ api_key=API_TOKEN,
64
+ provider=provider
65
+ )
66
+
67
+ # Generate the image using the proper client
68
+ image = client.text_to_image(
69
+ prompt=enhanced_prompt,
70
+ negative_prompt=negative_prompt,
71
+ model="black-forest-labs/FLUX.1-Krea-dev",
72
+ width=width,
73
+ height=height,
74
+ num_inference_steps=steps,
75
+ guidance_scale=cfg_scale,
76
+ seed=seed if seed != -1 else random.randint(1, 1000000000)
77
+ )
78
+
79
+ print(f'\033[1mGeneration {key} completed with {provider}!\033[0m ({enhanced_prompt})')
80
+ return image
81
+
82
+ except Exception as provider_error:
83
+ print(f"Provider {provider} failed: {provider_error}")
84
+ if provider == providers[-1]: # Last provider
85
+ raise provider_error
86
+ continue
87
+
88
  except Exception as e:
89
+ print(f"Error during image generation: {e}")
90
+ if "404" in str(e):
91
+ raise gr.Error("Model not found. Please ensure the FLUX.1-Krea-dev model is accessible with your API token.")
92
+ elif "503" in str(e):
93
+ raise gr.Error("The model is currently being loaded. Please try again in a moment.")
94
+ elif "401" in str(e) or "403" in str(e):
95
+ raise gr.Error("Authentication failed. Please check your HF_READ_TOKEN environment variable.")
96
+ else:
97
+ raise gr.Error(f"Image generation failed: {str(e)}")
98
  return None
99
 
100
  # CSS to style the app