Gemini899 commited on
Commit
2ab86fb
·
verified ·
1 Parent(s): c035c39

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -19
app.py CHANGED
@@ -3,7 +3,7 @@
3
  import os
4
  import sys
5
 
6
- # --- Install Dependencies (ensure diffusers, gradio_imageslider, huggingface-hub are present) ---
7
  print("Installing required packages: diffusers, gradio_imageslider, huggingface-hub…")
8
  os.system("pip install --no-input diffusers gradio_imageslider huggingface-hub")
9
 
@@ -22,7 +22,7 @@ from diffusers import FluxControlNetModel
22
  from diffusers.pipelines import FluxControlNetPipeline
23
  from gradio_imageslider import ImageSlider
24
  from PIL import Image, ImageOps
25
- from huggingface_hub import snapshot_download, HFValidationError
26
 
27
  # --- Logging & Device Setup ---
28
  logging.basicConfig(level=logging.INFO)
@@ -68,14 +68,16 @@ try:
68
  repo_id=flux_model_id,
69
  repo_type="model",
70
  local_dir=local_model_dir,
71
- use_auth_token=huggingface_token, # ← pass token here
72
  ignore_patterns=["*.md", "*.gitattributes"],
73
  )
74
  logging.info(f"Downloaded base model to: {model_path}")
75
 
76
  logging.info(f"Loading ControlNet: {controlnet_model_id}")
77
  controlnet = FluxControlNetModel.from_pretrained(
78
- controlnet_model_id, torch_dtype=torch_dtype, use_auth_token=huggingface_token
 
 
79
  ).to(device)
80
  logging.info("ControlNet loaded.")
81
 
@@ -88,14 +90,13 @@ try:
88
  ).to(device)
89
  logging.info("Pipeline ready.")
90
 
91
- except HFValidationError as hf_err:
92
- logging.error(f"Gated‑model access error: {hf_err}", exc_info=True)
93
- print(f"FATAL: Cannot access gated repo {flux_model_id}. "
94
- "Have you accepted its license and set a valid HUGGINGFACE_TOKEN?")
95
- sys.exit(1)
96
  except Exception as e:
97
  logging.error(f"Error loading models: {e}", exc_info=True)
98
- print(f"FATAL: model load failed: {e}")
 
 
 
 
99
  sys.exit(1)
100
 
101
  # --- Constants & Helpers ---
@@ -110,7 +111,8 @@ def process_input(input_image):
110
  if img.mode != "RGB":
111
  img = img.convert("RGB")
112
  w, h = img.size
113
- # enforce intermediate budget
 
114
  target_px = (w*INTERNAL_PROCESSING_FACTOR)*(h*INTERNAL_PROCESSING_FACTOR)
115
  if target_px > MAX_PIXEL_BUDGET:
116
  max_in = MAX_PIXEL_BUDGET / (INTERNAL_PROCESSING_FACTOR**2)
@@ -120,11 +122,13 @@ def process_input(input_image):
120
  was_resized = True
121
  else:
122
  was_resized = False
123
- # round to multiple of 8
 
124
  w2, h2 = img.size
125
  w2 -= w2 % 8; h2 -= h2 % 8
126
  if img.size != (w2,h2):
127
  img = img.resize((w2,h2), Image.Resampling.LANCZOS)
 
128
  return img, w, h, was_resized
129
 
130
  @spaces.GPU(duration=75)
@@ -149,7 +153,7 @@ def infer(
149
  processed, w0, h0, resized_flag = process_input(input_image)
150
  w_proc, h_proc = processed.size
151
 
152
- # make control image
153
  cw, ch = w_proc*INTERNAL_PROCESSING_FACTOR, h_proc*INTERNAL_PROCESSING_FACTOR
154
  control_img = processed.resize((cw, ch), Image.Resampling.LANCZOS)
155
 
@@ -165,7 +169,7 @@ def infer(
165
  generator=gen
166
  ).images[0]
167
 
168
- # final resize
169
  if resized_flag:
170
  fw, fh = w_proc*final_upscale_factor, h_proc*final_upscale_factor
171
  else:
@@ -189,17 +193,17 @@ with gr.Blocks(css=css, theme=gr.themes.Soft(), title="Flux Upscaler Demo") as d
189
  with gr.Column(scale=2):
190
  inp = gr.Image(label="Input Image", type="pil", sources=["upload","clipboard"], height=350)
191
  with gr.Column(scale=1):
192
- upf = gr.Slider("Final Upscale Factor", 1, INTERNAL_PROCESSING_FACTOR, step=1, value=2)
193
  steps = gr.Slider("Inference Steps", 4, 50, step=1, value=15)
194
- cscale = gr.Slider("ControlNet Scale", 0.0, 1.5, step=0.05, value=0.6)
195
  with gr.Row():
196
  sld = gr.Slider("Seed", 0, MAX_SEED, step=1, value=42)
197
  rnd = gr.Checkbox("Randomize", value=True, scale=0, min_width=80)
198
  btn = gr.Button("⚡ Upscale Image", variant="primary")
199
 
200
  slider = ImageSlider("Input / Output", type="pil", interactive=False, show_label=True, position=0.5)
201
- out_seed = gr.Textbox("Seed Used", interactive=False, visible=True)
202
- out_b64 = gr.Textbox("API Base64 Output", interactive=False, visible=False)
203
 
204
  btn.click(
205
  fn=infer,
@@ -208,5 +212,5 @@ with gr.Blocks(css=css, theme=gr.themes.Soft(), title="Flux Upscaler Demo") as d
208
  api_name="upscale"
209
  )
210
 
211
- # expose JSON API at /run/upscale
212
  demo.queue(max_size=10).launch(share=False, show_api=True)
 
3
  import os
4
  import sys
5
 
6
+ # --- Install Dependencies ---
7
  print("Installing required packages: diffusers, gradio_imageslider, huggingface-hub…")
8
  os.system("pip install --no-input diffusers gradio_imageslider huggingface-hub")
9
 
 
22
  from diffusers.pipelines import FluxControlNetPipeline
23
  from gradio_imageslider import ImageSlider
24
  from PIL import Image, ImageOps
25
+ from huggingface_hub import snapshot_download
26
 
27
  # --- Logging & Device Setup ---
28
  logging.basicConfig(level=logging.INFO)
 
68
  repo_id=flux_model_id,
69
  repo_type="model",
70
  local_dir=local_model_dir,
71
+ use_auth_token=huggingface_token,
72
  ignore_patterns=["*.md", "*.gitattributes"],
73
  )
74
  logging.info(f"Downloaded base model to: {model_path}")
75
 
76
  logging.info(f"Loading ControlNet: {controlnet_model_id}")
77
  controlnet = FluxControlNetModel.from_pretrained(
78
+ controlnet_model_id,
79
+ torch_dtype=torch_dtype,
80
+ use_auth_token=huggingface_token
81
  ).to(device)
82
  logging.info("ControlNet loaded.")
83
 
 
90
  ).to(device)
91
  logging.info("Pipeline ready.")
92
 
 
 
 
 
 
93
  except Exception as e:
94
  logging.error(f"Error loading models: {e}", exc_info=True)
95
+ # Detect gated‐repo 401 by checking for “401” in the message
96
+ if "401" in str(e):
97
+ print(f"FATAL: Cannot access gated model {flux_model_id}. Have you accepted its license and set a valid HUGGINGFACE_TOKEN?")
98
+ else:
99
+ print(f"FATAL: model load failed: {e}")
100
  sys.exit(1)
101
 
102
  # --- Constants & Helpers ---
 
111
  if img.mode != "RGB":
112
  img = img.convert("RGB")
113
  w, h = img.size
114
+
115
+ # enforce intermediate‐scale budget
116
  target_px = (w*INTERNAL_PROCESSING_FACTOR)*(h*INTERNAL_PROCESSING_FACTOR)
117
  if target_px > MAX_PIXEL_BUDGET:
118
  max_in = MAX_PIXEL_BUDGET / (INTERNAL_PROCESSING_FACTOR**2)
 
122
  was_resized = True
123
  else:
124
  was_resized = False
125
+
126
+ # round dimensions to multiples of 8
127
  w2, h2 = img.size
128
  w2 -= w2 % 8; h2 -= h2 % 8
129
  if img.size != (w2,h2):
130
  img = img.resize((w2,h2), Image.Resampling.LANCZOS)
131
+
132
  return img, w, h, was_resized
133
 
134
  @spaces.GPU(duration=75)
 
153
  processed, w0, h0, resized_flag = process_input(input_image)
154
  w_proc, h_proc = processed.size
155
 
156
+ # prepare control image at INTERNAL scale
157
  cw, ch = w_proc*INTERNAL_PROCESSING_FACTOR, h_proc*INTERNAL_PROCESSING_FACTOR
158
  control_img = processed.resize((cw, ch), Image.Resampling.LANCZOS)
159
 
 
169
  generator=gen
170
  ).images[0]
171
 
172
+ # final resize to user factor
173
  if resized_flag:
174
  fw, fh = w_proc*final_upscale_factor, h_proc*final_upscale_factor
175
  else:
 
193
  with gr.Column(scale=2):
194
  inp = gr.Image(label="Input Image", type="pil", sources=["upload","clipboard"], height=350)
195
  with gr.Column(scale=1):
196
+ upf = gr.Slider("Final Upscale Factor", 1, INTERNAL_PROCESSING_FACTOR, step=1, value=2)
197
  steps = gr.Slider("Inference Steps", 4, 50, step=1, value=15)
198
+ cscale= gr.Slider("ControlNet Scale", 0.0, 1.5, step=0.05, value=0.6)
199
  with gr.Row():
200
  sld = gr.Slider("Seed", 0, MAX_SEED, step=1, value=42)
201
  rnd = gr.Checkbox("Randomize", value=True, scale=0, min_width=80)
202
  btn = gr.Button("⚡ Upscale Image", variant="primary")
203
 
204
  slider = ImageSlider("Input / Output", type="pil", interactive=False, show_label=True, position=0.5)
205
+ out_seed= gr.Textbox("Seed Used", interactive=False, visible=True)
206
+ out_b64 = gr.Textbox("API Base64 Output", interactive=False, visible=False)
207
 
208
  btn.click(
209
  fn=infer,
 
212
  api_name="upscale"
213
  )
214
 
215
+ # Expose JSON API at /run/upscale
216
  demo.queue(max_size=10).launch(share=False, show_api=True)