ginipick commited on
Commit
3da2256
·
verified ·
1 Parent(s): 4e35634

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +127 -19
app.py CHANGED
@@ -8,7 +8,7 @@ import os
8
  from PIL import Image
9
  from diffusers import FluxKontextPipeline
10
  from diffusers.utils import load_image
11
- from huggingface_hub import hf_hub_download, HfFileSystem, ModelCard
12
  from safetensors.torch import load_file
13
  import requests
14
  import re
@@ -37,7 +37,50 @@ try:
37
  print(f"Successfully loaded {len(flux_loras_raw)} LoRAs from JSON")
38
  except Exception as e:
39
  print(f"Error loading flux_loras.json: {e}")
40
- flux_loras_raw = []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  # Global variables for LoRA management
42
  current_lora = None
43
  lora_cache = {}
@@ -45,10 +88,45 @@ lora_cache = {}
45
  def load_lora_weights(repo_id, weights_filename):
46
  """Load LoRA weights from HuggingFace"""
47
  try:
48
- if repo_id not in lora_cache:
 
49
  lora_path = hf_hub_download(repo_id=repo_id, filename=weights_filename)
50
- lora_cache[repo_id] = lora_path
51
- return lora_cache[repo_id]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  except Exception as e:
53
  print(f"Error loading LoRA from {repo_id}: {e}")
54
  return None
@@ -74,21 +152,36 @@ def get_huggingface_lora(link):
74
  model_card = ModelCard.load(link)
75
  trigger_word = model_card.data.get("instance_prompt", "")
76
 
77
- fs = HfFileSystem()
78
- list_of_files = fs.ls(link, detail=False)
79
- safetensors_file = None
80
 
81
- for file in list_of_files:
82
- if file.endswith(".safetensors") and "lora" in file.lower():
83
- safetensors_file = file.split("/")[-1]
84
- break
85
-
86
- if not safetensors_file:
87
- safetensors_file = "pytorch_lora_weights.safetensors"
 
 
 
 
 
 
88
 
 
89
  return split_link[1], safetensors_file, trigger_word
 
90
  except Exception as e:
91
- raise Exception(f"Error loading LoRA: {e}")
 
 
 
 
 
 
 
92
  else:
93
  raise Exception("Invalid HuggingFace repository format")
94
 
@@ -130,12 +223,27 @@ def classify_gallery(flux_loras):
130
  try:
131
  sorted_gallery = sorted(flux_loras, key=lambda x: x.get("likes", 0), reverse=True)
132
  gallery_items = []
 
133
  for item in sorted_gallery:
134
  if "image" in item and "title" in item:
135
- gallery_items.append((item["image"], item["title"]))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
136
  return gallery_items, sorted_gallery
137
  except Exception as e:
138
- print(f"Error loading gallery: {e}")
139
  return [], []
140
 
141
  def infer_with_lora_wrapper(input_image, prompt, selected_index, custom_lora, seed=42, randomize_seed=False, guidance_scale=2.5, lora_scale=1.75, flux_loras=None, progress=gr.Progress(track_tqdm=True)):
@@ -477,7 +585,7 @@ with gr.Blocks(css=css) as demo:
477
  custom_model = gr.Textbox(
478
  label="Or enter a custom HuggingFace FLUX LoRA",
479
  placeholder="e.g., username/lora-name",
480
- visible=False
481
  )
482
  custom_model_card = gr.HTML(visible=False)
483
  custom_model_button = gr.Button("Remove custom LoRA", visible=False)
 
8
  from PIL import Image
9
  from diffusers import FluxKontextPipeline
10
  from diffusers.utils import load_image
11
+ from huggingface_hub import hf_hub_download, HfFileSystem, ModelCard, list_repo_files
12
  from safetensors.torch import load_file
13
  import requests
14
  import re
 
37
  print(f"Successfully loaded {len(flux_loras_raw)} LoRAs from JSON")
38
  except Exception as e:
39
  print(f"Error loading flux_loras.json: {e}")
40
+ print("Using sample LoRA data instead...")
41
+ # Sample LoRA data with working repositories
42
+ flux_loras_raw = [
43
+ {
44
+ "image": "https://huggingface.co/alvdansen/flux-koda/resolve/main/images/photo-1586902197503-e71026292412.jpeg",
45
+ "title": "Flux Koda",
46
+ "repo": "alvdansen/flux-koda",
47
+ "trigger_word": "flmft style",
48
+ "weights": "flux_lora.safetensors",
49
+ "likes": 100
50
+ },
51
+ {
52
+ "image": "https://huggingface.co/multimodalart/flux-tarot-v1/resolve/main/images/e5f2761e5a474e52ab11b1c9246c9a30.png",
53
+ "title": "Tarot Cards",
54
+ "repo": "multimodalart/flux-tarot-v1",
55
+ "trigger_word": "in the style of TOK a trtcrd tarot style",
56
+ "weights": "flux_tarot_v1_lora.safetensors",
57
+ "likes": 90
58
+ },
59
+ {
60
+ "image": "https://huggingface.co/Norod78/Flux_1_Dev_LoRA_Paper-Cutout-Style/resolve/main/d13591878de740648a8f29b836e16ff2.jpeg",
61
+ "title": "Paper Cutout",
62
+ "repo": "Norod78/Flux_1_Dev_LoRA_Paper-Cutout-Style",
63
+ "trigger_word": "Paper Cutout Style",
64
+ "weights": "Flux_1_Dev_LoRA_Paper-Cutout-Style.safetensors",
65
+ "likes": 80
66
+ },
67
+ {
68
+ "image": "https://huggingface.co/alvdansen/frosting_lane_flux/resolve/main/images/content%20-%202024-08-11T010011.238.jpeg",
69
+ "title": "Frosting Lane",
70
+ "repo": "alvdansen/frosting_lane_flux",
71
+ "trigger_word": "frstingln illustration",
72
+ "weights": "flux_lora_frosting_lane_flux_000002500.safetensors",
73
+ "likes": 70
74
+ },
75
+ {
76
+ "image": "https://huggingface.co/davisbro/flux-watercolor/resolve/main/images/wc2.png",
77
+ "title": "Watercolor",
78
+ "repo": "davisbro/flux-watercolor",
79
+ "trigger_word": "watercolor style",
80
+ "weights": "flux_watercolor.safetensors",
81
+ "likes": 60
82
+ }
83
+ ]
84
  # Global variables for LoRA management
85
  current_lora = None
86
  lora_cache = {}
 
88
  def load_lora_weights(repo_id, weights_filename):
89
  """Load LoRA weights from HuggingFace"""
90
  try:
91
+ # First try with the specified filename
92
+ try:
93
  lora_path = hf_hub_download(repo_id=repo_id, filename=weights_filename)
94
+ if repo_id not in lora_cache:
95
+ lora_cache[repo_id] = lora_path
96
+ return lora_path
97
+ except Exception as e:
98
+ print(f"Failed to load {weights_filename}, trying to find alternative LoRA files...")
99
+
100
+ # If the specified file doesn't exist, try to find any .safetensors file
101
+ from huggingface_hub import list_repo_files
102
+ try:
103
+ files = list_repo_files(repo_id)
104
+ safetensors_files = [f for f in files if f.endswith(('.safetensors', '.bin')) and 'lora' in f.lower()]
105
+
106
+ if not safetensors_files:
107
+ # Try without 'lora' in filename
108
+ safetensors_files = [f for f in files if f.endswith('.safetensors')]
109
+
110
+ if safetensors_files:
111
+ # Try the first available file
112
+ for file in safetensors_files:
113
+ try:
114
+ print(f"Trying alternative file: {file}")
115
+ lora_path = hf_hub_download(repo_id=repo_id, filename=file)
116
+ if repo_id not in lora_cache:
117
+ lora_cache[repo_id] = lora_path
118
+ print(f"Successfully loaded alternative LoRA file: {file}")
119
+ return lora_path
120
+ except:
121
+ continue
122
+
123
+ print(f"No suitable LoRA files found in {repo_id}")
124
+ return None
125
+
126
+ except Exception as list_error:
127
+ print(f"Error listing files in repo {repo_id}: {list_error}")
128
+ return None
129
+
130
  except Exception as e:
131
  print(f"Error loading LoRA from {repo_id}: {e}")
132
  return None
 
152
  model_card = ModelCard.load(link)
153
  trigger_word = model_card.data.get("instance_prompt", "")
154
 
155
+ # Try to find the correct safetensors file
156
+ files = list_repo_files(link)
157
+ safetensors_files = [f for f in files if f.endswith('.safetensors')]
158
 
159
+ # Prioritize files with 'lora' in the name
160
+ lora_files = [f for f in safetensors_files if 'lora' in f.lower()]
161
+ if lora_files:
162
+ safetensors_file = lora_files[0]
163
+ elif safetensors_files:
164
+ safetensors_file = safetensors_files[0]
165
+ else:
166
+ # Try .bin files as fallback
167
+ bin_files = [f for f in files if f.endswith('.bin') and 'lora' in f.lower()]
168
+ if bin_files:
169
+ safetensors_file = bin_files[0]
170
+ else:
171
+ safetensors_file = "pytorch_lora_weights.safetensors" # Default fallback
172
 
173
+ print(f"Found LoRA file: {safetensors_file} in {link}")
174
  return split_link[1], safetensors_file, trigger_word
175
+
176
  except Exception as e:
177
+ print(f"Error in get_huggingface_lora: {e}")
178
+ # Try basic detection
179
+ try:
180
+ files = list_repo_files(link)
181
+ safetensors_file = next((f for f in files if f.endswith('.safetensors')), "pytorch_lora_weights.safetensors")
182
+ return split_link[1], safetensors_file, ""
183
+ except:
184
+ raise Exception(f"Error loading LoRA: {e}")
185
  else:
186
  raise Exception("Invalid HuggingFace repository format")
187
 
 
223
  try:
224
  sorted_gallery = sorted(flux_loras, key=lambda x: x.get("likes", 0), reverse=True)
225
  gallery_items = []
226
+
227
  for item in sorted_gallery:
228
  if "image" in item and "title" in item:
229
+ image_url = item["image"]
230
+ title = item["title"]
231
+
232
+ # If image is a local file path that might not exist, use a placeholder URL
233
+ if isinstance(image_url, str) and (image_url.startswith("/home/") or image_url.startswith("samples/") or not image_url.startswith("http")):
234
+ print(f"Replacing local/invalid image path: {image_url}")
235
+ # Use a more reliable placeholder
236
+ image_url = f"https://via.placeholder.com/512x512/E0E7FF/818CF8?text={title.replace(' ', '+')}"
237
+
238
+ gallery_items.append((image_url, title))
239
+
240
+ if not gallery_items:
241
+ print("No gallery items found after filtering")
242
+ return [], sorted_gallery
243
+
244
  return gallery_items, sorted_gallery
245
  except Exception as e:
246
+ print(f"Error in classify_gallery: {e}")
247
  return [], []
248
 
249
  def infer_with_lora_wrapper(input_image, prompt, selected_index, custom_lora, seed=42, randomize_seed=False, guidance_scale=2.5, lora_scale=1.75, flux_loras=None, progress=gr.Progress(track_tqdm=True)):
 
585
  custom_model = gr.Textbox(
586
  label="Or enter a custom HuggingFace FLUX LoRA",
587
  placeholder="e.g., username/lora-name",
588
+ visible=True
589
  )
590
  custom_model_card = gr.HTML(visible=False)
591
  custom_model_button = gr.Button("Remove custom LoRA", visible=False)