lionelgarnier commited on
Commit
52efc32
·
1 Parent(s): c943e2c

Add optional model preloading configuration

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -13,6 +13,7 @@ login(token=hf_token)
13
 
14
  MAX_SEED = np.iinfo(np.int32).max
15
  MAX_IMAGE_SIZE = 2048
 
16
 
17
  _text_gen_pipeline = None
18
  _image_gen_pipeline = None
@@ -21,6 +22,7 @@ _image_gen_pipeline = None
21
  def get_image_gen_pipeline():
22
  global _image_gen_pipeline
23
  if _image_gen_pipeline is None:
 
24
  try:
25
  device = "cuda" if torch.cuda.is_available() else "cpu"
26
  dtype = torch.bfloat16
@@ -180,16 +182,14 @@ def preload_models():
180
  return False
181
 
182
  def create_interface():
183
- # Préchargement des modèles
184
- models_loaded = preload_models()
185
-
186
- if not models_loaded:
187
- model_status = "⚠️ Erreur lors du chargement des modèles"
188
  else:
189
- model_status = " Modèles chargés avec succès!"
190
 
191
  with gr.Blocks(css=css) as demo:
192
-
193
  info = gr.Info(model_status)
194
 
195
  with gr.Column(elem_id="col-container"):
 
13
 
14
  MAX_SEED = np.iinfo(np.int32).max
15
  MAX_IMAGE_SIZE = 2048
16
+ PRELOAD_MODELS = False # Easy switch for preloading
17
 
18
  _text_gen_pipeline = None
19
  _image_gen_pipeline = None
 
22
  def get_image_gen_pipeline():
23
  global _image_gen_pipeline
24
  if _image_gen_pipeline is None:
25
+ print("Loading image generation model on first use...") # Optional debug message
26
  try:
27
  device = "cuda" if torch.cuda.is_available() else "cpu"
28
  dtype = torch.bfloat16
 
182
  return False
183
 
184
  def create_interface():
185
+ # Modify the preloading logic
186
+ if PRELOAD_MODELS:
187
+ models_loaded = preload_models()
188
+ model_status = "✅ Modèles chargés avec succès!" if models_loaded else "⚠️ Erreur lors du chargement des modèles"
 
189
  else:
190
+ model_status = "ℹ️ Modèles seront chargés à la demande"
191
 
192
  with gr.Blocks(css=css) as demo:
 
193
  info = gr.Info(model_status)
194
 
195
  with gr.Column(elem_id="col-container"):