Etrwy commited on
Commit
cb9676a
·
verified ·
1 Parent(s): 04d97b4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -7
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import gradio as gr
2
  import numpy as np
3
  import random
@@ -24,7 +25,7 @@ COMF_PATH = config('COMF_PATH')
24
 
25
  import torch
26
 
27
- import spaces
28
 
29
  print(f"Is CUDA available: {torch.cuda.is_available()}")
30
  print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
@@ -84,7 +85,6 @@ def check_server_ready(port):
84
 
85
 
86
 
87
- @spaces.GPU(duration=170)
88
  def generate_image(prompt, image, image2):
89
  prefix_filename = str(random.randint(0, 999999))
90
  prompt = prompt.replace('ComfyUI', prefix_filename)
@@ -105,7 +105,7 @@ def generate_image(prompt, image, image2):
105
  logger.debug(f'Subprocess started with PID: {process.pid}')
106
 
107
  # Ожидание запуска сервера
108
- for _ in range(30): # Максимум 20 секунд ожидания
109
  if check_server_ready(new_port):
110
  break
111
  time.sleep(1)
@@ -115,7 +115,7 @@ def generate_image(prompt, image, image2):
115
  start_queue(prompt, new_port)
116
 
117
  # Ожидание нового изображения
118
- timeout = 240 # Максимальное время ожидания в секундах
119
  start_time = time.time()
120
  while time.time() - start_time < timeout:
121
  latest_image = wait_for_image_with_prefix(OUTPUT_DIR, prefix_filename)
@@ -135,7 +135,7 @@ def generate_image(prompt, image, image2):
135
  except Exception as e:
136
  logger.error(f"Error in generate_image: {e}")
137
 
138
- finally:
139
  if process and process.poll() is None:
140
  process.terminate()
141
  logger.debug("process.terminate()")
@@ -145,15 +145,49 @@ def generate_image(prompt, image, image2):
145
  except subprocess.TimeoutExpired:
146
  logger.debug("process.kill()")
147
  process.kill()
 
148
 
 
 
 
149
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
150
 
151
  if __name__ == "__main__":
152
- demo = gr.Interface(fn=generate_image,
153
  inputs=[
154
  "text",
155
  gr.Image(image_mode='RGBA', type="numpy"),
156
- gr.Image(image_mode='RGBA', type="numpy")
 
157
  ],
158
  outputs=[
159
  gr.Image(type="numpy", image_mode='RGBA')
 
1
+ import spaces
2
  import gradio as gr
3
  import numpy as np
4
  import random
 
25
 
26
  import torch
27
 
28
+
29
 
30
  print(f"Is CUDA available: {torch.cuda.is_available()}")
31
  print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
 
85
 
86
 
87
 
 
88
  def generate_image(prompt, image, image2):
89
  prefix_filename = str(random.randint(0, 999999))
90
  prompt = prompt.replace('ComfyUI', prefix_filename)
 
105
  logger.debug(f'Subprocess started with PID: {process.pid}')
106
 
107
  # Ожидание запуска сервера
108
+ for _ in range(30): # Максимум 30 секунд ожидания
109
  if check_server_ready(new_port):
110
  break
111
  time.sleep(1)
 
115
  start_queue(prompt, new_port)
116
 
117
  # Ожидание нового изображения
118
+ timeout = 360 # Максимальное время ожидания в секундах
119
  start_time = time.time()
120
  while time.time() - start_time < timeout:
121
  latest_image = wait_for_image_with_prefix(OUTPUT_DIR, prefix_filename)
 
135
  except Exception as e:
136
  logger.error(f"Error in generate_image: {e}")
137
 
138
+ finally:
139
  if process and process.poll() is None:
140
  process.terminate()
141
  logger.debug("process.terminate()")
 
145
  except subprocess.TimeoutExpired:
146
  logger.debug("process.kill()")
147
  process.kill()
148
+ return generate_image(prompt, image, image2)
149
 
150
+ @spaces.GPU(duration=130)
151
+ def generate_image_130(prompt, image, image2):
152
+ return generate_image(prompt, image, image2)
153
 
154
+ @spaces.GPU(duration=150)
155
+ def generate_image_150(prompt, image, image2):
156
+ return generate_image(prompt, image, image2)
157
+
158
+ @spaces.GPU(duration=170)
159
+ def generate_image_170(prompt, image, image2):
160
+ return generate_image(prompt, image, image2)
161
+
162
+ @spaces.GPU(duration=190)
163
+ def generate_image_190(prompt, image, image2):
164
+ return generate_image(prompt, image, image2)
165
+
166
+ @spaces.GPU(duration=200)
167
+ def generate_image_200(prompt, image, image2):
168
+ return generate_image(prompt, image, image2)
169
+
170
+ def generate_image_wrapper(prompt, image, image2, duration):
171
+ if duration == "130":
172
+ return generate_image_130(prompt, image, image2)
173
+ elif duration == "150":
174
+ return generate_image_150(prompt, image, image2)
175
+ elif duration == "170":
176
+ return generate_image_170(prompt, image, image2)
177
+ elif duration == "190":
178
+ return generate_image_190(prompt, image, image2)
179
+ elif duration == "200":
180
+ return generate_image_200(prompt, image, image2)
181
+ else:
182
+ return generate_image_170(prompt, image, image2)
183
 
184
  if __name__ == "__main__":
185
+ demo = gr.Interface(fn=generate_image_wrapper,
186
  inputs=[
187
  "text",
188
  gr.Image(image_mode='RGBA', type="numpy"),
189
+ gr.Image(image_mode='RGBA', type="numpy"),
190
+ "text", label="GPU acclocation in secs")
191
  ],
192
  outputs=[
193
  gr.Image(type="numpy", image_mode='RGBA')