Potre1qw commited on
Commit
fffc2f0
·
verified ·
1 Parent(s): ba639b2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +115 -35
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import gradio as gr
2
  import numpy as np
3
  import random
@@ -16,22 +17,19 @@ from decouple import config
16
  from pathlib import Path
17
  from PIL import Image
18
  import io
 
19
 
20
- URL="http://127.0.0.1"
21
- OUTPUT_DIR = config('OUTPUT_DIR')
22
- INPUT_DIR = config('INPUT_DIR')
23
- COMF_PATH = config('COMF_PATH')
24
 
25
- import torch
26
 
27
- import spaces
 
 
 
28
 
29
- print(f"Is CUDA available: {torch.cuda.is_available()}")
30
- print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
31
- print(torch.version.cuda)
32
- device = torch.cuda.get_device_name(torch.cuda.current_device())
33
- print(device)
34
 
 
 
 
35
 
36
  def wait_for_image_with_prefix(folder, prefix):
37
  def is_file_ready(file_path):
@@ -39,7 +37,6 @@ def wait_for_image_with_prefix(folder, prefix):
39
  time.sleep(1)
40
  return initial_size == os.path.getsize(file_path)
41
 
42
-
43
  files = os.listdir(folder)
44
  image_files = [f for f in files if f.lower().startswith(prefix.lower()) and
45
  f.lower().endswith(('.png', '.jpg', '.jpeg'))]
@@ -54,10 +51,8 @@ def wait_for_image_with_prefix(folder, prefix):
54
  time.sleep(3)
55
  return latest_image
56
 
57
-
58
  return None
59
 
60
-
61
  def delete_image_file(file_path):
62
  try:
63
  if os.path.exists(file_path):
@@ -68,13 +63,11 @@ def delete_image_file(file_path):
68
  except Exception as e:
69
  logger.debug(f"error {file_path}: {str(e)}")
70
 
71
-
72
  def start_queue(prompt_workflow, port):
73
  p = {"prompt": prompt_workflow}
74
  data = json.dumps(p).encode('utf-8')
75
  requests.post(f"{URL}:{port}/prompt", data=data)
76
 
77
-
78
  def check_server_ready(port):
79
  try:
80
  response = requests.get(f"{URL}:{port}/history/123", timeout=5)
@@ -83,12 +76,15 @@ def check_server_ready(port):
83
  return False
84
 
85
 
86
-
87
- @spaces.GPU(duration=190)
88
  def generate_image(prompt, image):
89
  prefix_filename = str(random.randint(0, 999999))
90
- prompt = prompt.replace('ComfyUI', prefix_filename)
91
- prompt = json.loads(prompt)
 
 
 
 
 
92
 
93
  image = Image.fromarray(image)
94
  image.save(INPUT_DIR + '/input.png', format='PNG')
@@ -96,13 +92,11 @@ def generate_image(prompt, image):
96
  process = None
97
  new_port = str(random.randint(8123, 8200))
98
 
99
- try:
100
- # Запускаем скрипт как подпроцесс
101
- process = subprocess.Popen([sys.executable, COMF_PATH, "--listen", "127.0.0.1", "--port", new_port])
102
  logger.debug(f'Subprocess started with PID: {process.pid}')
103
-
104
- # Ожидание запуска сервера
105
- for _ in range(40): # Максимум 20 секунд ожидания
106
  if check_server_ready(new_port):
107
  break
108
  time.sleep(1)
@@ -110,9 +104,8 @@ def generate_image(prompt, image):
110
  raise TimeoutError("Server did not start in time")
111
 
112
  start_queue(prompt, new_port)
113
-
114
- # Ожидание нового изображения
115
- timeout = 240 # Максимальное время ожидания в секундах
116
  start_time = time.time()
117
  while time.time() - start_time < timeout:
118
  latest_image = wait_for_image_with_prefix(OUTPUT_DIR, prefix_filename)
@@ -143,15 +136,102 @@ def generate_image(prompt, image):
143
 
144
 
145
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
  if __name__ == "__main__":
147
- demo = gr.Interface(fn=generate_image, inputs=[
 
 
148
  "text",
149
- gr.Image(image_mode='RGBA', type="numpy")
 
150
  ],
151
- outputs=[
152
- gr.Image(type="numpy", image_mode='RGBA')
153
- ])
 
154
  demo.launch(debug=True)
155
  logger.debug('demo.launch()')
156
 
157
- logger.info("Основной скрипт завершил работу.")
 
1
+ import spaces
2
  import gradio as gr
3
  import numpy as np
4
  import random
 
17
  from pathlib import Path
18
  from PIL import Image
19
  import io
20
+ import torch
21
 
 
 
 
 
22
 
 
23
 
24
+ URL="http://127.0.0.1"
25
+ OUTPUT_DIR="ComfyUI/output"
26
+ INPUT_DIR="ComfyUI/input"
27
+ BACKEND_PATH="ComfyUI/main.py"
28
 
 
 
 
 
 
29
 
30
+ def read_prompt_from_file(filename):
31
+ with open(filename, 'r', encoding='utf-8') as file:
32
+ return json.load(file)
33
 
34
  def wait_for_image_with_prefix(folder, prefix):
35
  def is_file_ready(file_path):
 
37
  time.sleep(1)
38
  return initial_size == os.path.getsize(file_path)
39
 
 
40
  files = os.listdir(folder)
41
  image_files = [f for f in files if f.lower().startswith(prefix.lower()) and
42
  f.lower().endswith(('.png', '.jpg', '.jpeg'))]
 
51
  time.sleep(3)
52
  return latest_image
53
 
 
54
  return None
55
 
 
56
  def delete_image_file(file_path):
57
  try:
58
  if os.path.exists(file_path):
 
63
  except Exception as e:
64
  logger.debug(f"error {file_path}: {str(e)}")
65
 
 
66
  def start_queue(prompt_workflow, port):
67
  p = {"prompt": prompt_workflow}
68
  data = json.dumps(p).encode('utf-8')
69
  requests.post(f"{URL}:{port}/prompt", data=data)
70
 
 
71
  def check_server_ready(port):
72
  try:
73
  response = requests.get(f"{URL}:{port}/history/123", timeout=5)
 
76
  return False
77
 
78
 
 
 
79
  def generate_image(prompt, image):
80
  prefix_filename = str(random.randint(0, 999999))
81
+ try:
82
+ prompt = prompt.replace('ComfyUI', prefix_filename)
83
+ prompt = json.loads(prompt)
84
+ except:
85
+ prompt = read_prompt_from_file('config.json')
86
+ prompt = json.dumps(prompt, ensure_ascii=False).replace('output_image', prefix_filename)
87
+ prompt = json.loads(prompt)
88
 
89
  image = Image.fromarray(image)
90
  image.save(INPUT_DIR + '/input.png', format='PNG')
 
92
  process = None
93
  new_port = str(random.randint(8123, 8200))
94
 
95
+ try:
96
+ process = subprocess.Popen([sys.executable, BACKEND_PATH, "--listen", "127.0.0.1", "--port", new_port])
 
97
  logger.debug(f'Subprocess started with PID: {process.pid}')
98
+
99
+ for _ in range(40):
 
100
  if check_server_ready(new_port):
101
  break
102
  time.sleep(1)
 
104
  raise TimeoutError("Server did not start in time")
105
 
106
  start_queue(prompt, new_port)
107
+
108
+ timeout = 240
 
109
  start_time = time.time()
110
  while time.time() - start_time < timeout:
111
  latest_image = wait_for_image_with_prefix(OUTPUT_DIR, prefix_filename)
 
136
 
137
 
138
 
139
+ @spaces.GPU(duration=50)
140
+ def generate_image_50(prompt, image):
141
+ return generate_image(prompt, image)
142
+
143
+ @spaces.GPU(duration=70)
144
+ def generate_image_70(prompt, image):
145
+ return generate_image(prompt, image)
146
+
147
+ @spaces.GPU(duration=90)
148
+ def generate_image_90(prompt, image):
149
+ return generate_image(prompt, image)
150
+
151
+ @spaces.GPU(duration=110)
152
+ def generate_image_110(prompt, image):
153
+ return generate_image(prompt, image)
154
+
155
+ @spaces.GPU(duration=130)
156
+ def generate_image_130(prompt, image):
157
+ return generate_image(prompt, image)
158
+
159
+ @spaces.GPU(duration=150)
160
+ def generate_image_150(prompt, image):
161
+ return generate_image(prompt, image)
162
+
163
+ @spaces.GPU(duration=170)
164
+ def generate_image_170(prompt, image):
165
+ return generate_image(prompt, image)
166
+
167
+ @spaces.GPU(duration=190)
168
+ def generate_image_190(prompt, image):
169
+ return generate_image(prompt, image)
170
+
171
+ @spaces.GPU(duration=200)
172
+ def generate_image_200(prompt, image):
173
+ return generate_image(prompt, image)
174
+
175
+ @spaces.GPU(duration=210)
176
+ def generate_image_210(prompt, image):
177
+ return generate_image(prompt, image)
178
+
179
+ @spaces.GPU(duration=220)
180
+ def generate_image_220(prompt, image):
181
+ return generate_image(prompt, image)
182
+
183
+ @spaces.GPU(duration=230)
184
+ def generate_image_230(prompt, image):
185
+ return generate_image(prompt, image)
186
+
187
+ @spaces.GPU(duration=240)
188
+ def generate_image_240(prompt, image):
189
+ return generate_image(prompt, image)
190
+
191
+
192
+ def generate_image_wrapper(prompt, image, duration):
193
+ if duration == "50":
194
+ return generate_image_50(prompt, image)
195
+ elif duration == "70":
196
+ return generate_image_70(prompt, image)
197
+ elif duration == "90":
198
+ return generate_image_90(prompt, image)
199
+ elif duration == "110":
200
+ return generate_image_110(prompt, image)
201
+ elif duration == "130":
202
+ return generate_image_130(prompt, image)
203
+ elif duration == "150":
204
+ return generate_image_150(prompt, image)
205
+ elif duration == "170":
206
+ return generate_image_170(prompt, image)
207
+ elif duration == "190":
208
+ return generate_image_190(prompt, image)
209
+ elif duration == "200":
210
+ return generate_image_200(prompt, image)
211
+ elif duration == "210":
212
+ return generate_image_210(prompt, image)
213
+ elif duration == "220":
214
+ return generate_image_220(prompt, image)
215
+ elif duration == "230":
216
+ return generate_image_230(prompt, image)
217
+ elif duration == "240":
218
+ return generate_image_240(prompt, image)
219
+ else:
220
+ return generate_image_170(prompt, image)
221
+
222
  if __name__ == "__main__":
223
+ demo = gr.Interface(
224
+ fn=generate_image_wrapper,
225
+ inputs=[
226
  "text",
227
+ gr.Image(image_mode='RGBA', type="numpy"),
228
+ "text"
229
  ],
230
+ outputs=[gr.Image(type="numpy", image_mode='RGBA')],
231
+ title="Image Upscaler",
232
+ description="BEST UPSCALER EVER!!!!!"
233
+ )
234
  demo.launch(debug=True)
235
  logger.debug('demo.launch()')
236
 
237
+ logger.info("finish")