Spaces:
Runtime error
Runtime error
Update backup.app.py
Browse files- backup.app.py +78 -12
backup.app.py
CHANGED
@@ -2,7 +2,22 @@ import gradio as gr
|
|
2 |
import os
|
3 |
import sys
|
4 |
from pathlib import Path
|
|
|
|
|
|
|
|
|
5 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
def load_models_from_file(filename):
|
7 |
with open(filename, 'r') as f:
|
8 |
return [line.strip() for line in f]
|
@@ -27,19 +42,50 @@ def set_model(current_model):
|
|
27 |
current_model = models[current_model]
|
28 |
return gr.update(label=(f"{current_model}"))
|
29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
def send_it1(inputs, model_choice):
|
31 |
proc1 = models2[model_choice]
|
32 |
output1 = proc1(inputs)
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
with open(
|
39 |
-
f.write(inputs)
|
40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
return output1
|
42 |
|
|
|
|
|
43 |
css=""""""
|
44 |
|
45 |
|
@@ -61,6 +107,11 @@ with gr.Blocks(css=css) as myface:
|
|
61 |
</head>
|
62 |
</html>
|
63 |
""")
|
|
|
|
|
|
|
|
|
|
|
64 |
with gr.Row():
|
65 |
with gr.Tab("Title"):
|
66 |
gr.HTML("""<title>Prompt to Generate Image</title><div style="text-align: center; max-width: 1500px; margin: 0 auto;">
|
@@ -98,26 +149,41 @@ with gr.Blocks(css=css) as myface:
|
|
98 |
with gr.Column(scale=100):
|
99 |
magic1=gr.Textbox(lines=4)
|
100 |
run=gr.Button("Generate Image")
|
|
|
101 |
with gr.Row():
|
102 |
with gr.Column(scale=100):
|
103 |
model_name1 = gr.Dropdown(label="Select Model", choices=[m for m in models], type="index", value=current_model, interactive=True)
|
|
|
104 |
with gr.Row():
|
105 |
with gr.Column(style="width=800px"):
|
106 |
output1=gr.Image(label=(f"{current_model}"))
|
107 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
108 |
with gr.Row():
|
109 |
with gr.Column(scale=50):
|
110 |
input_text=gr.Textbox(label="Prompt Idea",lines=2)
|
111 |
use_short=gr.Button("Use Short Prompt")
|
112 |
see_prompts=gr.Button("Extend Idea")
|
113 |
-
|
|
|
|
|
|
|
|
|
114 |
def short_prompt(inputs):
|
115 |
return(inputs)
|
116 |
|
117 |
-
model_name1.change(set_model,inputs=model_name1,outputs=[output1])
|
118 |
-
run.click(send_it1, inputs=[magic1, model_name1], outputs=[output1])
|
119 |
use_short.click(short_prompt,inputs=[input_text],outputs=magic1)
|
120 |
see_prompts.click(text_it1,inputs=[input_text],outputs=magic1)
|
121 |
|
|
|
|
|
|
|
|
|
122 |
myface.queue(concurrency_count=200)
|
123 |
myface.launch(inline=True, show_api=False, max_threads=400)
|
|
|
2 |
import os
|
3 |
import sys
|
4 |
from pathlib import Path
|
5 |
+
from PIL import Image
|
6 |
+
import re
|
7 |
+
from PIL import Image
|
8 |
+
import numpy as np
|
9 |
|
10 |
+
# Coder: Create directories if they don't exist
|
11 |
+
if not os.path.exists('saved_prompts'):
|
12 |
+
os.makedirs('saved_prompts')
|
13 |
+
|
14 |
+
if not os.path.exists('saved_images'):
|
15 |
+
os.makedirs('saved_images')
|
16 |
+
|
17 |
+
# Humanities: Elegant function to generate a safe filename 📝
|
18 |
+
def generate_safe_filename(text):
|
19 |
+
return re.sub('[^a-zA-Z0-9]', '_', text)
|
20 |
+
|
21 |
def load_models_from_file(filename):
|
22 |
with open(filename, 'r') as f:
|
23 |
return [line.strip() for line in f]
|
|
|
42 |
current_model = models[current_model]
|
43 |
return gr.update(label=(f"{current_model}"))
|
44 |
|
45 |
+
# Analysis: Function to list saved prompts and images 📊
|
46 |
+
def list_saved_prompts_and_images():
|
47 |
+
saved_prompts = os.listdir('saved_prompts')
|
48 |
+
saved_images = os.listdir('saved_images')
|
49 |
+
|
50 |
+
html_str = "<h2>Saved Prompts and Images:</h2><ul>"
|
51 |
+
for prompt_file in saved_prompts:
|
52 |
+
image_file = f"{prompt_file[:-4]}.png"
|
53 |
+
if image_file in saved_images:
|
54 |
+
html_str += f'<li>Prompt: {prompt_file[:-4]} | <a href="saved_images/{image_file}" download>Download Image</a></li>'
|
55 |
+
html_str += "</ul>"
|
56 |
+
|
57 |
+
return html_str
|
58 |
+
|
59 |
+
# Coder: Modified function to save the prompt and image 🖼️
|
60 |
def send_it1(inputs, model_choice):
|
61 |
proc1 = models2[model_choice]
|
62 |
output1 = proc1(inputs)
|
63 |
+
|
64 |
+
safe_filename = generate_safe_filename(inputs[0])
|
65 |
+
image_path = f"saved_images/{safe_filename}.png"
|
66 |
+
prompt_path = f"saved_prompts/{safe_filename}.txt"
|
67 |
+
|
68 |
+
with open(prompt_path, 'w') as f:
|
69 |
+
f.write(inputs[0])
|
70 |
+
|
71 |
+
# Check the type of output1 before saving
|
72 |
+
if isinstance(output1, np.ndarray): # If it's a numpy array
|
73 |
+
Image.fromarray(np.uint8(output1)).save(image_path)
|
74 |
+
elif isinstance(output1, Image.Image): # If it's already a PIL Image
|
75 |
+
output1.save(image_path)
|
76 |
+
elif isinstance(output1, str): # If it's a string (this should not happen in ideal conditions)
|
77 |
+
print(f"Warning: output1 is a string. Cannot save as image. Value: {output1}")
|
78 |
+
else:
|
79 |
+
print(f"Warning: Unexpected type {type(output1)} for output1.")
|
80 |
+
|
81 |
+
#Image.fromarray(output1).save(image_path)
|
82 |
+
|
83 |
+
saved_output.update(list_saved_prompts_and_images())
|
84 |
+
|
85 |
return output1
|
86 |
|
87 |
+
|
88 |
+
|
89 |
css=""""""
|
90 |
|
91 |
|
|
|
107 |
</head>
|
108 |
</html>
|
109 |
""")
|
110 |
+
|
111 |
+
with gr.Row():
|
112 |
+
with gr.Column(scale=100):
|
113 |
+
saved_output = gr.HTML(label="Saved Prompts and Images")
|
114 |
+
|
115 |
with gr.Row():
|
116 |
with gr.Tab("Title"):
|
117 |
gr.HTML("""<title>Prompt to Generate Image</title><div style="text-align: center; max-width: 1500px; margin: 0 auto;">
|
|
|
149 |
with gr.Column(scale=100):
|
150 |
magic1=gr.Textbox(lines=4)
|
151 |
run=gr.Button("Generate Image")
|
152 |
+
|
153 |
with gr.Row():
|
154 |
with gr.Column(scale=100):
|
155 |
model_name1 = gr.Dropdown(label="Select Model", choices=[m for m in models], type="index", value=current_model, interactive=True)
|
156 |
+
|
157 |
with gr.Row():
|
158 |
with gr.Column(style="width=800px"):
|
159 |
output1=gr.Image(label=(f"{current_model}"))
|
160 |
+
# Check the type before attempting to save the image
|
161 |
+
if isinstance(output1, Image.Image): # Check if it's a PIL Image object
|
162 |
+
output1.save(image_path)
|
163 |
+
elif isinstance(output1, np.ndarray): # Check if it's a NumPy array
|
164 |
+
Image.fromarray(np.array(output1, dtype=np.uint8)).save(image_path)
|
165 |
+
else:
|
166 |
+
print(f"Warning: Unexpected type {type(output1)} for output1.")
|
167 |
+
|
168 |
with gr.Row():
|
169 |
with gr.Column(scale=50):
|
170 |
input_text=gr.Textbox(label="Prompt Idea",lines=2)
|
171 |
use_short=gr.Button("Use Short Prompt")
|
172 |
see_prompts=gr.Button("Extend Idea")
|
173 |
+
|
174 |
+
with gr.Row():
|
175 |
+
with gr.Column(scale=100):
|
176 |
+
saved_output = gr.HTML(label=list_saved_prompts_and_images(), live=True)
|
177 |
+
|
178 |
def short_prompt(inputs):
|
179 |
return(inputs)
|
180 |
|
|
|
|
|
181 |
use_short.click(short_prompt,inputs=[input_text],outputs=magic1)
|
182 |
see_prompts.click(text_it1,inputs=[input_text],outputs=magic1)
|
183 |
|
184 |
+
# Reasoning: Link functions to Gradio components 🎛️
|
185 |
+
model_name1.change(set_model, inputs=model_name1, outputs=[output1])
|
186 |
+
run.click(send_it1, inputs=[magic1, model_name1], outputs=[output1])
|
187 |
+
|
188 |
myface.queue(concurrency_count=200)
|
189 |
myface.launch(inline=True, show_api=False, max_threads=400)
|