Spaces:
Runtime error
Runtime error
File size: 6,394 Bytes
8ae0ff2 57a9c16 8ae0ff2 f413eb4 f80fd5d ab9256d cbc71ea ab9256d 628051d 57a9c16 628051d 57a9c16 9f870a3 8ae0ff2 57a9c16 8ae0ff2 628051d 8ae0ff2 cbc71ea f413eb4 b76872c f413eb4 b76872c f413eb4 b76872c f413eb4 cbc71ea 8ae0ff2 f413eb4 cbc71ea 9fb4a95 cbc71ea f413eb4 57a9c16 e4f2a60 628051d 57a9c16 cbc71ea b76872c cbc71ea 628051d 57a9c16 628051d 57a9c16 628051d 57a9c16 e4f2a60 57a9c16 628051d 57a9c16 628051d 57a9c16 628051d 57a9c16 9fb4a95 f413eb4 9fb4a95 57a9c16 cb3ed83 67470a1 cbc71ea 57a9c16 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 |
import gradio as gr
import os
import sys
from pathlib import Path
from PIL import Image
import re
# Coder: Create directories if they don't exist
if not os.path.exists('saved_prompts'):
os.makedirs('saved_prompts')
if not os.path.exists('saved_images'):
os.makedirs('saved_images')
# Humanities: Elegant function to generate a safe filename 📝
def generate_safe_filename(text):
return re.sub('[^a-zA-Z0-9]', '_', text)
def load_models_from_file(filename):
with open(filename, 'r') as f:
return [line.strip() for line in f]
if __name__ == "__main__":
models = load_models_from_file('models.txt')
print(models)
#removed to removed.txt
current_model = models[0]
text_gen1=gr.Interface.load("spaces/Omnibus/MagicPrompt-Stable-Diffusion_link")
models2 = [gr.Interface.load(f"models/{model}", live=True, preprocess=False) for model in models]
def text_it1(inputs,text_gen1=text_gen1):
go_t1=text_gen1(inputs)
return(go_t1)
def set_model(current_model):
current_model = models[current_model]
return gr.update(label=(f"{current_model}"))
# Analysis: Function to list saved prompts and images 📊
def list_saved_prompts_and_images():
saved_prompts = os.listdir('saved_prompts')
saved_images = os.listdir('saved_images')
html_str = "<h2>Saved Prompts and Images:</h2><ul>"
for prompt_file in saved_prompts:
image_file = f"{prompt_file[:-4]}.png"
if image_file in saved_images:
html_str += f'<li>Prompt: {prompt_file[:-4]} | <a href="saved_images/{image_file}" download>Download Image</a></li>'
html_str += "</ul>"
return html_str
# Coder: Modified function to save the prompt and image 🖼️
def send_it1(inputs, model_choice):
proc1 = models2[model_choice]
output1 = proc1(inputs)
safe_filename = generate_safe_filename(inputs[0])
image_path = f"saved_images/{safe_filename}.png"
prompt_path = f"saved_prompts/{safe_filename}.txt"
with open(prompt_path, 'w') as f:
f.write(inputs[0])
Image.fromarray(output1).save(image_path)
saved_output.update(list_saved_prompts_and_images())
return output1
css=""""""
with gr.Blocks(css=css) as myface:
gr.HTML("""<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="twitter:card" content="player"/>
<meta name="twitter:site" content=""/>
<meta name="twitter:player" content="https://omnibus-maximum-multiplier-places.hf.space"/>
<meta name="twitter:player:stream" content="https://omnibus-maximum-multiplier-places.hf.space"/>
<meta name="twitter:player:width" content="100%"/>
<meta name="twitter:player:height" content="600"/>
<meta property="og:title" content="Embedded Live Viewer"/>
<meta property="og:description" content="Tweet Genie - A Huggingface Space"/>
<meta property="og:image" content="https://cdn.glitch.global/80dbe92e-ce75-44af-84d5-74a2e21e9e55/omnicard.png?v=1676772531627"/>
<!--<meta http-equiv="refresh" content="0; url=https://huggingface.co/spaces/corbt/tweet-genie">-->
</head>
</html>
""")
with gr.Row():
with gr.Column(scale=100):
saved_output = gr.HTML(label="Saved Prompts and Images")
with gr.Row():
with gr.Tab("Title"):
gr.HTML("""<title>Prompt to Generate Image</title><div style="text-align: center; max-width: 1500px; margin: 0 auto;">
<h1>Enter a Prompt in Textbox then click Generate Image</h1>""")
with gr.Tab("Tools"):
with gr.Tab("View"):
with gr.Row():
with gr.Column(style="width=50%, height=70%"):
gr.Pil(label="Crop")
with gr.Column(style="width=50%, height=70%"):
gr.Pil(label="Crop")
with gr.Tab("Draw"):
with gr.Column(style="width=50%, height=70%"):
gr.Pil(label="Crop")
with gr.Column(style="width=50%, height=70%"):
gr.Pil(label="Draw")
gr.ImagePaint(label="Draw")
with gr.Tab("Text"):
with gr.Row():
with gr.Column(scale=50):
gr.Textbox(label="", lines=8, interactive=True)
with gr.Column(scale=50):
gr.Textbox(label="", lines=8, interactive=True)
with gr.Tab("Color Picker"):
with gr.Row():
with gr.Column(scale=50):
gr.ColorPicker(label="Color", interactive=True)
with gr.Column(scale=50):
gr.ImagePaint(label="Draw", interactive=True)
with gr.Row():
with gr.Column(scale=100):
magic1=gr.Textbox(lines=4)
run=gr.Button("Generate Image")
with gr.Row():
with gr.Column(scale=100):
model_name1 = gr.Dropdown(label="Select Model", choices=[m for m in models], type="index", value=current_model, interactive=True)
with gr.Row():
with gr.Column(style="width=800px"):
output1=gr.Image(label=(f"{current_model}"))
with gr.Row():
with gr.Column(scale=50):
input_text=gr.Textbox(label="Prompt Idea",lines=2)
use_short=gr.Button("Use Short Prompt")
see_prompts=gr.Button("Extend Idea")
with gr.Row():
with gr.Column(scale=100):
saved_output = gr.HTML(label=list_saved_prompts_and_images(), live=True)
def short_prompt(inputs):
return(inputs)
use_short.click(short_prompt,inputs=[input_text],outputs=magic1)
see_prompts.click(text_it1,inputs=[input_text],outputs=magic1)
# Reasoning: Link functions to Gradio components 🎛️
model_name1.change(set_model, inputs=model_name1, outputs=[output1])
run.click(send_it1, inputs=[magic1, model_name1], outputs=[output1])
myface.queue(concurrency_count=200)
myface.launch(inline=True, show_api=False, max_threads=400) |