awacke1's picture
Update app.py
e4f2a60
raw
history blame
1.96 kB
import gradio as gr
from pathlib import Path
import os
# Load models from file
def load_models_from_file(filename):
with open(filename, 'r') as f:
return [line.strip() for line in f]
# Initialize model
models = load_models_from_file('models.txt')
current_model = models[0]
# Load Interfaces
text_gen1 = gr.Interface.load("spaces/Omnibus/MagicPrompt-Stable-Diffusion_link")
models2 = [gr.Interface.load(f"models/{model}", live=True, preprocess=False) for model in models]
# Function to generate text
def text_it1(inputs):
go_t1 = text_gen1(inputs)
return go_t1
# Function to set model
def set_model(model_choice):
current_model = models[model_choice]
return f"{current_model}"
# Function to send input
def send_it1(inputs, model_choice):
proc1 = models2[model_choice]
output1 = proc1(inputs)
# Save image and prompt to file
prompt_safe_name = "".join(e for e in inputs if e.isalnum())
image_path = f"images/{prompt_safe_name}.png"
output1.save(image_path)
with open('prompts.txt', 'a') as f:
f.write(f"{inputs}\n{image_path}\n")
return output1
# Initialize Gradio Interface
with gr.Blocks() as myface:
with gr.Row():
gr.Textbox(lines=4, label="Prompt").store("magic1")
gr.Dropdown(label="Select Model", choices=models, type="index", value=0, interactive=True).store("model_name1")
gr.Button("Generate Image").click(send_it1, inputs=["magic1", "model_name1"]).outputs("output1")
gr.Image(label="Image Output").store("output1")
gr.Textbox(label="Prompt Idea", lines=2).store("input_text")
gr.Button("Use Short Prompt").click(text_it1, inputs=["input_text"]).outputs("magic1")
# Sidebar to show saved prompts
with open('prompts.txt', 'r') as f:
saved_prompts = f.readlines()
gr.Textbox(value=saved_prompts, lines=20, label="Saved Prompts", interactive=False, sidetab=True)
myface.launch()