awacke1 commited on
Commit
628051d
·
1 Parent(s): e4f2a60

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -36
app.py CHANGED
@@ -1,58 +1,67 @@
1
  import gradio as gr
2
  from pathlib import Path
3
- import os
4
 
5
- # Load models from file
6
  def load_models_from_file(filename):
7
  with open(filename, 'r') as f:
8
  return [line.strip() for line in f]
9
 
10
- # Initialize model
11
- models = load_models_from_file('models.txt')
12
- current_model = models[0]
13
 
14
- # Load Interfaces
15
  text_gen1 = gr.Interface.load("spaces/Omnibus/MagicPrompt-Stable-Diffusion_link")
16
  models2 = [gr.Interface.load(f"models/{model}", live=True, preprocess=False) for model in models]
17
 
18
- # Function to generate text
19
  def text_it1(inputs):
20
  go_t1 = text_gen1(inputs)
21
  return go_t1
22
 
23
- # Function to set model
24
- def set_model(model_choice):
25
- current_model = models[model_choice]
26
- return f"{current_model}"
27
 
28
- # Function to send input
29
  def send_it1(inputs, model_choice):
30
  proc1 = models2[model_choice]
31
  output1 = proc1(inputs)
32
-
33
- # Save image and prompt to file
34
- prompt_safe_name = "".join(e for e in inputs if e.isalnum())
35
- image_path = f"images/{prompt_safe_name}.png"
36
- output1.save(image_path)
37
-
38
- with open('prompts.txt', 'a') as f:
39
- f.write(f"{inputs}\n{image_path}\n")
40
-
41
  return output1
42
 
43
- # Initialize Gradio Interface
44
- with gr.Blocks() as myface:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  with gr.Row():
46
- gr.Textbox(lines=4, label="Prompt").store("magic1")
47
- gr.Dropdown(label="Select Model", choices=models, type="index", value=0, interactive=True).store("model_name1")
48
- gr.Button("Generate Image").click(send_it1, inputs=["magic1", "model_name1"]).outputs("output1")
49
- gr.Image(label="Image Output").store("output1")
50
- gr.Textbox(label="Prompt Idea", lines=2).store("input_text")
51
- gr.Button("Use Short Prompt").click(text_it1, inputs=["input_text"]).outputs("magic1")
52
-
53
- # Sidebar to show saved prompts
54
- with open('prompts.txt', 'r') as f:
55
- saved_prompts = f.readlines()
56
- gr.Textbox(value=saved_prompts, lines=20, label="Saved Prompts", interactive=False, sidetab=True)
57
-
58
- myface.launch()
 
 
 
 
1
  import gradio as gr
2
  from pathlib import Path
 
3
 
 
4
  def load_models_from_file(filename):
5
  with open(filename, 'r') as f:
6
  return [line.strip() for line in f]
7
 
8
+ if __name__ == "__main__":
9
+ models = load_models_from_file('models.txt')
10
+ print(models)
11
 
12
+ current_model = models[0]
13
  text_gen1 = gr.Interface.load("spaces/Omnibus/MagicPrompt-Stable-Diffusion_link")
14
  models2 = [gr.Interface.load(f"models/{model}", live=True, preprocess=False) for model in models]
15
 
 
16
  def text_it1(inputs):
17
  go_t1 = text_gen1(inputs)
18
  return go_t1
19
 
20
+ def set_model(current_model):
21
+ current_model = models[current_model]
22
+ return gr.update(label=(f"{current_model}"))
 
23
 
 
24
  def send_it1(inputs, model_choice):
25
  proc1 = models2[model_choice]
26
  output1 = proc1(inputs)
 
 
 
 
 
 
 
 
 
27
  return output1
28
 
29
+ css = ""
30
+
31
+ with gr.Blocks(css=css) as myface:
32
+ with gr.Row():
33
+ with gr.Tab("Tools"):
34
+ with gr.Tab("View"):
35
+ with gr.Row():
36
+ gr.Pil(label="Crop")
37
+ gr.Pil(label="Crop")
38
+
39
+ with gr.Tab("Draw"):
40
+ with gr.Row():
41
+ gr.Pil(label="Crop")
42
+ gr.ImagePaint(label="Draw")
43
+ gr.Textbox(label="", lines=8)
44
+ gr.Textbox(label="", lines=8)
45
+
46
+ with gr.Tab("Color Picker"):
47
+ with gr.Row():
48
+ gr.ColorPicker(label="Color")
49
+ gr.ImagePaint(label="Draw")
50
+
51
  with gr.Row():
52
+ gr.Textbox(lines=4, label="Magic Textbox")
53
+ gr.Button("Generate Image")
54
+
55
+ with gr.Row():
56
+ gr.Dropdown(label="Select Model", choices=[m for m in models], type="index", value=current_model)
57
+
58
+ with gr.Row():
59
+ gr.Image(label=(f"{current_model}"))
60
+
61
+ with gr.Row():
62
+ gr.Textbox(label="Prompt Idea", lines=2)
63
+ gr.Button("Use Short Prompt")
64
+ gr.Button("Extend Idea")
65
+
66
+ myface.queue(concurrency_count=200)
67
+ myface.launch(inline=True, show_api=False, max_threads=400)