englissi commited on
Commit
8e3153b
ยท
verified ยท
1 Parent(s): c2854a7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -23
app.py CHANGED
@@ -1,13 +1,11 @@
1
  import gradio as gr
2
  import numpy as np
3
  import random
4
-
5
- # import spaces #[uncomment to use ZeroGPU]
6
  from diffusers import DiffusionPipeline
7
  import torch
8
 
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
10
- model_repo_id = "stabilityai/sdxl-turbo" # Replace to the model you would like to use
11
 
12
  if torch.cuda.is_available():
13
  torch_dtype = torch.float16
@@ -20,8 +18,6 @@ pipe = pipe.to(device)
20
  MAX_SEED = np.iinfo(np.int32).max
21
  MAX_IMAGE_SIZE = 1024
22
 
23
-
24
- # @spaces.GPU #[uncomment to use ZeroGPU]
25
  def infer(
26
  prompt,
27
  negative_prompt,
@@ -66,71 +62,71 @@ css = """
66
 
67
  with gr.Blocks(css=css) as demo:
68
  with gr.Column(elem_id="col-container"):
69
- gr.Markdown(" # Text-to-Image Gradio Template")
70
 
71
  with gr.Row():
72
  prompt = gr.Text(
73
- label="Prompt",
74
  show_label=False,
75
  max_lines=1,
76
- placeholder="Enter your prompt",
77
  container=False,
78
  )
79
 
80
- run_button = gr.Button("Run", scale=0, variant="primary")
81
 
82
- result = gr.Image(label="Result", show_label=False)
83
 
84
- with gr.Accordion("Advanced Settings", open=False):
85
  negative_prompt = gr.Text(
86
- label="Negative prompt",
87
  max_lines=1,
88
- placeholder="Enter a negative prompt",
89
  visible=False,
90
  )
91
 
92
  seed = gr.Slider(
93
- label="Seed",
94
  minimum=0,
95
  maximum=MAX_SEED,
96
  step=1,
97
  value=0,
98
  )
99
 
100
- randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
101
 
102
  with gr.Row():
103
  width = gr.Slider(
104
- label="Width",
105
  minimum=256,
106
  maximum=MAX_IMAGE_SIZE,
107
  step=32,
108
- value=1024, # Replace with defaults that work for your model
109
  )
110
 
111
  height = gr.Slider(
112
- label="Height",
113
  minimum=256,
114
  maximum=MAX_IMAGE_SIZE,
115
  step=32,
116
- value=1024, # Replace with defaults that work for your model
117
  )
118
 
119
  with gr.Row():
120
  guidance_scale = gr.Slider(
121
- label="Guidance scale",
122
  minimum=0.0,
123
  maximum=10.0,
124
  step=0.1,
125
- value=0.0, # Replace with defaults that work for your model
126
  )
127
 
128
  num_inference_steps = gr.Slider(
129
- label="Number of inference steps",
130
  minimum=1,
131
  maximum=50,
132
  step=1,
133
- value=2, # Replace with defaults that work for your model
134
  )
135
 
136
  gr.Examples(examples=examples, inputs=[prompt])
 
1
  import gradio as gr
2
  import numpy as np
3
  import random
 
 
4
  from diffusers import DiffusionPipeline
5
  import torch
6
 
7
  device = "cuda" if torch.cuda.is_available() else "cpu"
8
+ model_repo_id = "stabilityai/sdxl-turbo" # ์‚ฌ์šฉํ•˜๋ ค๋Š” ๋ชจ๋ธ ์ด๋ฆ„
9
 
10
  if torch.cuda.is_available():
11
  torch_dtype = torch.float16
 
18
  MAX_SEED = np.iinfo(np.int32).max
19
  MAX_IMAGE_SIZE = 1024
20
 
 
 
21
  def infer(
22
  prompt,
23
  negative_prompt,
 
62
 
63
  with gr.Blocks(css=css) as demo:
64
  with gr.Column(elem_id="col-container"):
65
+ gr.Markdown(" # ํ…์ŠคํŠธ-์ด๋ฏธ์ง€ ์ƒ์„ฑ Gradio ํ…œํ”Œ๋ฆฟ")
66
 
67
  with gr.Row():
68
  prompt = gr.Text(
69
+ label="ํ”„๋กฌํ”„ํŠธ",
70
  show_label=False,
71
  max_lines=1,
72
+ placeholder="์ƒ์„ฑํ•˜๊ณ  ์‹ถ์€ ์ด๋ฏธ์ง€๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”",
73
  container=False,
74
  )
75
 
76
+ run_button = gr.Button("์‹คํ–‰", scale=0, variant="primary")
77
 
78
+ result = gr.Image(label="๊ฒฐ๊ณผ", show_label=False)
79
 
80
+ with gr.Accordion("๊ณ ๊ธ‰ ์„ค์ •", open=False):
81
  negative_prompt = gr.Text(
82
+ label="๋„ค๊ฑฐํ‹ฐ๋ธŒ ํ”„๋กฌํ”„ํŠธ",
83
  max_lines=1,
84
+ placeholder="ํฌํ•จํ•˜์ง€ ์•Š์„ ๋‚ด์šฉ์„ ์ž…๋ ฅํ•˜์„ธ์š”",
85
  visible=False,
86
  )
87
 
88
  seed = gr.Slider(
89
+ label="์‹œ๋“œ ๊ฐ’",
90
  minimum=0,
91
  maximum=MAX_SEED,
92
  step=1,
93
  value=0,
94
  )
95
 
96
+ randomize_seed = gr.Checkbox(label="์‹œ๋“œ ๋žœ๋คํ™”", value=True)
97
 
98
  with gr.Row():
99
  width = gr.Slider(
100
+ label="๋„ˆ๋น„ (ํ”ฝ์…€)",
101
  minimum=256,
102
  maximum=MAX_IMAGE_SIZE,
103
  step=32,
104
+ value=1024, # ๋ชจ๋ธ์— ์ ํ•ฉํ•œ ๊ธฐ๋ณธ๊ฐ’์œผ๋กœ ์„ค์ •
105
  )
106
 
107
  height = gr.Slider(
108
+ label="๋†’์ด (ํ”ฝ์…€)",
109
  minimum=256,
110
  maximum=MAX_IMAGE_SIZE,
111
  step=32,
112
+ value=1024, # ๋ชจ๋ธ์— ์ ํ•ฉํ•œ ๊ธฐ๋ณธ๊ฐ’์œผ๋กœ ์„ค์ •
113
  )
114
 
115
  with gr.Row():
116
  guidance_scale = gr.Slider(
117
+ label="๊ฐ€์ด๋˜์Šค ์Šค์ผ€์ผ",
118
  minimum=0.0,
119
  maximum=10.0,
120
  step=0.1,
121
+ value=0.0, # ๋ชจ๋ธ์— ์ ํ•ฉํ•œ ๊ธฐ๋ณธ๊ฐ’์œผ๋กœ ์„ค์ •
122
  )
123
 
124
  num_inference_steps = gr.Slider(
125
+ label="์ถ”๋ก  ๋‹จ๊ณ„ ์ˆ˜",
126
  minimum=1,
127
  maximum=50,
128
  step=1,
129
+ value=2, # ๋ชจ๋ธ์— ์ ํ•ฉํ•œ ๊ธฐ๋ณธ๊ฐ’์œผ๋กœ ์„ค์ •
130
  )
131
 
132
  gr.Examples(examples=examples, inputs=[prompt])