artificialguybr commited on
Commit
42e9500
·
1 Parent(s): ce1fb91

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -155
app.py CHANGED
@@ -1,159 +1,13 @@
1
  import gradio as gr
2
- import requests
3
- import json
4
- import PIL.Image
5
- from io import BytesIO
6
- import os
7
- import random
8
- import datetime
9
- import urllib3
10
- urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
11
 
12
- def generate_image(prompt, negative_prompt, use_negative_embedding, scheduler, steps, width, height, cfg, restore_faces, seed):
13
- forbidden_words = os.getenv("FORBIDDEN_WORDS").split(", ")
14
- # Check if the prompt contains any of the forbidden words
15
- for word in forbidden_words:
16
- if word in prompt:
17
- raise Exception(f"The prompt contains a forbidden word: {word}")
18
- request_time = datetime.datetime.now()
19
- restore_faces = bool(restore_faces)
20
- use_negative_embedding = bool(use_negative_embedding)
21
- print(f"restore_faces: {restore_faces}, type: {type(restore_faces)}")
22
- print(f"use_negative_embedding: {use_negative_embedding}, type: {type(use_negative_embedding)}")
23
- if use_negative_embedding:
24
- negative_prompt += ", easynegative, ng_deepnegative_v1_75t"
25
- # Define the API endpoint
26
- apiUrl = os.getenv("API_URL")
27
- # Define the request headers
28
- headers = {
29
- "Content-Type": "application/json",
30
- "token": os.getenv("API_TOKEN")
31
- }
32
 
33
- # Define the request body
34
- body = {
35
- "mode": "url",
36
- "model": "LiberteRedmond.ckpt",
37
- "tiling": False,
38
- "batch_size": 1,
39
- "prompt": prompt,
40
- "negative_prompt": negative_prompt,
41
- "seed":random.randint(0, 999999999),
42
- "scheduler": scheduler,
43
- "n_iter": 1,
44
- "steps": steps,
45
- "cfg": cfg,
46
- "offset_noise": 0.0,
47
- "width": width,
48
- "height": height,
49
- "clip_skip": 1,
50
- "embeddings": [
51
- {
52
- "name": "easynegative",
53
- "strength": 1.0 if use_negative_embedding else 0
54
- },
55
- {
56
- "name": "ng_deepnegative_v1_75t",
57
- "strength": 1.0 if use_negative_embedding else 0
58
- }
59
- ],
60
- "vae": "vae-ft-mse-840000-ema-pruned.ckpt",
61
- "restore_faces": restore_faces,
62
- "fr_model": "CodeFormer",
63
- "codeformer_weight": 0.5,
64
- "enable_hr": False,
65
- "denoising_strength": 0.75,
66
- "hr_scale": 2,
67
- "hr_upscale": "None",
68
- "img2img_ref_img_type": "piece",
69
- "img2img_resize_mode": 0,
70
- "img2img_denoising_strength": 0.75,
71
- }
72
-
73
- # Send the request
74
- response = requests.post(apiUrl, headers=headers, data=json.dumps(body), verify=False)
75
- # Print the response body if the status code is not 200
76
- if response.status_code != 200:
77
- print(response.text)
78
-
79
- # Check the response status
80
- if response.status_code == 200:
81
-
82
- # Get the image URL from the response
83
- response_json = response.json()
84
- if 'results' in response_json and isinstance(response_json['results'], list) and len(response_json['results']) > 0:
85
- image_url = response_json['results'][0]
86
-
87
- # Get the image from the URL
88
- image_response = requests.get(image_url)
89
- image = PIL.Image.open(BytesIO(image_response.content))
90
-
91
- # Log the information together
92
- print(f"Request time: {request_time}\n"
93
- f"Prompt: {prompt}\n"
94
- f"Negative Prompt: {negative_prompt}\n"
95
- f"Seed: {seed}\n"
96
- f"Res(width x height): {width} x {height}\n"
97
- f"Image URL: {image_url}")
98
-
99
- return image
100
- else:
101
- raise Exception("Unexpected API response format")
102
- else:
103
- raise Exception("API request failed with status code " + str(response.status_code))
104
-
105
- # Define the Gradio interface
106
- iface = gr.Interface(
107
- fn=generate_image,
108
- inputs=[
109
- gr.components.Textbox(label="Prompt"),
110
- gr.components.Textbox(value="ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft", label="Negative Prompt"),
111
- gr.inputs.Checkbox(label="Use Negative Embedding", default=True),
112
- gr.components.Dropdown(choices=[
113
- "Euler a",
114
- "Euler",
115
- "LMS",
116
- "Heun",
117
- "DPM2",
118
- "DPM2 a",
119
- "DPM++ 2S a",
120
- "DPM++ 2M",
121
- "DPM++ SDE",
122
- "DPM fast",
123
- "DPM adaptive",
124
- "LMS Karras",
125
- "DPM2 Karras",
126
- "DPM2 a Karras",
127
- "DPM++ 2S a Karras",
128
- "DPM++ 2M Karras",
129
- "DPM++ SDE Karras",
130
- "DDIM",
131
- "PLMS"
132
- ], label="Scheduler", value="DPM++ SDE Karras"),
133
- gr.components.Slider(minimum=10, maximum=100, step=1.0,value=30, label="Steps"),
134
- gr.components.Slider(minimum=512, maximum=1600, value=512, label="Width"),
135
- gr.components.Slider(minimum=512, maximum=1600, value=512, label="Height"),
136
- gr.components.Slider(minimum=4, maximum=12, step=0.5, value=7.0, label="CFG"),
137
- gr.inputs.Checkbox(label="Restore Faces", default=False),
138
- ],
139
- outputs=gr.components.Image(),
140
- title="Liberte.Redmond Demonstration",
141
- description = """
142
- ## Finetuned model of SD 1.5 produced by [@artificialguybr](https://twitter.com/artificialguybr).
143
- ## Resources
144
- - The weights were released [here](https://civitai.com/models/94123/liberte-generalist-model) with example prompts in CIVITAI and [here in HF](https://huggingface.co/artificialguybr/liberte).
145
- ## Demonstration
146
- This demonstration is running on the [makeai.run API](https://www.makeai.run/).
147
- ## Acknowledgements
148
- Thanks to [Redmond.ai](https://redmond.ai/) for providing GPU Time and sponsoring this model.
149
-
150
- ## Test my 2.1 Finetuned Model (Freedom) [here](https://huggingface.co/spaces/artificialguybr/freedom).
151
- """,
152
- allow_flagging='never'
153
- )
154
-
155
- #Adding queue
156
- iface.queue(concurrency_count=12)
157
-
158
- # Launch the app
159
  iface.launch()
 
1
  import gradio as gr
 
 
 
 
 
 
 
 
 
2
 
3
+ def show_message():
4
+ message = """
5
+ <div style='text-align: center; font-size: 24px;'>
6
+ <p>HF Spaces Closed Due to Lack of GPU.</p>
7
+ <p>If you are a GPU Provider and would like to provide GPU for this HF Space in exchange for publicity, please contact us on Twitter <a href='https://twitter.com/artificialguybr'>@artificialguybr</a>.</p>
8
+ </div>
9
+ """
10
+ return message
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
+ iface = gr.Interface(fn=show_message, outputs="html", title="GPU Provider Needed")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  iface.launch()