Abdullah commited on
Commit
aac6f76
·
1 Parent(s): fc52d69

Add application file

Browse files
Files changed (1) hide show
  1. app.py +171 -0
app.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import spaces
3
+ from gradio_imageslider import ImageSlider
4
+ from image_gen_aux import UpscaleWithModel
5
+ from image_gen_aux.utils import load_image
6
+ import torch
7
+
8
+ # This uses https://github.com/asomoza/image_gen_aux/blob/main/src/image_gen_aux/upscalers/README.md
9
+ # Also this space has been duplicated from their official huggingface space, https://huggingface.co/spaces/OzzyGT/basic_upscaler
10
+ # They did great work, and I was happy to see them to also use my models :) I thought Id duplicate it and extend it.
11
+ # It basically made me get a pro account so I can make a Zero GPU space and made me upload a lot of my models on huggingface now so I can use them here ;)
12
+
13
+ # My models, alphabetically sorted
14
+ MODELS = {
15
+ "1xDeH264_realplksr": "Phips/1xDeH264_realplksr",
16
+ "1xDeJPG_HAT": "Phips/1xDeJPG_HAT",
17
+ "1xDeJPG_OmniSR": "Phips/1xDeJPG_OmniSR",
18
+ "1xDeJPG_realplksr_otf": "Phips/1xDeJPG_realplksr_otf",
19
+ "1xDeJPG_SRFormer_light": "Phips/1xDeJPG_SRFormer_light",
20
+ "1xDeNoise_realplksr_otf": "Phips/1xDeNoise_realplksr_otf",
21
+ "1xExposureCorrection_compact": "Phips/1xExposureCorrection_compact",
22
+ "1xOverExposureCorrection_compact": "Phips/1xOverExposureCorrection_compact",
23
+ "1xUnderExposureCorrection_compact": "Phips/1xUnderExposureCorrection_compact",
24
+ "2xAoMR_mosr": "Phips/2xAoMR_mosr",
25
+ "2xEvangelion_compact": "Phips/2xEvangelion_compact",
26
+ "2xEvangelion_dat2": "Phips/2xEvangelion_dat2",
27
+ "2xEvangelion_omnisr": "Phips/2xEvangelion_omnisr",
28
+ "2xHFA2k_compact_multijpg": "Phips/2xHFA2k_compact_multijpg",
29
+ "2xHFA2k_LUDVAE_compact": "Phips/2xHFA2k_LUDVAE_compact",
30
+ "2xHFA2k_LUDVAE_SPAN": "Phips/2xHFA2k_LUDVAE_SPAN",
31
+ "2xHFA2kAVCCompact": "Phips/2xHFA2kAVCCompact",
32
+ "2xHFA2kAVCOmniSR": "Phips/2xHFA2kAVCOmniSR",
33
+ "2xHFA2kAVCSRFormer_light": "Phips/2xHFA2kAVCSRFormer_light",
34
+ "2xHFA2kCompact": "Phips/2xHFA2kCompact",
35
+ "2xHFA2kOmniSR": "Phips/2xHFA2kOmniSR",
36
+ "2xHFA2kReal-CUGAN": "Phips/2xHFA2kReal-CUGAN",
37
+ "2xHFA2kShallowESRGAN": "Phips/2xHFA2kShallowESRGAN",
38
+ "2xHFA2kSPAN": "Phips/2xHFA2kSPAN",
39
+ "2xHFA2kSwinIR-S": "Phips/2xHFA2kSwinIR-S",
40
+ "2xLexicaRRDBNet": "Phips/2xLexicaRRDBNet",
41
+ "2xLexicaRRDBNet_Sharp": "Phips/2xLexicaRRDBNet_Sharp",
42
+ "2xNomosUni_compact_multijpg": "Phips/2xNomosUni_compact_multijpg",
43
+ "2xNomosUni_compact_multijpg_ldl": "Phips/2xNomosUni_compact_multijpg_ldl",
44
+ "2xNomosUni_compact_otf_medium": "Phips/2xNomosUni_compact_otf_medium",
45
+ "2xNomosUni_esrgan_multijpg": "Phips/2xNomosUni_esrgan_multijpg",
46
+ "2xNomosUni_span_multijpg": "Phips/2xNomosUni_span_multijpg",
47
+ "2xNomosUni_span_multijpg_ldl": "Phips/2xNomosUni_span_multijpg_ldl",
48
+ "2xParimgCompact": "Phips/2xParimgCompact",
49
+ "4x4xTextures_GTAV_rgt-s": "Phips/4xTextures_GTAV_rgt-s",
50
+ "4xArtFaces_realplksr_dysample": "Phips/4xArtFaces_realplksr_dysample",
51
+ "4xBHI_dat2_multiblur": "Phips/4xBHI_dat2_multiblur",
52
+ "4xBHI_dat2_multiblurjpg": "Phips/4xBHI_dat2_multiblurjpg",
53
+ "4xBHI_dat2_otf": "Phips/4xBHI_dat2_otf",
54
+ "4xBHI_dat2_real": "Phips/4xBHI_dat2_real",
55
+ "4xBHI_realplksr_dysample_multi": "Phips/4xBHI_realplksr_dysample_multi",
56
+ "4xBHI_realplksr_dysample_multiblur": "Phips/4xBHI_realplksr_dysample_multiblur",
57
+ "4xBHI_realplksr_dysample_otf": "Phips/4xBHI_realplksr_dysample_otf",
58
+ "4xBHI_realplksr_dysample_otf_nn": "Phips/4xBHI_realplksr_dysample_otf_nn",
59
+ "4xBHI_realplksr_dysample_real": "Phips/4xBHI_realplksr_dysample_real",
60
+ "4xFaceUpDAT": "Phips/4xFaceUpDAT",
61
+ "4xFaceUpLDAT": "Phips/4xFaceUpLDAT",
62
+ "4xFaceUpSharpDAT": "Phips/4xFaceUpSharpDAT",
63
+ "4xFaceUpSharpLDAT": "Phips/4xFaceUpSharpLDAT",
64
+ "4xFFHQDAT": "Phips/4xFFHQDAT",
65
+ "4xFFHQLDAT": "Phips/4xFFHQLDAT",
66
+ "4xHFA2k": "Phips/4xHFA2k",
67
+ "4xHFA2k_ludvae_realplksr_dysample": "Phips/4xHFA2k_ludvae_realplksr_dysample",
68
+ "4xHFA2kLUDVAEGRL_small": "Phips/4xHFA2kLUDVAEGRL_small",
69
+ "4xHFA2kLUDVAESRFormer_light": "Phips/4xHFA2kLUDVAESRFormer_light",
70
+ "4xHFA2kLUDVAESwinIR_light": "Phips/4xHFA2kLUDVAESwinIR_light",
71
+ "4xLexicaDAT2_otf": "Phips/4xLexicaDAT2_otf",
72
+ "4xLSDIRCompact2": "Phips/4xLSDIRCompact2",
73
+ "4xLSDIRCompact": "Phips/4xLSDIRCompact",
74
+ "4xLSDIRCompactC3": "Phips/4xLSDIRCompactC3",
75
+ "4xLSDIRCompactC": "Phips/4xLSDIRCompactC",
76
+ "4xLSDIRCompactCR3": "Phips/4xLSDIRCompactCR3",
77
+ "4xLSDIRCompactN3": "Phips/4xLSDIRCompactN3",
78
+ "4xLSDIRCompactR3": "Phips/4xLSDIRCompactR3",
79
+ "4xLSDIRCompactR": "Phips/4xLSDIRCompactR",
80
+ "4xLSDIRDAT": "Phips/4xLSDIRDAT",
81
+ "4xNature_realplksr_dysample": "Phips/4xNature_realplksr_dysample",
82
+ "4xNomos2_hq_atd": "Phips/4xNomos2_hq_atd",
83
+ "4xNomos2_hq_dat2": "Phips/4xNomos2_hq_dat2",
84
+ "4xNomos2_hq_drct-l": "Phips/4xNomos2_hq_drct-l",
85
+ "4xNomos2_hq_mosr": "Phips/4xNomos2_hq_mosr",
86
+ "4xNomos2_otf_esrgan": "Phips/4xNomos2_otf_esrgan",
87
+ "4xNomos2_realplksr_dysample": "Phips/4xNomos2_realplksr_dysample",
88
+ "4xNomos8k_atd_jpg": "Phips/4xNomos8k_atd_jpg",
89
+ "4xNomos8kDAT": "Phips/4xNomos8kDAT",
90
+ "4xNomos8kHAT-L_bokeh_jpg": "Phips/4xNomos8kHAT-L_bokeh_jpg",
91
+ "4xNomos8kHAT-L_otf": "Phips/4xNomos8kHAT-L_otf",
92
+ "4xNomos8kSC": "Phips/4xNomos8kSC",
93
+ "4xNomos8kSCHAT-L": "Phips/4xNomos8kSCHAT-L",
94
+ "4xNomos8kSCHAT-S": "Phips/4xNomos8kSCHAT-S",
95
+ "4xNomos8kSCSRFormer": "Phips/4xNomos8kSCSRFormer",
96
+ "4xNomosUni_rgt_multijpg": "Phips/4xNomosUni_rgt_multijpg",
97
+ "4xNomosUni_rgt_s_multijpg": "Phips/4xNomosUni_rgt_s_multijpg",
98
+ "4xNomosUni_span_multijpg": "Phips/4xNomosUni_span_multijpg",
99
+ "4xNomosUniDAT2_box": "Phips/4xNomosUniDAT2_box",
100
+ "4xNomosUniDAT2_multijpg_ldl": "Phips/4xNomosUniDAT2_multijpg_ldl",
101
+ "4xNomosUniDAT2_multijpg_ldl_sharp": "Phips/4xNomosUniDAT2_multijpg_ldl_sharp",
102
+ "4xNomosUniDAT_bokeh_jpg": "Phips/4xNomosUniDAT_bokeh_jpg",
103
+ "4xNomosUniDAT_otf": "Phips/4xNomosUniDAT_otf",
104
+ "4xNomosWebPhoto_atd": "Phips/4xNomosWebPhoto_atd",
105
+ "4xNomosWebPhoto_esrgan": "Phips/4xNomosWebPhoto_esrgan",
106
+ "4xNomosWebPhoto_RealPLKSR": "Phips/4xNomosWebPhoto_RealPLKSR",
107
+ "4xReal_SSDIR_DAT_GAN": "Phips/4xReal_SSDIR_DAT_GAN",
108
+ "4xRealWebPhoto_v3_atd": "Phips/4xRealWebPhoto_v3_atd",
109
+ "4xRealWebPhoto_v4_dat2": "Phips/4xRealWebPhoto_v4_dat2",
110
+ "4xRealWebPhoto_v4_drct-l": "Phips/4xRealWebPhoto_v4_drct-l",
111
+ "4xSSDIRDAT": "Phips/4xSSDIRDAT",
112
+ "4xTextureDAT2_otf": "Phips/4xTextureDAT2_otf",
113
+ "4xTextures_GTAV_rgt-s": "Phips/4xTextures_GTAV_rgt-s",
114
+ "4xTextures_GTAV_rgt-s_dither": "Phips/4xTextures_GTAV_rgt-s_dither",
115
+ }
116
+
117
+
118
+ @spaces.GPU
119
+ def upscale_image(image, model_selection):
120
+ original = load_image(image)
121
+
122
+ device = "cuda" if torch.cuda.is_available() else "cpu"
123
+ upscaler = UpscaleWithModel.from_pretrained(MODELS[model_selection]).to(device)
124
+
125
+ image = upscaler(original, tiling=True, tile_width=1024, tile_height=1024)
126
+
127
+ return original, image
128
+
129
+
130
+ def clear_result():
131
+ return gr.update(value=None)
132
+
133
+
134
+ title = """<h1 align="center">Image Upscaler</h1>
135
+ <div align="center">Use this Space to upscale your images, makes use of the
136
+ <a href="https://github.com/asomoza/image_gen_aux">Image Generation Auxiliary Tools</a> library. <br>
137
+ This space makes use of <a href="https://github.com/Phhofm/models">my self trained models</a> and tiles at 1024x1024<br>
138
+ Here is an <a href="https://huggingface.co/spaces/Phips/Upscaler/resolve/main/input_example1.png">example input image</a> you can use to try it out.</div>
139
+ """
140
+
141
+ with gr.Blocks() as demo:
142
+ gr.HTML(title)
143
+ with gr.Row():
144
+ with gr.Column():
145
+ input_image = gr.Image(type="pil", label="Input Image")
146
+
147
+ model_selection = gr.Dropdown(
148
+ choices=list(MODELS.keys()),
149
+ value="4xNomosWebPhoto_RealPLKSR",
150
+ label="Model (alphabetically sorted)",
151
+ )
152
+
153
+ run_button = gr.Button("Upscale")
154
+ with gr.Column():
155
+ result = ImageSlider(
156
+ interactive=False,
157
+ label="Generated Image",
158
+ )
159
+
160
+ run_button.click(
161
+ fn=clear_result,
162
+ inputs=None,
163
+ outputs=result,
164
+ ).then(
165
+ fn=upscale_image,
166
+ inputs=[input_image, model_selection],
167
+ outputs=result,
168
+ )
169
+
170
+
171
+ demo.launch(share=False)