Abhishek Gola commited on
Commit
cca075c
·
1 Parent(s): 3c4ef91

Added NAFNet quantized model for deblurring DNN sample (#295)

Browse files

* Adding NAFNet deblurring model

* Added version check and made objects git-lfs

* Re-add images and ONNX under Git LFS

* Removed onnx model from git-lfs

* Added deblurring onnx model

models/deblurring_nafnet/CMakeLists.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cmake_minimum_required(VERSION 3.22.2)
2
+ project(opencv_zoo_deblurring_nafnet)
3
+
4
+ set(OPENCV_VERSION "5.0.0")
5
+ set(OPENCV_INSTALLATION_PATH "" CACHE PATH "Where to look for OpenCV installation")
6
+
7
+ # Find OpenCV
8
+ find_package(OpenCV ${OPENCV_VERSION} REQUIRED HINTS ${OPENCV_INSTALLATION_PATH})
9
+
10
+ add_executable(opencv_zoo_deblurring_nafnet demo.cpp)
11
+ target_link_libraries(opencv_zoo_deblurring_nafnet ${OpenCV_LIBS})
models/deblurring_nafnet/LICENSE ADDED
@@ -0,0 +1,228 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2022 megvii-model
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
22
+
23
+
24
+
25
+ BasicSR
26
+ Copyright 2018-2020 BasicSR Authors
27
+
28
+ Apache License
29
+ Version 2.0, January 2004
30
+ http://www.apache.org/licenses/
31
+
32
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
33
+
34
+ 1. Definitions.
35
+
36
+ "License" shall mean the terms and conditions for use, reproduction,
37
+ and distribution as defined by Sections 1 through 9 of this document.
38
+
39
+ "Licensor" shall mean the copyright owner or entity authorized by
40
+ the copyright owner that is granting the License.
41
+
42
+ "Legal Entity" shall mean the union of the acting entity and all
43
+ other entities that control, are controlled by, or are under common
44
+ control with that entity. For the purposes of this definition,
45
+ "control" means (i) the power, direct or indirect, to cause the
46
+ direction or management of such entity, whether by contract or
47
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
48
+ outstanding shares, or (iii) beneficial ownership of such entity.
49
+
50
+ "You" (or "Your") shall mean an individual or Legal Entity
51
+ exercising permissions granted by this License.
52
+
53
+ "Source" form shall mean the preferred form for making modifications,
54
+ including but not limited to software source code, documentation
55
+ source, and configuration files.
56
+
57
+ "Object" form shall mean any form resulting from mechanical
58
+ transformation or translation of a Source form, including but
59
+ not limited to compiled object code, generated documentation,
60
+ and conversions to other media types.
61
+
62
+ "Work" shall mean the work of authorship, whether in Source or
63
+ Object form, made available under the License, as indicated by a
64
+ copyright notice that is included in or attached to the work
65
+ (an example is provided in the Appendix below).
66
+
67
+ "Derivative Works" shall mean any work, whether in Source or Object
68
+ form, that is based on (or derived from) the Work and for which the
69
+ editorial revisions, annotations, elaborations, or other modifications
70
+ represent, as a whole, an original work of authorship. For the purposes
71
+ of this License, Derivative Works shall not include works that remain
72
+ separable from, or merely link (or bind by name) to the interfaces of,
73
+ the Work and Derivative Works thereof.
74
+
75
+ "Contribution" shall mean any work of authorship, including
76
+ the original version of the Work and any modifications or additions
77
+ to that Work or Derivative Works thereof, that is intentionally
78
+ submitted to Licensor for inclusion in the Work by the copyright owner
79
+ or by an individual or Legal Entity authorized to submit on behalf of
80
+ the copyright owner. For the purposes of this definition, "submitted"
81
+ means any form of electronic, verbal, or written communication sent
82
+ to the Licensor or its representatives, including but not limited to
83
+ communication on electronic mailing lists, source code control systems,
84
+ and issue tracking systems that are managed by, or on behalf of, the
85
+ Licensor for the purpose of discussing and improving the Work, but
86
+ excluding communication that is conspicuously marked or otherwise
87
+ designated in writing by the copyright owner as "Not a Contribution."
88
+
89
+ "Contributor" shall mean Licensor and any individual or Legal Entity
90
+ on behalf of whom a Contribution has been received by Licensor and
91
+ subsequently incorporated within the Work.
92
+
93
+ 2. Grant of Copyright License. Subject to the terms and conditions of
94
+ this License, each Contributor hereby grants to You a perpetual,
95
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
96
+ copyright license to reproduce, prepare Derivative Works of,
97
+ publicly display, publicly perform, sublicense, and distribute the
98
+ Work and such Derivative Works in Source or Object form.
99
+
100
+ 3. Grant of Patent License. Subject to the terms and conditions of
101
+ this License, each Contributor hereby grants to You a perpetual,
102
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
103
+ (except as stated in this section) patent license to make, have made,
104
+ use, offer to sell, sell, import, and otherwise transfer the Work,
105
+ where such license applies only to those patent claims licensable
106
+ by such Contributor that are necessarily infringed by their
107
+ Contribution(s) alone or by combination of their Contribution(s)
108
+ with the Work to which such Contribution(s) was submitted. If You
109
+ institute patent litigation against any entity (including a
110
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
111
+ or a Contribution incorporated within the Work constitutes direct
112
+ or contributory patent infringement, then any patent licenses
113
+ granted to You under this License for that Work shall terminate
114
+ as of the date such litigation is filed.
115
+
116
+ 4. Redistribution. You may reproduce and distribute copies of the
117
+ Work or Derivative Works thereof in any medium, with or without
118
+ modifications, and in Source or Object form, provided that You
119
+ meet the following conditions:
120
+
121
+ (a) You must give any other recipients of the Work or
122
+ Derivative Works a copy of this License; and
123
+
124
+ (b) You must cause any modified files to carry prominent notices
125
+ stating that You changed the files; and
126
+
127
+ (c) You must retain, in the Source form of any Derivative Works
128
+ that You distribute, all copyright, patent, trademark, and
129
+ attribution notices from the Source form of the Work,
130
+ excluding those notices that do not pertain to any part of
131
+ the Derivative Works; and
132
+
133
+ (d) If the Work includes a "NOTICE" text file as part of its
134
+ distribution, then any Derivative Works that You distribute must
135
+ include a readable copy of the attribution notices contained
136
+ within such NOTICE file, excluding those notices that do not
137
+ pertain to any part of the Derivative Works, in at least one
138
+ of the following places: within a NOTICE text file distributed
139
+ as part of the Derivative Works; within the Source form or
140
+ documentation, if provided along with the Derivative Works; or,
141
+ within a display generated by the Derivative Works, if and
142
+ wherever such third-party notices normally appear. The contents
143
+ of the NOTICE file are for informational purposes only and
144
+ do not modify the License. You may add Your own attribution
145
+ notices within Derivative Works that You distribute, alongside
146
+ or as an addendum to the NOTICE text from the Work, provided
147
+ that such additional attribution notices cannot be construed
148
+ as modifying the License.
149
+
150
+ You may add Your own copyright statement to Your modifications and
151
+ may provide additional or different license terms and conditions
152
+ for use, reproduction, or distribution of Your modifications, or
153
+ for any such Derivative Works as a whole, provided Your use,
154
+ reproduction, and distribution of the Work otherwise complies with
155
+ the conditions stated in this License.
156
+
157
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
158
+ any Contribution intentionally submitted for inclusion in the Work
159
+ by You to the Licensor shall be under the terms and conditions of
160
+ this License, without any additional terms or conditions.
161
+ Notwithstanding the above, nothing herein shall supersede or modify
162
+ the terms of any separate license agreement you may have executed
163
+ with Licensor regarding such Contributions.
164
+
165
+ 6. Trademarks. This License does not grant permission to use the trade
166
+ names, trademarks, service marks, or product names of the Licensor,
167
+ except as required for reasonable and customary use in describing the
168
+ origin of the Work and reproducing the content of the NOTICE file.
169
+
170
+ 7. Disclaimer of Warranty. Unless required by applicable law or
171
+ agreed to in writing, Licensor provides the Work (and each
172
+ Contributor provides its Contributions) on an "AS IS" BASIS,
173
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
174
+ implied, including, without limitation, any warranties or conditions
175
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
176
+ PARTICULAR PURPOSE. You are solely responsible for determining the
177
+ appropriateness of using or redistributing the Work and assume any
178
+ risks associated with Your exercise of permissions under this License.
179
+
180
+ 8. Limitation of Liability. In no event and under no legal theory,
181
+ whether in tort (including negligence), contract, or otherwise,
182
+ unless required by applicable law (such as deliberate and grossly
183
+ negligent acts) or agreed to in writing, shall any Contributor be
184
+ liable to You for damages, including any direct, indirect, special,
185
+ incidental, or consequential damages of any character arising as a
186
+ result of this License or out of the use or inability to use the
187
+ Work (including but not limited to damages for loss of goodwill,
188
+ work stoppage, computer failure or malfunction, or any and all
189
+ other commercial damages or losses), even if such Contributor
190
+ has been advised of the possibility of such damages.
191
+
192
+ 9. Accepting Warranty or Additional Liability. While redistributing
193
+ the Work or Derivative Works thereof, You may choose to offer,
194
+ and charge a fee for, acceptance of support, warranty, indemnity,
195
+ or other liability obligations and/or rights consistent with this
196
+ License. However, in accepting such obligations, You may act only
197
+ on Your own behalf and on Your sole responsibility, not on behalf
198
+ of any other Contributor, and only if You agree to indemnify,
199
+ defend, and hold each Contributor harmless for any liability
200
+ incurred by, or claims asserted against, such Contributor by reason
201
+ of your accepting any such warranty or additional liability.
202
+
203
+ END OF TERMS AND CONDITIONS
204
+
205
+ APPENDIX: How to apply the Apache License to your work.
206
+
207
+ To apply the Apache License to your work, attach the following
208
+ boilerplate notice, with the fields enclosed by brackets "[]"
209
+ replaced with your own identifying information. (Don't include
210
+ the brackets!) The text should be enclosed in the appropriate
211
+ comment syntax for the file format. We also recommend that a
212
+ file or class name and description of purpose be included on the
213
+ same "printed page" as the copyright notice for easier
214
+ identification within third-party archives.
215
+
216
+ Copyright 2018-2020 BasicSR Authors
217
+
218
+ Licensed under the Apache License, Version 2.0 (the "License");
219
+ you may not use this file except in compliance with the License.
220
+ You may obtain a copy of the License at
221
+
222
+ http://www.apache.org/licenses/LICENSE-2.0
223
+
224
+ Unless required by applicable law or agreed to in writing, software
225
+ distributed under the License is distributed on an "AS IS" BASIS,
226
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
227
+ See the License for the specific language governing permissions and
228
+ limitations under the License.
models/deblurring_nafnet/README.md ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # NAFNet
2
+
3
+ NAFNet is a lightweight image deblurring model that eliminates nonlinear activations to achieve state-of-the-art performance with minimal computational cost.
4
+
5
+ Notes:
6
+
7
+ - Model source: [.pth](https://drive.google.com/file/d/14D4V4raNYIOhETfcuuLI3bGLB-OYIv6X/view).
8
+ - ONNX Model link: [ONNX](https://drive.google.com/uc?export=dowload&id=1ZLRhkpCekNruJZggVpBgSoCx3k7bJ-5v)
9
+
10
+ ## Requirements
11
+ Install latest OpenCV >=5.0.0 and CMake >= 3.22.2 to get started with.
12
+
13
+ ## Demo
14
+
15
+ ### Python
16
+
17
+ Run the following command to try the demo:
18
+
19
+ ```shell
20
+ # deblur the default input image
21
+ python demo.py
22
+ # deblur the user input image
23
+ python demo.py --input /path/to/image
24
+
25
+ # get help regarding various parameters
26
+ python demo.py --help
27
+ ```
28
+
29
+ ### C++
30
+
31
+ ```shell
32
+ # A typical and default installation path of OpenCV is /usr/local
33
+ cmake -B build -D OPENCV_INSTALLATION_PATH=/path/to/opencv/installation .
34
+ cmake --build build
35
+
36
+ # deblur the default input image
37
+ ./build/demo
38
+ # deblur the user input image
39
+ ./build/demo --input=/path/to/image
40
+ # get help messages
41
+ ./build/demo -h
42
+ ```
43
+
44
+ ### Example outputs
45
+
46
+ ![licenseplate_motion](./example_outputs/licenseplate_motion_output.jpg)
47
+
48
+ ## License
49
+
50
+ All files in this directory are licensed under [MIT License](./LICENSE).
51
+
52
+ ## Reference
53
+
54
+ - https://github.com/megvii-research/NAFNet
models/deblurring_nafnet/demo.cpp ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #include <opencv2/dnn.hpp>
2
+ #include <opencv2/imgproc.hpp>
3
+ #include <opencv2/highgui.hpp>
4
+ #include <iostream>
5
+ #include <string>
6
+ #include <cmath>
7
+ #include <vector>
8
+
9
+ using namespace cv;
10
+ using namespace cv::dnn;
11
+ using namespace std;
12
+
13
+ class Nafnet {
14
+ public:
15
+ Nafnet(const string& modelPath) {
16
+ loadModel(modelPath);
17
+ }
18
+
19
+ // Function to set up the input image and process it
20
+ void process(const Mat& image, Mat& result) {
21
+ Mat blob = blobFromImage(image, 0.00392, Size(image.cols, image.rows), Scalar(0, 0, 0), true, false, CV_32F);
22
+ net.setInput(blob);
23
+ Mat output = net.forward();
24
+ postProcess(output, result);
25
+ }
26
+
27
+ private:
28
+ Net net;
29
+
30
+ // Load Model
31
+ void loadModel(const string modelPath) {
32
+ net = readNetFromONNX(modelPath);
33
+ net.setPreferableBackend(DNN_BACKEND_DEFAULT);
34
+ net.setPreferableTarget(DNN_TARGET_CPU);
35
+ }
36
+
37
+ void postProcess(const Mat& output, Mat& result) {
38
+ Mat output_transposed(3, &output.size[1], CV_32F, const_cast<void*>(reinterpret_cast<const void*>(output.ptr<float>())));
39
+
40
+ vector<Mat> channels;
41
+ for (int i = 0; i < 3; ++i) {
42
+ channels.push_back(Mat(output_transposed.size[1], output_transposed.size[2], CV_32F,
43
+ output_transposed.ptr<float>(i)));
44
+ }
45
+ merge(channels, result);
46
+ result.convertTo(result, CV_8UC3, 255.0);
47
+ cvtColor(result, result, COLOR_RGB2BGR);
48
+ }
49
+ };
50
+
51
+ int main(int argc, char** argv) {
52
+ const string about =
53
+ "This sample demonstrates deblurring with nafnet deblurring model.\n\n";
54
+ const string keys =
55
+ "{ help h | | Print help message. }"
56
+ "{ input i | example_outputs/licenseplate_motion.jpg | Path to input image.}"
57
+ "{ model | deblurring_nafnet_2025may.onnx | Path to the nafnet deblurring onnx model file }";
58
+
59
+ CommandLineParser parser(argc, argv, keys);
60
+ if (parser.has("help"))
61
+ {
62
+ cout << about << endl;
63
+ parser.printMessage();
64
+ return -1;
65
+ }
66
+
67
+ parser = CommandLineParser(argc, argv, keys);
68
+ string model = parser.get<String>("model");
69
+ parser.about(about);
70
+
71
+ Mat image = imread(parser.get<String>("input"));
72
+ if (image.empty()) {
73
+ cerr << "Error: Input image could not be loaded." << endl;
74
+ return -1;
75
+ }
76
+
77
+ // Create an instance of Dexined
78
+ Nafnet nafnet(model);
79
+
80
+ Mat result;
81
+ nafnet.process(image, result);
82
+
83
+ imshow("Input", image);
84
+ imshow("Output", result);
85
+ waitKey(0);
86
+
87
+ destroyAllWindows();
88
+ return 0;
89
+ }
models/deblurring_nafnet/demo.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cv2 as cv
2
+ import argparse
3
+
4
+ # Check OpenCV version
5
+ opencv_python_version = lambda str_version: tuple(map(int, [p.split('-')[0] for p in str_version.split('.')]))
6
+ assert opencv_python_version(cv.__version__) >= opencv_python_version("5.0.0"), \
7
+ "Please install latest opencv-python for benchmark: python3 -m pip install --upgrade opencv-python"
8
+
9
+ from nafnet import Nafnet
10
+
11
+ def get_args_parser(func_args):
12
+ parser = argparse.ArgumentParser(add_help=False)
13
+ parser.add_argument('--input', help='Path to input image.', default='example_outputs/licenseplate_motion.jpg', required=False)
14
+ parser.add_argument('--model', help='Path to nafnet deblurring onnx model', default='deblurring_nafnet_2025may.onnx', required=False)
15
+
16
+ args, _ = parser.parse_known_args()
17
+ parser = argparse.ArgumentParser(parents=[parser],
18
+ description='', formatter_class=argparse.RawTextHelpFormatter)
19
+ return parser.parse_args(func_args)
20
+
21
+ def main(func_args=None):
22
+ args = get_args_parser(func_args)
23
+
24
+ nafnet = Nafnet(modelPath=args.model)
25
+
26
+ input_image = cv.imread(args.input)
27
+
28
+ tm = cv.TickMeter()
29
+ tm.start()
30
+ result = nafnet.infer(input_image)
31
+ tm.stop()
32
+ label = 'Inference time: {:.2f} ms'.format(tm.getTimeMilli())
33
+ cv.putText(result, label, (0, 15), cv.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 0))
34
+
35
+ cv.imshow("Input image", input_image)
36
+ cv.imshow("Output image", result)
37
+ cv.waitKey(0)
38
+ cv.destroyAllWindows()
39
+
40
+ if __name__ == '__main__':
41
+ main()
models/deblurring_nafnet/nafnet.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cv2 as cv
2
+ import numpy as np
3
+
4
+ class Nafnet:
5
+ def __init__(self, modelPath='deblurring_nafnet_2025may.onnx', backendId=0, targetId=0):
6
+ self._modelPath = modelPath
7
+ self._backendId = backendId
8
+ self._targetId = targetId
9
+
10
+ # Load the model
11
+ self._model = cv.dnn.readNetFromONNX(self._modelPath)
12
+ self.setBackendAndTarget(self._backendId, self._targetId)
13
+
14
+ @property
15
+ def name(self):
16
+ return self.__class__.__name__
17
+
18
+ def setBackendAndTarget(self, backendId, targetId):
19
+ self._backendId = backendId
20
+ self._targetId = targetId
21
+ self._model.setPreferableBackend(self._backendId)
22
+ self._model.setPreferableTarget(self._targetId)
23
+
24
+ def infer(self, image):
25
+ image_blob = cv.dnn.blobFromImage(image, 0.00392, (image.shape[1], image.shape[0]), (0,0,0), True, False)
26
+
27
+ self._model.setInput(image_blob)
28
+ output = self._model.forward()
29
+
30
+ # Postprocessing
31
+ result = output[0]
32
+ result = np.transpose(result, (1, 2, 0))
33
+ result = np.clip(result * 255.0, 0, 255).astype(np.uint8)
34
+ result = cv.cvtColor(result, cv.COLOR_RGB2BGR)
35
+
36
+ return result