├── .gitignore
├── EdgeAndCenterExtractionLayer.py
├── GlobalVarianceLayer.py
├── LICENSE
├── README.md
├── TrainingDataGenerator.py
├── ValidationDataProvider.py
├── VarianceLayer.py
├── classified_image_datatype.py
├── extended_qt_delegate.py
├── generic_list_model.py
├── inference.py
├── inference_gui.py
├── inferencing_list.py
├── model.py
├── queue_manager.py
├── requirements.txt
├── requirements_gpu.txt
├── train.py
├── training_gui.py
├── unsharpDetectorSettings.json
├── unsharpDetectorWeights.hdf5
├── validation_data
├── bad
│ ├── art_blurry.jpg
│ ├── ball_blurry.jpg
│ ├── benchy3d_blurry.jpg
│ ├── carpet_blurry.jpg
│ ├── catview_blurry.jpg
│ ├── chaos_key_blurry.jpg
│ ├── console_blurry.jpg
│ ├── ct_blurry.jpg
│ ├── desk_blurry.jpg
│ ├── dsgvo_blurry.jpg
│ ├── esp32_blurry.jpg
│ ├── fabric_blurry.jpg
│ ├── garden_blurry.jpg
│ ├── headphones_blurry.jpg
│ ├── heise_garden_blurry.jpg
│ ├── keyboard2_blurry.jpg
│ ├── keyboard_blurry.jpg
│ ├── led_blurry.jpg
│ ├── mechanic_blurry.jpg
│ ├── metal_blurry.jpg
│ ├── netzteil_blurry.jpg
│ ├── paper_bag_blurry.jpg
│ ├── pina_blurry.jpg
│ ├── plastic_blurry.jpg
│ ├── printed_lamp_blurry.jpg
│ ├── skin_blurry.jpg
│ ├── squirrel_blurry.jpg
│ ├── star_blurry.jpg
│ ├── switch_blurry.jpg
│ ├── telephone_blurry.jpg
│ ├── tinkerstuff_blurry.jpg
│ ├── trees_and_sky_blurry.jpg
│ ├── vote_blurry.jpg
│ └── wall_blurry.jpg
└── good
│ ├── art_sharp.jpg
│ ├── ball_sharp.jpg
│ ├── benchy3d_sharp.jpg
│ ├── carpet_sharp.jpg
│ ├── catview_sharp.jpg
│ ├── circuit_sharp.jpg
│ ├── console_sharp.jpg
│ ├── ct_sharp.jpg
│ ├── desk_sharp.jpg
│ ├── dsgvo_sharp.jpg
│ ├── esp32_sharp.jpg
│ ├── fabric_sharp.jpg
│ ├── garden_sharp.jpg
│ ├── headphones_sharp.jpg
│ ├── heise_garden_sharp.jpg
│ ├── keyboard2_sharp.jpg
│ ├── keyboard_sharp.jpg
│ ├── led_sharp.jpg
│ ├── mechanic_sharp.jpg
│ ├── metal_sharp.jpg
│ ├── netzteil_sharp.jpg
│ ├── paper_bag_sharp.jpg
│ ├── pina_sharp.jpg
│ ├── plastic_sharp.jpg
│ ├── printed_lamp_sharp.jpg
│ ├── skin_sharp.jpg
│ ├── squirrel_sharp.jpg
│ ├── star_sharp.jpg
│ ├── switch_sharp.jpg
│ ├── telephone_sharp.jpg
│ ├── tinkerstuff_sharp.jpg
│ ├── trees_and_sky_sharp.jpg
│ ├── vote_sharp.jpg
│ └── wall_sharp.jpg
└── visualization_helpers.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 | local_settings.py
56 |
57 | # Flask stuff:
58 | instance/
59 | .webassets-cache
60 |
61 | # Scrapy stuff:
62 | .scrapy
63 |
64 | # Sphinx documentation
65 | docs/_build/
66 |
67 | # PyBuilder
68 | target/
69 |
70 | # Jupyter Notebook
71 | .ipynb_checkpoints
72 |
73 | # pyenv
74 | .python-version
75 |
76 | # celery beat schedule file
77 | celerybeat-schedule
78 |
79 | # SageMath parsed files
80 | *.sage.py
81 |
82 | # dotenv
83 | .env
84 |
85 | # virtualenv
86 | .venv
87 | venv/
88 | ENV/
89 |
90 | # Spyder project settings
91 | .spyderproject
92 | .spyproject
93 |
94 | # Rope project settings
95 | .ropeproject
96 |
97 | # mkdocs documentation
98 | /site
99 |
100 | # mypy
101 | .mypy_cache/
102 |
103 | # PyCharm
104 | .idea/
105 |
106 | secret_settings.py
107 |
--------------------------------------------------------------------------------
/EdgeAndCenterExtractionLayer.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import tensorflow.keras.backend as K
3 | from tensorflow.keras.layers import Layer
4 | from tensorflow.keras.models import Model
5 | from tensorflow.keras.layers import Input
6 | import numpy as np
7 | import unittest
8 |
9 |
10 | class EdgeAndCenterExtractionLayer(Layer):
11 | def __init__(self, width, **kwargs):
12 | self.w = width
13 | super(EdgeAndCenterExtractionLayer, self).__init__(**kwargs)
14 |
15 | def build(self, input_shape):
16 | super(EdgeAndCenterExtractionLayer, self).build(input_shape)
17 |
18 | def call(self, x, **kwargs):
19 | batch_size = K.shape(x)[0]
20 | half_y = K.cast(K.shape(x)[1] / 2, dtype="int32")
21 | half_x = K.cast(K.shape(x)[2] / 2, dtype="int32")
22 | channel_count = K.shape(x)[3]
23 | e0 = x[:, 0:self.w, 0:self.w]
24 | e1 = x[:, half_y - self.w:half_y + self.w, 0:self.w]
25 | e2 = x[:, -self.w:, 0:self.w]
26 | e7 = x[:, 0:self.w, half_x - self.w:half_x + self.w]
27 | cn = x[:, half_y - self.w:half_y + self.w, half_x - self.w:half_x + self.w]
28 | e3 = x[:, -self.w:, half_x - self.w:half_x + self.w]
29 | e6 = x[:, 0:self.w, -self.w:]
30 | e5 = x[:, half_y - self.w:half_y + self.w, -self.w:]
31 | e4 = x[:, -self.w:, -self.w:]
32 | l1 = K.concatenate([e0, e1, e2], axis=1)
33 | l2 = K.concatenate([e7, cn, e3], axis=1)
34 | l3 = K.concatenate([e6, e5, e4], axis=1)
35 | return K.reshape(K.concatenate([l1, l2, l3], axis=2), (batch_size, 4 * self.w, 4 * self.w, channel_count))
36 |
37 | def compute_output_shape(self, input_shape):
38 | print("EAC compute shape:", input_shape, "->", (input_shape[0], self.w * 4, self.w * 4, input_shape[3]))
39 | return input_shape[0], self.w * 4, self.w * 4, input_shape[3]
40 |
41 | def get_config(self):
42 | config = {
43 | 'width': self.w
44 | }
45 | return config
46 |
47 |
48 | class TestEdgeAndCenterExtractionLayer(unittest.TestCase):
49 | def test_extraction(self):
50 | data = np.zeros((1, 256, 256, 3), dtype=np.float32)
51 | data[0, 0, 0, 0] = 13
52 | data[0, 17, 17, 0] = 8
53 | data[0, 128, 128, 0] = -9
54 | data[0, 128, 2, 0] = -5
55 | data[0, 2, 128, 0] = 7
56 | data[0, 255, 255, 0] = 16
57 | data[0, 255, 128, 0] = 2
58 | inp = Input(shape=(256, 256, 3))
59 | x = EdgeAndCenterExtractionLayer(16)(inp)
60 | model = Model(inputs=inp, outputs=x)
61 | keras_values = model.predict(data, batch_size=1)
62 | self.assertAlmostEqual(keras_values[0, 0, 0, 0], 13, places=4)
63 | self.assertAlmostEqual(keras_values[0, 17, 17, 0], 0, places=4)
64 | self.assertAlmostEqual(keras_values[0, 32, 32, 0], -9, places=4)
65 | self.assertAlmostEqual(keras_values[0, 32, 2, 0], -5, places=4)
66 | self.assertAlmostEqual(keras_values[0, 2, 32, 0], 7, places=4)
67 | self.assertAlmostEqual(keras_values[0, 63, 63, 0], 16, places=4)
68 | self.assertAlmostEqual(keras_values[0, 63, 32, 0], 2, places=4)
69 |
--------------------------------------------------------------------------------
/GlobalVarianceLayer.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from tensorflow.keras import backend as K
3 | from tensorflow.keras.layers import Layer
4 | from tensorflow.keras.models import Model
5 | from tensorflow.keras.layers import Input
6 | import numpy as np
7 | import unittest
8 |
9 |
10 | class GlobalVarianceLayer(Layer):
11 | def __init__(self, **kwargs):
12 | super(GlobalVarianceLayer, self).__init__(**kwargs)
13 |
14 | def build(self, input_shape):
15 | super(GlobalVarianceLayer, self).build(input_shape) # Be sure to call this somewhere!
16 |
17 | def call(self, x, **kwargs):
18 | mean = K.mean(K.mean(x, axis=2), axis=1)
19 | mean_vector = K.repeat_elements(K.expand_dims(mean, axis=1), x.get_shape()[1], axis=1)
20 | mean_matrix = K.repeat_elements(K.expand_dims(mean_vector, axis=2), x.get_shape()[2], axis=2)
21 | quad_diff = (x - mean_matrix) ** 2
22 | return K.mean(K.mean(quad_diff, axis=2), axis=1)
23 |
24 | def compute_output_shape(self, input_shape):
25 | return input_shape[0], input_shape[3]
26 |
27 |
28 | class TestGlobalVarianceLayer(unittest.TestCase):
29 | def test_2d_mean(self):
30 | data = np.array([[[[1, 0], [2, 1], [3, -1]],
31 | [[0, 1], [1, -2], [2, 1]],
32 | [[-2, -1], [-1, -1], [3, 2]]]], dtype=np.float32)
33 | x = K.variable(data, dtype=K.floatx())
34 | mean = K.eval(K.mean(K.mean(x, axis=2), axis=1))
35 | self.assertAlmostEqual(mean[0, 0], 1.0)
36 | self.assertAlmostEqual(mean[0, 1], 0.0)
37 |
38 | def test_variance(self):
39 | data = np.array([[[[1, 2], [2, 3], [-1, -2]],
40 | [[-1, 3], [2, -5], [0, 1]],
41 | [[-2, 7], [0.5, -2], [2, -1]]]], dtype=np.float32)
42 | inp = Input(shape=(3, 3, 2))
43 | x = GlobalVarianceLayer()(inp)
44 | model = Model(inputs=inp, outputs=x)
45 | keras_values = model.predict(data, batch_size=1)
46 | self.assertAlmostEqual(keras_values[0, 0],
47 | np.array([[[1, 2, -1],
48 | [-1, 2, 0],
49 | [-2, 0.5, 2]]], dtype=np.float32).var(), places=4)
50 | self.assertAlmostEqual(keras_values[0, 1],
51 | np.array([[[2, 3, -2],
52 | [3, -5, 1],
53 | [7, -2, -1]]], dtype=np.float32).var(), places=4)
54 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # UnsharpDetector
2 | AI-application to automatically identify and delete blurry photos.
3 |
4 | ## Installation
5 |
6 | Load the 64 bit version of Python from [python.org](https://python.org) and
7 | install it. You can skip this step on Linux because Python is usually included
8 | in your distribution.
9 |
10 | Get the code using the download link or clone this repository with git.
11 | If you are using Windows or macOS get git from
12 | [git-scm.com](https://git-scm.com). Use your package manager on Linux.
13 |
14 | The next step is to create a virtual environment. This makes sure that
15 | the following steps can not interfere with other python programs. Create
16 | the virtualenv on macOS and Linux with the following command (in the
17 | directory where you downloaded the code):
18 |
19 | ```plaintext
20 | python3 -m venv env
21 | ```
22 |
23 | Windows does not find `python.exe` by default. So you may have to specify
24 | the full path:
25 |
26 | ```shell script
27 | ..\..\AppData\Local\Programs\Python\Python36\python.exe -m venv env
28 | ```
29 |
30 | Start the virtualenv on Windows with `env\Scripts\activate.bat`. On
31 | Linux and macOS use `source env/bin/activate`.
32 |
33 | Install all other dependencies with `pip`:
34 |
35 | ```shell script
36 | pip install -r requirements.txt
37 | ```
38 |
39 | If you are using a Nvidia GPU and CUDA you may use `requirements-gpu.txt`.
40 | TensorFlow will use a version which uses your GPU to run the neural
41 | network.
42 |
43 | ## Usage
44 |
45 | To use the program activate the virtualenv first with
46 | `env\Scripts\activate.bat` (Windows) or `source env/bin/activate`
47 | (Linux and macOS).
48 |
49 | Run the graphical Application with:
50 | ```shell script
51 | python inference_gui.py
52 | ```
53 |
54 | The program starts after a couple of seconds (initialization of
55 | TensorFlow). Initially it displays an empty list. You fill the list by
56 | clicking the Button in the upper left corner and selecting a path. The
57 | software will load all the images in this folder which may take a couple
58 | of seconds depending on the number and the size of the images. The
59 | classification starts immediately in the background.
60 |
61 | You may immediately mark images for keeping or removal using the mouse.
62 | The neural network will analyze all the images you do not classify
63 | manually. The dashed line around these images indicates the decision of
64 | the network. Green means definitely sharp. Red means blurry. Brown is
65 | something in between and a good candidate to override the decision.
66 |
67 | If the thumbnail is too small to decide if an image is sharp enough to
68 | keep you may click on the thumbnail. This opens the image in full
69 | resolution in the preview area on the right.
70 |
71 | If you are happy with all the decisions for the images click on the red
72 | button in the upper right corner. This deletes all the images which were
73 | marked for removal (red border) without further questions.
74 |
75 | ## Training
76 |
77 | This repository comes with weights and settings for a pretrained neural
78 | network. If you want to experiment with different network architectures
79 | you can simply change the config and run `train.py`.
80 |
81 | The code uses sacred to keep track of the experiments. To use this magic
82 | create a file named `secret_settings.py` which defines two variables:
83 | 1. `mongo_url`: The url of your mongodb with credentials.
84 | 2. `db_name`: The database name you are using in the mongodb.
85 |
86 | If you are training on a dedicated server you can create queued
87 | experiments with `-q` on your notebook and start `queue_manager.py` on
88 | the server. It will automatically fetch queued experiments from your
89 | database and run them.
90 |
91 | Most network architectures will learn some specifics of the generated
92 | datasets after 2-5 epochs. Training for 50 epochs (my default setting)
93 | leads to something which looks like overfitting. So if you want the best
94 | accuracy on validation data you may want to train for only 2-5 Epochs.
95 | But this also depends on the size of your dataset.
96 |
97 | Also make sure you have no blurry images in your training dataset. This
98 | greatly reduces the accuracy. My intention was to use images from
99 | vacations where I had already manually deleted all blurry images. I
100 | trained with ca. 2000 images.
101 |
102 | ## Instabilities
103 |
104 | I stumbled upon some instabilities of this program. Sometimes it crashes
105 | with a not very informative segmentation fault. This has something to do
106 | with the C-code from Qt or TensorFlow. It happened randomly. If you run
107 | into this problem try doing the same thing again. It may just work at the
108 | second attempt. If you have any idea what triggers these crashes please
109 | create an issue with your idea.
--------------------------------------------------------------------------------
/TrainingDataGenerator.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from os import path, listdir
3 | from tensorflow.keras.utils import Sequence
4 | from random import random, choice, randrange
5 | from skimage.io import imread, imsave
6 | from skimage.transform import resize, rotate
7 | from skimage.filters import gaussian
8 | from scipy.ndimage.filters import convolve
9 | from visualization_helpers import generate_y_image
10 | from ValidationDataProvider import NoUsableData
11 | import numpy as np
12 | import re
13 |
14 |
15 | class UnsharpTrainingDataGenerator(Sequence):
16 | def __init__(self, image_folders=[], batch_size=10, target_size=(256, 256),
17 | blur_rate=0.5, mask_rate=0.2, noise_rate=0.2, min_blur=0.5, min_shake=2.5):
18 | self.batch_size = batch_size
19 | self.target_size = target_size
20 | self.blur_rate = blur_rate
21 | self.mask_rate = mask_rate
22 | self.noise_rate = noise_rate
23 | self.min_blur = min_blur
24 | self.min_shake = min_shake
25 | filename_regex = re.compile(r".*\.(jpg|JPG|jpeg|JPEG|png|PNG|bmp|BMP)$")
26 | self.image_filenames = []
27 | for folder in image_folders:
28 | filenames = listdir(path.abspath(folder))
29 | for filename in filenames:
30 | if filename_regex.match(filename):
31 | self.image_filenames.append(path.join(path.abspath(folder), filename))
32 | if len(self.image_filenames) < 1:
33 | raise NoUsableData
34 | self.indexes = np.arange(len(self.image_filenames))
35 |
36 | def __len__(self):
37 | return int(np.floor(len(self.image_filenames) / self.batch_size))
38 |
39 | def __getitem__(self, index):
40 | indexes = self.indexes[index * self.batch_size:(index + 1) * self.batch_size]
41 | filename_selection = [self.image_filenames[k] for k in indexes]
42 | batch_x, batch_y = self.__data_generation(filename_selection)
43 | return batch_x, batch_y
44 |
45 | def __data_generation(self, filename_selection):
46 | batch_x = []
47 | batch_y = []
48 | for filename in filename_selection:
49 | img = imread(filename)
50 | while len(img.shape) != 3 or img.shape[0] < self.target_size[0] or img.shape[1] < self.target_size[1]:
51 | print("Error reading this image: " + filename + " | Shape: " + str(img.shape))
52 | filename = choice(self.image_filenames)
53 | print("Replacing with: " + filename)
54 | img = imread(filename)
55 | min_scale_factor = max(self.target_size[0] / img.shape[0], self.target_size[1] / img.shape[1])
56 | acceptable_crop_found = False
57 | fail_counter = 0
58 | if random() >= self.blur_rate:
59 | one_hot_class = np.array([0, 1], dtype=np.float32)
60 | else:
61 | one_hot_class = np.array([1, 0], dtype=np.float32)
62 | small_img = None
63 | while not acceptable_crop_found and fail_counter < 10:
64 | sf = random() * (1 - min_scale_factor) + min_scale_factor
65 | small_img = resize(img, (int(img.shape[0] * sf), int(img.shape[1] * sf), img.shape[2]), mode='reflect')
66 | crop_start_x = randrange(0, small_img.shape[1] - self.target_size[1] + 1)
67 | crop_start_y = randrange(0, small_img.shape[0] - self.target_size[0] + 1)
68 | small_img = small_img[crop_start_y:crop_start_y + self.target_size[0],
69 | crop_start_x:crop_start_x + self.target_size[1], :].astype(np.float32)
70 | if one_hot_class[0] > 0.5:
71 | blurred_img = self.blur_image(small_img)
72 | if np.mean((small_img - blurred_img) ** 2, axis=None) > 0.00017:
73 | acceptable_crop_found = True
74 | small_img = blurred_img
75 | else:
76 | fail_counter += 1
77 | else:
78 | if np.mean((small_img - gaussian(small_img, sigma=3.0, multichannel=True)) ** 2,
79 | axis=None) > 0.00017:
80 | acceptable_crop_found = True
81 | else:
82 | fail_counter += 1
83 | batch_x.append(small_img)
84 | batch_y.append(one_hot_class)
85 | return np.array(batch_x), np.array(batch_y)
86 |
87 | def blur_image(self, img):
88 | mode = choice([["blur"], ["shake"], ["blur", "shake"]])
89 | blurred_img = img
90 | if "blur" in mode:
91 | blurred_img = gaussian(img,
92 | sigma=self.min_blur + max(1.0, (6 - self.min_blur)) * random(),
93 | multichannel=True)
94 | if "shake" in mode:
95 | blurred_img = self.add_shake(blurred_img, self.min_shake)
96 | if random() < self.mask_rate:
97 | blurred_img = self.add_mask(blurred_img, img)
98 | if random() < self.noise_rate:
99 | blurred_img = self.add_noise(blurred_img)
100 | return blurred_img
101 |
102 | @staticmethod
103 | def add_shake(img, min_shake=2.5):
104 | filter_matrix = np.zeros((9, 9), dtype=img.dtype)
105 | shake_len = min_shake + random() * (9 - min_shake)
106 | filter_matrix[4, 4] = 1.0
107 | for i in range(1, 5):
108 | x = (shake_len - i * 2 + 1) / 2
109 | filter_matrix[4+i, 4] = x
110 | filter_matrix[4-i, 4] = x
111 | filter_matrix = np.clip(filter_matrix, 0, 1)
112 | filter_matrix = np.repeat(
113 | filter_matrix.reshape(filter_matrix.shape[0], filter_matrix.shape[1], 1),
114 | 3, axis=2)
115 | filter_matrix = rotate(filter_matrix, random() * 360, mode='constant', cval=0.0)
116 | filter_matrix = filter_matrix / filter_matrix.sum()
117 | img = convolve(img, filter_matrix, mode='reflect')
118 | return img
119 |
120 | @staticmethod
121 | def add_mask(blurred_img, clear_img):
122 | mask = np.array([[0, 0, 0, 1, 1, 1, 0, 0, 0],
123 | [0, 0, 1, 1, 1, 1, 1, 0, 0],
124 | [0, 0, 1, 1, 1, 1, 1, 0, 0],
125 | [0, 1, 1, 1, 1, 1, 1, 1, 0],
126 | [0, 1, 1, 1, 1, 1, 1, 1, 0],
127 | [0, 1, 1, 1, 1, 1, 1, 1, 0],
128 | [0, 1, 1, 1, 1, 1, 1, 1, 0],
129 | [0, 0, 1, 1, 1, 1, 1, 0, 0],
130 | [0, 0, 1, 1, 1, 1, 1, 0, 0],
131 | [0, 0, 0, 1, 1, 1, 0, 0, 0]], dtype=blurred_img.dtype)
132 | mask = np.clip(mask + np.random.random(mask.shape)*0.5*(0.3+random()), 0, 1)
133 | mask = np.repeat(mask.reshape(mask.shape[0], mask.shape[1], 1), 3, axis=2)
134 | mask = resize(mask, (blurred_img.shape[0], blurred_img.shape[1], blurred_img.shape[2]), mode='reflect')
135 | return mask * blurred_img + (1 - mask) * clear_img
136 |
137 | @staticmethod
138 | def add_noise(img):
139 | noise = np.random.randn(*img.shape)*(0.05+0.1*random())
140 | noise = gaussian(noise, sigma=0.1+1.1*random(), multichannel=True)
141 | return np.clip(img+noise, 0, 1)
142 |
143 | def on_epoch_end(self):
144 | self.indexes = np.arange(len(self.image_filenames))
145 | np.random.shuffle(self.indexes)
146 |
147 |
148 | if __name__ == "__main__":
149 | generator = UnsharpTrainingDataGenerator(["../../Bilder/kleine Landschaftsbilder/"], batch_size=7)
150 | bat_x, bat_y = generator.__getitem__(0)
151 | print(bat_y)
152 | imsave("test_data.png", (np.concatenate([np.concatenate(np.clip(bat_x, 0, 1), axis=1),
153 | generate_y_image(bat_y, dtype=bat_x.dtype)], axis=0)))
154 |
--------------------------------------------------------------------------------
/ValidationDataProvider.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from tensorflow.keras.utils import Sequence
3 | from os import path, listdir
4 | from skimage.io import imread
5 | from skimage.transform import resize
6 | import numpy as np
7 | import re
8 |
9 |
10 | class NoUsableData(Exception):
11 | pass
12 |
13 |
14 | class UnsharpValidationDataProvider(Sequence):
15 | def __init__(self, image_folder="", batch_size=10, target_size=(256, 256)):
16 | self.batch_size = batch_size
17 | self.target_size = target_size
18 | filename_regex = re.compile(r".*\.(jpg|JPG|jpeg|JPEG|png|PNG|bmp|BMP)$")
19 | self.data = []
20 | good_filenames = listdir(path.join(path.abspath(image_folder), "good"))
21 | bad_filenames = listdir(path.join(path.abspath(image_folder), "bad"))
22 | for filename in good_filenames:
23 | if filename_regex.match(filename):
24 | self.data.append({"filename": path.join(path.abspath(image_folder), "good", filename), "label": 1})
25 | for filename in bad_filenames:
26 | if filename_regex.match(filename):
27 | self.data.append({"filename": path.join(path.abspath(image_folder), "bad", filename), "label": 0})
28 | if len(self.data) < 1:
29 | raise NoUsableData
30 | self.indexes = np.arange(len(self.data))
31 |
32 | def __len__(self):
33 | return int(np.floor(len(self.data) / self.batch_size))
34 |
35 | def __getitem__(self, index):
36 | indexes = self.indexes[index * self.batch_size:(index + 1) * self.batch_size]
37 | filename_selection = [self.data[k] for k in indexes]
38 | batch_x, batch_y = self.__data_generation(filename_selection)
39 | return batch_x, batch_y
40 |
41 | def __data_generation(self, selection):
42 | batch_x = []
43 | batch_y = []
44 | for d in selection:
45 | img = imread(d["filename"])
46 | if len(img.shape) != 3:
47 | raise NoUsableData
48 | img = resize(img, (max(self.target_size[0],
49 | int(np.floor(img.shape[0]*self.target_size[1]/img.shape[1]))),
50 | max(self.target_size[1],
51 | int(np.floor(img.shape[1]*self.target_size[0]/img.shape[0]))),
52 | img.shape[2]), mode='reflect')
53 | crop_start_y = int(np.floor((img.shape[0] - self.target_size[0]) / 2))
54 | crop_start_x = int(np.floor((img.shape[1] - self.target_size[1]) / 2))
55 | img = img[crop_start_y:crop_start_y + self.target_size[0],
56 | crop_start_x:crop_start_x + self.target_size[1], :].astype(np.float32)
57 | batch_x.append(img)
58 | if d["label"] == 1:
59 | batch_y.append(np.array([0, 1], dtype=np.float32))
60 | else:
61 | batch_y.append(np.array([1, 0], dtype=np.float32))
62 | return np.array(batch_x), np.array(batch_y)
63 |
64 | def on_epoch_end(self):
65 | self.indexes = np.arange(len(self.data))
66 | np.random.shuffle(self.indexes)
67 |
68 |
69 | if __name__ == "__main__":
70 | generator = UnsharpValidationDataProvider("validation_data", batch_size=2)
71 | generator.on_epoch_end()
72 | bat_x, bat_y = generator.__getitem__(0)
73 | print(bat_y)
74 |
--------------------------------------------------------------------------------
/VarianceLayer.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from tensorflow.keras import backend as K
3 | from tensorflow.keras.layers import Layer
4 | from tensorflow.keras.models import Model
5 | from tensorflow.keras.layers import Input
6 | import numpy as np
7 | import unittest
8 |
9 |
10 | class VarianceLayer(Layer):
11 | def __init__(self, tile_size, **kwargs):
12 | self.tile_size = tile_size
13 | super(VarianceLayer, self).__init__(**kwargs)
14 |
15 | def build(self, input_shape):
16 | super(VarianceLayer, self).build(input_shape)
17 |
18 | def call(self, x, **kwargs):
19 | means = K.pool2d(x, self.tile_size, strides=self.tile_size, padding="same",
20 | pool_mode="avg", data_format="channels_last")
21 | mean_matrix = K.resize_images(means, self.tile_size[0], self.tile_size[1],
22 | data_format="channels_last")[:,
23 | 0:K.shape(x)[1], 0:K.shape(x)[2], :]
24 | quad_diff = (x - mean_matrix) ** 2
25 | return K.pool2d(quad_diff, self.tile_size, strides=self.tile_size, padding="same", pool_mode="avg")
26 |
27 | def compute_output_shape(self, input_shape):
28 | return input_shape[0], input_shape[1] // self.tile_size[0], input_shape[2] // self.tile_size[1], input_shape[3]
29 |
30 | def get_config(self):
31 | config = {
32 | 'tile_size': self.tile_size
33 | }
34 | return config
35 |
36 |
37 | class TestVarianceLayer(unittest.TestCase):
38 | def test_pool_mean(self):
39 | data = np.array([[[[1, 0], [2, 1], [3, -1]],
40 | [[0, 1], [1, -2], [2, 1]],
41 | [[-2, -1], [-1, -1], [3, 2]],
42 | [[-2, -1], [-1, -1], [3, 2]]]], dtype=np.float32)
43 | x = K.variable(data, dtype=K.floatx())
44 | means = K.eval(K.pool2d(x, (2, 2), strides=(2, 2), padding="valid", pool_mode="avg"))
45 | self.assertAlmostEqual(means[0, 0, 0, 0], 1.0)
46 | self.assertAlmostEqual(means[0, 0, 0, 1], 0.0)
47 | self.assertAlmostEqual(means[0, 1, 0, 0], -1.5)
48 | self.assertAlmostEqual(means[0, 1, 0, 1], -1.0)
49 |
50 | def test_variance(self):
51 | data = np.array([[[[1, 2], [2, 3], [-1, -2]],
52 | [[-1, 3], [2, -5], [0, 1]],
53 | [[-2, 2], [0.5, -2], [2, -1]],
54 | [[2, -4], [-0.5, -1], [3, 2]]]], dtype=np.float32)
55 | inp = Input(shape=(4, 3, 2))
56 | x = VarianceLayer((2, 2))(inp)
57 | model = Model(inputs=inp, outputs=x)
58 | keras_values = model.predict(data, batch_size=1)
59 | self.assertAlmostEqual(keras_values[0, 0, 0, 0], 1.5, places=4)
60 | self.assertAlmostEqual(keras_values[0, 0, 1, 0], 0.25, places=4)
61 | self.assertAlmostEqual(keras_values[0, 1, 0, 0], 2.125, places=4)
62 | self.assertAlmostEqual(keras_values[0, 1, 1, 0], 0.25, places=4)
63 | self.assertAlmostEqual(keras_values[0, 0, 0, 1], 11.1875, places=4)
64 | self.assertAlmostEqual(keras_values[0, 0, 1, 1], 2.25, places=4)
65 | self.assertAlmostEqual(keras_values[0, 1, 0, 1], 4.6875, places=4)
66 | self.assertAlmostEqual(keras_values[0, 1, 1, 1], 2.25, places=4)
67 |
--------------------------------------------------------------------------------
/classified_image_datatype.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import division, print_function, unicode_literals
3 | from PyQt5.QtCore import Qt, QObject, QPropertyAnimation, QSequentialAnimationGroup, QEasingCurve
4 | from PyQt5.QtCore import pyqtSignal, pyqtProperty
5 | from PyQt5.QtGui import QImage, QColor
6 | from visualization_helpers import convert_image
7 |
8 |
9 | class ClassifiedImageBundle(QObject):
10 | def get_animation_progress(self):
11 | return self._animation_progress
12 |
13 | def set_animation_progress(self, val):
14 | self._animation_progress = val
15 | self.data_changed.emit(self)
16 |
17 | UNDECIDED, CLASSIFIED, MANUAL, PROGRESS = range(4)
18 | data_changed = pyqtSignal(QObject)
19 | selected = pyqtSignal(QObject)
20 | animation_progress = pyqtProperty(float, get_animation_progress, set_animation_progress)
21 |
22 | def __init__(self, *args):
23 | super().__init__(*args)
24 | self.img = None
25 | self.thumb = None
26 | self.filename = None
27 | self.np_array = None
28 | self.status = ClassifiedImageBundle.UNDECIDED
29 | self.color = None
30 | self.keep = None
31 | self.show_buttons = False
32 | self._animation_progress = 1.0
33 | self.ani = QSequentialAnimationGroup()
34 | self.init_animation()
35 |
36 | def init_animation(self):
37 | ani1 = QPropertyAnimation(self, b"animation_progress")
38 | ani1.setDuration(3700)
39 | ani1.setEasingCurve(QEasingCurve.InOutQuad)
40 | ani1.setStartValue(-0.001)
41 | ani1.setEndValue(-1.0)
42 | self.ani.addAnimation(ani1)
43 | ani2 = QPropertyAnimation(self, b"animation_progress")
44 | ani2.setDuration(2300)
45 | ani2.setEasingCurve(QEasingCurve.InOutQuad)
46 | ani2.setStartValue(0.0)
47 | ani2.setEndValue(1.0)
48 | self.ani.addAnimation(ani2)
49 | self.ani.setLoopCount(-1)
50 |
51 | def set_np_image(self, np_array, thumb_width=128):
52 | self.np_array = np_array
53 | self.img = QImage(convert_image(np_array), np_array.shape[1], np_array.shape[0], QImage.Format_RGB32)
54 | self.create_thumb(thumb_width)
55 |
56 | def set_filename(self, filename):
57 | self.filename = filename
58 |
59 | def set_image_from_filename(self, filename, thumb_width=128):
60 | self.filename = filename
61 | self.img = QImage(filename)
62 | self.thumb = self.img.scaledToWidth(thumb_width, mode=Qt.SmoothTransformation)
63 |
64 | def create_thumb(self, thumb_width=128):
65 | self.thumb = self.img.scaledToWidth(thumb_width, mode=Qt.SmoothTransformation)
66 |
67 | def set_progress(self):
68 | self.status = ClassifiedImageBundle.PROGRESS
69 | self.color = QColor(148, 148, 255)
70 | self.ani.start()
71 | self.data_changed.emit(self)
72 |
73 | def set_manual(self, decision):
74 | self.keep = decision
75 | self.status = ClassifiedImageBundle.MANUAL
76 | if self.keep:
77 | self.color = QColor(0, 255, 0)
78 | else:
79 | self.color = QColor(255, 0, 0)
80 | self.ani.stop()
81 | self._animation_progress = 1.0
82 | self.data_changed.emit(self)
83 |
84 | def set_classification(self, result):
85 | if self.status != ClassifiedImageBundle.MANUAL:
86 | self.keep = result[1] > result[0]
87 | self.status = ClassifiedImageBundle.CLASSIFIED
88 | self.color = QColor(int(result[0] * 255), int(result[1] * 255), 0)
89 | self.ani.stop()
90 | self._animation_progress = 1.0
91 | self.data_changed.emit(self)
92 |
93 | def set_show_buttons(self, button_state=False):
94 | self.show_buttons = button_state
95 | self.data_changed.emit(self)
96 |
97 | def get_show_buttons(self):
98 | return self.show_buttons
99 |
100 | def get_thumb(self):
101 | return self.thumb
102 |
103 | def get_image(self):
104 | return self.img
105 |
106 | def get_np_array(self):
107 | return self.np_array
108 |
109 | def is_decided(self):
110 | return self.status in [ClassifiedImageBundle.CLASSIFIED,
111 | ClassifiedImageBundle.MANUAL,
112 | ClassifiedImageBundle.PROGRESS]
113 |
114 | def has_color(self):
115 | return self.color is not None and self.is_decided()
116 |
117 | def get_color(self):
118 | return self.color
119 |
120 | def is_classified(self):
121 | return self.status in [ClassifiedImageBundle.CLASSIFIED,
122 | ClassifiedImageBundle.PROGRESS]
123 |
124 | def is_undecided(self):
125 | return self.status == ClassifiedImageBundle.UNDECIDED
126 |
127 | def reset(self):
128 | self.set_show_buttons(False)
129 |
130 | def select(self):
131 | self.selected.emit(self)
132 |
--------------------------------------------------------------------------------
/extended_qt_delegate.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from PyQt5.QtCore import Qt, QSize, QLineF, QEvent
3 | from PyQt5.QtWidgets import QStyledItemDelegate
4 | from PyQt5.QtGui import QPen, QBrush, QPainter, QColor, QMouseEvent
5 | from classified_image_datatype import ClassifiedImageBundle
6 |
7 |
8 | class ImageableStyledItemDelegate(QStyledItemDelegate):
9 | def __init__(self, parent=None, *args):
10 | super().__init__(*args)
11 | self.setParent(parent)
12 |
13 | def paint(self, qp, style_option_view_item, model_index):
14 | mid = model_index.data()
15 | if type(mid) is ClassifiedImageBundle:
16 | qp.save()
17 | qp.drawImage(style_option_view_item.rect.left() + 4, style_option_view_item.rect.top() + 4, mid.get_thumb())
18 | qp.setRenderHint(QPainter.Antialiasing)
19 | qp.setRenderHint(QPainter.HighQualityAntialiasing)
20 | if mid.has_color():
21 | qp.setPen(QPen(QBrush(mid.get_color()), 4.0,
22 | Qt.DotLine if mid.is_classified() else Qt.SolidLine,
23 | Qt.SquareCap, Qt.RoundJoin))
24 | lines_to_draw = []
25 | len_of_all_lines = 2 * (mid.get_thumb().height() + mid.get_thumb().width() + 12)
26 | line_start_pos = -1 * min(0, mid.animation_progress) * len_of_all_lines
27 | if mid.animation_progress >= 0:
28 | line_end_pos = mid.animation_progress * len_of_all_lines
29 | else:
30 | line_end_pos = 1 * len_of_all_lines
31 | tx = style_option_view_item.rect.left() + 2
32 | ty = style_option_view_item.rect.top() + 2
33 | h = mid.get_thumb().height() + 4
34 | w = mid.get_thumb().width() + 4
35 | if line_start_pos <= h and 0 < line_end_pos:
36 | lines_to_draw.append(QLineF(tx,
37 | ty + line_start_pos,
38 | tx,
39 | ty + min(h, line_end_pos)))
40 | if line_start_pos <= h + w and h < line_end_pos:
41 | lines_to_draw.append(QLineF(tx + max(line_start_pos, h) - h,
42 | ty + h,
43 | tx + min(h + w, line_end_pos) - h,
44 | ty + h))
45 | if line_start_pos <= 2 * h + w and h + w < line_end_pos:
46 | lines_to_draw.append(QLineF(tx + w,
47 | ty + h - (max(line_start_pos - h - w, 0)),
48 | tx + w,
49 | ty + h - (min(line_end_pos - h - w, h))))
50 | if line_start_pos <= 2 * h + 2 * w and 2 * h + w < line_end_pos:
51 | lines_to_draw.append(QLineF(tx + w - (max(line_start_pos - 2 * h - w, 0)),
52 | ty,
53 | tx + w - (min(line_end_pos - 2 * h - w, w)),
54 | ty))
55 | qp.drawLines(lines_to_draw)
56 | if mid.keep or mid.keep is None or mid.get_show_buttons():
57 | qp.setBrush(QColor(0, 255, 0))
58 | qp.setPen(QPen(QBrush(QColor(0, 255, 0)), 1.0, Qt.SolidLine, Qt.SquareCap, Qt.RoundJoin))
59 | qp.drawEllipse(style_option_view_item.rect.left() + mid.get_thumb().width() - 30,
60 | style_option_view_item.rect.top() + mid.get_thumb().height() - 30,
61 | 30, 30)
62 | qp.setPen(QPen(QBrush(QColor(255, 255, 255)), 6.0, Qt.SolidLine, Qt.SquareCap, Qt.RoundJoin))
63 | qp.drawLines([
64 | QLineF(style_option_view_item.rect.left() + mid.get_thumb().width() - 24,
65 | style_option_view_item.rect.top() + mid.get_thumb().height() - 13,
66 | style_option_view_item.rect.left() + mid.get_thumb().width() - 19,
67 | style_option_view_item.rect.top() + mid.get_thumb().height() - 8),
68 | QLineF(style_option_view_item.rect.left() + mid.get_thumb().width() - 19,
69 | style_option_view_item.rect.top() + mid.get_thumb().height() - 8,
70 | style_option_view_item.rect.left() + mid.get_thumb().width() - 7,
71 | style_option_view_item.rect.top() + mid.get_thumb().height() - 20)
72 | ])
73 | if (mid.keep is not None and not mid.keep) or mid.get_show_buttons():
74 | qp.setBrush(QColor(255, 0, 0))
75 | qp.setPen(QPen(QBrush(QColor(255, 0, 0)), 1.0, Qt.SolidLine, Qt.SquareCap, Qt.RoundJoin))
76 | qp.drawEllipse(style_option_view_item.rect.left() + 8,
77 | style_option_view_item.rect.top() + mid.get_thumb().height() - 30,
78 | 30, 30)
79 | qp.setPen(QPen(QBrush(QColor(255, 255, 255)), 6.0, Qt.SolidLine, Qt.SquareCap, Qt.RoundJoin))
80 | qp.drawLine(style_option_view_item.rect.left() + 16,
81 | style_option_view_item.rect.top() + mid.get_thumb().height() - 22,
82 | style_option_view_item.rect.left() + 30,
83 | style_option_view_item.rect.top() + mid.get_thumb().height() - 8)
84 | qp.drawLine(style_option_view_item.rect.left() + 16,
85 | style_option_view_item.rect.top() + mid.get_thumb().height() - 8,
86 | style_option_view_item.rect.left() + 30,
87 | style_option_view_item.rect.top() + mid.get_thumb().height() - 22)
88 | qp.restore()
89 | else:
90 | super().paint(qp, style_option_view_item, model_index)
91 |
92 | def sizeHint(self, style_option_view_item, model_index):
93 | mid = model_index.data()
94 | if type(mid) is ClassifiedImageBundle:
95 | return QSize(mid.get_thumb().width() + 8, mid.get_thumb().height() + 8)
96 | else:
97 | return super().sizeHint(style_option_view_item, model_index)
98 |
99 | def editorEvent(self, event, model, style_option_view_item, model_index):
100 | if type(event) != QMouseEvent:
101 | return super().editorEvent(event, model, style_option_view_item, model_index)
102 | mid = model_index.data()
103 | if type(mid) is ClassifiedImageBundle:
104 | x_in_delegate = event.pos().x() - style_option_view_item.rect.left()
105 | y_in_delegate = event.pos().y() - style_option_view_item.rect.top()
106 | thumb_w = model_index.data().get_thumb().width()
107 | thumb_h = model_index.data().get_thumb().height()
108 | if event.type() == QEvent.MouseMove:
109 | model.reset_whole_list()
110 | if 4 < x_in_delegate < 4 + thumb_w and 4 < y_in_delegate < 4 + thumb_h:
111 | model_index.data().set_show_buttons(True)
112 | if event.type() == QEvent.MouseButtonPress and event.button() == Qt.LeftButton:
113 | if 9 <= x_in_delegate <= 39 and thumb_h - 30 <= y_in_delegate <= thumb_h:
114 | model_index.data().set_manual(False)
115 | elif thumb_w - 30 <= x_in_delegate <= thumb_w and thumb_h - 30 <= y_in_delegate <= thumb_h:
116 | model_index.data().set_manual(True)
117 | elif 4 < x_in_delegate < 4 + thumb_w and 4 < y_in_delegate < 4 + thumb_h:
118 | model_index.data().select()
119 | return super().editorEvent(event, model, style_option_view_item, model_index)
120 |
--------------------------------------------------------------------------------
/generic_list_model.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from PyQt5.QtCore import Qt, QAbstractListModel
3 |
4 |
5 | class GenericListModel(QAbstractListModel):
6 | def __init__(self, *args):
7 | super().__init__(*args)
8 | self.list = []
9 |
10 | def __iter__(self):
11 | return iter(self.list)
12 |
13 | def rowCount(self, parent=None, *args, **kwargs):
14 | if parent:
15 | return len(self.list)
16 |
17 | def data(self, index, role=None):
18 | return self.list[index.row()]
19 |
20 | def data_by_int_index(self, index):
21 | return self.list[index]
22 |
23 | def append(self, item):
24 | item.data_changed.connect(self.data_changed)
25 | self.list.append(item)
26 | new_index = self.createIndex(len(self.list), 0, item)
27 | self.dataChanged.emit(new_index, new_index, [Qt.EditRole])
28 |
29 | def pop(self, index):
30 | self.list.pop(index)
31 | i = min(index, len(self.list) - 1)
32 | new_index = self.createIndex(i, 0, self.list[i])
33 | self.dataChanged.emit(new_index, new_index, [Qt.EditRole])
34 |
35 | def data_changed(self, item):
36 | model_index = self.createIndex(self.list.index(item), 0, item)
37 | self.setData(model_index, item)
38 |
39 | def setData(self, model_index, data, role=Qt.EditRole):
40 | super().setData(model_index, data, role=role)
41 | self.dataChanged.emit(model_index, model_index, [role])
42 |
43 | def reset_whole_list(self):
44 | for item in self.list:
45 | item.reset()
46 |
47 | def clear(self):
48 | self.list = []
49 |
50 | def is_empty(self):
51 | return len(self.list) <= 0
52 |
--------------------------------------------------------------------------------
/inference.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from os import path, listdir
3 | from skimage.io import imread
4 | from model import create_model
5 | import numpy as np
6 | import json
7 | import re
8 |
9 |
10 | def load_model(input_size):
11 | with open('unsharpDetectorSettings.json', 'r') as json_file:
12 | settings = json.load(json_file)
13 | model = create_model(input_size,
14 | settings["l1fc"], settings["l1fs"], settings["l1st"],
15 | settings["l2fc"], settings["l2fs"], settings["l2st"],
16 | settings["l3fc"], settings["l3fs"],
17 | settings["eac_size"],
18 | settings["res_c"], settings["res_fc"], settings["res_fs"])
19 | model.load_weights("unsharpDetectorWeights.hdf5")
20 | return model
21 |
22 |
23 | def inference(model, img_list):
24 | return model.predict(img_list, batch_size=len(img_list))
25 |
26 |
27 | if __name__ == "__main__":
28 | filename_regex = re.compile(r".*\.(jpg|JPG|jpeg|JPEG|png|PNG|bmp|BMP)$")
29 | img_path = "validation_data/good/"
30 | filenames = listdir(path.abspath(img_path))
31 | for filename in filenames:
32 | if filename_regex.match(filename):
33 | print("reading " + str(path.join(path.abspath(img_path), filename)))
34 | data = np.array([
35 | imread(path.join(path.abspath(img_path), filename)) / 255
36 | ])
37 | trained_model = load_model(data.shape[1:])
38 | print(inference(trained_model, data))
39 |
--------------------------------------------------------------------------------
/inference_gui.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import division, print_function, unicode_literals
3 | from PyQt5.QtCore import Qt, QRect, pyqtSignal, QSize
4 | from PyQt5.QtWidgets import QApplication, QWidget, QHBoxLayout, QVBoxLayout, QSizePolicy
5 | from PyQt5.QtWidgets import QPushButton, QLabel, QFileDialog, QSplitter, QScrollArea
6 | from PyQt5.QtWidgets import QListView, QRadioButton, QSlider
7 | from PyQt5.QtGui import QPainter, QColor
8 | from skimage.io import imread
9 | from extended_qt_delegate import ImageableStyledItemDelegate
10 | from inferencing_list import InferencingList
11 | from classified_image_datatype import ClassifiedImageBundle
12 | import sys
13 | import os
14 | import re
15 |
16 |
17 | class ImageWidget(QWidget):
18 | def __init__(self, img):
19 | super().__init__()
20 | self.img = img
21 | self.rect = QRect(0, 0, 128, 128)
22 | self.setSizePolicy(QSizePolicy(QSizePolicy.Maximum, QSizePolicy.Maximum))
23 | self.set_img(img)
24 |
25 | def set_img(self, img):
26 | self.img = img
27 | if self.img:
28 | self.setMaximumSize(self.img.size())
29 | self.setFixedSize(self.img.size())
30 | self.rect = QRect(0, 0, self.img.width(), self.img.height())
31 | else:
32 | self.rect = QRect(0, 0, 128, 128)
33 | self.setFixedSize(QSize(128, 128))
34 | self.updateGeometry()
35 | self.update()
36 |
37 | def minimumSizeHint(self):
38 | if self.img:
39 | return QSize(self.img.width(), self.img.height())
40 | else:
41 | return QSize(20, 20)
42 |
43 | def sizeHint(self):
44 | return self.minimumSizeHint()
45 |
46 | def paintEvent(self, e):
47 | qp = QPainter()
48 | qp.begin(self)
49 | self.draw(qp)
50 | qp.end()
51 |
52 | def draw(self, qp):
53 | if self.img:
54 | qp.drawImage(0, 0, self.img)
55 |
56 |
57 | class ThumbnailList(QWidget):
58 | img_selected = pyqtSignal(ClassifiedImageBundle)
59 |
60 | def __init__(self):
61 | super().__init__()
62 | self.images_list = InferencingList()
63 | self.selected = 0
64 | self.thumb_width = 128
65 | size_policy = QSizePolicy()
66 | size_policy.setVerticalPolicy(QSizePolicy.MinimumExpanding)
67 | self.setSizePolicy(size_policy)
68 | self.layout = QVBoxLayout()
69 | self.layout.setContentsMargins(4, 4, 4, 0)
70 | size_row = QHBoxLayout()
71 | slider_label = QLabel()
72 | slider_label.setText("Thumbnailgröße:")
73 | slider_label.setMinimumHeight(12)
74 | size_row.addWidget(slider_label, alignment=Qt.AlignLeading)
75 | slider = QSlider()
76 | slider.setOrientation(Qt.Horizontal)
77 | slider.setMinimum(64)
78 | slider.setMaximum(512)
79 | size_row.addWidget(slider, alignment=Qt.AlignLeading)
80 | self.thumb_size_label = QLabel()
81 | size_row.addWidget(self.thumb_size_label, alignment=Qt.AlignLeading)
82 | self.layout.addLayout(size_row)
83 | self.t_list = QListView()
84 | self.t_list.setMinimumWidth(self.thumb_width)
85 | self.t_list.setSizePolicy(QSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.Expanding))
86 | self.t_list.setMouseTracking(True)
87 | self.t_list.setItemDelegate(ImageableStyledItemDelegate(parent=self.t_list))
88 | self.t_list.setSpacing(1)
89 | self.t_list.setModel(self.images_list)
90 | self.layout.addWidget(self.t_list, stretch=1)
91 | slider.valueChanged.connect(self.slider_changed)
92 | slider.setValue(self.thumb_width)
93 | self.setLayout(self.layout)
94 |
95 | def load_images(self, path):
96 | self.images_list.clear()
97 | filename_regex = re.compile(r".*\.(jpg|JPG|jpeg|JPEG|png|PNG|bmp|BMP)$")
98 | for filename in os.listdir(path):
99 | if filename_regex.match(filename):
100 | np_img = imread(os.path.join(path, filename))
101 | if len(np_img.shape) < 2:
102 | continue
103 | img_bundle = ClassifiedImageBundle()
104 | img_bundle.set_filename(os.path.join(path, filename))
105 | img_bundle.set_np_image(np_img, self.thumb_width)
106 | img_bundle.selected.connect(self.select_image)
107 | self.images_list.append(img_bundle)
108 | self.t_list.setMinimumWidth(self.thumb_width)
109 | self.t_list.updateGeometry()
110 | if not self.images_list.is_empty():
111 | self.img_selected.emit(self.images_list.data_by_int_index(0))
112 |
113 | def select_image(self, image_bundle):
114 | self.img_selected.emit(image_bundle)
115 |
116 | def delete_images(self):
117 | for i, bundle in enumerate(self.images_list):
118 | if bundle.is_decided() and not bundle.keep and \
119 | bundle.keep is not None and \
120 | bundle.filename is not None:
121 | self.images_list.pop(i)
122 | os.remove(bundle.filename)
123 |
124 | def stop_worker_thread(self):
125 | self.images_list.stop_worker_thread()
126 |
127 | def slider_changed(self, value):
128 | self.thumb_size_label.setText(str(value))
129 | self.thumb_width = value
130 | for bundle in self.images_list:
131 | bundle.create_thumb(self.thumb_width)
132 | self.t_list.setMinimumWidth(self.thumb_width)
133 | self.t_list.updateGeometry()
134 |
135 |
136 | class PreviewArea(QWidget):
137 | def __init__(self):
138 | super().__init__()
139 | self.bundle = None
140 | self.manual_change = True
141 | size_policy = QSizePolicy()
142 | size_policy.setHorizontalPolicy(QSizePolicy.Expanding)
143 | size_policy.setVerticalPolicy(QSizePolicy.Expanding)
144 | self.setSizePolicy(size_policy)
145 | layout = QVBoxLayout()
146 | layout.setContentsMargins(0, 4, 0, 0)
147 | this_row = QHBoxLayout()
148 | this_row.addSpacing(4)
149 | selection_label = QLabel()
150 | selection_label.setText("Dieses Bild: ")
151 | this_row.addWidget(selection_label)
152 | self.keep_button = QRadioButton()
153 | self.keep_button.setText("behalten")
154 | self.keep_button.setMaximumHeight(14)
155 | self.keep_button.toggled.connect(self.mark_bundle)
156 | this_row.addWidget(self.keep_button)
157 | self.discard_button = QRadioButton()
158 | self.discard_button.setText("löschen")
159 | self.discard_button.setMaximumHeight(14)
160 | this_row.addWidget(self.discard_button)
161 | this_row.addStretch(1)
162 | layout.addLayout(this_row)
163 | img_scroll_area = QScrollArea()
164 | img_scroll_area.setSizePolicy(QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding))
165 | self.img_widget = ImageWidget(None)
166 | img_scroll_area.setWidget(self.img_widget)
167 | layout.addWidget(img_scroll_area, stretch=1)
168 | layout.addStretch()
169 | self.setLayout(layout)
170 |
171 | def set_image(self, img_d):
172 | self.manual_change = False
173 | self.bundle = img_d
174 | self.bundle.data_changed.connect(self.bundle_changed)
175 | self.img_widget.set_img(img_d.get_image())
176 | self.bundle_changed()
177 | self.update()
178 | self.manual_change = True
179 |
180 | def mark_bundle(self, keep=False):
181 | if self.manual_change:
182 | self.manual_change = False
183 | self.bundle.set_manual(keep)
184 | self.manual_change = True
185 |
186 | def bundle_changed(self):
187 | if self.bundle.keep is None:
188 | self.discard_button.setAutoExclusive(False)
189 | self.keep_button.setAutoExclusive(False)
190 | self.discard_button.setChecked(False)
191 | self.keep_button.setChecked(False)
192 | self.discard_button.setAutoExclusive(True)
193 | self.keep_button.setAutoExclusive(True)
194 | elif not self.bundle.keep:
195 | self.discard_button.setChecked(True)
196 | else:
197 | self.keep_button.setChecked(True)
198 |
199 |
200 | class InferenceInterface(QWidget):
201 | def __init__(self):
202 | super().__init__(flags=Qt.WindowTitleHint | Qt.WindowCloseButtonHint |
203 | Qt.WindowMinimizeButtonHint | Qt.WindowMaximizeButtonHint)
204 | self.path = None
205 | self.setGeometry(200, 100, 1280, 720)
206 | self.setWindowTitle("Unsharp Detector")
207 | main_layout = QVBoxLayout()
208 | path_row = QHBoxLayout()
209 | open_button = QPushButton()
210 | open_button.setText("Pfad auswählen")
211 | open_button.clicked.connect(self.open_path_select_dialog)
212 | path_row.addWidget(open_button, alignment=Qt.AlignLeading)
213 | self.path_label = QLabel()
214 | path_row.addWidget(self.path_label, alignment=Qt.AlignLeading)
215 | path_row.addStretch()
216 | delete_button = QPushButton()
217 | delete_button.setText("Bilder aufräumen")
218 | delete_button.clicked.connect(self.delete_images)
219 | delete_button.setStyleSheet("background-color: #BB0000; color: #FFFFFF; font-weight: bold;")
220 | path_row.addWidget(delete_button, alignment=Qt.AlignTrailing)
221 | main_layout.addLayout(path_row, stretch=0)
222 | image_splitter = QSplitter()
223 | image_splitter.setOrientation(Qt.Horizontal)
224 | self.thumbnail_list = ThumbnailList()
225 | self.thumbnail_list.img_selected.connect(self.img_selected)
226 | image_splitter.addWidget(self.thumbnail_list)
227 | self.preview_area = PreviewArea()
228 | image_splitter.addWidget(self.preview_area)
229 | image_splitter.setSizes([176, self.width()-176])
230 | image_splitter.setSizePolicy(QSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.MinimumExpanding))
231 | main_layout.addWidget(image_splitter)
232 | self.setLayout(main_layout)
233 | self.show()
234 |
235 | def open_path_select_dialog(self):
236 | dialog = QFileDialog()
237 | dialog.setWindowTitle("Pfad der Bilder auswählen")
238 | dialog.setModal(False)
239 | dialog.setFileMode(QFileDialog.Directory)
240 | if dialog.exec():
241 | self.path = dialog.selectedFiles()[0]
242 | self.thumbnail_list.load_images(self.path)
243 | self.path_label.setText("Path: " + self.path)
244 | else:
245 | self.path = None
246 | self.path_label.setText("Kein Pfad ausgewählt.")
247 |
248 | def img_selected(self, img_d):
249 | self.preview_area.set_image(img_d)
250 |
251 | def delete_images(self):
252 | self.thumbnail_list.delete_images()
253 |
254 | def closeEvent(self, close_event):
255 | self.thumbnail_list.stop_worker_thread()
256 | super().closeEvent(close_event)
257 |
258 |
259 | if __name__ == "__main__":
260 | app_object = QApplication(sys.argv)
261 | window = InferenceInterface()
262 | status = app_object.exec_()
263 |
--------------------------------------------------------------------------------
/inferencing_list.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import division, print_function, unicode_literals
3 | from generic_list_model import GenericListModel
4 | from classified_image_datatype import ClassifiedImageBundle
5 | from threading import Thread
6 | from queue import Queue, Empty
7 | from inference import load_model
8 | import numpy as np
9 |
10 |
11 | def inferencer(work_queue):
12 | running = True
13 | data = work_queue.get()
14 | if type(data) == bool:
15 | running = data
16 | elif type(data) == ClassifiedImageBundle:
17 | data.set_progress()
18 | while running:
19 | model = load_model(data.get_np_array().shape)
20 | prediction = model.predict(np.array([data.get_np_array() / 255]), batch_size=1)
21 | print(prediction[0])
22 | data.set_classification(prediction[0])
23 | work_queue.task_done()
24 | data = work_queue.get()
25 | if type(data) == bool:
26 | running = data
27 | elif type(data) == ClassifiedImageBundle:
28 | data.set_progress()
29 |
30 |
31 | class InferencingList(GenericListModel):
32 | def __init__(self, *args):
33 | super().__init__(*args)
34 | self.work_queue = Queue()
35 | self.queued_bundles = []
36 | self.inferencer_thread = Thread(
37 | target=inferencer,
38 | args=(self.work_queue,))
39 | self.inferencer_thread.start()
40 |
41 | def stop_worker_thread(self):
42 | self.clear_queue()
43 | self.work_queue.put(False)
44 | self.inferencer_thread.join()
45 |
46 | def update_queue(self):
47 | clear_necessary = False
48 | for item in self.queued_bundles:
49 | if item not in self.list or not item.is_undecided():
50 | clear_necessary = True
51 | if clear_necessary:
52 | self.clear_queue()
53 | for item in self.list:
54 | if item.is_undecided() and item not in self.queued_bundles:
55 | self.work_queue.put(item)
56 | self.queued_bundles.append(item)
57 | item.ani.start()
58 |
59 | def clear_queue(self):
60 | while not self.work_queue.empty():
61 | try:
62 | self.work_queue.get(False)
63 | except Empty:
64 | break
65 | self.work_queue.task_done()
66 | self.queued_bundles = []
67 |
68 | def append(self, item):
69 | super().append(item)
70 | self.update_queue()
71 |
72 | def data_changed(self, item):
73 | super().data_changed(item)
74 | self.update_queue()
75 |
76 | def clear(self):
77 | super().clear()
78 | self.update_queue()
79 |
--------------------------------------------------------------------------------
/model.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from tensorflow.keras.models import Model
3 | from tensorflow.keras.layers import Conv2D, LeakyReLU, Dense, GlobalMaxPool2D, GlobalAveragePooling2D
4 | from tensorflow.keras.layers import Input, Concatenate, MaxPool2D, AveragePooling2D, Flatten, Add
5 | from GlobalVarianceLayer import GlobalVarianceLayer
6 | from VarianceLayer import VarianceLayer
7 | from EdgeAndCenterExtractionLayer import EdgeAndCenterExtractionLayer
8 | import tensorflow as tf
9 | import numpy as np
10 |
11 |
12 | def laplacian_group_initializer(shape, dtype=None):
13 | kernel = np.zeros(shape, dtype=np.float) # np.zeros(shape, dtype=dtype)
14 | if np.random.random() < 0.5 and kernel.shape[0] >= 3 and len(kernel.shape) == 2:
15 | kernel[int(kernel.shape[0] // 2) - 1, int(kernel.shape[1] // 2)] = 1
16 | kernel[int(kernel.shape[0] // 2) + 1, int(kernel.shape[1] // 2)] = 1
17 | if np.random.random() < 0.5 and kernel.shape[1] >= 3 and len(kernel.shape) == 2:
18 | kernel[int(kernel.shape[0] // 2), int(kernel.shape[1] // 2) - 1] = 1
19 | kernel[int(kernel.shape[0] // 2), int(kernel.shape[1] // 2) + 1] = 1
20 | kernel[tuple(map(lambda x: int(np.floor(x / 2)), kernel.shape))] = -np.sum(kernel)
21 | return kernel + np.random.normal(0.0, 0.005, shape) * 1.0
22 |
23 |
24 | def create_model(input_shape, l1fc, l1fs, l1st, l2fc, l2fs, l2st, l3fc, l3fs, eac_size, res_c, res_fc, res_fs):
25 | inp = Input(shape=(input_shape[0], input_shape[1], 3))
26 | c1 = Conv2D(l1fc, kernel_size=l1fs, strides=l1st, use_bias=True, padding="same",
27 | data_format="channels_last", kernel_initializer=laplacian_group_initializer)(inp)
28 | l1 = LeakyReLU(alpha=0.2)(c1)
29 | eac1_obj = EdgeAndCenterExtractionLayer(width=eac_size)
30 | eac1 = eac1_obj(l1)
31 | eac1.set_shape(eac1_obj.compute_output_shape(l1.shape))
32 | c2 = Conv2D(l2fc, kernel_size=l2fs, strides=l2st, use_bias=True, padding="same",
33 | data_format="channels_last")(l1)
34 | l2 = LeakyReLU(alpha=0.2)(c2)
35 | eac2_obj = EdgeAndCenterExtractionLayer(width=eac_size)
36 | eac2 = eac2_obj(l2)
37 | eac2.set_shape(eac2_obj.compute_output_shape(l2.shape))
38 | c3 = Conv2D(l3fc, kernel_size=l3fs, strides=1, use_bias=True, padding="same",
39 | data_format="channels_last")(l2)
40 | last_layer = c3
41 | prev_layer = None
42 | for i in range(res_c):
43 | res_act = LeakyReLU(alpha=0.2)(last_layer)
44 | if prev_layer is not None:
45 | res_act = Add()([res_act, prev_layer])
46 | prev_layer = last_layer
47 | last_layer = Conv2D(res_fc, kernel_size=res_fs, strides=1, use_bias=True, padding="same",
48 | data_format="channels_last")(res_act)
49 | eac3_obj = EdgeAndCenterExtractionLayer(width=eac_size)
50 | eac3 = eac3_obj(c3)
51 | eac3.set_shape(eac3_obj.compute_output_shape(c3.shape))
52 | eac3_max_grid = MaxPool2D((eac_size, eac_size), strides=eac_size,
53 | padding="valid", data_format="channels_last")(eac3)
54 | eac3_avg_grid = AveragePooling2D((eac_size, eac_size), strides=eac_size,
55 | padding="valid", data_format="channels_last")(eac3)
56 | features = [GlobalVarianceLayer()(c1),
57 | GlobalVarianceLayer()(c2),
58 | GlobalVarianceLayer()(c3),
59 | GlobalMaxPool2D(data_format="channels_last")(c1),
60 | GlobalMaxPool2D(data_format="channels_last")(c2),
61 | GlobalMaxPool2D(data_format="channels_last")(c3),
62 | GlobalAveragePooling2D(data_format="channels_last")(c1),
63 | GlobalAveragePooling2D(data_format="channels_last")(c2),
64 | GlobalAveragePooling2D(data_format="channels_last")(c3),
65 | GlobalMaxPool2D(data_format="channels_last")(eac1),
66 | GlobalMaxPool2D(data_format="channels_last")(eac2),
67 | GlobalMaxPool2D(data_format="channels_last")(eac3),
68 | GlobalAveragePooling2D(data_format="channels_last")(eac1),
69 | GlobalAveragePooling2D(data_format="channels_last")(eac2),
70 | GlobalAveragePooling2D(data_format="channels_last")(eac3),
71 | GlobalVarianceLayer()(eac1),
72 | GlobalVarianceLayer()(eac2),
73 | GlobalVarianceLayer()(eac3),
74 | Flatten()(VarianceLayer((eac_size, eac_size))(eac1)),
75 | Flatten()(VarianceLayer((eac_size, eac_size))(eac2)),
76 | Flatten()(VarianceLayer((eac_size, eac_size))(eac3)),
77 | GlobalVarianceLayer()(eac3_max_grid),
78 | GlobalVarianceLayer()(eac3_avg_grid),
79 | Flatten()(eac3_max_grid),
80 | Flatten()(eac3_avg_grid)
81 | ]
82 | if res_c > 0:
83 | res_eac = EdgeAndCenterExtractionLayer(width=eac_size)(last_layer)
84 | features.append(GlobalVarianceLayer()(last_layer))
85 | features.append(GlobalMaxPool2D()(last_layer))
86 | features.append(GlobalAveragePooling2D()(last_layer))
87 | features.append(GlobalVarianceLayer()(res_eac))
88 | features.append(GlobalMaxPool2D()(res_eac))
89 | features.append(GlobalAveragePooling2D()(res_eac))
90 | features.append(Flatten()(VarianceLayer((eac_size, eac_size))(res_eac)))
91 | res_eac_max_grid = MaxPool2D((eac_size, eac_size), strides=eac_size,
92 | padding="valid", data_format="channels_last")(res_eac)
93 | res_eac_avg_grid = AveragePooling2D((eac_size, eac_size), strides=eac_size,
94 | padding="valid", data_format="channels_last")(res_eac)
95 | features.append(GlobalVarianceLayer()(res_eac_max_grid))
96 | features.append(GlobalVarianceLayer()(res_eac_avg_grid))
97 | features.append(Flatten()(res_eac_max_grid))
98 | features.append(Flatten()(res_eac_avg_grid))
99 | feature_vector = Concatenate()(features)
100 | o = Dense(2, activation="softmax", use_bias=True, name="output")(feature_vector)
101 | return Model(inputs=inp, outputs=o)
102 |
--------------------------------------------------------------------------------
/queue_manager.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # -*- coding: utf-8 -*-
3 | from __future__ import division, print_function, unicode_literals
4 | from pymongo import MongoClient
5 | from time import sleep
6 | from secret_settings import mongo_url, db_name
7 | import argparse
8 |
9 | client = MongoClient(host=mongo_url)
10 | db = client[db_name]
11 | running_experiments = []
12 |
13 |
14 | def start_experiment(config):
15 | from train import ex
16 | run = ex.run(config_updates=config)
17 | try:
18 | db_entry = db.runs.find({'config': run.config})[0]
19 | running_experiments.append(db_entry['_id'])
20 | except IndexError:
21 | print("ERROR: Newly created experiment not found.")
22 |
23 |
24 | def check_for_work():
25 | for _id in running_experiments:
26 | try:
27 | if db.runs.find({'_id': _id})[0]['status'] != 'RUNNING':
28 | running_experiments.remove(_id)
29 | except IndexError:
30 | running_experiments.remove(_id)
31 | if len(running_experiments) > 0:
32 | return None
33 | try:
34 | queued_run = db.runs.find({'status': 'QUEUED'})[0]
35 | except IndexError:
36 | return None
37 | config = queued_run['config']
38 | print("Starting an experiment with the following configuration:")
39 | print(config)
40 | db.runs.delete_one({'_id': queued_run['_id']})
41 | start_experiment(config)
42 |
43 |
44 | def main_loop():
45 | while True:
46 | check_for_work()
47 | sleep(10)
48 |
49 |
50 | def print_dict(d, indentation=2):
51 | for key, value in sorted(d.items()):
52 | if type(value) == dict:
53 | print(" "*indentation + key + ":")
54 | print_dict(value, indentation=indentation+2)
55 | else:
56 | print(" "*indentation + key + ": " + str(value))
57 |
58 |
59 | def list_experiments(status='QUEUED'):
60 | print("These Experiments have the status '" + status + "':")
61 | for ex in db.runs.find({'status': status}):
62 | print("Experiment No " + str(ex['_id']))
63 | print_dict(ex['config'], indentation=2)
64 | print("----------------------------------------")
65 |
66 |
67 | if __name__ == "__main__":
68 | parser = argparse.ArgumentParser(description="Manage queued Sacred experiments.\n" +
69 | "If called without parameters the queue_manager will fetch " +
70 | "experiments from the database and run them.")
71 | parser.add_argument('-l', '--list', action='store_true', help="Show the list of queued experiments.")
72 | parser.add_argument('-c', '--clear', action='store_true', help="Clear the list of queued experiments.")
73 | args = parser.parse_args()
74 | if args.clear:
75 | db.runs.delete_many({'status': 'QUEUED'})
76 | if args.list:
77 | list_experiments()
78 | elif not args.clear:
79 | main_loop()
80 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | wheel
2 | numpy
3 | scipy
4 | scikit-image
5 | tensorflow
6 | sacred
7 | matplotlib
8 | pymongo
9 | pyqt5
--------------------------------------------------------------------------------
/requirements_gpu.txt:
--------------------------------------------------------------------------------
1 | wheel
2 | numpy
3 | scipy
4 | scikit-image
5 | tensorflow-gpu
6 | sacred
7 | matplotlib
8 | pymongo
9 | pyqt5
--------------------------------------------------------------------------------
/train.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from sacred import Experiment
3 | from sacred.observers import MongoObserver
4 | from sacred.utils import apply_backspaces_and_linefeeds
5 | from tensorflow.keras.optimizers import Adam
6 | import tensorflow.keras.backend as K
7 | from tensorflow.keras.losses import categorical_crossentropy
8 | from tensorflow.keras.callbacks import Callback, ModelCheckpoint
9 | from model import create_model
10 | from TrainingDataGenerator import UnsharpTrainingDataGenerator
11 | from ValidationDataProvider import UnsharpValidationDataProvider
12 | from secret_settings import mongo_url, db_name
13 | import json
14 | import os
15 |
16 | ex = Experiment("UnsharpDetector")
17 | ex.observers.append(MongoObserver(url=mongo_url, db_name=db_name))
18 | ex.captured_out_filter = apply_backspaces_and_linefeeds
19 | last_result = None
20 |
21 |
22 | @ex.capture
23 | def log_training_performance_batch(_run, loss, accuracy):
24 | _run.log_scalar("batch_loss", float(loss))
25 | _run.log_scalar("batch_accuracy", float(accuracy))
26 |
27 |
28 | @ex.capture
29 | def log_training_performance_epoch(_run, loss, accuracy):
30 | _run.log_scalar("loss", float(loss))
31 | _run.log_scalar("accuracy", float(accuracy))
32 |
33 |
34 | @ex.capture
35 | def log_validation_performance(_run, val_loss, val_accuracy):
36 | _run.log_scalar("validation_loss", float(val_loss))
37 | _run.log_scalar("validation_accuracy", float(val_accuracy))
38 | _run.result = float(val_accuracy)
39 | global last_result
40 | last_result = float(val_accuracy)
41 |
42 |
43 | @ex.capture
44 | def log_lr(_run, lr):
45 | _run.log_scalar("lr", float(lr))
46 |
47 |
48 | class LogPerformance(Callback):
49 | def __init__(self, model, gui_callback, data_generator, bs):
50 | super().__init__()
51 | self.model = model
52 | self.data_generator = data_generator
53 | self.gui_callback = gui_callback
54 | self.bs = bs
55 | self.epoch = 0
56 |
57 | def on_epoch_begin(self, epoch, logs={}):
58 | self.epoch = epoch
59 |
60 | def on_batch_begin(self, batch, logs=None):
61 | if self.gui_callback and batch % 10 == 0:
62 | x, y = self.data_generator.__getitem__(batch)
63 | prediction = self.model.predict(x, batch_size=self.bs)
64 | self.gui_callback(x, y, prediction, self.epoch)
65 |
66 | def on_batch_end(self, batch, logs={}):
67 | log_training_performance_batch(loss=logs.get("loss"), accuracy=logs.get("acc"))
68 |
69 | def on_epoch_end(self, epoch, logs={}):
70 | lr = self.model.optimizer.lr
71 | decay = self.model.optimizer.decay
72 | iterations = self.model.optimizer.iterations
73 | lr_with_decay = lr / (1. + decay * K.cast(iterations, K.dtype(decay)))
74 | log_lr(lr=K.eval(lr_with_decay))
75 | log_training_performance_epoch(loss=logs.get("loss"), accuracy=logs.get("acc"))
76 | log_validation_performance(val_loss=logs.get("val_loss"), val_accuracy=logs.get("val_acc"))
77 |
78 |
79 | @ex.config
80 | def config():
81 | input_size = (256, 256)
82 | bs = 12
83 | lr = 0.002
84 | lr_decay = 0.005
85 | blur_rate = 0.5
86 | mask_rate = 0.2
87 | noise_rate = 0.2
88 | min_blur = 0.5
89 | min_shake = 2.5
90 | l1fc = 8
91 | l1fs = (9, 9)
92 | l1st = 2
93 | l2fc = 16
94 | l2fs = (3, 3)
95 | l2st = 2
96 | l3fc = 32
97 | l3fs = (3, 3)
98 | res_c = 0
99 | res_fc = l3fc
100 | res_fs = (3, 3)
101 | eac_size = 16
102 | image_folders = [
103 | "../../Bilder/20190228-Antwerpen/",
104 | "../../Bilder/CC-Photos/",
105 | "../../Bilder/SparkMakerFHD/",
106 | "../../Bilder/20191117-TelAviv/",
107 | "../../Bilder/20190906-Toskana/"
108 | ]
109 | epochs = 50
110 | use_gui = True
111 | load_weights = False
112 |
113 |
114 | @ex.capture
115 | def validate(model, x, y, bs):
116 | prediction = model.predict(x, batch_size=bs)
117 | validation_loss = K.eval(K.mean(categorical_crossentropy(K.constant(y), K.constant(prediction))))
118 | log_validation_performance(val_loss=validation_loss)
119 | return validation_loss
120 |
121 |
122 | @ex.capture
123 | def get_model(input_size, l1fc, l1fs, l1st, l2fc, l2fs, l2st, l3fc, l3fs, eac_size, res_c, res_fc, res_fs):
124 | return create_model(input_size, l1fc, l1fs, l1st, l2fc, l2fs, l2st, l3fc, l3fs, eac_size, res_c, res_fc, res_fs)
125 |
126 |
127 | @ex.capture
128 | def get_model_config_settings(l1fc, l1fs, l1st, l2fc, l2fs, l2st, l3fc, l3fs, eac_size, res_c, res_fc, res_fs):
129 | return {
130 | "l1fc": l1fc, "l1fs": l1fs, "l1st": l1st,
131 | "l2fc": l2fc, "l2fs": l2fs, "l2st": l2st,
132 | "l3fc": l3fc, "l3fs": l3fs,
133 | "eac_size": eac_size,
134 | "res_c": res_c, "res_fc": res_fc, "res_fs": res_fs
135 | }
136 |
137 |
138 | @ex.capture
139 | def train(gui_callback, input_size, bs, lr, lr_decay, image_folders, epochs, load_weights,
140 | blur_rate, mask_rate, noise_rate, min_blur, min_shake):
141 | optimizer = Adam(lr, decay=lr_decay)
142 | model = get_model()
143 | model.compile(optimizer, loss=categorical_crossentropy, metrics=["accuracy"])
144 | print(model.summary())
145 | data_generator = UnsharpTrainingDataGenerator(image_folders, batch_size=bs, target_size=input_size,
146 | blur_rate=blur_rate, mask_rate=mask_rate, noise_rate=noise_rate,
147 | min_blur=min_blur, min_shake=min_shake)
148 | data_generator.on_epoch_end()
149 | validation_data_provider = UnsharpValidationDataProvider("validation_data", batch_size=bs, target_size=input_size)
150 | with open('unsharpDetectorSettings.json', 'w') as json_file:
151 | json_file.write(json.dumps(get_model_config_settings()))
152 | if load_weights and os.path.exists("unsharpDetectorWeights.hdf5"):
153 | model.load_weights("unsharpDetectorWeights.hdf5")
154 | else:
155 | model.save("unsharpDetectorWeights.hdf5", include_optimizer=True)
156 | model.fit(x=data_generator,
157 | validation_data=validation_data_provider,
158 | callbacks=[ModelCheckpoint("unsharpDetectorWeights.hdf5", monitor='val_loss',
159 | save_best_only=False, mode='auto', period=1),
160 | LogPerformance(model, gui_callback, data_generator, bs)],
161 | epochs=epochs,
162 | use_multiprocessing=True,
163 | workers=8, max_queue_size=30)
164 |
165 |
166 | @ex.automain
167 | def run(use_gui):
168 | gui_thread = None
169 | gui_callback = None
170 | if use_gui:
171 | from training_gui import init_gui
172 | gui_callback, feedback_queue, gui_thread = init_gui()
173 | train(gui_callback)
174 | if gui_thread:
175 | gui_thread.join()
176 | return last_result
177 |
--------------------------------------------------------------------------------
/training_gui.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import sys
3 | from PyQt5.QtWidgets import QApplication, QWidget
4 | from PyQt5.QtGui import QImage, QPainter, QPixmap, QFont, QColor
5 | from PyQt5.QtCore import QRect, Qt
6 | from threading import Thread
7 | from queue import Queue
8 | from visualization_helpers import convert_image
9 | import numpy as np
10 |
11 |
12 | class TrainingPreview(QWidget):
13 | def __init__(self, feedback_queue):
14 | super().__init__()
15 | self.feedback_queue = feedback_queue
16 | self.setWindowTitle("Training preview")
17 | self.resize(4 * 256, 3 * 276)
18 | self.setMinimumWidth(256)
19 | self.pixmaps = [QPixmap(QImage(
20 | convert_image(np.zeros((256, 256, 3), dtype=np.float32)),
21 | 256, 256, QImage.Format_RGB32))]
22 | self.labels = [{"color": QColor(0, 255, 0)}]
23 | self.predictions = [{"color": QColor(128, 128, 0)}]
24 | self.white = QColor(255, 255, 255)
25 | self.font = QFont('Sans-Serif', 12, QFont.Normal)
26 | self.show()
27 |
28 | def paintEvent(self, e):
29 | qp = QPainter()
30 | qp.begin(self)
31 | qp.setRenderHint(QPainter.Antialiasing)
32 | qp.setRenderHint(QPainter.HighQualityAntialiasing)
33 | self.draw(qp)
34 | qp.end()
35 |
36 | def draw(self, qp):
37 | size = self.size()
38 | line_len = size.width()//256
39 | qp.setFont(self.font)
40 | qp.setPen(self.white)
41 | for i, pixmap in enumerate(self.pixmaps):
42 | qp.drawPixmap(QRect((i % line_len) * 256, (i // line_len) * 276, 256, 256),
43 | pixmap, QRect(0, 0, 256, 256))
44 | qp.setBrush(self.labels[i]["color"])
45 | qp.drawRect((i % line_len) * 256, (i // line_len) * 276 + 256, 128, 20)
46 | qp.setBrush(self.predictions[i]["color"])
47 | qp.drawRect((i % line_len) * 256 + 128, (i // line_len) * 276 + 256, 128, 20)
48 |
49 | """qp.drawRoundedRect(0, 0, w, h, 5, 5)
50 | qp.setPen(QColor(0, 0, 0))
51 | font_metrics = qp.fontMetrics()
52 | c_start_position = 5
53 | cursor_pixel_position = c_start_position
54 | self.character_offsets = [cursor_pixel_position]
55 | for i, c in enumerate(self.text):
56 | start_of_parsed_block = False
57 | end_of_parsed_block = False
58 | inside_parsed_block = False
59 | for start, end in self.parsed_blocks:
60 | if start == i:
61 | block_width = 4
62 | for char in self.text[start:end]:
63 | block_width += font_metrics.width(char["char"])
64 | qp.setPen(QColor(0, 0, 0))
65 | qp.setBrush(QColor(0, 0, 0))
66 | qp.drawRoundedRect(c_start_position+2, 4, block_width, 20, 2, 2)"""
67 |
68 | def show_data(self, images, labels, predictions, epoch):
69 | self.setWindowTitle("Training preview | Epoch: " + str(epoch))
70 | from skimage.io import imsave
71 | imsave("test_data.png", np.clip(np.concatenate(images, axis=0), 0, 1))
72 | self.pixmaps = []
73 | self.labels = []
74 | self.predictions = []
75 | for i, img in enumerate(images):
76 | qimage = QImage(convert_image(img * 255), img.shape[0], img.shape[1], QImage.Format_RGB32)
77 | self.pixmaps.append(QPixmap().fromImage(qimage, flags=(Qt.AutoColor | Qt.DiffuseDither)).copy())
78 | self.labels.append({
79 | "color": QColor(int(labels[i][0] * 255), int(labels[i][1] * 255), 0)
80 | })
81 | self.predictions.append({
82 | "color": QColor(int(predictions[i][0] * 255), int(predictions[i][1] * 255), 0)
83 | })
84 | self.update()
85 |
86 |
87 | def run_gui(feedback_queue):
88 | app_object = QApplication(sys.argv)
89 | window = TrainingPreview(feedback_queue)
90 | feedback_queue.put({"callback": window.show_data})
91 | status = app_object.exec_()
92 | feedback_queue.put({"stop": status})
93 |
94 |
95 | def init_gui():
96 | feedback_queue = Queue()
97 | gui_thread = Thread(target=run_gui, args=(feedback_queue,))
98 | gui_thread.start()
99 | initialization_answer = feedback_queue.get(True)
100 | if "callback" in initialization_answer:
101 | return initialization_answer["callback"], feedback_queue, gui_thread
102 | else:
103 | print("ERROR: No Callback in init answer!")
104 | return None, feedback_queue, gui_thread
105 |
106 |
107 | if __name__ == "__main__":
108 | clb, fq, thread = init_gui()
109 | from TrainingDataGenerator import UnsharpTrainingDataGenerator
110 | g = UnsharpTrainingDataGenerator(["../../Bilder/Bilder der Woche/"], batch_size=7)
111 | g.on_epoch_end()
112 | x, y = g.__getitem__(0)
113 | print("x.shape: " + str(x.shape))
114 | print("y.shape: " + str(y.shape))
115 | clb(x, y, np.array([[0.2, 0.8], [0.9, 0.1],
116 | [0.3, 0.7], [0.3, 0.7],
117 | [0.3, 0.7], [0.3, 0.7],
118 | [0.3, 0.7]], dtype=np.float32), 0)
119 | feedback = fq.get()
120 | if "stop" in feedback.keys():
121 | print("stopping")
122 | thread.join()
123 | print("join finished")
124 | sys.exit(feedback["stop"])
125 |
--------------------------------------------------------------------------------
/unsharpDetectorSettings.json:
--------------------------------------------------------------------------------
1 | {"l1fc": 8, "l1fs": [9, 9], "l1st": 2, "l2fc": 16, "l2fs": [3, 3], "l2st": 2, "l3fc": 32, "l3fs": [3, 3], "eac_size": 16, "res_c": 0, "res_fc": 32, "res_fs": [3, 3]}
--------------------------------------------------------------------------------
/unsharpDetectorWeights.hdf5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/unsharpDetectorWeights.hdf5
--------------------------------------------------------------------------------
/validation_data/bad/art_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/art_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/ball_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/ball_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/benchy3d_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/benchy3d_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/carpet_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/carpet_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/catview_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/catview_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/chaos_key_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/chaos_key_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/console_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/console_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/ct_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/ct_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/desk_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/desk_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/dsgvo_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/dsgvo_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/esp32_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/esp32_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/fabric_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/fabric_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/garden_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/garden_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/headphones_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/headphones_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/heise_garden_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/heise_garden_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/keyboard2_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/keyboard2_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/keyboard_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/keyboard_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/led_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/led_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/mechanic_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/mechanic_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/metal_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/metal_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/netzteil_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/netzteil_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/paper_bag_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/paper_bag_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/pina_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/pina_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/plastic_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/plastic_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/printed_lamp_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/printed_lamp_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/skin_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/skin_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/squirrel_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/squirrel_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/star_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/star_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/switch_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/switch_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/telephone_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/telephone_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/tinkerstuff_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/tinkerstuff_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/trees_and_sky_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/trees_and_sky_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/vote_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/vote_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/bad/wall_blurry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/bad/wall_blurry.jpg
--------------------------------------------------------------------------------
/validation_data/good/art_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/art_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/ball_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/ball_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/benchy3d_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/benchy3d_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/carpet_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/carpet_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/catview_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/catview_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/circuit_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/circuit_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/console_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/console_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/ct_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/ct_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/desk_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/desk_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/dsgvo_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/dsgvo_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/esp32_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/esp32_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/fabric_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/fabric_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/garden_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/garden_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/headphones_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/headphones_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/heise_garden_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/heise_garden_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/keyboard2_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/keyboard2_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/keyboard_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/keyboard_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/led_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/led_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/mechanic_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/mechanic_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/metal_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/metal_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/netzteil_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/netzteil_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/paper_bag_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/paper_bag_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/pina_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/pina_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/plastic_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/plastic_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/printed_lamp_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/printed_lamp_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/skin_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/skin_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/squirrel_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/squirrel_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/star_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/star_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/switch_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/switch_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/telephone_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/telephone_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/tinkerstuff_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/tinkerstuff_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/trees_and_sky_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/trees_and_sky_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/vote_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/vote_sharp.jpg
--------------------------------------------------------------------------------
/validation_data/good/wall_sharp.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pinae/UnsharpDetector/7df706dbf314a7cbe0a7279a9e1504d01fd36150/validation_data/good/wall_sharp.jpg
--------------------------------------------------------------------------------
/visualization_helpers.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import numpy as np
3 |
4 |
5 | def convert_image(numpy_array):
6 | return np.left_shift(
7 | np.left_shift(
8 | np.left_shift(
9 | np.zeros((numpy_array.shape[0], numpy_array.shape[1]), dtype=np.uint32) + 0xff,
10 | 8) + numpy_array[:, :, 0].astype(np.uint32),
11 | 8) + numpy_array[:, :, 1].astype(np.uint32),
12 | 8) + numpy_array[:, :, 2].astype(np.uint32)
13 |
14 |
15 | def generate_y_image(batch_y, dtype=np.float):
16 | batch_size = batch_y.shape[0]
17 | batch_y_img_line = np.repeat(batch_y.astype(dtype).reshape(1, batch_size, 2), 256, axis=1)
18 | return np.repeat(
19 | np.concatenate([batch_y_img_line,
20 | np.zeros((1, 256 * batch_size, 1), dtype=dtype)], axis=2),
21 | 20, axis=0)
22 |
--------------------------------------------------------------------------------