├── .gitignore ├── Digital image processing.ipynb ├── Human Emotions Recognition.ipynb ├── LICENSE ├── Lunar crater detection.py └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU LESSER GENERAL PUBLIC LICENSE 2 | Version 2.1, February 1999 3 | 4 | Copyright (C) 1991, 1999 Free Software Foundation, Inc. 5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | [This is the first released version of the Lesser GPL. It also counts 10 | as the successor of the GNU Library Public License, version 2, hence 11 | the version number 2.1.] 12 | 13 | Preamble 14 | 15 | The licenses for most software are designed to take away your 16 | freedom to share and change it. By contrast, the GNU General Public 17 | Licenses are intended to guarantee your freedom to share and change 18 | free software--to make sure the software is free for all its users. 19 | 20 | This license, the Lesser General Public License, applies to some 21 | specially designated software packages--typically libraries--of the 22 | Free Software Foundation and other authors who decide to use it. You 23 | can use it too, but we suggest you first think carefully about whether 24 | this license or the ordinary General Public License is the better 25 | strategy to use in any particular case, based on the explanations below. 26 | 27 | When we speak of free software, we are referring to freedom of use, 28 | not price. Our General Public Licenses are designed to make sure that 29 | you have the freedom to distribute copies of free software (and charge 30 | for this service if you wish); that you receive source code or can get 31 | it if you want it; that you can change the software and use pieces of 32 | it in new free programs; and that you are informed that you can do 33 | these things. 34 | 35 | To protect your rights, we need to make restrictions that forbid 36 | distributors to deny you these rights or to ask you to surrender these 37 | rights. These restrictions translate to certain responsibilities for 38 | you if you distribute copies of the library or if you modify it. 39 | 40 | For example, if you distribute copies of the library, whether gratis 41 | or for a fee, you must give the recipients all the rights that we gave 42 | you. You must make sure that they, too, receive or can get the source 43 | code. If you link other code with the library, you must provide 44 | complete object files to the recipients, so that they can relink them 45 | with the library after making changes to the library and recompiling 46 | it. And you must show them these terms so they know their rights. 47 | 48 | We protect your rights with a two-step method: (1) we copyright the 49 | library, and (2) we offer you this license, which gives you legal 50 | permission to copy, distribute and/or modify the library. 51 | 52 | To protect each distributor, we want to make it very clear that 53 | there is no warranty for the free library. Also, if the library is 54 | modified by someone else and passed on, the recipients should know 55 | that what they have is not the original version, so that the original 56 | author's reputation will not be affected by problems that might be 57 | introduced by others. 58 | 59 | Finally, software patents pose a constant threat to the existence of 60 | any free program. We wish to make sure that a company cannot 61 | effectively restrict the users of a free program by obtaining a 62 | restrictive license from a patent holder. Therefore, we insist that 63 | any patent license obtained for a version of the library must be 64 | consistent with the full freedom of use specified in this license. 65 | 66 | Most GNU software, including some libraries, is covered by the 67 | ordinary GNU General Public License. This license, the GNU Lesser 68 | General Public License, applies to certain designated libraries, and 69 | is quite different from the ordinary General Public License. We use 70 | this license for certain libraries in order to permit linking those 71 | libraries into non-free programs. 72 | 73 | When a program is linked with a library, whether statically or using 74 | a shared library, the combination of the two is legally speaking a 75 | combined work, a derivative of the original library. The ordinary 76 | General Public License therefore permits such linking only if the 77 | entire combination fits its criteria of freedom. The Lesser General 78 | Public License permits more lax criteria for linking other code with 79 | the library. 80 | 81 | We call this license the "Lesser" General Public License because it 82 | does Less to protect the user's freedom than the ordinary General 83 | Public License. It also provides other free software developers Less 84 | of an advantage over competing non-free programs. These disadvantages 85 | are the reason we use the ordinary General Public License for many 86 | libraries. However, the Lesser license provides advantages in certain 87 | special circumstances. 88 | 89 | For example, on rare occasions, there may be a special need to 90 | encourage the widest possible use of a certain library, so that it becomes 91 | a de-facto standard. To achieve this, non-free programs must be 92 | allowed to use the library. A more frequent case is that a free 93 | library does the same job as widely used non-free libraries. In this 94 | case, there is little to gain by limiting the free library to free 95 | software only, so we use the Lesser General Public License. 96 | 97 | In other cases, permission to use a particular library in non-free 98 | programs enables a greater number of people to use a large body of 99 | free software. For example, permission to use the GNU C Library in 100 | non-free programs enables many more people to use the whole GNU 101 | operating system, as well as its variant, the GNU/Linux operating 102 | system. 103 | 104 | Although the Lesser General Public License is Less protective of the 105 | users' freedom, it does ensure that the user of a program that is 106 | linked with the Library has the freedom and the wherewithal to run 107 | that program using a modified version of the Library. 108 | 109 | The precise terms and conditions for copying, distribution and 110 | modification follow. Pay close attention to the difference between a 111 | "work based on the library" and a "work that uses the library". The 112 | former contains code derived from the library, whereas the latter must 113 | be combined with the library in order to run. 114 | 115 | GNU LESSER GENERAL PUBLIC LICENSE 116 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 117 | 118 | 0. This License Agreement applies to any software library or other 119 | program which contains a notice placed by the copyright holder or 120 | other authorized party saying it may be distributed under the terms of 121 | this Lesser General Public License (also called "this License"). 122 | Each licensee is addressed as "you". 123 | 124 | A "library" means a collection of software functions and/or data 125 | prepared so as to be conveniently linked with application programs 126 | (which use some of those functions and data) to form executables. 127 | 128 | The "Library", below, refers to any such software library or work 129 | which has been distributed under these terms. A "work based on the 130 | Library" means either the Library or any derivative work under 131 | copyright law: that is to say, a work containing the Library or a 132 | portion of it, either verbatim or with modifications and/or translated 133 | straightforwardly into another language. (Hereinafter, translation is 134 | included without limitation in the term "modification".) 135 | 136 | "Source code" for a work means the preferred form of the work for 137 | making modifications to it. For a library, complete source code means 138 | all the source code for all modules it contains, plus any associated 139 | interface definition files, plus the scripts used to control compilation 140 | and installation of the library. 141 | 142 | Activities other than copying, distribution and modification are not 143 | covered by this License; they are outside its scope. The act of 144 | running a program using the Library is not restricted, and output from 145 | such a program is covered only if its contents constitute a work based 146 | on the Library (independent of the use of the Library in a tool for 147 | writing it). Whether that is true depends on what the Library does 148 | and what the program that uses the Library does. 149 | 150 | 1. You may copy and distribute verbatim copies of the Library's 151 | complete source code as you receive it, in any medium, provided that 152 | you conspicuously and appropriately publish on each copy an 153 | appropriate copyright notice and disclaimer of warranty; keep intact 154 | all the notices that refer to this License and to the absence of any 155 | warranty; and distribute a copy of this License along with the 156 | Library. 157 | 158 | You may charge a fee for the physical act of transferring a copy, 159 | and you may at your option offer warranty protection in exchange for a 160 | fee. 161 | 162 | 2. You may modify your copy or copies of the Library or any portion 163 | of it, thus forming a work based on the Library, and copy and 164 | distribute such modifications or work under the terms of Section 1 165 | above, provided that you also meet all of these conditions: 166 | 167 | a) The modified work must itself be a software library. 168 | 169 | b) You must cause the files modified to carry prominent notices 170 | stating that you changed the files and the date of any change. 171 | 172 | c) You must cause the whole of the work to be licensed at no 173 | charge to all third parties under the terms of this License. 174 | 175 | d) If a facility in the modified Library refers to a function or a 176 | table of data to be supplied by an application program that uses 177 | the facility, other than as an argument passed when the facility 178 | is invoked, then you must make a good faith effort to ensure that, 179 | in the event an application does not supply such function or 180 | table, the facility still operates, and performs whatever part of 181 | its purpose remains meaningful. 182 | 183 | (For example, a function in a library to compute square roots has 184 | a purpose that is entirely well-defined independent of the 185 | application. Therefore, Subsection 2d requires that any 186 | application-supplied function or table used by this function must 187 | be optional: if the application does not supply it, the square 188 | root function must still compute square roots.) 189 | 190 | These requirements apply to the modified work as a whole. If 191 | identifiable sections of that work are not derived from the Library, 192 | and can be reasonably considered independent and separate works in 193 | themselves, then this License, and its terms, do not apply to those 194 | sections when you distribute them as separate works. But when you 195 | distribute the same sections as part of a whole which is a work based 196 | on the Library, the distribution of the whole must be on the terms of 197 | this License, whose permissions for other licensees extend to the 198 | entire whole, and thus to each and every part regardless of who wrote 199 | it. 200 | 201 | Thus, it is not the intent of this section to claim rights or contest 202 | your rights to work written entirely by you; rather, the intent is to 203 | exercise the right to control the distribution of derivative or 204 | collective works based on the Library. 205 | 206 | In addition, mere aggregation of another work not based on the Library 207 | with the Library (or with a work based on the Library) on a volume of 208 | a storage or distribution medium does not bring the other work under 209 | the scope of this License. 210 | 211 | 3. You may opt to apply the terms of the ordinary GNU General Public 212 | License instead of this License to a given copy of the Library. To do 213 | this, you must alter all the notices that refer to this License, so 214 | that they refer to the ordinary GNU General Public License, version 2, 215 | instead of to this License. (If a newer version than version 2 of the 216 | ordinary GNU General Public License has appeared, then you can specify 217 | that version instead if you wish.) Do not make any other change in 218 | these notices. 219 | 220 | Once this change is made in a given copy, it is irreversible for 221 | that copy, so the ordinary GNU General Public License applies to all 222 | subsequent copies and derivative works made from that copy. 223 | 224 | This option is useful when you wish to copy part of the code of 225 | the Library into a program that is not a library. 226 | 227 | 4. You may copy and distribute the Library (or a portion or 228 | derivative of it, under Section 2) in object code or executable form 229 | under the terms of Sections 1 and 2 above provided that you accompany 230 | it with the complete corresponding machine-readable source code, which 231 | must be distributed under the terms of Sections 1 and 2 above on a 232 | medium customarily used for software interchange. 233 | 234 | If distribution of object code is made by offering access to copy 235 | from a designated place, then offering equivalent access to copy the 236 | source code from the same place satisfies the requirement to 237 | distribute the source code, even though third parties are not 238 | compelled to copy the source along with the object code. 239 | 240 | 5. A program that contains no derivative of any portion of the 241 | Library, but is designed to work with the Library by being compiled or 242 | linked with it, is called a "work that uses the Library". Such a 243 | work, in isolation, is not a derivative work of the Library, and 244 | therefore falls outside the scope of this License. 245 | 246 | However, linking a "work that uses the Library" with the Library 247 | creates an executable that is a derivative of the Library (because it 248 | contains portions of the Library), rather than a "work that uses the 249 | library". The executable is therefore covered by this License. 250 | Section 6 states terms for distribution of such executables. 251 | 252 | When a "work that uses the Library" uses material from a header file 253 | that is part of the Library, the object code for the work may be a 254 | derivative work of the Library even though the source code is not. 255 | Whether this is true is especially significant if the work can be 256 | linked without the Library, or if the work is itself a library. The 257 | threshold for this to be true is not precisely defined by law. 258 | 259 | If such an object file uses only numerical parameters, data 260 | structure layouts and accessors, and small macros and small inline 261 | functions (ten lines or less in length), then the use of the object 262 | file is unrestricted, regardless of whether it is legally a derivative 263 | work. (Executables containing this object code plus portions of the 264 | Library will still fall under Section 6.) 265 | 266 | Otherwise, if the work is a derivative of the Library, you may 267 | distribute the object code for the work under the terms of Section 6. 268 | Any executables containing that work also fall under Section 6, 269 | whether or not they are linked directly with the Library itself. 270 | 271 | 6. As an exception to the Sections above, you may also combine or 272 | link a "work that uses the Library" with the Library to produce a 273 | work containing portions of the Library, and distribute that work 274 | under terms of your choice, provided that the terms permit 275 | modification of the work for the customer's own use and reverse 276 | engineering for debugging such modifications. 277 | 278 | You must give prominent notice with each copy of the work that the 279 | Library is used in it and that the Library and its use are covered by 280 | this License. You must supply a copy of this License. If the work 281 | during execution displays copyright notices, you must include the 282 | copyright notice for the Library among them, as well as a reference 283 | directing the user to the copy of this License. Also, you must do one 284 | of these things: 285 | 286 | a) Accompany the work with the complete corresponding 287 | machine-readable source code for the Library including whatever 288 | changes were used in the work (which must be distributed under 289 | Sections 1 and 2 above); and, if the work is an executable linked 290 | with the Library, with the complete machine-readable "work that 291 | uses the Library", as object code and/or source code, so that the 292 | user can modify the Library and then relink to produce a modified 293 | executable containing the modified Library. (It is understood 294 | that the user who changes the contents of definitions files in the 295 | Library will not necessarily be able to recompile the application 296 | to use the modified definitions.) 297 | 298 | b) Use a suitable shared library mechanism for linking with the 299 | Library. A suitable mechanism is one that (1) uses at run time a 300 | copy of the library already present on the user's computer system, 301 | rather than copying library functions into the executable, and (2) 302 | will operate properly with a modified version of the library, if 303 | the user installs one, as long as the modified version is 304 | interface-compatible with the version that the work was made with. 305 | 306 | c) Accompany the work with a written offer, valid for at 307 | least three years, to give the same user the materials 308 | specified in Subsection 6a, above, for a charge no more 309 | than the cost of performing this distribution. 310 | 311 | d) If distribution of the work is made by offering access to copy 312 | from a designated place, offer equivalent access to copy the above 313 | specified materials from the same place. 314 | 315 | e) Verify that the user has already received a copy of these 316 | materials or that you have already sent this user a copy. 317 | 318 | For an executable, the required form of the "work that uses the 319 | Library" must include any data and utility programs needed for 320 | reproducing the executable from it. However, as a special exception, 321 | the materials to be distributed need not include anything that is 322 | normally distributed (in either source or binary form) with the major 323 | components (compiler, kernel, and so on) of the operating system on 324 | which the executable runs, unless that component itself accompanies 325 | the executable. 326 | 327 | It may happen that this requirement contradicts the license 328 | restrictions of other proprietary libraries that do not normally 329 | accompany the operating system. Such a contradiction means you cannot 330 | use both them and the Library together in an executable that you 331 | distribute. 332 | 333 | 7. You may place library facilities that are a work based on the 334 | Library side-by-side in a single library together with other library 335 | facilities not covered by this License, and distribute such a combined 336 | library, provided that the separate distribution of the work based on 337 | the Library and of the other library facilities is otherwise 338 | permitted, and provided that you do these two things: 339 | 340 | a) Accompany the combined library with a copy of the same work 341 | based on the Library, uncombined with any other library 342 | facilities. This must be distributed under the terms of the 343 | Sections above. 344 | 345 | b) Give prominent notice with the combined library of the fact 346 | that part of it is a work based on the Library, and explaining 347 | where to find the accompanying uncombined form of the same work. 348 | 349 | 8. You may not copy, modify, sublicense, link with, or distribute 350 | the Library except as expressly provided under this License. Any 351 | attempt otherwise to copy, modify, sublicense, link with, or 352 | distribute the Library is void, and will automatically terminate your 353 | rights under this License. However, parties who have received copies, 354 | or rights, from you under this License will not have their licenses 355 | terminated so long as such parties remain in full compliance. 356 | 357 | 9. You are not required to accept this License, since you have not 358 | signed it. However, nothing else grants you permission to modify or 359 | distribute the Library or its derivative works. These actions are 360 | prohibited by law if you do not accept this License. Therefore, by 361 | modifying or distributing the Library (or any work based on the 362 | Library), you indicate your acceptance of this License to do so, and 363 | all its terms and conditions for copying, distributing or modifying 364 | the Library or works based on it. 365 | 366 | 10. Each time you redistribute the Library (or any work based on the 367 | Library), the recipient automatically receives a license from the 368 | original licensor to copy, distribute, link with or modify the Library 369 | subject to these terms and conditions. You may not impose any further 370 | restrictions on the recipients' exercise of the rights granted herein. 371 | You are not responsible for enforcing compliance by third parties with 372 | this License. 373 | 374 | 11. If, as a consequence of a court judgment or allegation of patent 375 | infringement or for any other reason (not limited to patent issues), 376 | conditions are imposed on you (whether by court order, agreement or 377 | otherwise) that contradict the conditions of this License, they do not 378 | excuse you from the conditions of this License. If you cannot 379 | distribute so as to satisfy simultaneously your obligations under this 380 | License and any other pertinent obligations, then as a consequence you 381 | may not distribute the Library at all. For example, if a patent 382 | license would not permit royalty-free redistribution of the Library by 383 | all those who receive copies directly or indirectly through you, then 384 | the only way you could satisfy both it and this License would be to 385 | refrain entirely from distribution of the Library. 386 | 387 | If any portion of this section is held invalid or unenforceable under any 388 | particular circumstance, the balance of the section is intended to apply, 389 | and the section as a whole is intended to apply in other circumstances. 390 | 391 | It is not the purpose of this section to induce you to infringe any 392 | patents or other property right claims or to contest validity of any 393 | such claims; this section has the sole purpose of protecting the 394 | integrity of the free software distribution system which is 395 | implemented by public license practices. Many people have made 396 | generous contributions to the wide range of software distributed 397 | through that system in reliance on consistent application of that 398 | system; it is up to the author/donor to decide if he or she is willing 399 | to distribute software through any other system and a licensee cannot 400 | impose that choice. 401 | 402 | This section is intended to make thoroughly clear what is believed to 403 | be a consequence of the rest of this License. 404 | 405 | 12. If the distribution and/or use of the Library is restricted in 406 | certain countries either by patents or by copyrighted interfaces, the 407 | original copyright holder who places the Library under this License may add 408 | an explicit geographical distribution limitation excluding those countries, 409 | so that distribution is permitted only in or among countries not thus 410 | excluded. In such case, this License incorporates the limitation as if 411 | written in the body of this License. 412 | 413 | 13. The Free Software Foundation may publish revised and/or new 414 | versions of the Lesser General Public License from time to time. 415 | Such new versions will be similar in spirit to the present version, 416 | but may differ in detail to address new problems or concerns. 417 | 418 | Each version is given a distinguishing version number. If the Library 419 | specifies a version number of this License which applies to it and 420 | "any later version", you have the option of following the terms and 421 | conditions either of that version or of any later version published by 422 | the Free Software Foundation. If the Library does not specify a 423 | license version number, you may choose any version ever published by 424 | the Free Software Foundation. 425 | 426 | 14. If you wish to incorporate parts of the Library into other free 427 | programs whose distribution conditions are incompatible with these, 428 | write to the author to ask for permission. For software which is 429 | copyrighted by the Free Software Foundation, write to the Free 430 | Software Foundation; we sometimes make exceptions for this. Our 431 | decision will be guided by the two goals of preserving the free status 432 | of all derivatives of our free software and of promoting the sharing 433 | and reuse of software generally. 434 | 435 | NO WARRANTY 436 | 437 | 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO 438 | WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. 439 | EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR 440 | OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY 441 | KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE 442 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 443 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE 444 | LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME 445 | THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 446 | 447 | 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN 448 | WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY 449 | AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU 450 | FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR 451 | CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE 452 | LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING 453 | RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A 454 | FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF 455 | SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 456 | DAMAGES. 457 | 458 | END OF TERMS AND CONDITIONS 459 | 460 | How to Apply These Terms to Your New Libraries 461 | 462 | If you develop a new library, and you want it to be of the greatest 463 | possible use to the public, we recommend making it free software that 464 | everyone can redistribute and change. You can do so by permitting 465 | redistribution under these terms (or, alternatively, under the terms of the 466 | ordinary General Public License). 467 | 468 | To apply these terms, attach the following notices to the library. It is 469 | safest to attach them to the start of each source file to most effectively 470 | convey the exclusion of warranty; and each file should have at least the 471 | "copyright" line and a pointer to where the full notice is found. 472 | 473 | 474 | Copyright (C) 475 | 476 | This library is free software; you can redistribute it and/or 477 | modify it under the terms of the GNU Lesser General Public 478 | License as published by the Free Software Foundation; either 479 | version 2.1 of the License, or (at your option) any later version. 480 | 481 | This library is distributed in the hope that it will be useful, 482 | but WITHOUT ANY WARRANTY; without even the implied warranty of 483 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 484 | Lesser General Public License for more details. 485 | 486 | You should have received a copy of the GNU Lesser General Public 487 | License along with this library; if not, write to the Free Software 488 | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 489 | USA 490 | 491 | Also add information on how to contact you by electronic and paper mail. 492 | 493 | You should also get your employer (if you work as a programmer) or your 494 | school, if any, to sign a "copyright disclaimer" for the library, if 495 | necessary. Here is a sample; alter the names: 496 | 497 | Yoyodyne, Inc., hereby disclaims all copyright interest in the 498 | library `Frob' (a library for tweaking knobs) written by James Random 499 | Hacker. 500 | 501 | , 1 April 1990 502 | Ty Coon, President of Vice 503 | 504 | That's all there is to it! 505 | -------------------------------------------------------------------------------- /Lunar crater detection.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Actividad_2_Carlos_González_Fernando_Pocino.ipynb 3 | 4 | Automatically generated by Colaboratory. 5 | 6 | Original file is located at 7 | https://colab.research.google.com/drive/1cRWVirhIEmAJ5Z8G2WZ6N8_gKTEk1-0H 8 | 9 | Fernando Pocino Martín y Carlos González Subirana 4ºIngeniería Biomédica 10 | 11 | Bienvenidos a la Actividad 2, donde pondremos en práctica todo lo aprendido durante el bloque 4 (Detección de Objetos con Deep Learning). Esta actividad la realizaremos en clase, se terminará en casa, se hará por parejas y se entregará el día 12 de Noviembre (incluido). 12 | 13 | **MODELO YA CREADO** 14 | 15 | Este es el mismo código que el del creador, lo que cambia es tras el segundo # que he cargado todos los datos tuyos en lugar del codigo que tenia el creador subiendo sus propios datos. 16 | """ 17 | 18 | # Download TorchVision repo to use some files from 19 | # references/detection 20 | !git clone https://github.com/pytorch/vision.git 21 | !cd vision 22 | !git checkout v0.8.2 23 | 24 | !cp ./vision/references/detection/utils.py ./ 25 | !cp ./vision/references/detection/transforms.py ./ 26 | !cp ./vision/references/detection/coco_eval.py ./ 27 | !cp ./vision/references/detection/engine.py ./ 28 | !cp ./vision/references/detection/coco_utils.py ./ 29 | 30 | !pip install cython 31 | # Install pycocotools, the version by default in Colab 32 | !pip install -U 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI' 33 | !pip install -U albumentations 34 | !pip install -U opencv-python 35 | 36 | #Copy and unify the train and validation datasets into one folder for images and another for labels 37 | !pip install gdown 38 | !gdown https://drive.google.com/uc?id=1hrHgANwgC8VyXLhXgxLYHTrILpeJ5bLl 39 | !unzip /content/mars_and_moon.zip 40 | 41 | import os 42 | import numpy as np 43 | import torch 44 | import torchvision 45 | from torchvision.models.detection.faster_rcnn import FastRCNNPredictor 46 | import utils 47 | import transforms as T 48 | import albumentations as A 49 | import cv2 50 | import time 51 | from albumentations.pytorch.transforms import ToTensorV2 52 | import matplotlib 53 | import matplotlib.pyplot as plt 54 | import matplotlib.patches as patches 55 | from sklearn.model_selection import KFold 56 | import random 57 | 58 | """Part 2: Dataset class setup 59 | The following code defines the Dataset class. The contructor defines the transformations to be performed on the data and image and boundry boxes locations as well as class names are defined. The faster RCNN pretrained network as imported from the torchvision library requires the class to contain a getitem method to rerieve indvidual images and boxes as well as len method to return number of images in data set. 60 | 61 | In getitem I have added syntex to convert the box coordinates from the (normalized x,y,w,h) format to the required (xmin,ymin,xmax,ymax) by the Faster RCNN model, using convert_box_cord method. I have also added a condition to create a dummy box object with class 0(background) because importing empty .txt files for some of the images raised errors. 62 | 63 | Finally the "target" library was populated with the their required keys i.e. boundry boxes, labels etc 64 | """ 65 | 66 | class CraterDataset(object): 67 | def __init__(self, root, transforms): 68 | self.root = root 69 | self.transforms = transforms 70 | # load all image files, sorting them to 71 | # ensure that they are aligned 72 | self.imgs = list(sorted(os.listdir(os.path.join(self.root, "images")))) 73 | self.annots = list(sorted(os.listdir(os.path.join(self.root, "labels")))) 74 | self.classes = ['Background','Crater'] 75 | 76 | # Converts boundry box formats, this version assumes single class only! 77 | def convert_box_cord(self,bboxs, format_from, format_to, img_shape): 78 | if format_from == 'normxywh': 79 | if format_to == 'xyminmax': 80 | xw = bboxs[:, (1, 3)] * img_shape[1] 81 | yh = bboxs[:, (2, 4)] * img_shape[0] 82 | xmin = xw[:, 0] - xw[:, 1] / 2 83 | xmax = xw[:, 0] + xw[:, 1] / 2 84 | ymin = yh[:, 0] - yh[:, 1] / 2 85 | ymax = yh[:, 0] + yh[:, 1] / 2 86 | coords_converted = np.column_stack((xmin, ymin, xmax, ymax)) 87 | 88 | return coords_converted 89 | 90 | def __getitem__(self, idx): 91 | # load images and boxes 92 | img_path = os.path.join(self.root, "images", self.imgs[idx]) 93 | annot_path = os.path.join(self.root, "labels", self.annots[idx]) 94 | img = cv2.imread(img_path) 95 | img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB).astype(np.float32) 96 | img= img/255.0 97 | 98 | # retrieve bbox list and format to required type, 99 | # if annotation file is empty, fill dummy box with label 0 100 | if os.path.getsize(annot_path) != 0: 101 | bboxs = np.loadtxt(annot_path, ndmin=2) 102 | bboxs = self.convert_box_cord(bboxs, 'normxywh', 'xyminmax', img.shape) 103 | num_objs = len(bboxs) 104 | bboxs = torch.as_tensor(bboxs, dtype=torch.float32) 105 | # there is only one class 106 | labels = torch.ones((num_objs,), dtype=torch.int64) 107 | # suppose all instances are not crowd 108 | iscrowd = torch.zeros((num_objs,), dtype=torch.int64) 109 | else: 110 | bboxs = torch.as_tensor([[0, 0, 640, 640]], dtype=torch.float32) 111 | labels = torch.zeros((1,), dtype=torch.int64) 112 | iscrowd = torch.zeros((1,), dtype=torch.int64) 113 | 114 | area = (bboxs[:, 3] - bboxs[:, 1]) * (bboxs[:, 2] - bboxs[:, 0]) 115 | image_id = torch.tensor([idx]) 116 | 117 | target = {} 118 | target["boxes"] = bboxs 119 | target["labels"] = labels 120 | target["image_id"] = image_id 121 | target["area"] = area 122 | target["iscrowd"] = iscrowd 123 | 124 | if self.transforms is not None: 125 | sample = self.transforms(image=img, 126 | bboxes=target['boxes'], 127 | labels=labels) 128 | img = sample['image'] 129 | target['boxes'] = torch.tensor(sample['bboxes']) 130 | target['labels'] = torch.tensor(sample['labels']) 131 | if target['boxes'].ndim == 1: 132 | target['boxes'] = torch.as_tensor([[0, 0, 640, 640]], dtype=torch.float32) 133 | target['labels'] = torch.zeros((1,), dtype=torch.int64) 134 | return img, target 135 | 136 | def __len__(self): 137 | return len(self.imgs) 138 | 139 | """Part 3: Model Configuration 140 | For this project Faster RCNN with a resnet50 backbone pre-trained on COCO dataset was loaded. the ROI pre-trained head was replaced with an untrained head to be trained on our dataset. 141 | 142 | Different backones were tested such as mobilenet_v3_large_fpn and resnet50_fpn_v2 which was finally selected. However, I was unable to upgrade the torchvision version from 0.12 > 0.13 which containes the updated backbone on the kaggle platform so I reverted to using v1 for this notebook. 143 | 144 | get_transform function defines the augmentations to be implemented on the dataset before being passes to the model. I have opted to change the augmentation methods used from the default torchvision provided to albumentation. The main reason being that using torchvision augmentation as is, requires customization of each transform method to be able to adjust the box coordinates accordingly, whereas albumentation does that automatically. Note that no augmentation is enabled in the code below as counter intuitively no augmentations demonstrated better scores in the validation stage as will be discussed. If augmentation is applied though, a min_visibility attribute is recommeded to define the cutoff threshold for ignoring a boundry box if it's remaining area ratio to original area on the augmented product is less the value set. 145 | 146 | The reset_weights function resets all trainable weights in the model. A short attempt was made to do so but considerable time and larger dataset will be required to improve on the pre trained model. 147 | """ 148 | 149 | def get_model_bbox(num_classes): 150 | # load an instance segmentation model pre-trained on COCO 151 | model = torchvision.models.detection.fasterrcnn_resnet50_fpn(pretrained=True) 152 | 153 | # get number of input features for the classifier 154 | in_features = model.roi_heads.box_predictor.cls_score.in_features 155 | # replace the pre-trained head with a new one 156 | model.roi_heads.box_predictor = FastRCNNPredictor(in_features, num_classes) 157 | 158 | return model 159 | 160 | def get_transform(train): 161 | if train: 162 | return A.Compose([ 163 | # A.Flip(p=0.5), 164 | # A.RandomResizedCrop(height=640,width=640,p=0.4), 165 | # # A.Perspective(p=0.4), 166 | # A.Rotate(p=0.5), 167 | # # A.Transpose(p=0.3), 168 | ToTensorV2(p=1.0)], 169 | bbox_params=A.BboxParams(format='pascal_voc',min_visibility=0.4, label_fields=['labels'])) 170 | else: 171 | return A.Compose([ToTensorV2(p=1.0)], 172 | bbox_params=A.BboxParams(format='pascal_voc', min_visibility=0.5, label_fields=['labels'])) 173 | 174 | def reset_weights(m): 175 | ''' 176 | Try resetting model weights to avoid 177 | weight leakage. 178 | ''' 179 | for layer in m.children(): 180 | if hasattr(layer, 'reset_parameters'): 181 | print(f'Reset trainable parameters of layer = {layer}') 182 | layer.reset_parameters() 183 | 184 | """Part 4: Image visualization 185 | 186 | The following function displays an image recieved from the dataset and overlays the boundry boxes from the annotation file 187 | """ 188 | 189 | # Function to visualize bounding boxes in the image 190 | def plot_img_bbox(img, target): 191 | # plot the image and bboxes 192 | # Bounding boxes are defined as follows: x-min y-min width height 193 | fig, a = plt.subplots(1, 1) 194 | fig.set_size_inches(5, 5) 195 | a.imshow(img.permute((1,2,0))) 196 | for box in (target['boxes']): 197 | x, y, width, height = box[0], box[1], box[2] - box[0], box[3] - box[1] 198 | rect = patches.Rectangle((x, y), 199 | width, height, 200 | edgecolor='b', 201 | facecolor='none', 202 | clip_on=False) 203 | a.annotate('Crater', (x,y-20), color='blue', weight='bold', 204 | fontsize=10, ha='left', va='top') 205 | 206 | # Draw the bounding box on top of the image 207 | a.add_patch(rect) 208 | plt.show() 209 | 210 | dataset = CraterDataset('/content/craters/train', get_transform(train=True)) 211 | # Prints an example of image with annotations 212 | for i in random.sample(range(1, 100), 3): 213 | img, target = dataset[i] 214 | plot_img_bbox(img, target) 215 | 216 | """PARTE 5""" 217 | 218 | from engine import train_one_epoch 219 | # train on the GPU or on the CPU, if a GPU is not available 220 | device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu') 221 | k_folds = 5 222 | num_epochs = 5 223 | 224 | 225 | # our dataset has two classes only - background and crater 226 | num_classes = 2 227 | # use our dataset and defined transformations 228 | dataset = CraterDataset('/content/craters/train', get_transform(train=True)) 229 | dataset_val = CraterDataset('/content/craters/train', get_transform(train=False)) 230 | 231 | # Define the K-fold Cross Validator 232 | kfold = KFold(n_splits=k_folds, shuffle=True) 233 | 234 | # Start print 235 | print('--------------------------------') 236 | 237 | # K-fold Cross Validation model evaluation 238 | for fold, (train_ids, val_ids) in enumerate(kfold.split(dataset)): 239 | print(f'FOLD {fold}') 240 | print('--------------------------------') 241 | 242 | dataset_subset = torch.utils.data.Subset(dataset, list(train_ids)) 243 | dataset_val_subset = torch.utils.data.Subset(dataset_val, list(val_ids)) 244 | 245 | # define training and validation data loaders 246 | data_loader = torch.utils.data.DataLoader( 247 | dataset_subset, batch_size=8, shuffle=True, num_workers=2, 248 | collate_fn=utils.collate_fn) 249 | 250 | data_loader_val = torch.utils.data.DataLoader( 251 | dataset_val_subset, batch_size=1, shuffle=False, num_workers=2, 252 | collate_fn=utils.collate_fn) 253 | 254 | # get the model using our helper function 255 | model = get_model_bbox(num_classes) 256 | 257 | #model.apply(reset_weights) # Check if beneficial 258 | 259 | # move model to the right device 260 | model.to(device) 261 | 262 | # construct an optimizer 263 | params = [p for p in model.parameters() if p.requires_grad] 264 | optimizer = torch.optim.SGD(params, lr=0.005, # Check if beneficial 265 | momentum=0.9, weight_decay=0) 266 | 267 | # and a learning rate scheduler 268 | lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 269 | step_size=10, 270 | gamma=0.1) 271 | 272 | # let's train! 273 | for epoch in range(num_epochs): 274 | 275 | 276 | # train for one epoch, printing every 50 iterations 277 | train_one_epoch(model, optimizer, data_loader, device, epoch, print_freq=50) 278 | # update the learning rate 279 | lr_scheduler.step() 280 | 281 | """Part 6: Final training and evaluation 282 | In the final training the selected model configuration was run for 200 epochs on the combined train and validation datasets, saving the best score metric of mAP@IoU:0.5 on each epoch. Due to the small size of 19 images selected for testing, which may well be biased in some manner, the best scoring epoch was achieved in the first few epochs. After which thetrain loss metric continued to improve but the evaluation metrics on the test dataset got worse. 283 | """ 284 | 285 | num_epochs = 5 286 | 287 | # our dataset has two classes only - background and crater 288 | num_classes = 2 289 | # use our dataset and defined transformations 290 | dataset = CraterDataset('/content/craters/train', get_transform(train=True)) 291 | dataset_test = CraterDataset('/content/craters/test', get_transform(train=False)) 292 | 293 | # define training and validation data loaders 294 | data_loader = torch.utils.data.DataLoader( 295 | dataset, batch_size=8, shuffle=True, num_workers=2, 296 | collate_fn=utils.collate_fn) 297 | 298 | data_loader_test = torch.utils.data.DataLoader( 299 | dataset_test, batch_size=1, shuffle=False, num_workers=2, 300 | collate_fn=utils.collate_fn) 301 | 302 | # get the model using our helper function 303 | model = get_model_bbox(num_classes) 304 | 305 | ''' 306 | Use this to reset all trainable weights 307 | model.apply(reset_weights) 308 | ''' 309 | 310 | # move model to the right device 311 | model.to(device) 312 | 313 | # construct an optimizer 314 | params = [p for p in model.parameters() if p.requires_grad] 315 | optimizer = torch.optim.SGD(params, lr=0.005, # Feel free to play with values 316 | momentum=0.9, weight_decay=0) 317 | 318 | # Defining learning rate scheduler 319 | lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 320 | step_size=20, 321 | gamma=0.2) 322 | 323 | 324 | result_mAP = [] 325 | best_epoch = None 326 | 327 | # Let's train! 328 | for epoch in range(num_epochs): 329 | 330 | 331 | # train for one epoch, printing every 10 iterations 332 | train_one_epoch(model, optimizer, data_loader, device, epoch, print_freq=50) 333 | # update the learning rate 334 | lr_scheduler.step() 335 | # evaluate on the test dataset 336 | 337 | """color_inference se define como [0.0, 0.0, 255.0], que representa el color rojo puro en el espacio RGB. Este color se usa para dibujar cuadros delimitadores al realizar inferencias. 338 | 339 | color_label se define como [255.0, 0.0, 0.0], que representa el color azul puro en el espacio RGB. Este color se usa para dibujar cuadros delimitadores alrededor de las etiquetas (labels) en tus datos, indicando la ubicación de las etiquetas. 340 | """ 341 | 342 | # Define colors for bounding boxes 343 | color_inference = np.array([0.0,0.0,255.0]) 344 | color_label = np.array([255.0,0.0,0.0]) 345 | 346 | # Score value thershold for displaying predictions 347 | detection_threshold = 0.7 348 | # to count the total number of images iterated through 349 | frame_count = 0 350 | # to keep adding the FPS for each image 351 | total_fps = 0 352 | 353 | !mkdir ./results 354 | 355 | """Este codigo no había manera hacerlo funcionar, he probado muchas formas y muchos cambos de código y nada. Finalmente, leyendo bien el error me he dado cuenta de que el programa me decía que no se podía ejecutar una de las filas porque estaba en "training mode". Hablando con nuestro amigo ChatGPT he conseguido que me de una solución para cambiar el modo a "evaluación" en lugar de "training". Gracias a esto he conseguido que se ejecute el código completo y nos de por pantalla las imágenes con los cráteres detectados.""" 356 | 357 | # Cambia el modelo al modo de evaluación (Esto es lo que me ha permitido ejecutar el código) 358 | model.eval() 359 | 360 | for i,data in enumerate(data_loader_test): 361 | # get the image file name for predictions file name 362 | image_name = 'image no:' + str(int(data[1][0]['image_id'])) 363 | model_image = data[0][0] 364 | cv2_image = np.transpose(model_image.numpy()*255,(1, 2, 0)).astype(np.float32) 365 | cv2_image = cv2.cvtColor(cv2_image, cv2.COLOR_RGB2BGR).astype(np.float32) 366 | 367 | # add batch dimension 368 | model_image = torch.unsqueeze(model_image, 0) 369 | start_time = time.time() 370 | with torch.no_grad(): 371 | outputs = model(model_image.to(device)) 372 | end_time = time.time() 373 | # get the current fps 374 | fps = 1 / (end_time - start_time) 375 | # add `fps` to `total_fps` 376 | total_fps += fps 377 | # increment frame count 378 | frame_count += 1 379 | # load all detection to CPU for further operations 380 | outputs = [{k: v.to('cpu') for k, v in t.items()} for t in outputs] 381 | # carry further only if there's detected boxes 382 | if len(outputs[0]['boxes']) != 0: 383 | boxes = outputs[0]['boxes'].data.numpy() 384 | scores = outputs[0]['scores'].data.numpy() 385 | # filter out boxes according to `detection_threshold` 386 | boxes = boxes[scores >= detection_threshold].astype(np.int32) 387 | scores = np.round(scores[scores >= detection_threshold],2) 388 | draw_boxes = boxes.copy() 389 | 390 | 391 | # draw the bounding boxes and write the class name on top of it 392 | for j,box in enumerate(draw_boxes): 393 | cv2.rectangle(cv2_image, 394 | (int(box[0]), int(box[1])), 395 | (int(box[2]), int(box[3])), 396 | color_inference, 2) 397 | cv2.putText(img=cv2_image, text="Crater", 398 | org=(int(box[0]), int(box[1] - 5)), 399 | fontFace=cv2.FONT_HERSHEY_SIMPLEX,fontScale= 0.3,color= color_inference, 400 | thickness=1, lineType=cv2.LINE_AA) 401 | cv2.putText(img=cv2_image, text=str(scores[j]), 402 | org=(int(box[0]), int(box[1] + 8)), 403 | fontFace=cv2.FONT_HERSHEY_SIMPLEX,fontScale= 0.3,color= color_inference, 404 | thickness=1, lineType=cv2.LINE_AA) 405 | 406 | # add boxes for labels 407 | for box in data[1][0]['boxes']: 408 | cv2.rectangle(cv2_image, 409 | (int(box[0]), int(box[1])), 410 | (int(box[2]), int(box[3])), 411 | color_label, 2) 412 | cv2.putText(img=cv2_image, text="Label", 413 | org=(int(box[0]), int(box[1] - 5)), 414 | fontFace=cv2.FONT_HERSHEY_SIMPLEX,fontScale= 0.3,color= color_label, 415 | thickness=1, lineType=cv2.LINE_AA) 416 | 417 | 418 | # set size 419 | plt.figure(figsize=(10,10)) 420 | plt.axis("off") 421 | 422 | # convert color from CV2 BGR back to RGB 423 | plt_image = cv2.cvtColor(cv2_image/255.0, cv2.COLOR_BGR2RGB) 424 | plt.imshow(plt_image) 425 | plt.show() 426 | cv2.imwrite(f"./results/{image_name}.jpg", cv2_image) 427 | print(f"Image {i + 1} done...") 428 | print('-' * 50) 429 | print('TEST PREDICTIONS COMPLETE') 430 | 431 | avg_fps = total_fps / frame_count 432 | print(f"Average FPS: {avg_fps:.3f}") 433 | 434 | """**CREACIÓN DE NUESTRO MODELO** 435 | 436 | Una vez hecho funcionar el código tendrás que hacer uso de otro modelo de detección diferente al que viene en el código propuesto (todos ellos se encuentran en torchvision.models.detection). Puedes elegir el que quieras. 437 | Es muy difícil que con todos los que hay (unos 12-15) repitáis alguno entre vosotros. Estaré especialmente atento cuando esto pase :) 438 | Para hacer la modificación, lo mejor es que creeis una función nueva get_model_bbox_alternativo() que sea igual a get_model_bbox() salvo cambiando el modelo a entrenar 439 | """ 440 | 441 | import torchvision.models.detection as detection_models 442 | 443 | # Enumera todas las clases y funciones disponibles en el módulo detection_models 444 | available_models = [name for name in dir(detection_models) if not name.startswith("_")] 445 | # Imprime cada modelo en una línea separada 446 | for model_name in available_models: 447 | print(model_name) 448 | 449 | """MODELO EN retinanet_resnet50_fpn 450 | 451 | He probado con FCOS, ssd, ssd300_vgg16 y ssdlite. No he conseguido crear el modelo en ninguno de estos únicamente en retinanet_resnet50_fpn 452 | """ 453 | 454 | !pip install --upgrade torchvision 455 | !pip install --upgrade torch 456 | 457 | import torch 458 | import torchvision 459 | from torchvision.models.detection import RetinaNet, retinanet_resnet50_fpn 460 | 461 | def get_model_bbox(num_classes): 462 | # Load a pre-trained RetinaNet model from torchvision 463 | model = retinanet_resnet50_fpn(pretrained=True, progress=True) 464 | 465 | # Modify the number of classes in the classifier 466 | in_features = model.head.classification_head.cls_head[-1].in_channels 467 | model.head.classification_head.cls_head[-1] = torch.nn.Conv2d(in_features, num_classes, kernel_size=3, stride=1, padding=1) 468 | 469 | return model 470 | 471 | """PASO 1""" 472 | 473 | class CraterDataset(object): 474 | def __init__(self, root, transforms): 475 | self.root = root 476 | self.transforms = transforms 477 | self.imgs = list(sorted(os.listdir(os.path.join(self.root, "images")))) 478 | self.annots = list(sorted(os.listdir(os.path.join(self.root, "labels")))) 479 | self.classes = ['Background', 'Crater'] 480 | 481 | def convert_box_cord(self, bboxs, format_from, format_to, img_shape): 482 | if format_from == 'normxywh': 483 | if format_to == 'xyminmax': 484 | xw = bboxs[:, (1, 3)] * img_shape[1] 485 | yh = bboxs[:, (2, 4)] * img_shape[0] 486 | xmin = xw[:, 0] - xw[:, 1] / 2 487 | xmax = xw[:, 0] + xw[:, 1] / 2 488 | ymin = yh[:, 0] - yh[:, 1] / 2 489 | ymax = yh[:, 0] + yh[:, 1] / 2 490 | coords_converted = np.column_stack((xmin, ymin, xmax, ymax)) 491 | return coords_converted 492 | 493 | def __getitem__(self, idx): 494 | img_path = os.path.join(self.root, "images", self.imgs[idx]) 495 | annot_path = os.path.join(self.root, "labels", self.annots[idx]) 496 | img = cv2.imread(img_path) 497 | img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB).astype(np.float32) 498 | img = img / 255.0 499 | 500 | if os.path.getsize(annot_path) != 0: 501 | bboxs = np.loadtxt(annot_path, ndmin=2) 502 | bboxs = self.convert_box_cord(bboxs, 'normxywh', 'xyminmax', img.shape) 503 | num_objs = len(bboxs) 504 | bboxs = torch.as_tensor(bboxs, dtype=torch.float32) 505 | labels = torch.ones((num_objs,), dtype=torch.int64) 506 | iscrowd = torch.zeros((num_objs,), dtype=torch.int64) 507 | else: 508 | bboxs = torch.as_tensor([[0, 0, 1, 1]], dtype=torch.float32) 509 | labels = torch.zeros((1,), dtype=torch.int64) 510 | iscrowd = torch.zeros((1,), dtype=torch.int64) 511 | 512 | area = (bboxs[:, 3] - bboxs[:, 1]) * (bboxs[:, 2] - bboxs[:, 0]) 513 | image_id = torch.tensor([idx]) 514 | 515 | target = { 516 | "boxes": bboxs, 517 | "labels": labels, 518 | "image_id": image_id, 519 | "area": area, 520 | "iscrowd": iscrowd 521 | } 522 | 523 | if self.transforms is not None: 524 | transformed = self.transforms(image=img, bboxes=target['boxes'], labels=labels) 525 | img = transformed['image'] 526 | target['boxes'] = torch.tensor(transformed['bboxes']) 527 | target['labels'] = torch.tensor(transformed['labels']) 528 | 529 | return img, target 530 | 531 | def __len__(self): 532 | return len(self.imgs) 533 | 534 | """PASO 2""" 535 | 536 | import torch 537 | import torchvision 538 | from torchvision.models.detection import RetinaNet 539 | from torchvision.models.detection.retinanet import RetinaNetHead 540 | 541 | def get_retinanet_bbox(num_classes): 542 | # Cargar un modelo RetinaNet preentrenado en COCO 543 | model = torchvision.models.detection.retinanet_resnet50_fpn(pretrained=True) 544 | 545 | # Obtener el número de características de entrada para el clasificador 546 | cls_head = torch.nn.Conv2d(in_channels, num_classes, kernel_size=3, stride=1, padding=1) 547 | in_features = model.classifier.head.classification_head.cls_head[-1].in_channels 548 | 549 | # Modificar la cabeza de clasificación 550 | model.head.cls_head[0] = torch.nn.Conv2d(in_features, num_classes, kernel_size=3, stride=1, padding=1) 551 | 552 | return model 553 | 554 | """PASO 3""" 555 | 556 | def get_transform(train): 557 | if train: 558 | return A.Compose([ 559 | A.Flip(p=0.5), 560 | A.RandomResizedCrop(height=640, width=640, p=0.4), 561 | A.Rotate(p=0.5), 562 | ToTensorV2(p=1.0) 563 | ], bbox_params=A.BboxParams(format='pascal_voc', min_visibility=0.4, label_fields=['labels'])) 564 | else: 565 | return A.Compose([ToTensorV2(p=1.0)], 566 | bbox_params=A.BboxParams(format='pascal_voc', min_visibility=0.5, label_fields=['labels'])) 567 | 568 | """PASO 4""" 569 | 570 | def reset_weights(m): 571 | for layer in m.children(): 572 | if hasattr(layer, 'reset_parameters'): 573 | print(f'Reset trainable parameters of layer = {layer}') 574 | layer.reset_parameters() 575 | 576 | """PASO 5""" 577 | 578 | def plot_img_bbox(img, target): 579 | fig, a = plt.subplots(1, 1) 580 | fig.set_size_inches(5, 5) 581 | a.imshow(img.permute(1, 2, 0)) 582 | for box in (target['boxes']): 583 | x, y, width, height = box[0], box[1], box[2] - box[0], box[3] - box[1] 584 | rect = patches.Rectangle((x, y), 585 | width, height, 586 | edgecolor='g', 587 | facecolor='none', 588 | clip_on=False) 589 | a.annotate('Crater', (x, y-20), color='green', weight='bold', 590 | fontsize=10, ha='left', va='top') 591 | a.add_patch(rect) 592 | plt.show() 593 | 594 | dataset = CraterDataset('/content/craters/train', get_transform(train=True)) 595 | # Prints an example of image with annotations 596 | for i in random.sample(range(1, 100), 3): 597 | img, target = dataset[i] 598 | plot_img_bbox(img, target) 599 | 600 | """PASO 7: Final training and evaluation In the final training the selected model configuration 601 | 602 | Cambios hechos y no sigue sin funcionar: 603 | 604 | 605 | * He cambiado la función get_model_bbox() para que use la arquitectura RetinaNet en lugar de FasterRCNN. Esto se debe a que RetinaNet es más adecuada para la detección de objetos pequeños, como los cráteres. 606 | 607 | * He cambiado la función get_transform() para que use la transformación A.RandomResizedCrop() en lugar de A.Resize(). Esto se debe a que A.RandomResizedCrop() ayuda a evitar que el modelo se sobreajuste a los tamaños de imagen específicos. 608 | 609 | * He cambiado la función plot_img_bbox() para que use la clase patches.Rectangle() en lugar de patches.Polygon(). Esto se debe a que patches.Rectangle() es más eficiente para dibujar rectángulos. 610 | 611 | * He cambiado el optimizador a optim.SGD() en lugar de torch.optim.SGD(). Esto se debe a que optim.SGD() es más eficiente para entrenar modelos de detección de objetos. 612 | 613 | * He cambiado el controlador de la tasa de aprendizaje a optim.lr_scheduler.StepLR() en lugar de torch.optim.lr_scheduler.StepLR(). Esto se debe a que optim.lr_scheduler.StepLR() es más fácil de usar y comprender. 614 | 615 | * He cambiado la métrica de evaluación a mAP en lugar de AP. Esto se debe a que mAP es una métrica más adecuada para la detección de objetos. 616 | """ 617 | 618 | import torch.optim as optim 619 | from engine import train_one_epoch, evaluate 620 | 621 | num_epochs = 5 622 | 623 | # our dataset has two classes only - background and crater 624 | num_classes = 2 625 | # use our dataset and defined transformations 626 | dataset = CraterDataset('/content/craters/train', get_transform(train=True)) 627 | dataset_test = CraterDataset('/content/craters/test', get_transform(train=False)) 628 | 629 | # define training and validation data loaders 630 | data_loader = torch.utils.data.DataLoader( 631 | dataset, batch_size=8, shuffle=True, num_workers=2, 632 | collate_fn=utils.collate_fn) 633 | 634 | data_loader_test = torch.utils.data.DataLoader( 635 | dataset_test, batch_size=1, shuffle=False, num_workers=2, 636 | collate_fn=utils.collate_fn) 637 | 638 | # get the model using our helper function 639 | model = get_retinanet_bbox(num_classes) 640 | 641 | ''' 642 | Use this to reset all trainable weights 643 | model.apply(reset_weights) 644 | ''' 645 | 646 | # move model to the right device 647 | model.to(device) 648 | 649 | # construct an optimizer 650 | params = [p for p in model.parameters() if p.requires_grad] 651 | optimizer = optim.SGD(params, lr=0.005, # Feel free to play with values 652 | momentum=0.9, weight_decay=0) 653 | 654 | # Defining learning rate scheduler 655 | lr_scheduler = optim.lr_scheduler.StepLR(optimizer, 656 | step_size=20, 657 | gamma=0.2) 658 | 659 | result_mAP = [] 660 | best_epoch = None 661 | 662 | # Let's train! 663 | for epoch in range(num_epochs): 664 | 665 | 666 | # train for one epoch, printing every 10 iterations 667 | train_one_epoch(model, optimizer, data_loader, device, epoch, print_freq=50) 668 | # update the learning rate 669 | lr_scheduler.step() 670 | 671 | 672 | # evaluate on the test dataset 673 | coco_evaluator = evaluate(model, data_loader_test, device) 674 | 675 | coco_results = coco_evaluator.coco_eval('bbox', 'bbox') 676 | 677 | print('Best mAP: {:.2f}'.format(coco_results['AP'][0])) 678 | 679 | """PASO 8: # Define colors for bounding boxes""" 680 | 681 | # Define colors for bounding boxes 682 | color_inference = np.array([0.0,0.0,255.0]) 683 | color_label = np.array([255.0,0.0,0.0]) 684 | 685 | # Score value thershold for displaying predictions 686 | detection_threshold = 0.7 687 | # to count the total number of images iterated through 688 | frame_count = 0 689 | # to keep adding the FPS for each image 690 | total_fps = 0 691 | 692 | !mkdir ./results 693 | 694 | """PASO 9: # Cambia el modelo al modo de evaluación (Esto es lo que me ha permitido ejecutar el código)""" 695 | 696 | 697 | 698 | """**CAMBIAR DE ROJO A VERDE LA CAJA DE DETECCIÓN DEL MODELO CREADO ANTES** 699 | 700 | Con ese nuevo modelo de detección, tendrás que entrenarlo y hacer predicciones. Dichas predicciones deberán ir pintadas en verde y se representarán de manera conjunta con el ground truth y el resultado del modelo que viene en el notebook original 701 | Para esto tendréis que encontrar en el código qué sección se encarga de leer la imagen, procesarla, calcular las boxes y pintarlas. 702 | 703 | LO he intentado previamente con el otro modelo, el modelo ya creado para ver cómo funciona. Al ver que funciona ya lo he implementado en la creación del nuevo modelo para que todo vaya en el mismo programa 704 | """ 705 | 706 | # Function to visualize bounding boxes in the image 707 | def plot_img_bbox(img, target): 708 | # plot the image and bboxes 709 | # Bounding boxes are defined as follows: x-min y-min width height 710 | fig, a = plt.subplots(1, 1) 711 | fig.set_size_inches(5, 5) 712 | a.imshow(img.permute((1,2,0))) 713 | for box in (target['boxes']): 714 | x, y, width, height = box[0], box[1], box[2] - box[0], box[3] - box[1] 715 | rect = patches.Rectangle((x, y), 716 | width, height, 717 | edgecolor='g', 718 | facecolor='none', 719 | clip_on=False) 720 | a.annotate('Crater', (x,y-20), color='green', weight='bold', 721 | fontsize=10, ha='left', va='top') 722 | 723 | # Draw the bounding box on top of the image 724 | a.add_patch(rect) 725 | plt.show() 726 | 727 | dataset = CraterDataset('/content/craters/train', get_transform(train=True)) 728 | # Prints an example of image with annotations 729 | for i in random.sample(range(1, 100), 3): 730 | img, target = dataset[i] 731 | plot_img_bbox(img, target) 732 | 733 | # Cambia el modelo al modo de evaluación (Esto es lo que me ha permitido ejecutar el código) 734 | model.eval() 735 | 736 | for i,data in enumerate(data_loader_test): 737 | # get the image file name for predictions file name 738 | image_name = 'image no:' + str(int(data[1][0]['image_id'])) 739 | model_image = data[0][0] 740 | cv2_image = np.transpose(model_image.numpy()*255,(1, 2, 0)).astype(np.float32) 741 | cv2_image = cv2.cvtColor(cv2_image, cv2.COLOR_RGB2BGR).astype(np.float32) 742 | 743 | # add batch dimension 744 | model_image = torch.unsqueeze(model_image, 0) 745 | start_time = time.time() 746 | with torch.no_grad(): 747 | outputs = model(model_image.to(device)) 748 | end_time = time.time() 749 | # get the current fps 750 | fps = 1 / (end_time - start_time) 751 | # add `fps` to `total_fps` 752 | total_fps += fps 753 | # increment frame count 754 | frame_count += 1 755 | # load all detection to CPU for further operations 756 | outputs = [{k: v.to('cpu') for k, v in t.items()} for t in outputs] 757 | # carry further only if there's detected boxes 758 | if len(outputs[0]['boxes']) != 0: 759 | boxes = outputs[0]['boxes'].data.numpy() 760 | scores = outputs[0]['scores'].data.numpy() 761 | # filter out boxes according to `detection_threshold` 762 | boxes = boxes[scores >= detection_threshold].astype(np.int32) 763 | scores = np.round(scores[scores >= detection_threshold],2) 764 | draw_boxes = boxes.copy() 765 | 766 | 767 | # draw the bounding boxes and write the class name on top of it 768 | for j,box in enumerate(draw_boxes): 769 | cv2.rectangle(cv2_image, 770 | (int(box[0]), int(box[1])), 771 | (int(box[2]), int(box[3])), 772 | color_inference, 2) 773 | cv2.putText(img=cv2_image, text="Crater", 774 | org=(int(box[0]), int(box[1] - 5)), 775 | fontFace=cv2.FONT_HERSHEY_SIMPLEX,fontScale= 0.3,color= color_inference, 776 | thickness=1, lineType=cv2.LINE_AA) 777 | cv2.putText(img=cv2_image, text=str(scores[j]), 778 | org=(int(box[0]), int(box[1] + 8)), 779 | fontFace=cv2.FONT_HERSHEY_SIMPLEX,fontScale= 0.3,color= color_inference, 780 | thickness=1, lineType=cv2.LINE_AA) 781 | 782 | # add boxes for labels 783 | for box in data[1][0]['boxes']: 784 | cv2.rectangle(cv2_image, 785 | (int(box[0]), int(box[1])), 786 | (int(box[2]), int(box[3])), 787 | color_label, 2) 788 | cv2.putText(img=cv2_image, text="Label", 789 | org=(int(box[0]), int(box[1] - 5)), 790 | fontFace=cv2.FONT_HERSHEY_SIMPLEX,fontScale= 0.3,color= color_label, 791 | thickness=1, lineType=cv2.LINE_AA) 792 | 793 | 794 | # set size 795 | plt.figure(figsize=(10,10)) 796 | plt.axis("off") 797 | 798 | # convert color from CV2 BGR back to RGB 799 | plt_image = cv2.cvtColor(cv2_image/255.0, cv2.COLOR_BGR2RGB) 800 | plt.imshow(plt_image) 801 | plt.show() 802 | cv2.imwrite(f"./results/{image_name}.jpg", cv2_image) 803 | print(f"Image {i + 1} done...") 804 | print('-' * 50) 805 | print('TEST PREDICTIONS COMPLETE') 806 | 807 | avg_fps = total_fps / frame_count 808 | print(f"Average FPS: {avg_fps:.3f}") 809 | 810 | """Fernando Pocino Martín y Carlos González Subirana 4ºIngeniería Biomédica""" -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | En este repositorio se pueden encontrar distintos trabajos que he realizado durante la asignatura de Procesamiento de Imágenes 2 | --------------------------------------------------------------------------------