├── .coveragerc ├── .gitignore ├── .travis.yml ├── AUTHORS.rst ├── CHANGELOG.rst ├── CONTRIBUTING.rst ├── LICENSE.txt ├── Pipfile ├── Pipfile.lock ├── README.rst ├── docs ├── Makefile ├── _static │ └── .gitignore ├── api │ ├── modules.rst │ ├── unet.datasets.rst │ └── unet.rst ├── authors.rst ├── changelog.rst ├── conf.py ├── contributing.rst ├── galaxies.png ├── index.rst ├── installation.rst ├── license.rst ├── requirements.txt ├── rfi.png ├── toy_problem.png ├── unet.png └── usage.rst ├── notebooks ├── circles.ipynb └── oxford_pets.ipynb ├── scripts ├── circles.py └── oxford_iiit_pet.py ├── setup.cfg ├── setup.py ├── src ├── __init__.py └── unet │ ├── __init__.py │ ├── callbacks.py │ ├── datasets │ ├── __init__.py │ ├── circles.py │ └── oxford_iiit_pet.py │ ├── metrics.py │ ├── schedulers.py │ ├── trainer.py │ ├── unet.py │ └── utils.py └── tests ├── conftest.py ├── test_schedulers.py ├── test_trainer.py ├── test_unet.py └── test_utils.py /.coveragerc: -------------------------------------------------------------------------------- 1 | # .coveragerc to control coverage.py 2 | [run] 3 | branch = True 4 | source = unet 5 | # omit = bad_file.py 6 | 7 | [paths] 8 | source = 9 | src/ 10 | */site-packages/ 11 | 12 | [report] 13 | # Regexes for lines to exclude from consideration 14 | exclude_lines = 15 | # Have to re-enable the standard pragma 16 | pragma: no cover 17 | 18 | # Don't complain about missing debug-only code: 19 | def __repr__ 20 | if self\.debug 21 | 22 | # Don't complain if tests don't hit defensive assertion code: 23 | raise AssertionError 24 | raise NotImplementedError 25 | 26 | # Don't complain if non-runnable code isn't run: 27 | if 0: 28 | if __name__ == .__main__.: 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Temporary and binary files 2 | *~ 3 | *.py[cod] 4 | *.so 5 | *.cfg 6 | !.isort.cfg 7 | !setup.cfg 8 | *.orig 9 | *.log 10 | *.pot 11 | __pycache__/* 12 | .cache/* 13 | .*.swp 14 | */.ipynb_checkpoints/* 15 | .DS_Store 16 | 17 | # Project files 18 | .ropeproject 19 | .project 20 | .pydevproject 21 | .settings 22 | .idea 23 | tags 24 | 25 | # Package files 26 | *.egg 27 | *.eggs/ 28 | .installed.cfg 29 | *.egg-info 30 | 31 | # Unittest and coverage 32 | htmlcov/* 33 | .coverage 34 | .tox 35 | junit.xml 36 | coverage.xml 37 | .pytest_cache/ 38 | 39 | # Build and docs folder/files 40 | build/* 41 | dist/* 42 | sdist/* 43 | docs/_rst/* 44 | docs/_build/* 45 | cover/* 46 | MANIFEST 47 | 48 | # Per-project virtualenvs 49 | .venv*/ 50 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | matrix: 3 | include: 4 | - python: 3.7 5 | - python: 3.8 6 | 7 | install: 8 | - pip install pipenv --upgrade 9 | - pipenv install --dev 10 | 11 | script: 12 | - python -m pytest 13 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Development Lead 6 | ---------------- 7 | 8 | * `@jakeret `_ 9 | 10 | Contributors 11 | ------------ 12 | * `@tdrobbins `_ 13 | * `@ck090 `_ 14 | * `gokarslan `_ 15 | 16 | Citations 17 | --------- 18 | 19 | As you use **unet** for your exciting discoveries, please cite the paper that describes the package: 20 | 21 | `J. Akeret, C. Chang, A. Lucchi, A. Refregier, Published in Astronomy and Computing (2017) `_ 22 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | Version 0.1 6 | =========== 7 | 8 | - Feature A added 9 | - FIX: nasty bug #1729 fixed 10 | - add your changes here! 11 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Contributing 3 | ============ 4 | 5 | Contributions are welcome, and they are greatly appreciated! Every 6 | little bit helps, and credit will always be given. 7 | 8 | You can contribute in many ways: 9 | 10 | Types of Contributions 11 | ---------------------- 12 | 13 | Report Bugs 14 | ~~~~~~~~~~~ 15 | 16 | If you are reporting a bug, please include: 17 | 18 | * Your operating system name and version. 19 | * Any details about your local setup that might be helpful in troubleshooting. 20 | * Detailed steps to reproduce the bug. 21 | 22 | Fix Bugs 23 | ~~~~~~~~ 24 | 25 | Implement Features 26 | ~~~~~~~~~~~~~~~~~~ 27 | 28 | Write Documentation 29 | ~~~~~~~~~~~~~~~~~~~ 30 | 31 | Tensorflow Unet could always use more documentation, whether as part of the 32 | official Tensorflow Unet docs, in docstrings, or even on the web in blog posts, 33 | articles, and such. 34 | 35 | Submit Feedback 36 | ~~~~~~~~~~~~~~~ 37 | 38 | If you are proposing a feature: 39 | 40 | * Explain in detail how it would work. 41 | * Keep the scope as narrow as possible, to make it easier to implement. 42 | * Remember that this is a volunteer-driven project, and that contributions 43 | are welcome :) 44 | 45 | Pull Request Guidelines 46 | ----------------------- 47 | 48 | Before you submit a pull request, check that it meets these guidelines: 49 | 50 | 1. The pull request should include tests. 51 | 2. If the pull request adds functionality, the docs should be updated. Put 52 | your new functionality into a function with a docstring, and add the 53 | feature to the list in README.rst. 54 | 3. The pull request should work for Python 2.6, 2.7, and 3.3, and for PyPy. 55 | make sure that the tests pass for all supported Python versions. 56 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | pytest = "*" 8 | matplotlib = ">=3.2.0" 9 | seaborn = ">=0.10.0" 10 | jupyterlab = ">=2.0.1" 11 | sphinx = ">=3.0.0" 12 | tensorflow-datasets = ">=2.1.0" 13 | unet = {editable = true,path = "."} 14 | 15 | [packages] 16 | tensorflow = ">=2.3.0" 17 | numpy = ">=1.18.1" 18 | 19 | [requires] 20 | python_version = "3.7" 21 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "a0bab43ee83fb3e0f936a8fc6064fd553804f596ed8bdc3fe406681eb5c177e1" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.7" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "absl-py": { 20 | "hashes": [ 21 | "sha256:673cccb88d810e5627d0c1c818158485d106f65a583880e2f730c997399bcfa7", 22 | "sha256:b3d9eb5119ff6e0a0125f6dabf2f9fae02f8acae7be70576002fac27235611c5" 23 | ], 24 | "version": "==0.11.0" 25 | }, 26 | "astunparse": { 27 | "hashes": [ 28 | "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872", 29 | "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8" 30 | ], 31 | "version": "==1.6.3" 32 | }, 33 | "cachetools": { 34 | "hashes": [ 35 | "sha256:1d9d5f567be80f7c07d765e21b814326d78c61eb0c3a637dffc0e5d1796cb2e2", 36 | "sha256:f469e29e7aa4cff64d8de4aad95ce76de8ea1125a16c68e0d93f65c3c3dc92e9" 37 | ], 38 | "markers": "python_version ~= '3.5'", 39 | "version": "==4.2.1" 40 | }, 41 | "certifi": { 42 | "hashes": [ 43 | "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", 44 | "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" 45 | ], 46 | "version": "==2020.12.5" 47 | }, 48 | "chardet": { 49 | "hashes": [ 50 | "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", 51 | "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" 52 | ], 53 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 54 | "version": "==4.0.0" 55 | }, 56 | "flatbuffers": { 57 | "hashes": [ 58 | "sha256:63bb9a722d5e373701913e226135b28a6f6ac200d5cc7b4d919fa38d73b44610", 59 | "sha256:9e9ef47fa92625c4721036e7c4124182668dc6021d9e7c73704edd395648deb9" 60 | ], 61 | "version": "==1.12" 62 | }, 63 | "gast": { 64 | "hashes": [ 65 | "sha256:8f46f5be57ae6889a4e16e2ca113b1703ef17f2b0abceb83793eaba9e1351a45", 66 | "sha256:b881ef288a49aa81440d2c5eb8aeefd4c2bb8993d5f50edae7413a85bfdb3b57" 67 | ], 68 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 69 | "version": "==0.3.3" 70 | }, 71 | "google-auth": { 72 | "hashes": [ 73 | "sha256:0b0e026b412a0ad096e753907559e4bdb180d9ba9f68dd9036164db4fdc4ad2e", 74 | "sha256:ce752cc51c31f479dbf9928435ef4b07514b20261b021c7383bee4bda646acb8" 75 | ], 76 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", 77 | "version": "==1.24.0" 78 | }, 79 | "google-auth-oauthlib": { 80 | "hashes": [ 81 | "sha256:65b65bc39ad8cab15039b35e5898455d3d66296d0584d96fe0e79d67d04c51d9", 82 | "sha256:d4d98c831ea21d574699978827490a41b94f05d565c617fe1b420e88f1fc8d8d" 83 | ], 84 | "markers": "python_version >= '3.6'", 85 | "version": "==0.4.2" 86 | }, 87 | "google-pasta": { 88 | "hashes": [ 89 | "sha256:4612951da876b1a10fe3960d7226f0c7682cf901e16ac06e473b267a5afa8954", 90 | "sha256:b32482794a366b5366a32c92a9a9201b107821889935a02b3e51f6b432ea84ed", 91 | "sha256:c9f2c8dfc8f96d0d5808299920721be30c9eec37f2389f28904f454565c8a16e" 92 | ], 93 | "version": "==0.2.0" 94 | }, 95 | "grpcio": { 96 | "hashes": [ 97 | "sha256:01d3046fe980be25796d368f8fc5ff34b7cf5e1444f3789a017a7fe794465639", 98 | "sha256:07b430fa68e5eecd78e2ad529ab80f6a234b55fc1b675fe47335ccbf64c6c6c8", 99 | "sha256:0e3edd8cdb71809d2455b9dbff66b4dd3d36c321e64bfa047da5afdfb0db332b", 100 | "sha256:0f3f09269ffd3fded430cd89ba2397eabbf7e47be93983b25c187cdfebb302a7", 101 | "sha256:1376a60f9bfce781b39973f100b5f67e657b5be479f2fd8a7d2a408fc61c085c", 102 | "sha256:14c0f017bfebbc18139551111ac58ecbde11f4bc375b73a53af38927d60308b6", 103 | "sha256:182c64ade34c341398bf71ec0975613970feb175090760ab4f51d1e9a5424f05", 104 | "sha256:1ada89326a364a299527c7962e5c362dbae58c67b283fe8383c4d952b26565d5", 105 | "sha256:1ce6f5ff4f4a548c502d5237a071fa617115df58ea4b7bd41dac77c1ab126e9c", 106 | "sha256:1d384a61f96a1fc6d5d3e0b62b0a859abc8d4c3f6d16daba51ebf253a3e7df5d", 107 | "sha256:25959a651420dd4a6fd7d3e8dee53f4f5fd8c56336a64963428e78b276389a59", 108 | "sha256:28677f057e2ef11501860a7bc15de12091d40b95dd0fddab3c37ff1542e6b216", 109 | "sha256:378fe80ec5d9353548eb2a8a43ea03747a80f2e387c4f177f2b3ff6c7d898753", 110 | "sha256:3afb058b6929eba07dba9ae6c5b555aa1d88cb140187d78cc510bd72d0329f28", 111 | "sha256:4396b1d0f388ae875eaf6dc05cdcb612c950fd9355bc34d38b90aaa0665a0d4b", 112 | "sha256:4775bc35af9cd3b5033700388deac2e1d611fa45f4a8dcb93667d94cb25f0444", 113 | "sha256:5bddf9d53c8df70061916c3bfd2f468ccf26c348bb0fb6211531d895ed5e4c72", 114 | "sha256:6d869a3e8e62562b48214de95e9231c97c53caa7172802236cd5d60140d7cddd", 115 | "sha256:6f7947dad606c509d067e5b91a92b250aa0530162ab99e4737090f6b17eb12c4", 116 | "sha256:7cda998b7b551503beefc38db9be18c878cfb1596e1418647687575cdefa9273", 117 | "sha256:99bac0e2c820bf446662365df65841f0c2a55b0e2c419db86eaf5d162ddae73e", 118 | "sha256:9c0d8f2346c842088b8cbe3e14985b36e5191a34bf79279ba321a4bf69bd88b7", 119 | "sha256:a8004b34f600a8a51785e46859cd88f3386ef67cccd1cfc7598e3d317608c643", 120 | "sha256:ac7028d363d2395f3d755166d0161556a3f99500a5b44890421ccfaaf2aaeb08", 121 | "sha256:be98e3198ec765d0a1e27f69d760f69374ded8a33b953dcfe790127731f7e690", 122 | "sha256:c31e8a219650ddae1cd02f5a169e1bffe66a429a8255d3ab29e9363c73003b62", 123 | "sha256:c4966d746dccb639ef93f13560acbe9630681c07f2b320b7ec03fe2c8f0a1f15", 124 | "sha256:c58825a3d8634cd634d8f869afddd4d5742bdb59d594aea4cea17b8f39269a55", 125 | "sha256:ce617e1c4a39131f8527964ac9e700eb199484937d7a0b3e52655a3ba50d5fb9", 126 | "sha256:e28e4c0d4231beda5dee94808e3a224d85cbaba3cfad05f2192e6f4ec5318053", 127 | "sha256:e467af6bb8f5843f5a441e124b43474715cfb3981264e7cd227343e826dcc3ce", 128 | "sha256:e6786f6f7be0937614577edcab886ddce91b7c1ea972a07ef9972e9f9ecbbb78", 129 | "sha256:e811ce5c387256609d56559d944a974cc6934a8eea8c76e7c86ec388dc06192d", 130 | "sha256:ec10d5f680b8e95a06f1367d73c5ddcc0ed04a3f38d6e4c9346988fb0cea2ffa", 131 | "sha256:ef9bd7fdfc0a063b4ed0efcab7906df5cae9bbcf79d05c583daa2eba56752b00", 132 | "sha256:f03dfefa9075dd1c6c5cc27b1285c521434643b09338d8b29e1d6a27b386aa82", 133 | "sha256:f12900be4c3fd2145ba94ab0d80b7c3d71c9e6414cfee2f31b1c20188b5c281f", 134 | "sha256:f53f2dfc8ff9a58a993e414a016c8b21af333955ae83960454ad91798d467c7b", 135 | "sha256:f7d508691301027033215d3662dab7e178f54d5cca2329f26a71ae175d94b83f" 136 | ], 137 | "version": "==1.32.0" 138 | }, 139 | "h5py": { 140 | "hashes": [ 141 | "sha256:063947eaed5f271679ed4ffa36bb96f57bc14f44dd4336a827d9a02702e6ce6b", 142 | "sha256:13c87efa24768a5e24e360a40e0bc4c49bcb7ce1bb13a3a7f9902cec302ccd36", 143 | "sha256:16ead3c57141101e3296ebeed79c9c143c32bdd0e82a61a2fc67e8e6d493e9d1", 144 | "sha256:3dad1730b6470fad853ef56d755d06bb916ee68a3d8272b3bab0c1ddf83bb99e", 145 | "sha256:51ae56894c6c93159086ffa2c94b5b3388c0400548ab26555c143e7cfa05b8e5", 146 | "sha256:54817b696e87eb9e403e42643305f142cd8b940fe9b3b490bbf98c3b8a894cf4", 147 | "sha256:549ad124df27c056b2e255ea1c44d30fb7a17d17676d03096ad5cd85edb32dc1", 148 | "sha256:64f74da4a1dd0d2042e7d04cf8294e04ddad686f8eba9bb79e517ae582f6668d", 149 | "sha256:6998be619c695910cb0effe5eb15d3a511d3d1a5d217d4bd0bebad1151ec2262", 150 | "sha256:6ef7ab1089e3ef53ca099038f3c0a94d03e3560e6aff0e9d6c64c55fb13fc681", 151 | "sha256:769e141512b54dee14ec76ed354fcacfc7d97fea5a7646b709f7400cf1838630", 152 | "sha256:79b23f47c6524d61f899254f5cd5e486e19868f1823298bc0c29d345c2447172", 153 | "sha256:7be5754a159236e95bd196419485343e2b5875e806fe68919e087b6351f40a70", 154 | "sha256:84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d", 155 | "sha256:86868dc07b9cc8cb7627372a2e6636cdc7a53b7e2854ad020c9e9d8a4d3fd0f5", 156 | "sha256:8bb1d2de101f39743f91512a9750fb6c351c032e5cd3204b4487383e34da7f75", 157 | "sha256:a5f82cd4938ff8761d9760af3274acf55afc3c91c649c50ab18fcff5510a14a5", 158 | "sha256:aac4b57097ac29089f179bbc2a6e14102dd210618e94d77ee4831c65f82f17c0", 159 | "sha256:bffbc48331b4a801d2f4b7dac8a72609f0b10e6e516e5c480a3e3241e091c878", 160 | "sha256:c0d4b04bbf96c47b6d360cd06939e72def512b20a18a8547fa4af810258355d5", 161 | "sha256:c54a2c0dd4957776ace7f95879d81582298c5daf89e77fb8bee7378f132951de", 162 | "sha256:cbf28ae4b5af0f05aa6e7551cee304f1d317dbed1eb7ac1d827cee2f1ef97a99", 163 | "sha256:d35f7a3a6cefec82bfdad2785e78359a0e6a5fbb3f605dd5623ce88082ccd681", 164 | "sha256:d3c59549f90a891691991c17f8e58c8544060fdf3ccdea267100fa5f561ff62f", 165 | "sha256:d7ae7a0576b06cb8e8a1c265a8bc4b73d05fdee6429bffc9a26a6eb531e79d72", 166 | "sha256:ecf4d0b56ee394a0984de15bceeb97cbe1fe485f1ac205121293fc44dcf3f31f", 167 | "sha256:f0e25bb91e7a02efccb50aba6591d3fe2c725479e34769802fcdd4076abfa917", 168 | "sha256:f23951a53d18398ef1344c186fb04b26163ca6ce449ebd23404b153fd111ded9", 169 | "sha256:ff7d241f866b718e4584fa95f520cb19405220c501bd3a53ee11871ba5166ea2" 170 | ], 171 | "version": "==2.10.0" 172 | }, 173 | "idna": { 174 | "hashes": [ 175 | "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", 176 | "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" 177 | ], 178 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 179 | "version": "==2.10" 180 | }, 181 | "importlib-metadata": { 182 | "hashes": [ 183 | "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771", 184 | "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d" 185 | ], 186 | "markers": "python_version < '3.8'", 187 | "version": "==3.4.0" 188 | }, 189 | "keras-preprocessing": { 190 | "hashes": [ 191 | "sha256:7b82029b130ff61cc99b55f3bd27427df4838576838c5b2f65940e4fcec99a7b", 192 | "sha256:add82567c50c8bc648c14195bf544a5ce7c1f76761536956c3d2978970179ef3" 193 | ], 194 | "version": "==1.1.2" 195 | }, 196 | "markdown": { 197 | "hashes": [ 198 | "sha256:5d9f2b5ca24bc4c7a390d22323ca4bad200368612b5aaa7796babf971d2b2f18", 199 | "sha256:c109c15b7dc20a9ac454c9e6025927d44460b85bd039da028d85e2b6d0bcc328" 200 | ], 201 | "markers": "python_version >= '3.6'", 202 | "version": "==3.3.3" 203 | }, 204 | "numpy": { 205 | "hashes": [ 206 | "sha256:012426a41bc9ab63bb158635aecccc7610e3eff5d31d1eb43bc099debc979d94", 207 | "sha256:06fab248a088e439402141ea04f0fffb203723148f6ee791e9c75b3e9e82f080", 208 | "sha256:0eef32ca3132a48e43f6a0f5a82cb508f22ce5a3d6f67a8329c81c8e226d3f6e", 209 | "sha256:1ded4fce9cfaaf24e7a0ab51b7a87be9038ea1ace7f34b841fe3b6894c721d1c", 210 | "sha256:2e55195bc1c6b705bfd8ad6f288b38b11b1af32f3c8289d6c50d47f950c12e76", 211 | "sha256:2ea52bd92ab9f768cc64a4c3ef8f4b2580a17af0a5436f6126b08efbd1838371", 212 | "sha256:36674959eed6957e61f11c912f71e78857a8d0604171dfd9ce9ad5cbf41c511c", 213 | "sha256:384ec0463d1c2671170901994aeb6dce126de0a95ccc3976c43b0038a37329c2", 214 | "sha256:39b70c19ec771805081578cc936bbe95336798b7edf4732ed102e7a43ec5c07a", 215 | "sha256:400580cbd3cff6ffa6293df2278c75aef2d58d8d93d3c5614cd67981dae68ceb", 216 | "sha256:43d4c81d5ffdff6bae58d66a3cd7f54a7acd9a0e7b18d97abb255defc09e3140", 217 | "sha256:50a4a0ad0111cc1b71fa32dedd05fa239f7fb5a43a40663269bb5dc7877cfd28", 218 | "sha256:603aa0706be710eea8884af807b1b3bc9fb2e49b9f4da439e76000f3b3c6ff0f", 219 | "sha256:6149a185cece5ee78d1d196938b2a8f9d09f5a5ebfbba66969302a778d5ddd1d", 220 | "sha256:759e4095edc3c1b3ac031f34d9459fa781777a93ccc633a472a5468587a190ff", 221 | "sha256:7fb43004bce0ca31d8f13a6eb5e943fa73371381e53f7074ed21a4cb786c32f8", 222 | "sha256:811daee36a58dc79cf3d8bdd4a490e4277d0e4b7d103a001a4e73ddb48e7e6aa", 223 | "sha256:8b5e972b43c8fc27d56550b4120fe6257fdc15f9301914380b27f74856299fea", 224 | "sha256:99abf4f353c3d1a0c7a5f27699482c987cf663b1eac20db59b8c7b061eabd7fc", 225 | "sha256:a0d53e51a6cb6f0d9082decb7a4cb6dfb33055308c4c44f53103c073f649af73", 226 | "sha256:a12ff4c8ddfee61f90a1633a4c4afd3f7bcb32b11c52026c92a12e1325922d0d", 227 | "sha256:a4646724fba402aa7504cd48b4b50e783296b5e10a524c7a6da62e4a8ac9698d", 228 | "sha256:a76f502430dd98d7546e1ea2250a7360c065a5fdea52b2dffe8ae7180909b6f4", 229 | "sha256:a9d17f2be3b427fbb2bce61e596cf555d6f8a56c222bd2ca148baeeb5e5c783c", 230 | "sha256:ab83f24d5c52d60dbc8cd0528759532736b56db58adaa7b5f1f76ad551416a1e", 231 | "sha256:aeb9ed923be74e659984e321f609b9ba54a48354bfd168d21a2b072ed1e833ea", 232 | "sha256:c843b3f50d1ab7361ca4f0b3639bf691569493a56808a0b0c54a051d260b7dbd", 233 | "sha256:cae865b1cae1ec2663d8ea56ef6ff185bad091a5e33ebbadd98de2cfa3fa668f", 234 | "sha256:cc6bd4fd593cb261332568485e20a0712883cf631f6f5e8e86a52caa8b2b50ff", 235 | "sha256:cf2402002d3d9f91c8b01e66fbb436a4ed01c6498fffed0e4c7566da1d40ee1e", 236 | "sha256:d051ec1c64b85ecc69531e1137bb9751c6830772ee5c1c426dbcfe98ef5788d7", 237 | "sha256:d6631f2e867676b13026e2846180e2c13c1e11289d67da08d71cacb2cd93d4aa", 238 | "sha256:dbd18bcf4889b720ba13a27ec2f2aac1981bd41203b3a3b27ba7a33f88ae4827", 239 | "sha256:df609c82f18c5b9f6cb97271f03315ff0dbe481a2a02e56aeb1b1a985ce38e60" 240 | ], 241 | "index": "pypi", 242 | "version": "==1.19.5" 243 | }, 244 | "oauthlib": { 245 | "hashes": [ 246 | "sha256:bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889", 247 | "sha256:df884cd6cbe20e32633f1db1072e9356f53638e4361bef4e8b03c9127c9328ea" 248 | ], 249 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 250 | "version": "==3.1.0" 251 | }, 252 | "opt-einsum": { 253 | "hashes": [ 254 | "sha256:2455e59e3947d3c275477df7f5205b30635e266fe6dc300e3d9f9646bfcea147", 255 | "sha256:59f6475f77bbc37dcf7cd748519c0ec60722e91e63ca114e68821c0c54a46549" 256 | ], 257 | "markers": "python_version >= '3.5'", 258 | "version": "==3.3.0" 259 | }, 260 | "protobuf": { 261 | "hashes": [ 262 | "sha256:0e247612fadda953047f53301a7b0407cb0c3cb4ae25a6fde661597a04039b3c", 263 | "sha256:0fc96785262042e4863b3f3b5c429d4636f10d90061e1840fce1baaf59b1a836", 264 | "sha256:1c51fda1bbc9634246e7be6016d860be01747354ed7015ebe38acf4452f470d2", 265 | "sha256:1d63eb389347293d8915fb47bee0951c7b5dab522a4a60118b9a18f33e21f8ce", 266 | "sha256:22bcd2e284b3b1d969c12e84dc9b9a71701ec82d8ce975fdda19712e1cfd4e00", 267 | "sha256:2a7e2fe101a7ace75e9327b9c946d247749e564a267b0515cf41dfe450b69bac", 268 | "sha256:43b554b9e73a07ba84ed6cf25db0ff88b1e06be610b37656e292e3cbb5437472", 269 | "sha256:4b74301b30513b1a7494d3055d95c714b560fbb630d8fb9956b6f27992c9f980", 270 | "sha256:4e75105c9dfe13719b7293f75bd53033108f4ba03d44e71db0ec2a0e8401eafd", 271 | "sha256:5b7a637212cc9b2bcf85dd828b1178d19efdf74dbfe1ddf8cd1b8e01fdaaa7f5", 272 | "sha256:5e9806a43232a1fa0c9cf5da8dc06f6910d53e4390be1fa06f06454d888a9142", 273 | "sha256:629b03fd3caae7f815b0c66b41273f6b1900a579e2ccb41ef4493a4f5fb84f3a", 274 | "sha256:72230ed56f026dd664c21d73c5db73ebba50d924d7ba6b7c0d81a121e390406e", 275 | "sha256:86a75477addde4918e9a1904e5c6af8d7b691f2a3f65587d73b16100fbe4c3b2", 276 | "sha256:8971c421dbd7aad930c9bd2694122f332350b6ccb5202a8b7b06f3f1a5c41ed5", 277 | "sha256:9616f0b65a30851e62f1713336c931fcd32c057202b7ff2cfbfca0fc7d5e3043", 278 | "sha256:b0d5d35faeb07e22a1ddf8dce620860c8fe145426c02d1a0ae2688c6e8ede36d", 279 | "sha256:ecc33531a213eee22ad60e0e2aaea6c8ba0021f0cce35dbf0ab03dee6e2a23a1" 280 | ], 281 | "version": "==3.14.0" 282 | }, 283 | "pyasn1": { 284 | "hashes": [ 285 | "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359", 286 | "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576", 287 | "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf", 288 | "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7", 289 | "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", 290 | "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00", 291 | "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8", 292 | "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86", 293 | "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12", 294 | "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776", 295 | "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba", 296 | "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2", 297 | "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3" 298 | ], 299 | "version": "==0.4.8" 300 | }, 301 | "pyasn1-modules": { 302 | "hashes": [ 303 | "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8", 304 | "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199", 305 | "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811", 306 | "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed", 307 | "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4", 308 | "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e", 309 | "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74", 310 | "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb", 311 | "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45", 312 | "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd", 313 | "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0", 314 | "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d", 315 | "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405" 316 | ], 317 | "version": "==0.2.8" 318 | }, 319 | "requests": { 320 | "hashes": [ 321 | "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", 322 | "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" 323 | ], 324 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 325 | "version": "==2.25.1" 326 | }, 327 | "requests-oauthlib": { 328 | "hashes": [ 329 | "sha256:7f71572defaecd16372f9006f33c2ec8c077c3cfa6f5911a9a90202beb513f3d", 330 | "sha256:b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a", 331 | "sha256:fa6c47b933f01060936d87ae9327fead68768b69c6c9ea2109c48be30f2d4dbc" 332 | ], 333 | "version": "==1.3.0" 334 | }, 335 | "rsa": { 336 | "hashes": [ 337 | "sha256:69805d6b69f56eb05b62daea3a7dbd7aa44324ad1306445e05da8060232d00f4", 338 | "sha256:a8774e55b59fd9fc893b0d05e9bfc6f47081f46ff5b46f39ccf24631b7be356b" 339 | ], 340 | "markers": "python_version >= '3.6'", 341 | "version": "==4.7" 342 | }, 343 | "six": { 344 | "hashes": [ 345 | "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", 346 | "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" 347 | ], 348 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 349 | "version": "==1.15.0" 350 | }, 351 | "tensorboard": { 352 | "hashes": [ 353 | "sha256:7b8c53c396069b618f6f276ec94fc45d17e3282d668979216e5d30be472115e4" 354 | ], 355 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 356 | "version": "==2.4.1" 357 | }, 358 | "tensorboard-plugin-wit": { 359 | "hashes": [ 360 | "sha256:2a80d1c551d741e99b2f197bb915d8a133e24adb8da1732b840041860f91183a" 361 | ], 362 | "version": "==1.8.0" 363 | }, 364 | "tensorflow": { 365 | "hashes": [ 366 | "sha256:0e427b1350be6dbe572f971947c5596fdbb152081f227808d8becd894bf40282", 367 | "sha256:22723b8e1fa83b34f56c349b16a57aaff913b404451fcf70981f2b1d6e0c64fc", 368 | "sha256:2357112319303da1b5459a621fd0503c2b2cd97b6c33c4903abd46b3c3e380e2", 369 | "sha256:36d5acd60aac48e34bd545d0ce1fb8b3fceebff6b8782436defd0f71c12203bd", 370 | "sha256:4a04081647b89a8fb602895b29ffc559e3c20aac8bde1d4c5ecd2a65adce5d35", 371 | "sha256:55368ba0bedb513ba0e36a2543a588b5276e9b2ca99fa3232a9a176601a7bab5", 372 | "sha256:e1f2799cc86861680d8515167f103e2207a8cab92a4afe5471e4839330591f08", 373 | "sha256:eedcf578afde5e6e69c75d796bed41093451cd1ab54afb438760e40fb74a09de", 374 | "sha256:efa9daa4b3701a4e439b24b74c1e4b66844aee8ae5263fb3cc12281ac9cc9f67" 375 | ], 376 | "index": "pypi", 377 | "version": "==2.4.1" 378 | }, 379 | "tensorflow-estimator": { 380 | "hashes": [ 381 | "sha256:5b7b7bf2debe19a8794adacc43e8ba6459daa4efaf54d3302623994a359b17f0" 382 | ], 383 | "version": "==2.4.0" 384 | }, 385 | "termcolor": { 386 | "hashes": [ 387 | "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b" 388 | ], 389 | "version": "==1.1.0" 390 | }, 391 | "typing-extensions": { 392 | "hashes": [ 393 | "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", 394 | "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", 395 | "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" 396 | ], 397 | "markers": "python_version < '3.8'", 398 | "version": "==3.7.4.3" 399 | }, 400 | "urllib3": { 401 | "hashes": [ 402 | "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", 403 | "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" 404 | ], 405 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", 406 | "version": "==1.26.3" 407 | }, 408 | "werkzeug": { 409 | "hashes": [ 410 | "sha256:2de2a5db0baeae7b2d2664949077c2ac63fbd16d98da0ff71837f7d1dea3fd43", 411 | "sha256:6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c" 412 | ], 413 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 414 | "version": "==1.0.1" 415 | }, 416 | "wheel": { 417 | "hashes": [ 418 | "sha256:78b5b185f0e5763c26ca1e324373aadd49182ca90e825f7853f4b2509215dc0e", 419 | "sha256:e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e" 420 | ], 421 | "markers": "python_version >= '3'", 422 | "version": "==0.36.2" 423 | }, 424 | "wrapt": { 425 | "hashes": [ 426 | "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7" 427 | ], 428 | "version": "==1.12.1" 429 | }, 430 | "zipp": { 431 | "hashes": [ 432 | "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108", 433 | "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb" 434 | ], 435 | "markers": "python_version >= '3.6'", 436 | "version": "==3.4.0" 437 | } 438 | }, 439 | "develop": { 440 | "absl-py": { 441 | "hashes": [ 442 | "sha256:673cccb88d810e5627d0c1c818158485d106f65a583880e2f730c997399bcfa7", 443 | "sha256:b3d9eb5119ff6e0a0125f6dabf2f9fae02f8acae7be70576002fac27235611c5" 444 | ], 445 | "version": "==0.11.0" 446 | }, 447 | "alabaster": { 448 | "hashes": [ 449 | "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359", 450 | "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02" 451 | ], 452 | "version": "==0.7.12" 453 | }, 454 | "anyio": { 455 | "hashes": [ 456 | "sha256:01cce0087b8fd8b6b7e629dc11505dcde02f916ce903332892cb2ae9817b597d", 457 | "sha256:35075abd32cf20fd7e0be2fee3614e80b92d5392eba257c8d2f33de3df7ca237" 458 | ], 459 | "markers": "python_full_version >= '3.6.2'", 460 | "version": "==2.0.2" 461 | }, 462 | "appnope": { 463 | "hashes": [ 464 | "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442", 465 | "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a" 466 | ], 467 | "markers": "sys_platform == 'darwin' and platform_system == 'Darwin'", 468 | "version": "==0.1.2" 469 | }, 470 | "argon2-cffi": { 471 | "hashes": [ 472 | "sha256:05a8ac07c7026542377e38389638a8a1e9b78f1cd8439cd7493b39f08dd75fbf", 473 | "sha256:0bf066bc049332489bb2d75f69216416329d9dc65deee127152caeb16e5ce7d5", 474 | "sha256:18dee20e25e4be86680b178b35ccfc5d495ebd5792cd00781548d50880fee5c5", 475 | "sha256:392c3c2ef91d12da510cfb6f9bae52512a4552573a9e27600bdb800e05905d2b", 476 | "sha256:57358570592c46c420300ec94f2ff3b32cbccd10d38bdc12dc6979c4a8484fbc", 477 | "sha256:6678bb047373f52bcff02db8afab0d2a77d83bde61cfecea7c5c62e2335cb203", 478 | "sha256:6ea92c980586931a816d61e4faf6c192b4abce89aa767ff6581e6ddc985ed003", 479 | "sha256:77e909cc756ef81d6abb60524d259d959bab384832f0c651ed7dcb6e5ccdbb78", 480 | "sha256:7d455c802727710e9dfa69b74ccaab04568386ca17b0ad36350b622cd34606fe", 481 | "sha256:8a84934bd818e14a17943de8099d41160da4a336bcc699bb4c394bbb9b94bd32", 482 | "sha256:9bee3212ba4f560af397b6d7146848c32a800652301843df06b9e8f68f0f7361", 483 | "sha256:9dfd5197852530294ecb5795c97a823839258dfd5eb9420233c7cfedec2058f2", 484 | "sha256:b160416adc0f012fb1f12588a5e6954889510f82f698e23ed4f4fa57f12a0647", 485 | "sha256:ba7209b608945b889457f949cc04c8e762bed4fe3fec88ae9a6b7765ae82e496", 486 | "sha256:cc0e028b209a5483b6846053d5fd7165f460a1f14774d79e632e75e7ae64b82b", 487 | "sha256:d8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d", 488 | "sha256:da7f0445b71db6d3a72462e04f36544b0de871289b0bc8a7cc87c0f5ec7079fa", 489 | "sha256:e2db6e85c057c16d0bd3b4d2b04f270a7467c147381e8fd73cbbe5bc719832be" 490 | ], 491 | "version": "==20.1.0" 492 | }, 493 | "async-generator": { 494 | "hashes": [ 495 | "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b", 496 | "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144" 497 | ], 498 | "markers": "python_version >= '3.5'", 499 | "version": "==1.10" 500 | }, 501 | "attrs": { 502 | "hashes": [ 503 | "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", 504 | "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" 505 | ], 506 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 507 | "version": "==20.3.0" 508 | }, 509 | "babel": { 510 | "hashes": [ 511 | "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", 512 | "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" 513 | ], 514 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 515 | "version": "==2.9.0" 516 | }, 517 | "backcall": { 518 | "hashes": [ 519 | "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e", 520 | "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255" 521 | ], 522 | "version": "==0.2.0" 523 | }, 524 | "bleach": { 525 | "hashes": [ 526 | "sha256:2d3b3f7e7d69148bb683b26a3f21eabcf62fa8fb7bc75d0e7a13bcecd9568d4d", 527 | "sha256:c6ad42174219b64848e2e2cd434e44f56cd24a93a9b4f8bc52cfed55a1cd5aad" 528 | ], 529 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 530 | "version": "==3.2.3" 531 | }, 532 | "certifi": { 533 | "hashes": [ 534 | "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", 535 | "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" 536 | ], 537 | "version": "==2020.12.5" 538 | }, 539 | "cffi": { 540 | "hashes": [ 541 | "sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e", 542 | "sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d", 543 | "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a", 544 | "sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec", 545 | "sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362", 546 | "sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668", 547 | "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c", 548 | "sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b", 549 | "sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06", 550 | "sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698", 551 | "sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2", 552 | "sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c", 553 | "sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7", 554 | "sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009", 555 | "sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03", 556 | "sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b", 557 | "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909", 558 | "sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53", 559 | "sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35", 560 | "sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26", 561 | "sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b", 562 | "sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01", 563 | "sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb", 564 | "sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293", 565 | "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd", 566 | "sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d", 567 | "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3", 568 | "sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d", 569 | "sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e", 570 | "sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca", 571 | "sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d", 572 | "sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775", 573 | "sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375", 574 | "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b", 575 | "sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b", 576 | "sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f" 577 | ], 578 | "version": "==1.14.4" 579 | }, 580 | "chardet": { 581 | "hashes": [ 582 | "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", 583 | "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" 584 | ], 585 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 586 | "version": "==4.0.0" 587 | }, 588 | "cycler": { 589 | "hashes": [ 590 | "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d", 591 | "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8" 592 | ], 593 | "version": "==0.10.0" 594 | }, 595 | "decorator": { 596 | "hashes": [ 597 | "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760", 598 | "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7" 599 | ], 600 | "version": "==4.4.2" 601 | }, 602 | "defusedxml": { 603 | "hashes": [ 604 | "sha256:6687150770438374ab581bb7a1b327a847dd9c5749e396102de3fad4e8a3ef93", 605 | "sha256:f684034d135af4c6cbb949b8a4d2ed61634515257a67299e5f940fbaa34377f5" 606 | ], 607 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 608 | "version": "==0.6.0" 609 | }, 610 | "dill": { 611 | "hashes": [ 612 | "sha256:78370261be6ea49037ace8c17e0b7dd06d0393af6513cc23f9b222d9367ce389", 613 | "sha256:efb7f6cb65dba7087c1e111bb5390291ba3616741f96840bfc75792a1a9b5ded" 614 | ], 615 | "markers": "python_version >= '2.6' and python_version != '3.0'", 616 | "version": "==0.3.3" 617 | }, 618 | "docutils": { 619 | "hashes": [ 620 | "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", 621 | "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" 622 | ], 623 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 624 | "version": "==0.16" 625 | }, 626 | "entrypoints": { 627 | "hashes": [ 628 | "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", 629 | "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451" 630 | ], 631 | "markers": "python_version >= '2.7'", 632 | "version": "==0.3" 633 | }, 634 | "future": { 635 | "hashes": [ 636 | "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d" 637 | ], 638 | "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", 639 | "version": "==0.18.2" 640 | }, 641 | "googleapis-common-protos": { 642 | "hashes": [ 643 | "sha256:560716c807117394da12cecb0a54da5a451b5cf9866f1d37e9a5e2329a665351", 644 | "sha256:c8961760f5aad9a711d37b675be103e0cc4e9a39327e0d6d857872f698403e24" 645 | ], 646 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 647 | "version": "==1.52.0" 648 | }, 649 | "idna": { 650 | "hashes": [ 651 | "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", 652 | "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" 653 | ], 654 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 655 | "version": "==2.10" 656 | }, 657 | "imagesize": { 658 | "hashes": [ 659 | "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", 660 | "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" 661 | ], 662 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 663 | "version": "==1.2.0" 664 | }, 665 | "importlib-metadata": { 666 | "hashes": [ 667 | "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771", 668 | "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d" 669 | ], 670 | "markers": "python_version < '3.8'", 671 | "version": "==3.4.0" 672 | }, 673 | "importlib-resources": { 674 | "hashes": [ 675 | "sha256:885b8eae589179f661c909d699a546cf10d83692553e34dca1bf5eb06f7f6217", 676 | "sha256:bfdad047bce441405a49cf8eb48ddce5e56c696e185f59147a8b79e75e9e6380" 677 | ], 678 | "markers": "python_version < '3.9'", 679 | "version": "==5.1.0" 680 | }, 681 | "iniconfig": { 682 | "hashes": [ 683 | "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", 684 | "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" 685 | ], 686 | "version": "==1.1.1" 687 | }, 688 | "ipykernel": { 689 | "hashes": [ 690 | "sha256:4ed205700001a83b5832d4821c46a5733f1bf4b1c55744314ae3c756be6b6095", 691 | "sha256:697103d218e9a8828025af7986e033c89e0b36e2b6eb84a5bda4739b9a27f3cb" 692 | ], 693 | "markers": "python_version >= '3.5'", 694 | "version": "==5.4.3" 695 | }, 696 | "ipython": { 697 | "hashes": [ 698 | "sha256:c987e8178ced651532b3b1ff9965925bfd445c279239697052561a9ab806d28f", 699 | "sha256:cbb2ef3d5961d44e6a963b9817d4ea4e1fa2eb589c371a470fed14d8d40cbd6a" 700 | ], 701 | "markers": "python_version >= '3.7'", 702 | "version": "==7.19.0" 703 | }, 704 | "ipython-genutils": { 705 | "hashes": [ 706 | "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8", 707 | "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8" 708 | ], 709 | "version": "==0.2.0" 710 | }, 711 | "jedi": { 712 | "hashes": [ 713 | "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93", 714 | "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707" 715 | ], 716 | "markers": "python_version >= '3.6'", 717 | "version": "==0.18.0" 718 | }, 719 | "jinja2": { 720 | "hashes": [ 721 | "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", 722 | "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" 723 | ], 724 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 725 | "version": "==2.11.2" 726 | }, 727 | "json5": { 728 | "hashes": [ 729 | "sha256:703cfee540790576b56a92e1c6aaa6c4b0d98971dc358ead83812aa4d06bdb96", 730 | "sha256:af1a1b9a2850c7f62c23fde18be4749b3599fd302f494eebf957e2ada6b9e42c" 731 | ], 732 | "version": "==0.9.5" 733 | }, 734 | "jsonschema": { 735 | "hashes": [ 736 | "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163", 737 | "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a" 738 | ], 739 | "version": "==3.2.0" 740 | }, 741 | "jupyter-client": { 742 | "hashes": [ 743 | "sha256:5eaaa41df449167ebba5e1cf6ca9b31f7fd4f71625069836e2e4fee07fe3cb13", 744 | "sha256:649ca3aca1e28f27d73ef15868a7c7f10d6e70f761514582accec3ca6bb13085" 745 | ], 746 | "markers": "python_version >= '3.5'", 747 | "version": "==6.1.11" 748 | }, 749 | "jupyter-core": { 750 | "hashes": [ 751 | "sha256:0a451c9b295e4db772bdd8d06f2f1eb31caeec0e81fbb77ba37d4a3024e3b315", 752 | "sha256:aa1f9496ab3abe72da4efe0daab0cb2233997914581f9a071e07498c6add8ed3" 753 | ], 754 | "markers": "python_version >= '3.6'", 755 | "version": "==4.7.0" 756 | }, 757 | "jupyter-server": { 758 | "hashes": [ 759 | "sha256:26a98cd5c45b8ebd1e10215586c350a8fa3ca2971e757ee6bf517a180f9933ae", 760 | "sha256:49fd3f9f6f4e866c2b8d7494baa2b6e6a7e44236006e443f2c04c407f7f55918" 761 | ], 762 | "markers": "python_version >= '3.6'", 763 | "version": "==1.2.2" 764 | }, 765 | "jupyterlab": { 766 | "hashes": [ 767 | "sha256:ad6337a3fc86e9b2a1c29fca82dfd49a75148ca28b695c94962d7808d968f64d", 768 | "sha256:ea75d43d9a054e9192b78ae1eefa72270818d1d787ec21f19db1a92d5cc8db35" 769 | ], 770 | "index": "pypi", 771 | "version": "==3.0.5" 772 | }, 773 | "jupyterlab-pygments": { 774 | "hashes": [ 775 | "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008", 776 | "sha256:cfcda0873626150932f438eccf0f8bf22bfa92345b814890ab360d666b254146" 777 | ], 778 | "version": "==0.1.2" 779 | }, 780 | "jupyterlab-server": { 781 | "hashes": [ 782 | "sha256:2af96b04bacf49a17bd2abdd461a219ab62724c39aea2d39ba95ded4be9a171a", 783 | "sha256:9d459d5aba43e626f41cce76d9d00c025e4591fa85feee2d36670295ed1a51fa" 784 | ], 785 | "markers": "python_version >= '3.6'", 786 | "version": "==2.1.3" 787 | }, 788 | "kiwisolver": { 789 | "hashes": [ 790 | "sha256:0cd53f403202159b44528498de18f9285b04482bab2a6fc3f5dd8dbb9352e30d", 791 | "sha256:1e1bc12fb773a7b2ffdeb8380609f4f8064777877b2225dec3da711b421fda31", 792 | "sha256:225e2e18f271e0ed8157d7f4518ffbf99b9450fca398d561eb5c4a87d0986dd9", 793 | "sha256:232c9e11fd7ac3a470d65cd67e4359eee155ec57e822e5220322d7b2ac84fbf0", 794 | "sha256:31dfd2ac56edc0ff9ac295193eeaea1c0c923c0355bf948fbd99ed6018010b72", 795 | "sha256:33449715e0101e4d34f64990352bce4095c8bf13bed1b390773fc0a7295967b3", 796 | "sha256:401a2e9afa8588589775fe34fc22d918ae839aaaf0c0e96441c0fdbce6d8ebe6", 797 | "sha256:44a62e24d9b01ba94ae7a4a6c3fb215dc4af1dde817e7498d901e229aaf50e4e", 798 | "sha256:50af681a36b2a1dee1d3c169ade9fdc59207d3c31e522519181e12f1b3ba7000", 799 | "sha256:563c649cfdef27d081c84e72a03b48ea9408c16657500c312575ae9d9f7bc1c3", 800 | "sha256:5989db3b3b34b76c09253deeaf7fbc2707616f130e166996606c284395da3f18", 801 | "sha256:5a7a7dbff17e66fac9142ae2ecafb719393aaee6a3768c9de2fd425c63b53e21", 802 | "sha256:5c3e6455341008a054cccee8c5d24481bcfe1acdbc9add30aa95798e95c65621", 803 | "sha256:5f6ccd3dd0b9739edcf407514016108e2280769c73a85b9e59aa390046dbf08b", 804 | "sha256:72c99e39d005b793fb7d3d4e660aed6b6281b502e8c1eaf8ee8346023c8e03bc", 805 | "sha256:78751b33595f7f9511952e7e60ce858c6d64db2e062afb325985ddbd34b5c131", 806 | "sha256:834ee27348c4aefc20b479335fd422a2c69db55f7d9ab61721ac8cd83eb78882", 807 | "sha256:8be8d84b7d4f2ba4ffff3665bcd0211318aa632395a1a41553250484a871d454", 808 | "sha256:950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248", 809 | "sha256:a357fd4f15ee49b4a98b44ec23a34a95f1e00292a139d6015c11f55774ef10de", 810 | "sha256:a53d27d0c2a0ebd07e395e56a1fbdf75ffedc4a05943daf472af163413ce9598", 811 | "sha256:acef3d59d47dd85ecf909c359d0fd2c81ed33bdff70216d3956b463e12c38a54", 812 | "sha256:b38694dcdac990a743aa654037ff1188c7a9801ac3ccc548d3341014bc5ca278", 813 | "sha256:b9edd0110a77fc321ab090aaa1cfcaba1d8499850a12848b81be2222eab648f6", 814 | "sha256:c08e95114951dc2090c4a630c2385bef681cacf12636fb0241accdc6b303fd81", 815 | "sha256:c5518d51a0735b1e6cee1fdce66359f8d2b59c3ca85dc2b0813a8aa86818a030", 816 | "sha256:c8fd0f1ae9d92b42854b2979024d7597685ce4ada367172ed7c09edf2cef9cb8", 817 | "sha256:ca3820eb7f7faf7f0aa88de0e54681bddcb46e485beb844fcecbcd1c8bd01689", 818 | "sha256:cf8b574c7b9aa060c62116d4181f3a1a4e821b2ec5cbfe3775809474113748d4", 819 | "sha256:d3155d828dec1d43283bd24d3d3e0d9c7c350cdfcc0bd06c0ad1209c1bbc36d0", 820 | "sha256:f8d6f8db88049a699817fd9178782867bf22283e3813064302ac59f61d95be05", 821 | "sha256:fd34fbbfbc40628200730bc1febe30631347103fc8d3d4fa012c21ab9c11eca9" 822 | ], 823 | "markers": "python_version >= '3.6'", 824 | "version": "==1.3.1" 825 | }, 826 | "markupsafe": { 827 | "hashes": [ 828 | "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", 829 | "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", 830 | "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", 831 | "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", 832 | "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", 833 | "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", 834 | "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", 835 | "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", 836 | "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", 837 | "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", 838 | "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", 839 | "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", 840 | "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", 841 | "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", 842 | "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", 843 | "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", 844 | "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", 845 | "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", 846 | "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", 847 | "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", 848 | "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", 849 | "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", 850 | "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", 851 | "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", 852 | "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", 853 | "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", 854 | "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", 855 | "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", 856 | "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", 857 | "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", 858 | "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", 859 | "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", 860 | "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" 861 | ], 862 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 863 | "version": "==1.1.1" 864 | }, 865 | "matplotlib": { 866 | "hashes": [ 867 | "sha256:1de0bb6cbfe460725f0e97b88daa8643bcf9571c18ba90bb8e41432aaeca91d6", 868 | "sha256:1e850163579a8936eede29fad41e202b25923a0a8d5ffd08ce50fc0a97dcdc93", 869 | "sha256:215e2a30a2090221a9481db58b770ce56b8ef46f13224ae33afe221b14b24dc1", 870 | "sha256:348e6032f666ffd151b323342f9278b16b95d4a75dfacae84a11d2829a7816ae", 871 | "sha256:3d2eb9c1cc254d0ffa90bc96fde4b6005d09c2228f99dfd493a4219c1af99644", 872 | "sha256:3e477db76c22929e4c6876c44f88d790aacdf3c3f8f3a90cb1975c0bf37825b0", 873 | "sha256:451cc89cb33d6652c509fc6b588dc51c41d7246afdcc29b8624e256b7663ed1f", 874 | "sha256:46b1a60a04e6d884f0250d5cc8dc7bd21a9a96c584a7acdaab44698a44710bab", 875 | "sha256:5f571b92a536206f7958f7cb2d367ff6c9a1fa8229dc35020006e4cdd1ca0acd", 876 | "sha256:672960dd114e342b7c610bf32fb99d14227f29919894388b41553217457ba7ef", 877 | "sha256:7310e353a4a35477c7f032409966920197d7df3e757c7624fd842f3eeb307d3d", 878 | "sha256:746a1df55749629e26af7f977ea426817ca9370ad1569436608dc48d1069b87c", 879 | "sha256:7c155437ae4fd366e2700e2716564d1787700687443de46bcb895fe0f84b761d", 880 | "sha256:9265ae0fb35e29f9b8cc86c2ab0a2e3dcddc4dd9de4b85bf26c0f63fe5c1c2ca", 881 | "sha256:94bdd1d55c20e764d8aea9d471d2ae7a7b2c84445e0fa463f02e20f9730783e1", 882 | "sha256:9a79e5dd7bb797aa611048f5b70588b23c5be05b63eefd8a0d152ac77c4243db", 883 | "sha256:a17f0a10604fac7627ec82820439e7db611722e80c408a726cd00d8c974c2fb3", 884 | "sha256:a1acb72f095f1d58ecc2538ed1b8bca0b57df313b13db36ed34b8cdf1868e674", 885 | "sha256:aa49571d8030ad0b9ac39708ee77bd2a22f87815e12bdee52ecaffece9313ed8", 886 | "sha256:c24c05f645aef776e8b8931cb81e0f1632d229b42b6d216e30836e2e145a2b40", 887 | "sha256:cf3a7e54eff792f0815dbbe9b85df2f13d739289c93d346925554f71d484be78", 888 | "sha256:d738acfdfb65da34c91acbdb56abed46803db39af259b7f194dc96920360dbe4", 889 | "sha256:e15fa23d844d54e7b3b7243afd53b7567ee71c721f592deb0727ee85e668f96a", 890 | "sha256:ed4a9e6dcacba56b17a0a9ac22ae2c72a35b7f0ef0693aa68574f0b2df607a89", 891 | "sha256:f44149a0ef5b4991aaef12a93b8e8d66d6412e762745fea1faa61d98524e0ba9" 892 | ], 893 | "index": "pypi", 894 | "version": "==3.3.4" 895 | }, 896 | "mistune": { 897 | "hashes": [ 898 | "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e", 899 | "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4" 900 | ], 901 | "version": "==0.8.4" 902 | }, 903 | "nbclassic": { 904 | "hashes": [ 905 | "sha256:0248333262d6f90c2fbe05aacb4f008f1d71b5250a9f737488e0a03cfa1c6ed5", 906 | "sha256:b649436ff85dc731ba8115deef089e5abbe827d7a6dccbad42c15b8d427104e8" 907 | ], 908 | "markers": "python_version >= '3.6'", 909 | "version": "==0.2.6" 910 | }, 911 | "nbclient": { 912 | "hashes": [ 913 | "sha256:01e2d726d16eaf2cde6db74a87e2451453547e8832d142f73f72fddcd4fe0250", 914 | "sha256:4d6b116187c795c99b9dba13d46e764d596574b14c296d60670c8dfe454db364" 915 | ], 916 | "markers": "python_version >= '3.6'", 917 | "version": "==0.5.1" 918 | }, 919 | "nbconvert": { 920 | "hashes": [ 921 | "sha256:39e9f977920b203baea0be67eea59f7b37a761caa542abe80f5897ce3cf6311d", 922 | "sha256:cbbc13a86dfbd4d1b5dee106539de0795b4db156c894c2c5dc382062bbc29002" 923 | ], 924 | "markers": "python_version >= '3.6'", 925 | "version": "==6.0.7" 926 | }, 927 | "nbformat": { 928 | "hashes": [ 929 | "sha256:1d223e64a18bfa7cdf2db2e9ba8a818312fc2a0701d2e910b58df66809385a56", 930 | "sha256:3949fdc8f5fa0b1afca16fb307546e78494fa7a7bceff880df8168eafda0e7ac" 931 | ], 932 | "markers": "python_version >= '3.5'", 933 | "version": "==5.1.2" 934 | }, 935 | "nest-asyncio": { 936 | "hashes": [ 937 | "sha256:76d6e972265063fe92a90b9cc4fb82616e07d586b346ed9d2c89a4187acea39c", 938 | "sha256:afc5a1c515210a23c461932765691ad39e8eba6551c055ac8d5546e69250d0aa" 939 | ], 940 | "markers": "python_version >= '3.5'", 941 | "version": "==1.5.1" 942 | }, 943 | "notebook": { 944 | "hashes": [ 945 | "sha256:0464b28e18e7a06cec37e6177546c2322739be07962dd13bf712bcb88361f013", 946 | "sha256:25ad93c982b623441b491e693ef400598d1a46cdf11b8c9c0b3be6c61ebbb6cd" 947 | ], 948 | "markers": "python_version >= '3.5'", 949 | "version": "==6.2.0" 950 | }, 951 | "numpy": { 952 | "hashes": [ 953 | "sha256:012426a41bc9ab63bb158635aecccc7610e3eff5d31d1eb43bc099debc979d94", 954 | "sha256:06fab248a088e439402141ea04f0fffb203723148f6ee791e9c75b3e9e82f080", 955 | "sha256:0eef32ca3132a48e43f6a0f5a82cb508f22ce5a3d6f67a8329c81c8e226d3f6e", 956 | "sha256:1ded4fce9cfaaf24e7a0ab51b7a87be9038ea1ace7f34b841fe3b6894c721d1c", 957 | "sha256:2e55195bc1c6b705bfd8ad6f288b38b11b1af32f3c8289d6c50d47f950c12e76", 958 | "sha256:2ea52bd92ab9f768cc64a4c3ef8f4b2580a17af0a5436f6126b08efbd1838371", 959 | "sha256:36674959eed6957e61f11c912f71e78857a8d0604171dfd9ce9ad5cbf41c511c", 960 | "sha256:384ec0463d1c2671170901994aeb6dce126de0a95ccc3976c43b0038a37329c2", 961 | "sha256:39b70c19ec771805081578cc936bbe95336798b7edf4732ed102e7a43ec5c07a", 962 | "sha256:400580cbd3cff6ffa6293df2278c75aef2d58d8d93d3c5614cd67981dae68ceb", 963 | "sha256:43d4c81d5ffdff6bae58d66a3cd7f54a7acd9a0e7b18d97abb255defc09e3140", 964 | "sha256:50a4a0ad0111cc1b71fa32dedd05fa239f7fb5a43a40663269bb5dc7877cfd28", 965 | "sha256:603aa0706be710eea8884af807b1b3bc9fb2e49b9f4da439e76000f3b3c6ff0f", 966 | "sha256:6149a185cece5ee78d1d196938b2a8f9d09f5a5ebfbba66969302a778d5ddd1d", 967 | "sha256:759e4095edc3c1b3ac031f34d9459fa781777a93ccc633a472a5468587a190ff", 968 | "sha256:7fb43004bce0ca31d8f13a6eb5e943fa73371381e53f7074ed21a4cb786c32f8", 969 | "sha256:811daee36a58dc79cf3d8bdd4a490e4277d0e4b7d103a001a4e73ddb48e7e6aa", 970 | "sha256:8b5e972b43c8fc27d56550b4120fe6257fdc15f9301914380b27f74856299fea", 971 | "sha256:99abf4f353c3d1a0c7a5f27699482c987cf663b1eac20db59b8c7b061eabd7fc", 972 | "sha256:a0d53e51a6cb6f0d9082decb7a4cb6dfb33055308c4c44f53103c073f649af73", 973 | "sha256:a12ff4c8ddfee61f90a1633a4c4afd3f7bcb32b11c52026c92a12e1325922d0d", 974 | "sha256:a4646724fba402aa7504cd48b4b50e783296b5e10a524c7a6da62e4a8ac9698d", 975 | "sha256:a76f502430dd98d7546e1ea2250a7360c065a5fdea52b2dffe8ae7180909b6f4", 976 | "sha256:a9d17f2be3b427fbb2bce61e596cf555d6f8a56c222bd2ca148baeeb5e5c783c", 977 | "sha256:ab83f24d5c52d60dbc8cd0528759532736b56db58adaa7b5f1f76ad551416a1e", 978 | "sha256:aeb9ed923be74e659984e321f609b9ba54a48354bfd168d21a2b072ed1e833ea", 979 | "sha256:c843b3f50d1ab7361ca4f0b3639bf691569493a56808a0b0c54a051d260b7dbd", 980 | "sha256:cae865b1cae1ec2663d8ea56ef6ff185bad091a5e33ebbadd98de2cfa3fa668f", 981 | "sha256:cc6bd4fd593cb261332568485e20a0712883cf631f6f5e8e86a52caa8b2b50ff", 982 | "sha256:cf2402002d3d9f91c8b01e66fbb436a4ed01c6498fffed0e4c7566da1d40ee1e", 983 | "sha256:d051ec1c64b85ecc69531e1137bb9751c6830772ee5c1c426dbcfe98ef5788d7", 984 | "sha256:d6631f2e867676b13026e2846180e2c13c1e11289d67da08d71cacb2cd93d4aa", 985 | "sha256:dbd18bcf4889b720ba13a27ec2f2aac1981bd41203b3a3b27ba7a33f88ae4827", 986 | "sha256:df609c82f18c5b9f6cb97271f03315ff0dbe481a2a02e56aeb1b1a985ce38e60" 987 | ], 988 | "index": "pypi", 989 | "version": "==1.19.5" 990 | }, 991 | "packaging": { 992 | "hashes": [ 993 | "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858", 994 | "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093" 995 | ], 996 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 997 | "version": "==20.8" 998 | }, 999 | "pandas": { 1000 | "hashes": [ 1001 | "sha256:050ed2c9d825ef36738e018454e6d055c63d947c1d52010fbadd7584f09df5db", 1002 | "sha256:055647e7f4c5e66ba92c2a7dcae6c2c57898b605a3fb007745df61cc4015937f", 1003 | "sha256:23ac77a3a222d9304cb2a7934bb7b4805ff43d513add7a42d1a22dc7df14edd2", 1004 | "sha256:2de012a36cc507debd9c3351b4d757f828d5a784a5fc4e6766eafc2b56e4b0f5", 1005 | "sha256:30e9e8bc8c5c17c03d943e8d6f778313efff59e413b8dbdd8214c2ed9aa165f6", 1006 | "sha256:324e60bea729cf3b55c1bf9e88fe8b9932c26f8669d13b928e3c96b3a1453dff", 1007 | "sha256:37443199f451f8badfe0add666e43cdb817c59fa36bceedafd9c543a42f236ca", 1008 | "sha256:47ec0808a8357ab3890ce0eca39a63f79dcf941e2e7f494470fe1c9ec43f6091", 1009 | "sha256:496fcc29321e9a804d56d5aa5d7ec1320edfd1898eee2f451aa70171cf1d5a29", 1010 | "sha256:50e6c0a17ef7f831b5565fd0394dbf9bfd5d615ee4dd4bb60a3d8c9d2e872323", 1011 | "sha256:5527c5475d955c0bc9689c56865aaa2a7b13c504d6c44f0aadbf57b565af5ebd", 1012 | "sha256:57d5c7ac62925a8d2ab43ea442b297a56cc8452015e71e24f4aa7e4ed6be3d77", 1013 | "sha256:9d45f58b03af1fea4b48e44aa38a819a33dccb9821ef9e1d68f529995f8a632f", 1014 | "sha256:b26e2dabda73d347c7af3e6fed58483161c7b87a886a4e06d76ccfe55a044aa9", 1015 | "sha256:cfd237865d878da9b65cfee883da5e0067f5e2ff839e459466fb90565a77bda3", 1016 | "sha256:d7cca42dba13bfee369e2944ae31f6549a55831cba3117e17636955176004088", 1017 | "sha256:fe7de6fed43e7d086e3d947651ec89e55ddf00102f9dd5758763d56d182f0564" 1018 | ], 1019 | "markers": "python_full_version >= '3.7.1'", 1020 | "version": "==1.2.1" 1021 | }, 1022 | "pandocfilters": { 1023 | "hashes": [ 1024 | "sha256:bc63fbb50534b4b1f8ebe1860889289e8af94a23bff7445259592df25a3906eb" 1025 | ], 1026 | "version": "==1.4.3" 1027 | }, 1028 | "parso": { 1029 | "hashes": [ 1030 | "sha256:15b00182f472319383252c18d5913b69269590616c947747bc50bf4ac768f410", 1031 | "sha256:8519430ad07087d4c997fda3a7918f7cfa27cb58972a8c89c2a0295a1c940e9e" 1032 | ], 1033 | "markers": "python_version >= '3.6'", 1034 | "version": "==0.8.1" 1035 | }, 1036 | "pexpect": { 1037 | "hashes": [ 1038 | "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937", 1039 | "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c" 1040 | ], 1041 | "markers": "sys_platform != 'win32'", 1042 | "version": "==4.8.0" 1043 | }, 1044 | "pickleshare": { 1045 | "hashes": [ 1046 | "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca", 1047 | "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56" 1048 | ], 1049 | "version": "==0.7.5" 1050 | }, 1051 | "pillow": { 1052 | "hashes": [ 1053 | "sha256:165c88bc9d8dba670110c689e3cc5c71dbe4bfb984ffa7cbebf1fac9554071d6", 1054 | "sha256:1d208e670abfeb41b6143537a681299ef86e92d2a3dac299d3cd6830d5c7bded", 1055 | "sha256:22d070ca2e60c99929ef274cfced04294d2368193e935c5d6febfd8b601bf865", 1056 | "sha256:2353834b2c49b95e1313fb34edf18fca4d57446675d05298bb694bca4b194174", 1057 | "sha256:39725acf2d2e9c17356e6835dccebe7a697db55f25a09207e38b835d5e1bc032", 1058 | "sha256:3de6b2ee4f78c6b3d89d184ade5d8fa68af0848f9b6b6da2b9ab7943ec46971a", 1059 | "sha256:47c0d93ee9c8b181f353dbead6530b26980fe4f5485aa18be8f1fd3c3cbc685e", 1060 | "sha256:5e2fe3bb2363b862671eba632537cd3a823847db4d98be95690b7e382f3d6378", 1061 | "sha256:604815c55fd92e735f9738f65dabf4edc3e79f88541c221d292faec1904a4b17", 1062 | "sha256:6c5275bd82711cd3dcd0af8ce0bb99113ae8911fc2952805f1d012de7d600a4c", 1063 | "sha256:731ca5aabe9085160cf68b2dbef95fc1991015bc0a3a6ea46a371ab88f3d0913", 1064 | "sha256:7612520e5e1a371d77e1d1ca3a3ee6227eef00d0a9cddb4ef7ecb0b7396eddf7", 1065 | "sha256:7916cbc94f1c6b1301ac04510d0881b9e9feb20ae34094d3615a8a7c3db0dcc0", 1066 | "sha256:81c3fa9a75d9f1afafdb916d5995633f319db09bd773cb56b8e39f1e98d90820", 1067 | "sha256:887668e792b7edbfb1d3c9d8b5d8c859269a0f0eba4dda562adb95500f60dbba", 1068 | "sha256:93a473b53cc6e0b3ce6bf51b1b95b7b1e7e6084be3a07e40f79b42e83503fbf2", 1069 | "sha256:96d4dc103d1a0fa6d47c6c55a47de5f5dafd5ef0114fa10c85a1fd8e0216284b", 1070 | "sha256:a3d3e086474ef12ef13d42e5f9b7bbf09d39cf6bd4940f982263d6954b13f6a9", 1071 | "sha256:b02a0b9f332086657852b1f7cb380f6a42403a6d9c42a4c34a561aa4530d5234", 1072 | "sha256:b09e10ec453de97f9a23a5aa5e30b334195e8d2ddd1ce76cc32e52ba63c8b31d", 1073 | "sha256:b6f00ad5ebe846cc91763b1d0c6d30a8042e02b2316e27b05de04fa6ec831ec5", 1074 | "sha256:bba80df38cfc17f490ec651c73bb37cd896bc2400cfba27d078c2135223c1206", 1075 | "sha256:c3d911614b008e8a576b8e5303e3db29224b455d3d66d1b2848ba6ca83f9ece9", 1076 | "sha256:ca20739e303254287138234485579b28cb0d524401f83d5129b5ff9d606cb0a8", 1077 | "sha256:cb192176b477d49b0a327b2a5a4979552b7a58cd42037034316b8018ac3ebb59", 1078 | "sha256:cdbbe7dff4a677fb555a54f9bc0450f2a21a93c5ba2b44e09e54fcb72d2bd13d", 1079 | "sha256:cf6e33d92b1526190a1de904df21663c46a456758c0424e4f947ae9aa6088bf7", 1080 | "sha256:d355502dce85ade85a2511b40b4c61a128902f246504f7de29bbeec1ae27933a", 1081 | "sha256:d673c4990acd016229a5c1c4ee8a9e6d8f481b27ade5fc3d95938697fa443ce0", 1082 | "sha256:dc577f4cfdda354db3ae37a572428a90ffdbe4e51eda7849bf442fb803f09c9b", 1083 | "sha256:dd9eef866c70d2cbbea1ae58134eaffda0d4bfea403025f4db6859724b18ab3d", 1084 | "sha256:f50e7a98b0453f39000619d845be8b06e611e56ee6e8186f7f60c3b1e2f0feae" 1085 | ], 1086 | "markers": "python_version >= '3.6'", 1087 | "version": "==8.1.0" 1088 | }, 1089 | "pluggy": { 1090 | "hashes": [ 1091 | "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", 1092 | "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" 1093 | ], 1094 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 1095 | "version": "==0.13.1" 1096 | }, 1097 | "prometheus-client": { 1098 | "hashes": [ 1099 | "sha256:9da7b32f02439d8c04f7777021c304ed51d9ec180604700c1ba72a4d44dceb03", 1100 | "sha256:b08c34c328e1bf5961f0b4352668e6c8f145b4a087e09b7296ef62cbe4693d35" 1101 | ], 1102 | "version": "==0.9.0" 1103 | }, 1104 | "promise": { 1105 | "hashes": [ 1106 | "sha256:dfd18337c523ba4b6a58801c164c1904a9d4d1b1747c7d5dbf45b693a49d93d0" 1107 | ], 1108 | "version": "==2.3" 1109 | }, 1110 | "prompt-toolkit": { 1111 | "hashes": [ 1112 | "sha256:7e966747c18ececaec785699626b771c1ba8344c8d31759a1915d6b12fad6525", 1113 | "sha256:c96b30925025a7635471dc083ffb6af0cc67482a00611bd81aeaeeeb7e5a5e12" 1114 | ], 1115 | "markers": "python_full_version >= '3.6.1'", 1116 | "version": "==3.0.14" 1117 | }, 1118 | "protobuf": { 1119 | "hashes": [ 1120 | "sha256:0e247612fadda953047f53301a7b0407cb0c3cb4ae25a6fde661597a04039b3c", 1121 | "sha256:0fc96785262042e4863b3f3b5c429d4636f10d90061e1840fce1baaf59b1a836", 1122 | "sha256:1c51fda1bbc9634246e7be6016d860be01747354ed7015ebe38acf4452f470d2", 1123 | "sha256:1d63eb389347293d8915fb47bee0951c7b5dab522a4a60118b9a18f33e21f8ce", 1124 | "sha256:22bcd2e284b3b1d969c12e84dc9b9a71701ec82d8ce975fdda19712e1cfd4e00", 1125 | "sha256:2a7e2fe101a7ace75e9327b9c946d247749e564a267b0515cf41dfe450b69bac", 1126 | "sha256:43b554b9e73a07ba84ed6cf25db0ff88b1e06be610b37656e292e3cbb5437472", 1127 | "sha256:4b74301b30513b1a7494d3055d95c714b560fbb630d8fb9956b6f27992c9f980", 1128 | "sha256:4e75105c9dfe13719b7293f75bd53033108f4ba03d44e71db0ec2a0e8401eafd", 1129 | "sha256:5b7a637212cc9b2bcf85dd828b1178d19efdf74dbfe1ddf8cd1b8e01fdaaa7f5", 1130 | "sha256:5e9806a43232a1fa0c9cf5da8dc06f6910d53e4390be1fa06f06454d888a9142", 1131 | "sha256:629b03fd3caae7f815b0c66b41273f6b1900a579e2ccb41ef4493a4f5fb84f3a", 1132 | "sha256:72230ed56f026dd664c21d73c5db73ebba50d924d7ba6b7c0d81a121e390406e", 1133 | "sha256:86a75477addde4918e9a1904e5c6af8d7b691f2a3f65587d73b16100fbe4c3b2", 1134 | "sha256:8971c421dbd7aad930c9bd2694122f332350b6ccb5202a8b7b06f3f1a5c41ed5", 1135 | "sha256:9616f0b65a30851e62f1713336c931fcd32c057202b7ff2cfbfca0fc7d5e3043", 1136 | "sha256:b0d5d35faeb07e22a1ddf8dce620860c8fe145426c02d1a0ae2688c6e8ede36d", 1137 | "sha256:ecc33531a213eee22ad60e0e2aaea6c8ba0021f0cce35dbf0ab03dee6e2a23a1" 1138 | ], 1139 | "version": "==3.14.0" 1140 | }, 1141 | "ptyprocess": { 1142 | "hashes": [ 1143 | "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", 1144 | "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220" 1145 | ], 1146 | "markers": "os_name != 'nt'", 1147 | "version": "==0.7.0" 1148 | }, 1149 | "py": { 1150 | "hashes": [ 1151 | "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", 1152 | "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" 1153 | ], 1154 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 1155 | "version": "==1.10.0" 1156 | }, 1157 | "pycparser": { 1158 | "hashes": [ 1159 | "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", 1160 | "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" 1161 | ], 1162 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 1163 | "version": "==2.20" 1164 | }, 1165 | "pygments": { 1166 | "hashes": [ 1167 | "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435", 1168 | "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337" 1169 | ], 1170 | "markers": "python_version >= '3.5'", 1171 | "version": "==2.7.4" 1172 | }, 1173 | "pyparsing": { 1174 | "hashes": [ 1175 | "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", 1176 | "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" 1177 | ], 1178 | "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", 1179 | "version": "==2.4.7" 1180 | }, 1181 | "pyrsistent": { 1182 | "hashes": [ 1183 | "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e" 1184 | ], 1185 | "markers": "python_version >= '3.5'", 1186 | "version": "==0.17.3" 1187 | }, 1188 | "pyscaffold": { 1189 | "hashes": [ 1190 | "sha256:862ba8415361c7b9947bc9aba83c12af325e61f6c1f70890759b9e9bef498ab7", 1191 | "sha256:96c3e7f5529df0b5b351e879a141e1e5c9f26211f30d493c23d8c09d9d610a6f" 1192 | ], 1193 | "markers": "python_version >= '3.4'", 1194 | "version": "==3.2.3" 1195 | }, 1196 | "pytest": { 1197 | "hashes": [ 1198 | "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9", 1199 | "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839" 1200 | ], 1201 | "index": "pypi", 1202 | "version": "==6.2.2" 1203 | }, 1204 | "python-dateutil": { 1205 | "hashes": [ 1206 | "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", 1207 | "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" 1208 | ], 1209 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 1210 | "version": "==2.8.1" 1211 | }, 1212 | "pytz": { 1213 | "hashes": [ 1214 | "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4", 1215 | "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5" 1216 | ], 1217 | "version": "==2020.5" 1218 | }, 1219 | "pyzmq": { 1220 | "hashes": [ 1221 | "sha256:082abbb95936f7475cee098153191058350878e33b8fb1dbefc82264978297e4", 1222 | "sha256:098c13c6198913c2a0690235fa74d2e49161755f66b663beaec89651554cc79c", 1223 | "sha256:0a6890d626b4f95f276a2381aea8d3435bb25ef7a2735bbc74966b105b09e758", 1224 | "sha256:42ddd761ac71dd7a386849bceffdcf4f35798caf844b762693456fc55c19c721", 1225 | "sha256:43df5e2fe06e03f41649a48e6339045fe8c68feaedef700a54440551f0ba94a3", 1226 | "sha256:46ff042f883bb22242ba5a3817fbcb2ff0cc0990827b8f925d49c176b1cb7394", 1227 | "sha256:4a70ef4e3835333e020c697ebfe3e6be172dd4ef8fe19ad047cd88678c1259c5", 1228 | "sha256:530ee5571bea541ff68c6e92819a0da0bdab9457c9b637b6c142c267c02a799e", 1229 | "sha256:544963322b1cb650de3d2f45d81bc644e5d9ada6f8f1f5718d9837cda78ee948", 1230 | "sha256:62b3c8196b2fa106552b03ed8ea7b91e1047e9a614849c87aea468f0caac4076", 1231 | "sha256:664f075d38869c6117507193ae3f3d5319491900f11b344030345c11d74863f2", 1232 | "sha256:68f8120ba7ec704d5acfabdcd1328c37806d8a23e1688a7ae3f59193c3cd46e3", 1233 | "sha256:71ff9975f23a78c14a303bf4efd8b8924830a170a8eabcffff7f5e5a5b583b9e", 1234 | "sha256:7307f6efb568a20bb56662041555d08aa2cbc71df91638344b6a088c10b44da7", 1235 | "sha256:82f59dbbdc47987f7ce0daea4d6ee21059ab9d5896bd8110215736c62762cc7f", 1236 | "sha256:84ccd4d9f8839353278480d1f06372f5fd149abcb7621f85c4ebe0924acbd110", 1237 | "sha256:8b984feb536152009e2dc306140ec47f88dd85922063d9e9e8b07f4ff5a0832a", 1238 | "sha256:a0d3aaff782ee1d423e90604c2abe4e573062e9a2008b27c01c86d94f94dbfa7", 1239 | "sha256:a3da3d5a66545fa127ad12784babd78859656e0c9614324d40c72d4210aa5bbe", 1240 | "sha256:b4b7e6edea41257562e9d4b28e717ee04ef078720d46ddb4c2241b9b60dbecc2", 1241 | "sha256:b7f471ecead3c4b3c88d00eeff5d78f2b2a6a9f56dd33aa96620019d83fcc3dd", 1242 | "sha256:c34ec0218319f7a78b15315038125d08ab0b37ff1fe2ce002e70b7aafe1423cf", 1243 | "sha256:d91cbc637a34e1a72ebc47da8bf21a2e6c5e386d1b04143c07c8082258e9b430", 1244 | "sha256:dbccca5b77162f610727b664804216674b1974a7a65e03a6ed638a9434cdf2b2", 1245 | "sha256:efd3685579d93f01a742827d4d364df6a3c08df25e14ea091828e3f77d054f19", 1246 | "sha256:f91a6dd45678fa6bac889267328ed9cfec56e2adeab7af2dddfa8c7e9dab24de", 1247 | "sha256:fcb790ff9df5d85d059069a7847f5696ec9296b719ed3e7e675a61a7af390e2f", 1248 | "sha256:fe714a0aeee5d5f230cb67af8e584f243adce63f32e81519dd80f605d036feea" 1249 | ], 1250 | "markers": "python_version >= '3.6'", 1251 | "version": "==21.0.2" 1252 | }, 1253 | "requests": { 1254 | "hashes": [ 1255 | "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", 1256 | "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" 1257 | ], 1258 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 1259 | "version": "==2.25.1" 1260 | }, 1261 | "scipy": { 1262 | "hashes": [ 1263 | "sha256:155225621df90fcd151e25d51c50217e412de717475999ebb76e17e310176981", 1264 | "sha256:1bc5b446600c4ff7ab36bade47180673141322f0febaa555f1c433fe04f2a0e3", 1265 | "sha256:2f1c2ebca6fd867160e70102200b1bd07b3b2d31a3e6af3c58d688c15d0d07b7", 1266 | "sha256:313785c4dab65060f9648112d025f6d2fec69a8a889c714328882d678a95f053", 1267 | "sha256:31ab217b5c27ab429d07428a76002b33662f98986095bbce5d55e0788f7e8b15", 1268 | "sha256:3d4303e3e21d07d9557b26a1707bb9fc065510ee8501c9bf22a0157249a82fd0", 1269 | "sha256:4f1d9cc977ac6a4a63c124045c1e8bf67ec37098f67c699887a93736961a00ae", 1270 | "sha256:58731bbe0103e96b89b2f41516699db9b63066e4317e31b8402891571f6d358f", 1271 | "sha256:8629135ee00cc2182ac8be8e75643b9f02235942443732c2ed69ab48edcb6614", 1272 | "sha256:876badc33eec20709d4e042a09834f5953ebdac4088d45a4f3a1f18b56885718", 1273 | "sha256:8840a9adb4ede3751f49761653d3ebf664f25195fdd42ada394ffea8903dd51d", 1274 | "sha256:aef3a2dbc436bbe8f6e0b635f0b5fe5ed024b522eee4637dbbe0b974129ca734", 1275 | "sha256:b8af26839ae343655f3ca377a5d5e5466f1d3b3ac7432a43449154fe958ae0e0", 1276 | "sha256:c0911f3180de343643f369dc5cfedad6ba9f939c2d516bddea4a6871eb000722", 1277 | "sha256:cb6dc9f82dfd95f6b9032a8d7ea70efeeb15d5b5fd6ed4e8537bb3c673580566", 1278 | "sha256:cdbc47628184a0ebeb5c08f1892614e1bd4a51f6e0d609c6eed253823a960f5b", 1279 | "sha256:d902d3a5ad7f28874c0a82db95246d24ca07ad932741df668595fe00a4819870", 1280 | "sha256:eb7928275f3560d47e5538e15e9f32b3d64cd30ea8f85f3e82987425476f53f6", 1281 | "sha256:f68d5761a2d2376e2b194c8e9192bbf7c51306ca176f1a0889990a52ef0d551f" 1282 | ], 1283 | "markers": "python_version >= '3.7'", 1284 | "version": "==1.6.0" 1285 | }, 1286 | "seaborn": { 1287 | "hashes": [ 1288 | "sha256:44e78eaed937c5a87fc7a892c329a7cc091060b67ebd1d0d306b446a74ba01ad", 1289 | "sha256:4e1cce9489449a1c6ff3c567f2113cdb41122f727e27a984950d004a88ef3c5c" 1290 | ], 1291 | "index": "pypi", 1292 | "version": "==0.11.1" 1293 | }, 1294 | "send2trash": { 1295 | "hashes": [ 1296 | "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2", 1297 | "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b" 1298 | ], 1299 | "version": "==1.5.0" 1300 | }, 1301 | "six": { 1302 | "hashes": [ 1303 | "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", 1304 | "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" 1305 | ], 1306 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 1307 | "version": "==1.15.0" 1308 | }, 1309 | "sniffio": { 1310 | "hashes": [ 1311 | "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663", 1312 | "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de" 1313 | ], 1314 | "markers": "python_version >= '3.5'", 1315 | "version": "==1.2.0" 1316 | }, 1317 | "snowballstemmer": { 1318 | "hashes": [ 1319 | "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2", 1320 | "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914" 1321 | ], 1322 | "version": "==2.1.0" 1323 | }, 1324 | "sphinx": { 1325 | "hashes": [ 1326 | "sha256:41cad293f954f7d37f803d97eb184158cfd90f51195131e94875bc07cd08b93c", 1327 | "sha256:c314c857e7cd47c856d2c5adff514ac2e6495f8b8e0f886a8a37e9305dfea0d8" 1328 | ], 1329 | "index": "pypi", 1330 | "version": "==3.4.3" 1331 | }, 1332 | "sphinxcontrib-applehelp": { 1333 | "hashes": [ 1334 | "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", 1335 | "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" 1336 | ], 1337 | "markers": "python_version >= '3.5'", 1338 | "version": "==1.0.2" 1339 | }, 1340 | "sphinxcontrib-devhelp": { 1341 | "hashes": [ 1342 | "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", 1343 | "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" 1344 | ], 1345 | "markers": "python_version >= '3.5'", 1346 | "version": "==1.0.2" 1347 | }, 1348 | "sphinxcontrib-htmlhelp": { 1349 | "hashes": [ 1350 | "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", 1351 | "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" 1352 | ], 1353 | "markers": "python_version >= '3.5'", 1354 | "version": "==1.0.3" 1355 | }, 1356 | "sphinxcontrib-jsmath": { 1357 | "hashes": [ 1358 | "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", 1359 | "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" 1360 | ], 1361 | "markers": "python_version >= '3.5'", 1362 | "version": "==1.0.1" 1363 | }, 1364 | "sphinxcontrib-qthelp": { 1365 | "hashes": [ 1366 | "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", 1367 | "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" 1368 | ], 1369 | "markers": "python_version >= '3.5'", 1370 | "version": "==1.0.3" 1371 | }, 1372 | "sphinxcontrib-serializinghtml": { 1373 | "hashes": [ 1374 | "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", 1375 | "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" 1376 | ], 1377 | "markers": "python_version >= '3.5'", 1378 | "version": "==1.1.4" 1379 | }, 1380 | "tensorflow-datasets": { 1381 | "hashes": [ 1382 | "sha256:93a89cd599b24e9cb04a3a3b43cc72728fc11889f41e9d69f0bd95a8318531bc", 1383 | "sha256:be2532e7de3c7aac3e95a094c3c95b9b1f1a7f2e98daeb16d212dda4382de264" 1384 | ], 1385 | "index": "pypi", 1386 | "version": "==4.2.0" 1387 | }, 1388 | "tensorflow-metadata": { 1389 | "hashes": [ 1390 | "sha256:6fbefdff10984e525a714f241f1f033338795e9500b67071df454fc3ef044e17" 1391 | ], 1392 | "markers": "python_version >= '3.6' and python_version < '4'", 1393 | "version": "==0.27.0" 1394 | }, 1395 | "termcolor": { 1396 | "hashes": [ 1397 | "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b" 1398 | ], 1399 | "version": "==1.1.0" 1400 | }, 1401 | "terminado": { 1402 | "hashes": [ 1403 | "sha256:23a053e06b22711269563c8bb96b36a036a86be8b5353e85e804f89b84aaa23f", 1404 | "sha256:89e6d94b19e4bc9dce0ffd908dfaf55cc78a9bf735934e915a4a96f65ac9704c" 1405 | ], 1406 | "markers": "python_version >= '3.6'", 1407 | "version": "==0.9.2" 1408 | }, 1409 | "testpath": { 1410 | "hashes": [ 1411 | "sha256:60e0a3261c149755f4399a1fff7d37523179a70fdc3abdf78de9fc2604aeec7e", 1412 | "sha256:bfcf9411ef4bf3db7579063e0546938b1edda3d69f4e1fb8756991f5951f85d4" 1413 | ], 1414 | "version": "==0.4.4" 1415 | }, 1416 | "toml": { 1417 | "hashes": [ 1418 | "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", 1419 | "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" 1420 | ], 1421 | "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", 1422 | "version": "==0.10.2" 1423 | }, 1424 | "tornado": { 1425 | "hashes": [ 1426 | "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb", 1427 | "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c", 1428 | "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288", 1429 | "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95", 1430 | "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558", 1431 | "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe", 1432 | "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791", 1433 | "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d", 1434 | "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326", 1435 | "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b", 1436 | "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4", 1437 | "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c", 1438 | "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910", 1439 | "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5", 1440 | "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c", 1441 | "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0", 1442 | "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675", 1443 | "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd", 1444 | "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f", 1445 | "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c", 1446 | "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea", 1447 | "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6", 1448 | "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05", 1449 | "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd", 1450 | "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575", 1451 | "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a", 1452 | "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37", 1453 | "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795", 1454 | "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f", 1455 | "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32", 1456 | "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c", 1457 | "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01", 1458 | "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4", 1459 | "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2", 1460 | "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921", 1461 | "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085", 1462 | "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df", 1463 | "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102", 1464 | "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5", 1465 | "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68", 1466 | "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5" 1467 | ], 1468 | "markers": "python_version >= '3.5'", 1469 | "version": "==6.1" 1470 | }, 1471 | "tqdm": { 1472 | "hashes": [ 1473 | "sha256:4621f6823bab46a9cc33d48105753ccbea671b68bab2c50a9f0be23d4065cb5a", 1474 | "sha256:fe3d08dd00a526850568d542ff9de9bbc2a09a791da3c334f3213d8d0bbbca65" 1475 | ], 1476 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 1477 | "version": "==4.56.0" 1478 | }, 1479 | "traitlets": { 1480 | "hashes": [ 1481 | "sha256:178f4ce988f69189f7e523337a3e11d91c786ded9360174a3d9ca83e79bc5396", 1482 | "sha256:69ff3f9d5351f31a7ad80443c2674b7099df13cc41fc5fa6e2f6d3b0330b0426" 1483 | ], 1484 | "markers": "python_version >= '3.7'", 1485 | "version": "==5.0.5" 1486 | }, 1487 | "typing-extensions": { 1488 | "hashes": [ 1489 | "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", 1490 | "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", 1491 | "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" 1492 | ], 1493 | "markers": "python_version < '3.8'", 1494 | "version": "==3.7.4.3" 1495 | }, 1496 | "unet": { 1497 | "editable": true, 1498 | "path": "." 1499 | }, 1500 | "urllib3": { 1501 | "hashes": [ 1502 | "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", 1503 | "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" 1504 | ], 1505 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", 1506 | "version": "==1.26.3" 1507 | }, 1508 | "wcwidth": { 1509 | "hashes": [ 1510 | "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", 1511 | "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" 1512 | ], 1513 | "version": "==0.2.5" 1514 | }, 1515 | "webencodings": { 1516 | "hashes": [ 1517 | "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", 1518 | "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" 1519 | ], 1520 | "version": "==0.5.1" 1521 | }, 1522 | "zipp": { 1523 | "hashes": [ 1524 | "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108", 1525 | "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb" 1526 | ], 1527 | "markers": "python_version >= '3.6'", 1528 | "version": "==3.4.0" 1529 | } 1530 | } 1531 | } 1532 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ============================= 2 | Tensorflow Unet 3 | ============================= 4 | 5 | .. image:: https://readthedocs.org/projects/u-net/badge/?version=latest 6 | :target: https://u-net.readthedocs.io/en/latest/?badge=latest 7 | :alt: Documentation Status 8 | 9 | .. image:: https://travis-ci.com/jakeret/unet.svg?branch=master 10 | :target: https://travis-ci.com/jakeret/unet 11 | 12 | .. image:: http://img.shields.io/badge/arXiv-1609.09077-orange.svg?style=flat 13 | :target: http://arxiv.org/abs/1609.09077 14 | 15 | .. image:: https://camo.githubusercontent.com/c8e5db7a5d15b0e7c13480a0ed81db1ae2128b80/68747470733a2f2f62696e6465722e70616e67656f2e696f2f62616467655f6c6f676f2e737667 16 | :target: https://mybinder.org/v2/gh/jakeret/unet/master?filepath=notebooks%2Fcicles.ipynb 17 | 18 | .. image:: https://camo.githubusercontent.com/52feade06f2fecbf006889a904d221e6a730c194/68747470733a2f2f636f6c61622e72657365617263682e676f6f676c652e636f6d2f6173736574732f636f6c61622d62616467652e737667 19 | :target: https://colab.research.google.com/drive/1laPoOaGcqEBB3jTvb-pGnmDU21zwtgJB 20 | 21 | This is a generic **U-Net** implementation as proposed by `Ronneberger et al. `_ developed with **Tensorflow 2**. This project is a reimplementation of the original `tf_unet `_. 22 | 23 | Originally, the code was developed and used for `Radio Frequency Interference mitigation using deep convolutional neural networks `_ . 24 | 25 | The network can be trained to perform image segmentation on arbitrary imaging data. Checkout the `Usage `_ section, the included `Jupyter notebooks `_ or `on Google Colab `_ for a toy problem or the Oxford Pet Segmentation example available on `Google Colab `_. 26 | 27 | The code is not tied to a specific segmentation such that it can be used in a toy problem to detect circles in a noisy image. 28 | 29 | .. image:: https://raw.githubusercontent.com/jakeret/unet/master/docs/toy_problem.png 30 | :alt: Segmentation of a toy problem. 31 | :align: center 32 | 33 | To more complex application such as the detection of radio frequency interference (RFI) in radio astronomy. 34 | 35 | .. image:: https://raw.githubusercontent.com/jakeret/unet/master/docs/rfi.png 36 | :alt: Segmentation of RFI in radio data. 37 | :align: center 38 | 39 | Or to detect galaxies and star in wide field imaging data. 40 | 41 | .. image:: https://raw.githubusercontent.com/jakeret/unet/master/docs/galaxies.png 42 | :alt: Segmentation of a galaxies. 43 | :align: center 44 | 45 | 46 | The architectural elements of a U-Net consist of a contracting and expanding path: 47 | 48 | .. image:: https://raw.githubusercontent.com/jakeret/unet/master/docs/unet.png 49 | :alt: Unet architecture. 50 | :align: center 51 | 52 | 53 | As you use **unet** for your exciting discoveries, please cite the paper that describes the package:: 54 | 55 | 56 | @article{akeret2017radio, 57 | title={Radio frequency interference mitigation using deep convolutional neural networks}, 58 | author={Akeret, Joel and Chang, Chihway and Lucchi, Aurelien and Refregier, Alexandre}, 59 | journal={Astronomy and Computing}, 60 | volume={18}, 61 | pages={35--39}, 62 | year={2017}, 63 | publisher={Elsevier} 64 | } 65 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = ../build/sphinx/ 9 | AUTODOCDIR = api 10 | AUTODOCBUILD = sphinx-apidoc 11 | PROJECT = unet 12 | MODULEDIR = ../src/unet 13 | 14 | # User-friendly check for sphinx-build 15 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $?), 1) 16 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 17 | endif 18 | 19 | # Internal variables. 20 | PAPEROPT_a4 = -D latex_paper_size=a4 21 | PAPEROPT_letter = -D latex_paper_size=letter 22 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 23 | # the i18n builder cannot share the environment and doctrees with the others 24 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 25 | 26 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext doc-requirements 27 | 28 | help: 29 | @echo "Please use \`make ' where is one of" 30 | @echo " html to make standalone HTML files" 31 | @echo " dirhtml to make HTML files named index.html in directories" 32 | @echo " singlehtml to make a single large HTML file" 33 | @echo " pickle to make pickle files" 34 | @echo " json to make JSON files" 35 | @echo " htmlhelp to make HTML files and a HTML help project" 36 | @echo " qthelp to make HTML files and a qthelp project" 37 | @echo " devhelp to make HTML files and a Devhelp project" 38 | @echo " epub to make an epub" 39 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 40 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 41 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 42 | @echo " text to make text files" 43 | @echo " man to make manual pages" 44 | @echo " texinfo to make Texinfo files" 45 | @echo " info to make Texinfo files and run them through makeinfo" 46 | @echo " gettext to make PO message catalogs" 47 | @echo " changes to make an overview of all changed/added/deprecated items" 48 | @echo " xml to make Docutils-native XML files" 49 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 50 | @echo " linkcheck to check all external links for integrity" 51 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 52 | 53 | clean: 54 | rm -rf $(BUILDDIR)/* 55 | 56 | $(AUTODOCDIR): $(MODULEDIR) 57 | mkdir -p $@ 58 | $(AUTODOCBUILD) -f -o $@ $^ 59 | 60 | doc-requirements: $(AUTODOCDIR) 61 | 62 | html: doc-requirements 63 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 64 | @echo 65 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 66 | 67 | dirhtml: doc-requirements 68 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 69 | @echo 70 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 71 | 72 | singlehtml: doc-requirements 73 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 74 | @echo 75 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 76 | 77 | pickle: doc-requirements 78 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 79 | @echo 80 | @echo "Build finished; now you can process the pickle files." 81 | 82 | json: doc-requirements 83 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 84 | @echo 85 | @echo "Build finished; now you can process the JSON files." 86 | 87 | htmlhelp: doc-requirements 88 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 89 | @echo 90 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 91 | ".hhp project file in $(BUILDDIR)/htmlhelp." 92 | 93 | qthelp: doc-requirements 94 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 95 | @echo 96 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 97 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 98 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/$(PROJECT).qhcp" 99 | @echo "To view the help file:" 100 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/$(PROJECT).qhc" 101 | 102 | devhelp: doc-requirements 103 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 104 | @echo 105 | @echo "Build finished." 106 | @echo "To view the help file:" 107 | @echo "# mkdir -p $HOME/.local/share/devhelp/$(PROJECT)" 108 | @echo "# ln -s $(BUILDDIR)/devhelp $HOME/.local/share/devhelp/$(PROJEC)" 109 | @echo "# devhelp" 110 | 111 | epub: doc-requirements 112 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 113 | @echo 114 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 115 | 116 | patch-latex: 117 | find _build/latex -iname "*.tex" | xargs -- \ 118 | sed -i'' 's~includegraphics{~includegraphics\[keepaspectratio,max size={\\textwidth}{\\textheight}\]{~g' 119 | 120 | latex: doc-requirements 121 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 122 | $(MAKE) patch-latex 123 | @echo 124 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 125 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 126 | "(use \`make latexpdf' here to do that automatically)." 127 | 128 | latexpdf: doc-requirements 129 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 130 | $(MAKE) patch-latex 131 | @echo "Running LaTeX files through pdflatex..." 132 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 133 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 134 | 135 | latexpdfja: doc-requirements 136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 137 | @echo "Running LaTeX files through platex and dvipdfmx..." 138 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 139 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 140 | 141 | text: doc-requirements 142 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 143 | @echo 144 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 145 | 146 | man: doc-requirements 147 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 148 | @echo 149 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 150 | 151 | texinfo: doc-requirements 152 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 153 | @echo 154 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 155 | @echo "Run \`make' in that directory to run these through makeinfo" \ 156 | "(use \`make info' here to do that automatically)." 157 | 158 | info: doc-requirements 159 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 160 | @echo "Running Texinfo files through makeinfo..." 161 | make -C $(BUILDDIR)/texinfo info 162 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 163 | 164 | gettext: doc-requirements 165 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 166 | @echo 167 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 168 | 169 | changes: doc-requirements 170 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 171 | @echo 172 | @echo "The overview file is in $(BUILDDIR)/changes." 173 | 174 | linkcheck: doc-requirements 175 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 176 | @echo 177 | @echo "Link check complete; look for any errors in the above output " \ 178 | "or in $(BUILDDIR)/linkcheck/output.txt." 179 | 180 | doctest: doc-requirements 181 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 182 | @echo "Testing of doctests in the sources finished, look at the " \ 183 | "results in $(BUILDDIR)/doctest/output.txt." 184 | 185 | xml: doc-requirements 186 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 187 | @echo 188 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 189 | 190 | pseudoxml: doc-requirements 191 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 192 | @echo 193 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 194 | -------------------------------------------------------------------------------- /docs/_static/.gitignore: -------------------------------------------------------------------------------- 1 | # Empty directory 2 | -------------------------------------------------------------------------------- /docs/api/modules.rst: -------------------------------------------------------------------------------- 1 | unet 2 | ==== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | unet 8 | -------------------------------------------------------------------------------- /docs/api/unet.datasets.rst: -------------------------------------------------------------------------------- 1 | unet.datasets package 2 | ===================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | unet.datasets.circles module 8 | ---------------------------- 9 | 10 | .. automodule:: unet.datasets.circles 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | unet.datasets.oxford\_iiit\_pet module 16 | -------------------------------------- 17 | 18 | .. automodule:: unet.datasets.oxford_iiit_pet 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | 24 | Module contents 25 | --------------- 26 | 27 | .. automodule:: unet.datasets 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | -------------------------------------------------------------------------------- /docs/api/unet.rst: -------------------------------------------------------------------------------- 1 | unet package 2 | ============ 3 | 4 | unet.unet module 5 | ---------------- 6 | 7 | .. automodule:: unet.unet 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | unet.trainer module 13 | ------------------- 14 | 15 | .. automodule:: unet.trainer 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | unet.utils module 21 | ----------------- 22 | 23 | .. automodule:: unet.utils 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | 28 | 29 | Subpackages 30 | ----------- 31 | 32 | .. toctree:: 33 | :maxdepth: 4 34 | 35 | unet.datasets 36 | 37 | Submodules 38 | ---------- 39 | 40 | unet.callbacks module 41 | --------------------- 42 | 43 | .. automodule:: unet.callbacks 44 | :members: 45 | :undoc-members: 46 | :show-inheritance: 47 | 48 | unet.metrics module 49 | ------------------- 50 | 51 | .. automodule:: unet.metrics 52 | :members: 53 | :undoc-members: 54 | :show-inheritance: 55 | 56 | unet.schedulers module 57 | ---------------------- 58 | 59 | .. automodule:: unet.schedulers 60 | :members: 61 | :undoc-members: 62 | :show-inheritance: 63 | 64 | Module contents 65 | --------------- 66 | 67 | .. automodule:: unet 68 | :members: 69 | :undoc-members: 70 | :show-inheritance: 71 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. _authors: 2 | .. include:: ../AUTHORS.rst 3 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. _changes: 2 | .. include:: ../CHANGELOG.rst 3 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # This file is execfile()d with the current directory set to its containing dir. 4 | # 5 | # Note that not all possible configuration values are present in this 6 | # autogenerated file. 7 | # 8 | # All configuration values have a default; values that are commented out 9 | # serve to show the default. 10 | 11 | import inspect 12 | import os 13 | import sys 14 | 15 | __location__ = os.path.join(os.getcwd(), os.path.dirname( 16 | inspect.getfile(inspect.currentframe()))) 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | sys.path.insert(0, os.path.join(__location__, '../src')) 22 | 23 | # -- Run sphinx-apidoc ------------------------------------------------------ 24 | # This hack is necessary since RTD does not issue `sphinx-apidoc` before running 25 | # `sphinx-build -b html . _build/html`. See Issue: 26 | # https://github.com/rtfd/readthedocs.org/issues/1139 27 | # DON'T FORGET: Check the box "Install your project inside a virtualenv using 28 | # setup.py install" in the RTD Advanced Settings. 29 | # Additionally it helps us to avoid running apidoc manually 30 | 31 | try: # for Sphinx >= 1.7 32 | from sphinx.ext import apidoc 33 | except ImportError: 34 | from sphinx import apidoc 35 | 36 | output_dir = os.path.join(__location__, "api") 37 | module_dir = os.path.join(__location__, "../src/unet") 38 | # try: 39 | # shutil.rmtree(output_dir) 40 | # except FileNotFoundError: 41 | # pass 42 | 43 | # try: 44 | # import sphinx 45 | # from pkg_resources import parse_version 46 | # 47 | # cmd_line_template = "sphinx-apidoc -f -o {outputdir} {moduledir}" 48 | # cmd_line = cmd_line_template.format(outputdir=output_dir, moduledir=module_dir) 49 | # 50 | # args = cmd_line.split(" ") 51 | # if parse_version(sphinx.__version__) >= parse_version('1.7'): 52 | # args = args[1:] 53 | # 54 | # apidoc.main(args) 55 | # except Exception as e: 56 | # print("Running `sphinx-apidoc` failed!\n{}".format(e)) 57 | 58 | # -- General configuration ----------------------------------------------------- 59 | 60 | # If your documentation needs a minimal Sphinx version, state it here. 61 | # needs_sphinx = '1.0' 62 | 63 | # Add any Sphinx extension module names here, as strings. They can be extensions 64 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 65 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 66 | 'sphinx.ext.autosummary', 'sphinx.ext.viewcode', 'sphinx.ext.coverage', 67 | 'sphinx.ext.doctest', 'sphinx.ext.ifconfig', 'sphinx.ext.mathjax', 68 | 'sphinx.ext.napoleon'] 69 | 70 | # Add any paths that contain templates here, relative to this directory. 71 | templates_path = ['_templates'] 72 | 73 | # The suffix of source filenames. 74 | source_suffix = '.rst' 75 | 76 | # The encoding of source files. 77 | # source_encoding = 'utf-8-sig' 78 | 79 | # The master toctree document. 80 | master_doc = 'index' 81 | 82 | # General information about the project. 83 | project = u'unet' 84 | copyright = u'2020, Akeret, Joel' 85 | 86 | # The version info for the project you're documenting, acts as replacement for 87 | # |version| and |release|, also used in various other places throughout the 88 | # built documents. 89 | # 90 | # The short X.Y version. 91 | version = '' # Is set by calling `setup.py docs` 92 | # The full version, including alpha/beta/rc tags. 93 | release = '' # Is set by calling `setup.py docs` 94 | 95 | # The language for content autogenerated by Sphinx. Refer to documentation 96 | # for a list of supported languages. 97 | # language = None 98 | 99 | # There are two options for replacing |today|: either, you set today to some 100 | # non-false value, then it is used: 101 | # today = '' 102 | # Else, today_fmt is used as the format for a strftime call. 103 | # today_fmt = '%B %d, %Y' 104 | 105 | # List of patterns, relative to source directory, that match files and 106 | # directories to ignore when looking for source files. 107 | exclude_patterns = ['_build'] 108 | 109 | # The reST default role (used for this markup: `text`) to use for all documents. 110 | # default_role = None 111 | 112 | # If true, '()' will be appended to :func: etc. cross-reference text. 113 | # add_function_parentheses = True 114 | 115 | # If true, the current module name will be prepended to all description 116 | # unit titles (such as .. function::). 117 | # add_module_names = True 118 | 119 | # If true, sectionauthor and moduleauthor directives will be shown in the 120 | # output. They are ignored by default. 121 | # show_authors = False 122 | 123 | # The name of the Pygments (syntax highlighting) style to use. 124 | pygments_style = 'sphinx' 125 | 126 | # A list of ignored prefixes for module index sorting. 127 | # modindex_common_prefix = [] 128 | 129 | # If true, keep warnings as "system message" paragraphs in the built documents. 130 | # keep_warnings = False 131 | 132 | 133 | # -- Options for HTML output --------------------------------------------------- 134 | 135 | # The theme to use for HTML and HTML Help pages. See the documentation for 136 | # a list of builtin themes. 137 | html_theme = 'default' 138 | 139 | # Theme options are theme-specific and customize the look and feel of a theme 140 | # further. For a list of options available for each theme, see the 141 | # documentation. 142 | html_theme_options = { 143 | 'sidebar_width': '300px', 144 | 'page_width': '1200px' 145 | } 146 | 147 | # Add any paths that contain custom themes here, relative to this directory. 148 | # html_theme_path = [] 149 | 150 | # The name for this set of Sphinx documents. If None, it defaults to 151 | # " v documentation". 152 | try: 153 | from unet import __version__ as version 154 | except ImportError: 155 | pass 156 | else: 157 | release = version 158 | 159 | # A shorter title for the navigation bar. Default is the same as html_title. 160 | # html_short_title = None 161 | 162 | # The name of an image file (relative to this directory) to place at the top 163 | # of the sidebar. 164 | # html_logo = "" 165 | 166 | # The name of an image file (within the static path) to use as favicon of the 167 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 168 | # pixels large. 169 | # html_favicon = None 170 | 171 | # Add any paths that contain custom static files (such as style sheets) here, 172 | # relative to this directory. They are copied after the builtin static files, 173 | # so a file named "default.css" will overwrite the builtin "default.css". 174 | html_static_path = ['_static'] 175 | 176 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 177 | # using the given strftime format. 178 | # html_last_updated_fmt = '%b %d, %Y' 179 | 180 | # If true, SmartyPants will be used to convert quotes and dashes to 181 | # typographically correct entities. 182 | # html_use_smartypants = True 183 | 184 | # Custom sidebar templates, maps document names to template names. 185 | # html_sidebars = {} 186 | 187 | # Additional templates that should be rendered to pages, maps page names to 188 | # template names. 189 | # html_additional_pages = {} 190 | 191 | # If false, no module index is generated. 192 | # html_domain_indices = True 193 | 194 | # If false, no index is generated. 195 | # html_use_index = True 196 | 197 | # If true, the index is split into individual pages for each letter. 198 | # html_split_index = False 199 | 200 | # If true, links to the reST sources are added to the pages. 201 | # html_show_sourcelink = True 202 | 203 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 204 | # html_show_sphinx = True 205 | 206 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 207 | # html_show_copyright = True 208 | 209 | # If true, an OpenSearch description file will be output, and all pages will 210 | # contain a tag referring to it. The value of this option must be the 211 | # base URL from which the finished HTML is served. 212 | # html_use_opensearch = '' 213 | 214 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 215 | # html_file_suffix = None 216 | 217 | # Output file base name for HTML help builder. 218 | htmlhelp_basename = 'unet-doc' 219 | 220 | 221 | # -- Options for LaTeX output -------------------------------------------------- 222 | 223 | latex_elements = { 224 | # The paper size ('letterpaper' or 'a4paper'). 225 | # 'papersize': 'letterpaper', 226 | 227 | # The font size ('10pt', '11pt' or '12pt'). 228 | # 'pointsize': '10pt', 229 | 230 | # Additional stuff for the LaTeX preamble. 231 | # 'preamble': '', 232 | } 233 | 234 | # Grouping the document tree into LaTeX files. List of tuples 235 | # (source start file, target name, title, author, documentclass [howto/manual]). 236 | latex_documents = [ 237 | ('index', 'user_guide.tex', u'unet Documentation', 238 | u'Akeret, Joel', 'manual'), 239 | ] 240 | 241 | # The name of an image file (relative to this directory) to place at the top of 242 | # the title page. 243 | # latex_logo = "" 244 | 245 | # For "manual" documents, if this is true, then toplevel headings are parts, 246 | # not chapters. 247 | # latex_use_parts = False 248 | 249 | # If true, show page references after internal links. 250 | # latex_show_pagerefs = False 251 | 252 | # If true, show URL addresses after external links. 253 | # latex_show_urls = False 254 | 255 | # Documents to append as an appendix to all manuals. 256 | # latex_appendices = [] 257 | 258 | # If false, no module index is generated. 259 | # latex_domain_indices = True 260 | 261 | # -- External mapping ------------------------------------------------------------ 262 | python_version = '.'.join(map(str, sys.version_info[0:2])) 263 | intersphinx_mapping = { 264 | 'sphinx': ('http://www.sphinx-doc.org/en/stable', None), 265 | 'python': ('https://docs.python.org/' + python_version, None), 266 | 'matplotlib': ('https://matplotlib.org', None), 267 | 'numpy': ('https://docs.scipy.org/doc/numpy', None), 268 | 'sklearn': ('http://scikit-learn.org/stable', None), 269 | 'pandas': ('http://pandas.pydata.org/pandas-docs/stable', None), 270 | 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None), 271 | } 272 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst -------------------------------------------------------------------------------- /docs/galaxies.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jakeret/unet/f557a51b6f95aae6848cab6141e6cae573934bf8/docs/galaxies.png -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | 3 | 4 | Contents 5 | ======== 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | 10 | installation 11 | usage 12 | Module Reference 13 | contributing 14 | Authors 15 | Changelog 16 | License 17 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | The project is hosted on GitHub. Get a copy by running:: 6 | 7 | $ git clone https://github.com/jakeret/unet.git 8 | 9 | 10 | Install the package like this:: 11 | 12 | $ cd unet 13 | $ pipenv install --dev 14 | 15 | -------------------------------------------------------------------------------- /docs/license.rst: -------------------------------------------------------------------------------- 1 | .. _license: 2 | 3 | ======= 4 | License 5 | ======= 6 | 7 | .. include:: ../LICENSE.txt 8 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | tensorflow>=2.0.0 2 | numpy>=1.18.1 3 | -------------------------------------------------------------------------------- /docs/rfi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jakeret/unet/f557a51b6f95aae6848cab6141e6cae573934bf8/docs/rfi.png -------------------------------------------------------------------------------- /docs/toy_problem.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jakeret/unet/f557a51b6f95aae6848cab6141e6cae573934bf8/docs/toy_problem.png -------------------------------------------------------------------------------- /docs/unet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jakeret/unet/f557a51b6f95aae6848cab6141e6cae573934bf8/docs/unet.png -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | Usage 3 | ======== 4 | 5 | To use Tensorflow Unet in a project:: 6 | 7 | import unet 8 | from unet.datasets import circles 9 | 10 | #loading the datasets 11 | train_dataset, validation_dataset = circles.load_data(100, nx=200, ny=200, 12 | splits=(0.8, 0.2)) 13 | 14 | #building the model 15 | unet_model = unet.build_model(channels=circles.channels, 16 | num_classes=circles.classes, 17 | layer_depth=3, 18 | filters_root=16) 19 | 20 | unet.finalize_model(unet_model) 21 | 22 | #training and validating the model 23 | trainer = unet.Trainer(checkpoint_callback=False) 24 | trainer.fit(unet_model, 25 | train_dataset, 26 | validation_dataset, 27 | epochs=5, 28 | batch_size=1) 29 | 30 | 31 | Once the model is trained it can be saved using Tensorflow's save format:: 32 | 33 | from unet import custom_objects 34 | unet_model.save() 35 | 36 | 37 | and loaded by using:: 38 | 39 | from unet import custom_objects 40 | reconstructed_model = tf.keras.models.load_model(, custom_objects=custom_objects) 41 | 42 | 43 | Keep track of the learning progress using *Tensorboard*. **unet** automatically outputs relevant summaries. 44 | 45 | .. image:: https://raw.githubusercontent.com/jakeret/unet/master/docs/stats.png 46 | :align: center 47 | 48 | -------------------------------------------------------------------------------- /scripts/circles.py: -------------------------------------------------------------------------------- 1 | # unet is free software: you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation, either version 3 of the License, or 4 | # (at your option) any later version. 5 | # 6 | # unet is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 9 | # GNU General Public License for more details. 10 | # 11 | # You should have received a copy of the GNU General Public License 12 | # along with unet. If not, see . 13 | import logging 14 | 15 | import numpy as np 16 | 17 | import unet 18 | from unet.datasets import circles 19 | 20 | LEARNING_RATE = 1e-3 21 | 22 | logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s') 23 | np.random.seed(98765) 24 | 25 | 26 | def train(): 27 | unet_model = unet.build_model(channels=circles.channels, 28 | num_classes=circles.classes, 29 | layer_depth=3, 30 | filters_root=16) 31 | 32 | unet.finalize_model(unet_model, 33 | learning_rate=LEARNING_RATE) 34 | 35 | trainer = unet.Trainer(name="circles", 36 | learning_rate_scheduler=unet.SchedulerType.WARMUP_LINEAR_DECAY, 37 | warmup_proportion=0.1, 38 | learning_rate=LEARNING_RATE) 39 | 40 | train_dataset, validation_dataset, test_dataset = circles.load_data(100, nx=272, ny=272, r_max=20) 41 | 42 | trainer.fit(unet_model, 43 | train_dataset, 44 | validation_dataset, 45 | test_dataset, 46 | epochs=25, 47 | batch_size=5) 48 | 49 | return unet_model 50 | 51 | 52 | if __name__ == '__main__': 53 | train() 54 | -------------------------------------------------------------------------------- /scripts/oxford_iiit_pet.py: -------------------------------------------------------------------------------- 1 | # unet is free software: you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation, either version 3 of the License, or 4 | # (at your option) any later version. 5 | # 6 | # unet is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 9 | # GNU General Public License for more details. 10 | # 11 | # You should have received a copy of the GNU General Public License 12 | # along with unet. If not, see . 13 | import logging 14 | 15 | import numpy as np 16 | from tensorflow.keras import losses, metrics 17 | 18 | import unet 19 | from unet.datasets import oxford_iiit_pet 20 | 21 | LEARNING_RATE = 1e-3 22 | 23 | logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s') 24 | np.random.seed(98765) 25 | 26 | 27 | def train(): 28 | unet_model = unet.build_model(*oxford_iiit_pet.IMAGE_SIZE, 29 | channels=oxford_iiit_pet.channels, 30 | num_classes=oxford_iiit_pet.classes, 31 | layer_depth=4, 32 | filters_root=64, 33 | padding="same" 34 | ) 35 | 36 | unet.finalize_model(unet_model, 37 | loss=losses.SparseCategoricalCrossentropy(), 38 | metrics=[metrics.SparseCategoricalAccuracy()], 39 | auc=False, 40 | learning_rate=LEARNING_RATE) 41 | 42 | trainer = unet.Trainer(name="oxford_iiit_pet") 43 | 44 | train_dataset, validation_dataset = oxford_iiit_pet.load_data() 45 | 46 | trainer.fit(unet_model, 47 | train_dataset, 48 | validation_dataset, 49 | epochs=25, 50 | batch_size=1) 51 | 52 | return unet_model 53 | 54 | 55 | if __name__ == '__main__': 56 | train() 57 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # This file is used to configure your project. 2 | # Read more about the various options under: 3 | # http://setuptools.readthedocs.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files 4 | 5 | [metadata] 6 | name = unet 7 | description = Add a short description here! 8 | author = Akeret, Joel 9 | author-email = joel.akeret@gmail.com 10 | license = GPLv3 11 | long-description = file: README.rst 12 | long-description-content-type = text/x-rst; charset=UTF-8 13 | url = https://github.com/pyscaffold/pyscaffold/ 14 | project-urls = 15 | Documentation = https://pyscaffold.org/ 16 | # Change if running only on Windows, Mac or Linux (comma-separated) 17 | platforms = any 18 | # Add here all kinds of additional classifiers as defined under 19 | # https://pypi.python.org/pypi?%3Aaction=list_classifiers 20 | classifiers = 21 | Development Status :: 4 - Beta 22 | Programming Language :: Python 23 | 24 | [options] 25 | zip_safe = False 26 | packages = find: 27 | include_package_data = True 28 | package_dir = 29 | =src 30 | # DON'T CHANGE THE FOLLOWING LINE! IT WILL BE UPDATED BY PYSCAFFOLD! 31 | setup_requires = pyscaffold>=3.2a0,<3.3a0 32 | # Add here dependencies of your project (semicolon/line-separated), e.g. 33 | #install_requires = 34 | # tensorflow>=2.0.0 35 | # numpy>=1.18.1 36 | # The usage of test_requires is discouraged, see `Dependency Management` docs 37 | # tests_require = pytest; pytest-cov 38 | # Require a specific Python version, e.g. Python 2.7 or >= 3.4 39 | # python_requires = >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* 40 | 41 | [options.packages.find] 42 | where = src 43 | exclude = 44 | tests 45 | 46 | [options.extras_require] 47 | # Add here additional requirements for extra features, to install with: 48 | # `pip install unet[PDF]` like: 49 | # PDF = ReportLab; RXP 50 | # Add here test requirements (semicolon/line-separated) 51 | testing = 52 | pytest 53 | pytest-cov 54 | 55 | [options.entry_points] 56 | # Add here console scripts like: 57 | # console_scripts = 58 | # script_name = unet.module:function 59 | # For example: 60 | # console_scripts = 61 | # fibonacci = unet.skeleton:run 62 | # And any other entry points, for example: 63 | # pyscaffold.cli = 64 | # awesome = pyscaffoldext.awesome.extension:AwesomeExtension 65 | 66 | [test] 67 | # py.test options when running `python setup.py test` 68 | # addopts = --verbose 69 | extras = True 70 | 71 | [tool:pytest] 72 | # Options for py.test: 73 | # Specify command line options as you would do when invoking py.test directly. 74 | # e.g. --cov-report html (or xml) for html/xml output or --junitxml junit.xml 75 | # in order to write a coverage file that can be read by Jenkins. 76 | addopts = 77 | --verbose 78 | norecursedirs = 79 | dist 80 | build 81 | .tox 82 | testpaths = tests 83 | 84 | [aliases] 85 | dists = bdist_wheel 86 | 87 | [bdist_wheel] 88 | # Use this option if your package is pure-python 89 | universal = 1 90 | 91 | [build_sphinx] 92 | source_dir = docs 93 | build_dir = build/sphinx 94 | 95 | [devpi:upload] 96 | # Options for the devpi: PyPI server and packaging tool 97 | # VCS export must be deactivated since we are using setuptools-scm 98 | no-vcs = 1 99 | formats = bdist_wheel 100 | 101 | [flake8] 102 | # Some sane defaults for the code style checker flake8 103 | exclude = 104 | .tox 105 | build 106 | dist 107 | .eggs 108 | docs/conf.py 109 | 110 | [pyscaffold] 111 | # PyScaffold's parameters when the project was created. 112 | # This will be used when updating. Do not change! 113 | version = 3.2.3 114 | package = unet 115 | extensions = 116 | no_skeleton 117 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Setup file for unet. 4 | Use setup.cfg to configure your project. 5 | 6 | This file was generated with PyScaffold 3.2.3. 7 | PyScaffold helps you to put up the scaffold of your new Python project. 8 | Learn more under: https://pyscaffold.org/ 9 | """ 10 | import sys 11 | 12 | from pkg_resources import VersionConflict, require 13 | from setuptools import setup 14 | 15 | try: 16 | require('setuptools>=38.3') 17 | except VersionConflict: 18 | print("Error: version of setuptools is too old (<38.3)!") 19 | sys.exit(1) 20 | 21 | 22 | if __name__ == "__main__": 23 | setup(use_pyscaffold=True) 24 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jakeret/unet/f557a51b6f95aae6848cab6141e6cae573934bf8/src/__init__.py -------------------------------------------------------------------------------- /src/unet/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from pkg_resources import get_distribution, DistributionNotFound 3 | 4 | from unet import metrics 5 | from unet.schedulers import SchedulerType 6 | from unet.trainer import Trainer 7 | from unet.unet import build_model, finalize_model 8 | 9 | try: 10 | # Change here if project is renamed and does not equal the package name 11 | dist_name = __name__ 12 | __version__ = get_distribution(dist_name).version 13 | except DistributionNotFound: 14 | __version__ = 'unknown' 15 | finally: 16 | del get_distribution, DistributionNotFound 17 | 18 | __all__ = [build_model, 19 | finalize_model, 20 | SchedulerType, 21 | Trainer] 22 | 23 | 24 | custom_objects = {'mean_iou': metrics.mean_iou, 25 | 'dice_coefficient': metrics.dice_coefficient} -------------------------------------------------------------------------------- /src/unet/callbacks.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import numpy as np 4 | import tensorflow as tf 5 | from tensorflow.keras import backend as K 6 | from tensorflow.keras.callbacks import TensorBoard, Callback 7 | 8 | from unet import utils 9 | 10 | 11 | class TensorBoardImageSummary(Callback): 12 | 13 | def __init__(self, name, 14 | logdir: str, 15 | dataset: tf.data.Dataset, 16 | max_outputs: int = None): 17 | self.name = name 18 | self.logdir = str(Path(logdir) / "summaries") 19 | if max_outputs is None: 20 | max_outputs = self.images.shape[0] 21 | self.max_outputs = max_outputs 22 | 23 | self.dataset = dataset.take(self.max_outputs) 24 | 25 | self.file_writer = tf.summary.create_file_writer(self.logdir) 26 | 27 | super().__init__() 28 | 29 | def on_epoch_end(self, epoch, logs=None): 30 | predictions = self.model.predict(self.dataset.batch(batch_size=1)) 31 | 32 | self._log_histogramms(epoch, predictions) 33 | 34 | self._log_image_summaries(epoch, predictions) 35 | 36 | self.file_writer.flush() 37 | 38 | def _log_image_summaries(self, epoch, predictions): 39 | cropped_images, cropped_labels = list(self.dataset 40 | .map(utils.crop_image_and_label_to_shape(predictions.shape[1:])) 41 | .take(self.max_outputs) 42 | .batch(self.max_outputs))[0] 43 | 44 | output = self.combine_to_image(cropped_images.numpy(), 45 | cropped_labels.numpy(), 46 | predictions) 47 | 48 | with self.file_writer.as_default(): 49 | tf.summary.image(self.name, 50 | output, 51 | step=epoch, 52 | max_outputs=self.max_outputs) 53 | 54 | def combine_to_image(self, images: np.array, labels: np.array, predictions: np.array) -> np.array: 55 | """ 56 | Concatenates the three tensors to one RGB image 57 | 58 | :param images: images tensor, shape [None, nx, ny, channels] 59 | :param labels: labels tensor, shape [None, nx, ny, 1] for sparse or [None, nx, ny, classes] for one-hot 60 | :param predictions: labels tensor, shape [None, nx, ny, classes] 61 | 62 | :return: image tensor, shape [None, nx, 3 x ny, 3] 63 | """ 64 | 65 | if predictions.shape[-1] == 2: 66 | mask = predictions[..., :1] 67 | else: 68 | mask = np.argmax(predictions, axis=-1)[..., np.newaxis] 69 | 70 | output = np.concatenate((utils.to_rgb(images), 71 | utils.to_rgb(labels[..., :1]), 72 | utils.to_rgb(mask)), 73 | axis=2) 74 | return output 75 | 76 | def _log_histogramms(self, epoch, predictions): 77 | with self.file_writer.as_default(): 78 | tf.summary.histogram(self.name + "_prediction_histograms", 79 | predictions, 80 | step=epoch, 81 | buckets=30, 82 | description=None) 83 | 84 | 85 | class TensorBoardWithLearningRate(TensorBoard): 86 | def on_epoch_end(self, batch, logs=None): 87 | logs = logs or {} 88 | logs['learning_rate'] = K.get_value(self.model.optimizer.lr) 89 | super().on_epoch_end(batch, logs) 90 | -------------------------------------------------------------------------------- /src/unet/datasets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jakeret/unet/f557a51b6f95aae6848cab6141e6cae573934bf8/src/unet/datasets/__init__.py -------------------------------------------------------------------------------- /src/unet/datasets/circles.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple, List 2 | 3 | import numpy as np 4 | import tensorflow as tf 5 | 6 | channels = 1 7 | classes = 2 8 | 9 | 10 | def load_data(count:int, splits:Tuple[float]=(0.7, 0.2, 0.1), **kwargs) -> List[tf.data.Dataset]: 11 | return [tf.data.Dataset.from_tensor_slices(_build_samples(int(split * count), **kwargs)) 12 | for split in splits] 13 | 14 | 15 | def _build_samples(sample_count:int, nx:int, ny:int, **kwargs) -> Tuple[np.array, np.array]: 16 | images = np.empty((sample_count, nx, ny, 1)) 17 | labels = np.empty((sample_count, nx, ny, 2)) 18 | for i in range(sample_count): 19 | image, mask = _create_image_and_mask(nx, ny, **kwargs) 20 | images[i] = image 21 | labels[i, ..., 0] = ~mask 22 | labels[i, ..., 1] = mask 23 | return images, labels 24 | 25 | 26 | def _create_image_and_mask(nx, ny, cnt=10, r_min=3, r_max=10, border=32, sigma=20): 27 | image = np.ones((nx, ny, 1)) 28 | mask = np.zeros((nx, ny), dtype=np.bool) 29 | 30 | for _ in range(cnt): 31 | a = np.random.randint(border, nx - border) 32 | b = np.random.randint(border, ny - border) 33 | r = np.random.randint(r_min, r_max) 34 | h = np.random.randint(1, 255) 35 | 36 | y, x = np.ogrid[-a:nx - a, -b:ny - b] 37 | m = x * x + y * y <= r * r 38 | mask = np.logical_or(mask, m) 39 | 40 | image[m] = h 41 | 42 | image += np.random.normal(scale=sigma, size=image.shape) 43 | image -= np.amin(image) 44 | image /= np.amax(image) 45 | 46 | return image, mask 47 | -------------------------------------------------------------------------------- /src/unet/datasets/oxford_iiit_pet.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple, Dict 2 | 3 | import tensorflow as tf 4 | import tensorflow_datasets as tfds 5 | from tensorflow_datasets.core import DatasetInfo 6 | 7 | tfds.disable_progress_bar() 8 | 9 | IMAGE_SIZE = (128, 128) 10 | channels = 3 11 | classes = 3 12 | 13 | 14 | def normalize(input_image, input_mask): 15 | input_image = tf.cast(input_image, tf.float32) / 255.0 16 | input_mask -= 1 17 | return input_image, input_mask 18 | 19 | 20 | def load_image_train(datapoint): 21 | input_image = tf.image.resize(datapoint['image'], IMAGE_SIZE) 22 | input_mask = tf.image.resize(datapoint['segmentation_mask'], IMAGE_SIZE) 23 | 24 | if tf.random.uniform(()) > 0.5: 25 | input_image = tf.image.flip_left_right(input_image) 26 | input_mask = tf.image.flip_left_right(input_mask) 27 | 28 | input_image, input_mask = normalize(input_image, input_mask) 29 | 30 | return input_image, input_mask 31 | 32 | 33 | def load_image_test(datapoint): 34 | input_image = tf.image.resize(datapoint['image'], IMAGE_SIZE) 35 | input_mask = tf.image.resize(datapoint['segmentation_mask'], IMAGE_SIZE) 36 | 37 | input_image, input_mask = normalize(input_image, input_mask) 38 | 39 | return input_image, input_mask 40 | 41 | 42 | def load_data(buffer_size=1000, **kwargs) -> Tuple[tf.data.Dataset, tf.data.Dataset]: 43 | dataset, info = _load_without_checksum_verification(**kwargs) 44 | train = dataset['train'].map(load_image_train, num_parallel_calls=tf.data.experimental.AUTOTUNE) 45 | test = dataset['test'].map(load_image_test) 46 | train_dataset = train.cache().shuffle(buffer_size).take(info.splits["train"].num_examples) 47 | return train_dataset, test 48 | 49 | 50 | def _load_without_checksum_verification(**kwargs) -> Tuple[Dict, DatasetInfo]: 51 | builder = tfds.builder('oxford_iiit_pet:3.2.0') 52 | # by setting register_checksums as True to pass the check 53 | config = tfds.download.DownloadConfig(register_checksums=True) 54 | builder.download_and_prepare(download_config=config) 55 | dataset = builder.as_dataset() 56 | 57 | return dataset, (builder.info) 58 | -------------------------------------------------------------------------------- /src/unet/metrics.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | 4 | def mean_iou(y_true, y_pred): 5 | y_true = tf.cast(y_true, tf.dtypes.float64) 6 | y_pred = tf.cast(y_pred, tf.dtypes.float64) 7 | I = tf.reduce_sum(y_pred * y_true, axis=(1, 2)) 8 | U = tf.reduce_sum(y_pred + y_true, axis=(1, 2)) - I 9 | return tf.reduce_mean(I / U) 10 | 11 | 12 | def dice_coefficient(y_true, y_pred, smooth=1): 13 | intersection = tf.reduce_sum(y_true * y_pred, axis=[1, 2, 3]) 14 | union = tf.reduce_sum(y_true, axis=[1, 2, 3]) + tf.reduce_sum(y_pred, axis=[1, 2, 3]) 15 | dice = tf.reduce_mean((2. * intersection + smooth) / (union + smooth), axis=0) 16 | return dice 17 | -------------------------------------------------------------------------------- /src/unet/schedulers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from enum import Enum 3 | from typing import Callable 4 | 5 | import tensorflow as tf 6 | import tensorflow.keras.backend as K 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | class SchedulerType(Enum): 12 | WARMUP_LINEAR_DECAY = "warmup-linear-decay" 13 | 14 | 15 | def get(scheduler:SchedulerType, train_dataset_size:int, learning_rate:float, **hyperparams): 16 | if scheduler == SchedulerType.WARMUP_LINEAR_DECAY: 17 | batch_size = hyperparams["batch_size"] 18 | steps_per_epoch = (train_dataset_size + batch_size - 1) // batch_size 19 | total_steps = steps_per_epoch * hyperparams["epochs"] 20 | warmup_steps = int(total_steps * hyperparams["warmup_proportion"]) 21 | logger.info("Total steps %s, warum steps %s", total_steps, warmup_steps) 22 | 23 | schedule = WarmupLinearDecaySchedule(warmup_steps, total_steps, learning_rate) 24 | return LearningRateScheduler(schedule, steps_per_epoch, verbose=0) 25 | else: 26 | raise ValueError("Unknown scheduler %s"%scheduler) 27 | 28 | 29 | class LearningRateScheduler(tf.keras.callbacks.Callback): 30 | # Currently, the optimizers in TF2 don't properly support LR schedulers as callable. 31 | # As alternative we have to use a Keras callback which only allows for updating the LR per batch instead per step 32 | 33 | """Learning rate scheduler. 34 | Arguments: 35 | schedule: a function that takes an step index as input 36 | (integer, indexed from 0) and returns a new 37 | learning rate as output (float). 38 | verbose: int. 0: quiet, 1: update messages. 39 | """ 40 | 41 | def __init__(self, schedule:Callable[[int], float], steps_per_epoch:int, verbose=0): 42 | super(LearningRateScheduler, self).__init__() 43 | self.schedule = schedule 44 | self.steps_per_epoch = steps_per_epoch 45 | self.verbose = verbose 46 | self._current_step = 0 47 | 48 | def on_train_batch_begin(self, batch, logs=None): 49 | new_lr = self.schedule(self._current_step) 50 | 51 | K.set_value(self.model.optimizer.lr, new_lr) 52 | 53 | self._current_step += 1 54 | 55 | if self.verbose > 0: 56 | logger.info('\nBatch %05d: LearningRateScheduler changing learning rate to %s.', batch + 1, new_lr) 57 | 58 | def on_epoch_end(self, epoch, logs=None): 59 | logs = logs or {} 60 | logs['learning_rate'] = K.get_value(self.model.optimizer.lr) 61 | 62 | def on_train_batch_end(self, batch, logs=None): 63 | logs = logs or {} 64 | logs['learning_rate'] = K.get_value(self.model.optimizer.lr) 65 | 66 | 67 | class WarmupLinearDecaySchedule: 68 | """ Linear warmup and then linear decay. 69 | Linearly increases learning rate from 0 to 1 over `warmup_steps` training steps. 70 | Linearly decreases learning rate from 1. to 0. over remaining `t_total - warmup_steps` steps. 71 | """ 72 | def __init__(self, warmup_steps, total_steps, learning_rate, min_lr=0.0): 73 | self.warmup_steps = warmup_steps 74 | self.total_steps = total_steps 75 | self.initial_learning_rate = learning_rate 76 | self.min_lr = min_lr 77 | self.decay_steps = max(1.0, self.total_steps - self.warmup_steps) 78 | 79 | def __call__(self, step): 80 | if step < self.warmup_steps: 81 | learning_rate = self.initial_learning_rate * float(step) / max(1., self.warmup_steps) 82 | else: 83 | decay_factor = max(0, (self.total_steps - step) / self.decay_steps) 84 | learning_rate = self.min_lr + (self.initial_learning_rate - self.min_lr) * decay_factor 85 | 86 | return learning_rate 87 | -------------------------------------------------------------------------------- /src/unet/trainer.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from pathlib import Path 3 | from typing import Union, List, Optional, Tuple 4 | 5 | import tensorflow as tf 6 | from tensorflow.keras import Model 7 | from tensorflow.keras.callbacks import Callback 8 | from tensorflow.keras.callbacks import ModelCheckpoint, TensorBoard 9 | 10 | from unet import utils, schedulers 11 | from unet.callbacks import TensorBoardWithLearningRate, TensorBoardImageSummary 12 | from unet.schedulers import SchedulerType 13 | 14 | 15 | class Trainer: 16 | """ 17 | Fits a given model to a datasets and configres learning rate schedulers and 18 | various callbacks 19 | 20 | :param name: Name of the model, used to build the target log directory if no explicit path is given 21 | :param log_dir_path: Path to the directory where the model and tensorboard summaries should be stored 22 | :param checkpoint_callback: Flag if checkpointing should be enabled. Alternatively a callback instance can be passed 23 | :param tensorboard_callback: Flag if information should be stored for tensorboard. Alternatively a callback instance can be passed 24 | :param tensorboard_images_callback: Flag if intermediate predictions should be stored in Tensorboard. Alternatively a callback instance can be passed 25 | :param callbacks: List of additional callbacks 26 | :param learning_rate_scheduler: The learning rate to be used. Either None for a constant learning rate, a `Callback` or a `SchedulerType` 27 | :param scheduler_opts: Further kwargs passed to the learning rate scheduler 28 | """ 29 | 30 | def __init__(self, 31 | name: Optional[str]="unet", 32 | log_dir_path: Optional[Union[Path, str]]=None, 33 | checkpoint_callback: Optional[Union[TensorBoard, bool]] = True, 34 | tensorboard_callback: Optional[Union[TensorBoard, bool]] = True, 35 | tensorboard_images_callback: Optional[Union[TensorBoardImageSummary, bool]] = True, 36 | callbacks: Union[List[Callback], None]=None, 37 | learning_rate_scheduler: Optional[Union[SchedulerType, Callback]]=None, 38 | **scheduler_opts, 39 | ): 40 | self.checkpoint_callback = checkpoint_callback 41 | self.tensorboard_callback = tensorboard_callback 42 | self.tensorboard_images_callback = tensorboard_images_callback 43 | self.callbacks = callbacks 44 | self.learning_rate_scheduler = learning_rate_scheduler 45 | self.scheduler_opts=scheduler_opts 46 | 47 | if log_dir_path is None: 48 | log_dir_path = build_log_dir_path(name) 49 | if isinstance(log_dir_path, Path): 50 | log_dir_path = str(log_dir_path) 51 | 52 | self.log_dir_path = log_dir_path 53 | 54 | def fit(self, 55 | model: Model, 56 | train_dataset: tf.data.Dataset, 57 | validation_dataset: Optional[tf.data.Dataset]=None, 58 | test_dataset: Optional[tf.data.Dataset]=None, 59 | epochs=10, 60 | batch_size=1, 61 | **fit_kwargs): 62 | """ 63 | Fits the model to the given data 64 | 65 | :param model: The model to be fit 66 | :param train_dataset: The dataset used for training 67 | :param validation_dataset: (Optional) The dataset used for validation 68 | :param test_dataset: (Optional) The dataset used for test 69 | :param epochs: Number of epochs 70 | :param batch_size: Size of minibatches 71 | :param fit_kwargs: Further kwargs passd to `model.fit` 72 | """ 73 | 74 | prediction_shape = self._get_output_shape(model, train_dataset)[1:] 75 | 76 | learning_rate_scheduler = self._build_learning_rate_scheduler(train_dataset=train_dataset, 77 | batch_size=batch_size, 78 | epochs=epochs, 79 | **self.scheduler_opts) 80 | 81 | callbacks = self._build_callbacks(train_dataset, 82 | validation_dataset) 83 | 84 | if learning_rate_scheduler: 85 | callbacks += [learning_rate_scheduler] 86 | 87 | train_dataset = train_dataset.map(utils.crop_labels_to_shape(prediction_shape)).batch(batch_size) 88 | 89 | if validation_dataset: 90 | validation_dataset = validation_dataset.map(utils.crop_labels_to_shape(prediction_shape)).batch(batch_size) 91 | 92 | history = model.fit(train_dataset, 93 | validation_data=validation_dataset, 94 | epochs=epochs, 95 | callbacks=callbacks, 96 | **fit_kwargs) 97 | 98 | self.evaluate(model, test_dataset, prediction_shape) 99 | 100 | return history 101 | 102 | def _get_output_shape(self, 103 | model: Model, 104 | train_dataset: tf.data.Dataset): 105 | return model.predict(train_dataset 106 | .take(count=1) 107 | .batch(batch_size=1) 108 | ).shape 109 | 110 | def _build_callbacks(self, 111 | train_dataset: Optional[tf.data.Dataset], 112 | validation_dataset: Optional[tf.data.Dataset]) -> List[Callback]: 113 | if self.callbacks: 114 | callbacks = self.callbacks 115 | else: 116 | callbacks = [] 117 | 118 | if isinstance(self.checkpoint_callback, Callback): 119 | callbacks.append(self.checkpoint_callback) 120 | elif self.checkpoint_callback: 121 | callbacks.append(ModelCheckpoint(self.log_dir_path, 122 | save_best_only=True)) 123 | 124 | if isinstance(self.tensorboard_callback, Callback): 125 | callbacks.append(self.tensorboard_callback) 126 | elif self.tensorboard_callback: 127 | callbacks.append(TensorBoardWithLearningRate(self.log_dir_path)) 128 | 129 | if isinstance(self.tensorboard_images_callback, Callback): 130 | callbacks.append(self.tensorboard_images_callback) 131 | elif self.tensorboard_images_callback: 132 | tensorboard_image_summary = TensorBoardImageSummary("train", 133 | self.log_dir_path, 134 | dataset=train_dataset, 135 | max_outputs=6) 136 | callbacks.append(tensorboard_image_summary) 137 | 138 | if validation_dataset: 139 | tensorboard_image_summary = TensorBoardImageSummary("validation", 140 | self.log_dir_path, 141 | dataset=validation_dataset, 142 | max_outputs=6) 143 | callbacks.append(tensorboard_image_summary) 144 | 145 | return callbacks 146 | 147 | def _build_learning_rate_scheduler(self, 148 | train_dataset: tf.data.Dataset, 149 | **scheduler_opts 150 | ) -> Optional[Callback]: 151 | 152 | if self.learning_rate_scheduler is None: 153 | return None 154 | 155 | if isinstance(self.learning_rate_scheduler, Callback): 156 | return self.learning_rate_scheduler 157 | 158 | elif isinstance(self.learning_rate_scheduler, SchedulerType): 159 | train_dataset_size = tf.data.experimental.cardinality(train_dataset).numpy() 160 | learning_rate_scheduler = schedulers.get( 161 | scheduler=self.learning_rate_scheduler, 162 | train_dataset_size=train_dataset_size, 163 | **scheduler_opts) 164 | 165 | return learning_rate_scheduler 166 | 167 | def evaluate(self, 168 | model:Model, 169 | test_dataset: Optional[tf.data.Dataset]=None, 170 | shape:Tuple[int, int, int]=None): 171 | 172 | if test_dataset: 173 | model.evaluate(test_dataset 174 | .map(utils.crop_labels_to_shape(shape)) 175 | .batch(batch_size=1) 176 | ) 177 | 178 | 179 | def build_log_dir_path(root: Optional[str]= "unet") -> str: 180 | return str(Path(root) / datetime.now().strftime("%Y-%m-%dT%H-%M_%S")) 181 | -------------------------------------------------------------------------------- /src/unet/unet.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union, Callable, List 2 | 3 | import numpy as np 4 | import tensorflow as tf 5 | from tensorflow.keras import Model, Input 6 | from tensorflow.keras import layers 7 | from tensorflow.keras import losses 8 | from tensorflow.keras.initializers import TruncatedNormal 9 | from tensorflow.keras.optimizers import Adam 10 | 11 | import unet.metrics 12 | 13 | 14 | class ConvBlock(layers.Layer): 15 | 16 | def __init__(self, layer_idx, filters_root, kernel_size, dropout_rate, padding, activation, **kwargs): 17 | super(ConvBlock, self).__init__(**kwargs) 18 | self.layer_idx=layer_idx 19 | self.filters_root=filters_root 20 | self.kernel_size=kernel_size 21 | self.dropout_rate=dropout_rate 22 | self.padding=padding 23 | self.activation=activation 24 | 25 | filters = _get_filter_count(layer_idx, filters_root) 26 | self.conv2d_1 = layers.Conv2D(filters=filters, 27 | kernel_size=(kernel_size, kernel_size), 28 | kernel_initializer=_get_kernel_initializer(filters, kernel_size), 29 | strides=1, 30 | padding=padding) 31 | self.dropout_1 = layers.Dropout(rate=dropout_rate) 32 | self.activation_1 = layers.Activation(activation) 33 | 34 | self.conv2d_2 = layers.Conv2D(filters=filters, 35 | kernel_size=(kernel_size, kernel_size), 36 | kernel_initializer=_get_kernel_initializer(filters, kernel_size), 37 | strides=1, 38 | padding=padding) 39 | self.dropout_2 = layers.Dropout(rate=dropout_rate) 40 | self.activation_2 = layers.Activation(activation) 41 | 42 | def call(self, inputs, training=None, **kwargs): 43 | x = inputs 44 | x = self.conv2d_1(x) 45 | 46 | if training: 47 | x = self.dropout_1(x) 48 | x = self.activation_1(x) 49 | x = self.conv2d_2(x) 50 | 51 | if training: 52 | x = self.dropout_2(x) 53 | 54 | x = self.activation_2(x) 55 | return x 56 | 57 | def get_config(self): 58 | return dict(layer_idx=self.layer_idx, 59 | filters_root=self.filters_root, 60 | kernel_size=self.kernel_size, 61 | dropout_rate=self.dropout_rate, 62 | padding=self.padding, 63 | activation=self.activation, 64 | **super(ConvBlock, self).get_config(), 65 | ) 66 | 67 | 68 | class UpconvBlock(layers.Layer): 69 | 70 | def __init__(self, layer_idx, filters_root, kernel_size, pool_size, padding, activation, **kwargs): 71 | super(UpconvBlock, self).__init__(**kwargs) 72 | self.layer_idx=layer_idx 73 | self.filters_root=filters_root 74 | self.kernel_size=kernel_size 75 | self.pool_size=pool_size 76 | self.padding=padding 77 | self.activation=activation 78 | 79 | filters = _get_filter_count(layer_idx + 1, filters_root) 80 | self.upconv = layers.Conv2DTranspose(filters // 2, 81 | kernel_size=(pool_size, pool_size), 82 | kernel_initializer=_get_kernel_initializer(filters, kernel_size), 83 | strides=pool_size, padding=padding) 84 | 85 | self.activation_1 = layers.Activation(activation) 86 | 87 | def call(self, inputs, **kwargs): 88 | x = inputs 89 | x = self.upconv(x) 90 | x = self.activation_1(x) 91 | return x 92 | 93 | def get_config(self): 94 | return dict(layer_idx=self.layer_idx, 95 | filters_root=self.filters_root, 96 | kernel_size=self.kernel_size, 97 | pool_size=self.pool_size, 98 | padding=self.padding, 99 | activation=self.activation, 100 | **super(UpconvBlock, self).get_config(), 101 | ) 102 | 103 | class CropConcatBlock(layers.Layer): 104 | 105 | def call(self, x, down_layer, **kwargs): 106 | x1_shape = tf.shape(down_layer) 107 | x2_shape = tf.shape(x) 108 | 109 | height_diff = (x1_shape[1] - x2_shape[1]) // 2 110 | width_diff = (x1_shape[2] - x2_shape[2]) // 2 111 | 112 | down_layer_cropped = down_layer[:, 113 | height_diff: (x2_shape[1] + height_diff), 114 | width_diff: (x2_shape[2] + width_diff), 115 | :] 116 | 117 | x = tf.concat([down_layer_cropped, x], axis=-1) 118 | return x 119 | 120 | 121 | def build_model(nx: Optional[int] = None, 122 | ny: Optional[int] = None, 123 | channels: int = 1, 124 | num_classes: int = 2, 125 | layer_depth: int = 5, 126 | filters_root: int = 64, 127 | kernel_size: int = 3, 128 | pool_size: int = 2, 129 | dropout_rate: int = 0.5, 130 | padding:str="valid", 131 | activation:Union[str, Callable]="relu") -> Model: 132 | """ 133 | Constructs a U-Net model 134 | 135 | :param nx: (Optional) image size on x-axis 136 | :param ny: (Optional) image size on y-axis 137 | :param channels: number of channels of the input tensors 138 | :param num_classes: number of classes 139 | :param layer_depth: total depth of unet 140 | :param filters_root: number of filters in top unet layer 141 | :param kernel_size: size of convolutional layers 142 | :param pool_size: size of maxplool layers 143 | :param dropout_rate: rate of dropout 144 | :param padding: padding to be used in convolutions 145 | :param activation: activation to be used 146 | 147 | :return: A TF Keras model 148 | """ 149 | 150 | inputs = Input(shape=(nx, ny, channels), name="inputs") 151 | 152 | x = inputs 153 | contracting_layers = {} 154 | 155 | conv_params = dict(filters_root=filters_root, 156 | kernel_size=kernel_size, 157 | dropout_rate=dropout_rate, 158 | padding=padding, 159 | activation=activation) 160 | 161 | for layer_idx in range(0, layer_depth - 1): 162 | x = ConvBlock(layer_idx, **conv_params)(x) 163 | contracting_layers[layer_idx] = x 164 | x = layers.MaxPooling2D((pool_size, pool_size))(x) 165 | 166 | x = ConvBlock(layer_idx + 1, **conv_params)(x) 167 | 168 | for layer_idx in range(layer_idx, -1, -1): 169 | x = UpconvBlock(layer_idx, 170 | filters_root, 171 | kernel_size, 172 | pool_size, 173 | padding, 174 | activation)(x) 175 | x = CropConcatBlock()(x, contracting_layers[layer_idx]) 176 | x = ConvBlock(layer_idx, **conv_params)(x) 177 | 178 | x = layers.Conv2D(filters=num_classes, 179 | kernel_size=(1, 1), 180 | kernel_initializer=_get_kernel_initializer(filters_root, kernel_size), 181 | strides=1, 182 | padding=padding)(x) 183 | 184 | x = layers.Activation(activation)(x) 185 | outputs = layers.Activation("softmax", name="outputs")(x) 186 | model = Model(inputs, outputs, name="unet") 187 | 188 | return model 189 | 190 | 191 | def _get_filter_count(layer_idx, filters_root): 192 | return 2 ** layer_idx * filters_root 193 | 194 | 195 | def _get_kernel_initializer(filters, kernel_size): 196 | stddev = np.sqrt(2 / (kernel_size ** 2 * filters)) 197 | return TruncatedNormal(stddev=stddev) 198 | 199 | 200 | def finalize_model(model: Model, 201 | loss: Optional[Union[Callable, str]]=losses.categorical_crossentropy, 202 | optimizer: Optional= None, 203 | metrics:Optional[List[Union[Callable,str]]]=None, 204 | dice_coefficient: bool=True, 205 | auc: bool=True, 206 | mean_iou: bool=True, 207 | **opt_kwargs): 208 | """ 209 | Configures the model for training by setting, loss, optimzer, and tracked metrics 210 | 211 | :param model: the model to compile 212 | :param loss: the loss to be optimized. Defaults to `categorical_crossentropy` 213 | :param optimizer: the optimizer to use. Defaults to `Adam` 214 | :param metrics: List of metrics to track. Is extended by `crossentropy` and `accuracy` 215 | :param dice_coefficient: Flag if the dice coefficient metric should be tracked 216 | :param auc: Flag if the area under the curve metric should be tracked 217 | :param mean_iou: Flag if the mean over intersection over union metric should be tracked 218 | :param opt_kwargs: key word arguments passed to default optimizer (Adam), e.g. learning rate 219 | """ 220 | 221 | if optimizer is None: 222 | optimizer = Adam(**opt_kwargs) 223 | 224 | if metrics is None: 225 | metrics = ['categorical_crossentropy', 226 | 'categorical_accuracy', 227 | ] 228 | 229 | if mean_iou: 230 | metrics += [unet.metrics.mean_iou] 231 | 232 | if dice_coefficient: 233 | metrics += [unet.metrics.dice_coefficient] 234 | 235 | if auc: 236 | metrics += [tf.keras.metrics.AUC()] 237 | 238 | model.compile(loss=loss, 239 | optimizer=optimizer, 240 | metrics=metrics, 241 | ) 242 | -------------------------------------------------------------------------------- /src/unet/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | 3 | import numpy as np 4 | 5 | 6 | def crop_to_shape(data, shape: Tuple[int, int, int]): 7 | """ 8 | Crops the array to the given image shape by removing the border 9 | 10 | :param data: the array to crop, expects a tensor of shape [batches, nx, ny, channels] 11 | :param shape: the target shape [batches, nx, ny, channels] 12 | """ 13 | diff_nx = (data.shape[0] - shape[0]) 14 | diff_ny = (data.shape[1] - shape[1]) 15 | 16 | if diff_nx == 0 and diff_ny == 0: 17 | return data 18 | 19 | offset_nx_left = diff_nx // 2 20 | offset_nx_right = diff_nx - offset_nx_left 21 | offset_ny_left = diff_ny // 2 22 | offset_ny_right = diff_ny - offset_ny_left 23 | 24 | cropped = data[offset_nx_left:(-offset_nx_right), offset_ny_left:(-offset_ny_right)] 25 | 26 | assert cropped.shape[0] == shape[0] 27 | assert cropped.shape[1] == shape[1] 28 | return cropped 29 | 30 | 31 | def crop_labels_to_shape(shape: Tuple[int, int, int]): 32 | def crop(image, label): 33 | return image, crop_to_shape(label, shape) 34 | return crop 35 | 36 | 37 | def crop_image_and_label_to_shape(shape: Tuple[int, int, int]): 38 | def crop(image, label): 39 | return crop_to_shape(image, shape), \ 40 | crop_to_shape(label, shape) 41 | return crop 42 | 43 | 44 | def to_rgb(img: np.array): 45 | """ 46 | Converts the given array into a RGB image and normalizes the values to [0, 1). 47 | If the number of channels is less than 3, the array is tiled such that it has 3 channels. 48 | If the number of channels is greater than 3, only the first 3 channels are used 49 | 50 | :param img: the array to convert [bs, nx, ny, channels] 51 | 52 | :returns img: the rgb image [bs, nx, ny, 3] 53 | """ 54 | img = img.astype(np.float32) 55 | img = np.atleast_3d(img) 56 | 57 | channels = img.shape[-1] 58 | if channels == 1: 59 | img = np.tile(img, 3) 60 | 61 | elif channels == 2: 62 | img = np.concatenate((img, img[..., :1]), axis=-1) 63 | 64 | elif channels > 3: 65 | img = img[..., :3] 66 | 67 | img[np.isnan(img)] = 0 68 | img -= np.amin(img) 69 | if np.amax(img) != 0: 70 | img /= np.amax(img) 71 | 72 | return img 73 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Dummy conftest.py for unet. 4 | 5 | If you don't know what this is for, just leave it empty. 6 | Read more about conftest.py under: 7 | https://pytest.org/latest/plugins.html 8 | """ 9 | 10 | # import pytest 11 | -------------------------------------------------------------------------------- /tests/test_schedulers.py: -------------------------------------------------------------------------------- 1 | from unet import schedulers 2 | 3 | 4 | class TestWarmupLinearDecaySchedule: 5 | 6 | def test_schedule(self): 7 | warmup_steps = 2 8 | total_steps = 10 9 | learning_rate = 10 10 | min_lr = 0.0 11 | scheduler = schedulers.WarmupLinearDecaySchedule(warmup_steps, total_steps, learning_rate, min_lr=min_lr) 12 | 13 | assert scheduler(step=0) == 0 14 | assert 0 < scheduler(step=warmup_steps - 1) < learning_rate 15 | assert scheduler(step=warmup_steps) == learning_rate 16 | 17 | assert min_lr < scheduler(step=warmup_steps + 1) < learning_rate 18 | assert min_lr < scheduler(step=total_steps - 1) < learning_rate 19 | assert scheduler(step=total_steps) == min_lr 20 | assert scheduler(step=total_steps + 1) == min_lr 21 | -------------------------------------------------------------------------------- /tests/test_trainer.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock 2 | 3 | import numpy as np 4 | import tensorflow as tf 5 | from tensorflow.keras.callbacks import ModelCheckpoint 6 | 7 | import unet 8 | from unet.callbacks import TensorBoardWithLearningRate, TensorBoardImageSummary 9 | from unet.schedulers import LearningRateScheduler 10 | 11 | 12 | def _build_dataset(items=5, image_shape=(10, 10, 3), label_shape=(10, 10, 2)): 13 | images = np.ones((items, *image_shape)) 14 | labels = np.ones((items, *label_shape)) 15 | return tf.data.Dataset.from_tensor_slices((images, labels)) 16 | 17 | 18 | class TestTrainer: 19 | 20 | def test_fit(self, tmp_path): 21 | output_shape = (8, 8, 2) 22 | image_shape = (10, 10, 3) 23 | epochs = 5 24 | shuffle = True 25 | batch_size = 10 26 | 27 | model = Mock(name="model") 28 | model.predict().shape = (None, *output_shape) 29 | 30 | mock_callback = Mock() 31 | trainer = unet.Trainer(name="test", 32 | log_dir_path=str(tmp_path), 33 | checkpoint_callback=True, 34 | tensorboard_callback=True, 35 | tensorboard_images_callback=True, 36 | callbacks=[mock_callback], 37 | learning_rate_scheduler=unet.SchedulerType.WARMUP_LINEAR_DECAY, 38 | warmup_proportion=0.1, 39 | learning_rate=1.0) 40 | 41 | train_dataset = _build_dataset(image_shape=image_shape) 42 | validation_dataset = _build_dataset(image_shape=image_shape) 43 | test_dataset = _build_dataset(image_shape=image_shape) 44 | 45 | trainer.fit(model, 46 | train_dataset=train_dataset, 47 | validation_dataset=validation_dataset, 48 | test_dataset=test_dataset, 49 | epochs=epochs, 50 | batch_size=batch_size, 51 | shuffle=shuffle) 52 | 53 | args, kwargs = model.fit.call_args 54 | train_dataset = args[0] 55 | validation_dataset = kwargs["validation_data"] 56 | 57 | assert tuple(train_dataset.element_spec[0].shape) == (None, *image_shape) 58 | assert tuple(train_dataset.element_spec[1].shape) == (None, *output_shape) 59 | assert train_dataset._batch_size.numpy() == batch_size 60 | 61 | assert validation_dataset._batch_size.numpy() == batch_size 62 | assert tuple(validation_dataset.element_spec[0].shape) == (None, *image_shape) 63 | assert tuple(validation_dataset.element_spec[1].shape) == (None, *output_shape) 64 | 65 | callbacks = kwargs["callbacks"] 66 | callback_types = [type(callback) for callback in callbacks] 67 | assert mock_callback in callbacks 68 | assert ModelCheckpoint in callback_types 69 | assert TensorBoardWithLearningRate in callback_types 70 | assert TensorBoardImageSummary in callback_types 71 | assert LearningRateScheduler in callback_types 72 | 73 | assert kwargs["epochs"] == epochs 74 | assert kwargs["shuffle"] == shuffle 75 | 76 | args, kwargs = model.evaluate.call_args 77 | test_dataset = args[0] 78 | assert tuple(test_dataset.element_spec[0].shape) == (None, *image_shape) 79 | assert tuple(test_dataset.element_spec[1].shape) == (None, *output_shape) 80 | -------------------------------------------------------------------------------- /tests/test_unet.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock, patch 2 | 3 | import numpy as np 4 | import tensorflow as tf 5 | from tensorflow.keras import layers 6 | from tensorflow.keras import losses 7 | 8 | from unet import unet, custom_objects 9 | 10 | 11 | class TestConvBlock: 12 | 13 | def test_serialization(self): 14 | conv_block = unet.ConvBlock(layer_idx=1, 15 | filters_root=16, 16 | kernel_size=3, 17 | dropout_rate=0.1, 18 | padding="same", 19 | activation="relu", 20 | name="conv_block_test") 21 | 22 | config = conv_block.get_config() 23 | new_conv_block = unet.ConvBlock.from_config(config) 24 | 25 | assert new_conv_block.layer_idx == conv_block.layer_idx 26 | assert new_conv_block.filters_root == conv_block.filters_root 27 | assert new_conv_block.kernel_size == conv_block.kernel_size 28 | assert new_conv_block.dropout_rate == conv_block.dropout_rate 29 | assert new_conv_block.padding == conv_block.padding 30 | assert new_conv_block.activation == conv_block.activation 31 | assert new_conv_block.activation == conv_block.activation 32 | 33 | 34 | class TestUpconvBlock: 35 | 36 | def test_serialization(self): 37 | upconv_block = unet.UpconvBlock(layer_idx=1, 38 | filters_root=16, 39 | kernel_size=3, 40 | pool_size=2, 41 | padding="same", 42 | activation="relu", 43 | name="upconv_block_test") 44 | 45 | config = upconv_block.get_config() 46 | new_upconv_block = unet.UpconvBlock.from_config(config) 47 | 48 | assert new_upconv_block.layer_idx == upconv_block.layer_idx 49 | assert new_upconv_block.filters_root == upconv_block.filters_root 50 | assert new_upconv_block.kernel_size == upconv_block.kernel_size 51 | assert new_upconv_block.pool_size == upconv_block.pool_size 52 | assert new_upconv_block.padding == upconv_block.padding 53 | assert new_upconv_block.activation == upconv_block.activation 54 | assert new_upconv_block.activation == upconv_block.activation 55 | 56 | 57 | class TestCropConcatBlock(): 58 | 59 | def test_uneven_concat(self): 60 | layer = unet.CropConcatBlock() 61 | down_tensor = np.ones([1, 61, 61, 32]) 62 | up_tensor = np.ones([1, 52, 52, 32]) 63 | 64 | concat_tensor = layer(up_tensor, down_tensor) 65 | 66 | assert concat_tensor.shape == (1, 52, 52, 64) 67 | 68 | 69 | class TestUnetModel: 70 | 71 | def test_serialization(self, tmpdir): 72 | save_path = str(tmpdir / "unet_model") 73 | unet_model = unet.build_model(layer_depth=3, filters_root=2) 74 | unet.finalize_model(unet_model) 75 | unet_model.save(save_path) 76 | 77 | reconstructed_model = tf.keras.models.load_model(save_path, custom_objects=custom_objects) 78 | assert reconstructed_model is not None 79 | 80 | def test_build_model(self): 81 | nx = 572 82 | ny = 572 83 | channels = 3 84 | num_classes = 2 85 | kernel_size = 3 86 | pool_size = 2 87 | filters_root = 64 88 | layer_depth = 5 89 | model = unet.build_model(nx=nx, 90 | ny=ny, 91 | channels=channels, 92 | num_classes=num_classes, 93 | layer_depth=layer_depth, 94 | filters_root=filters_root, 95 | kernel_size=kernel_size, 96 | pool_size=pool_size) 97 | 98 | input_shape = model.get_layer("inputs").output.shape 99 | assert tuple(input_shape) == (None, nx, ny, channels) 100 | output_shape = model.get_layer("outputs").output.shape 101 | assert tuple(output_shape) == (None, 388, 388, num_classes) 102 | 103 | filters_per_layer = [filters_root, 128, 256, 512, 1024, 512, 256, 128, filters_root] 104 | conv2D_layers = _collect_conv2d_layers(model) 105 | 106 | assert len(conv2D_layers) == 2 * len(filters_per_layer) + 1 107 | 108 | for conv2D_layer in conv2D_layers[:-1]: 109 | assert conv2D_layer.kernel_size == (kernel_size, kernel_size) 110 | 111 | for i, filters in enumerate(filters_per_layer): 112 | assert conv2D_layers[i*2].filters == filters 113 | assert conv2D_layers[i*2+1].filters == filters 114 | 115 | maxpool_layers = [layer for layer in model.layers if type(layer) == layers.MaxPool2D] 116 | 117 | assert len(maxpool_layers) == layer_depth - 1 118 | 119 | for maxpool_layer in maxpool_layers[:-1]: 120 | assert maxpool_layer.pool_size == (pool_size, pool_size) 121 | 122 | @patch.object(unet, "Adam") 123 | def test_finalize_model(self, AdamMock:Mock): 124 | adam_instance = Mock() 125 | AdamMock.return_value = adam_instance 126 | metric_mock = Mock(name="metric") 127 | model = Mock(name="model") 128 | 129 | loss = losses.binary_crossentropy 130 | learning_rate = 1.0 131 | 132 | unet.finalize_model(model, 133 | loss=loss, 134 | optimizer=None, 135 | metrics=[metric_mock], 136 | dice_coefficient=True, 137 | auc=True, 138 | mean_iou=True, 139 | learning_rate=learning_rate) 140 | 141 | __, kwargs = AdamMock.call_args 142 | assert kwargs["learning_rate"] == learning_rate 143 | 144 | args, kwargs = model.compile.call_args 145 | assert kwargs["loss"] == loss 146 | assert kwargs["optimizer"] == adam_instance 147 | 148 | metrics = kwargs["metrics"] 149 | assert len(metrics) == 4 150 | assert metrics[0] == metric_mock 151 | 152 | 153 | def _collect_conv2d_layers(model): 154 | conv2d_layers = [] 155 | for layer in model.layers: 156 | if type(layer) == layers.Conv2D: 157 | conv2d_layers.append(layer) 158 | elif type(layer) == unet.ConvBlock: 159 | conv2d_layers.append(layer.conv2d_1) 160 | conv2d_layers.append(layer.conv2d_2) 161 | 162 | return conv2d_layers 163 | 164 | 165 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from unet import utils 5 | 6 | 7 | @pytest.mark.parametrize("channels", [ 8 | 1,2,3,4 9 | ]) 10 | def test_to_rgb(channels): 11 | tensor = np.random.normal(size=(5, 12, 12, channels)) 12 | 13 | tensor[1, 5, 5, 0] = np.nan 14 | 15 | rgb_img = utils.to_rgb(tensor) 16 | 17 | assert rgb_img.shape[:2] == tensor.shape[:2] 18 | assert rgb_img.shape[3] == 3 19 | 20 | assert rgb_img.min() == 0 21 | assert rgb_img.max() == 1 22 | --------------------------------------------------------------------------------