├── LICENSE ├── README.md ├── ds_book ├── .DS_Store ├── _bibliography │ └── references.bib ├── _build │ ├── .doctrees │ │ ├── docs │ │ │ ├── Lesson1a_Intro_ML_NN_DL.doctree │ │ │ ├── Lesson1a_annex.doctree │ │ │ ├── Lesson1b_Intro_TensorFlow_Keras.doctree │ │ │ ├── Lesson1b_annex.doctree │ │ │ ├── Lesson2a_get_planet_NICFI.doctree │ │ │ ├── Lesson2b_prep_data_ML_segmentation.doctree │ │ │ ├── Lesson3_deeplearning_crop_segmentation.doctree │ │ │ ├── Lesson4_evaluation.doctree │ │ │ ├── Lesson5_dealing_with_limited_data.doctree │ │ │ ├── appendix.doctree │ │ │ └── index.doctree │ │ ├── environment.pickle │ │ └── glue_cache.json │ ├── html │ │ ├── .buildinfo │ │ ├── _images │ │ │ ├── Keras_functional_API.jpg │ │ │ ├── Lesson2b_prep_data_ML_segmentation_35_1.png │ │ │ ├── Lesson3_deeplearning_crop_segmentation_31_0.png │ │ │ ├── Lesson3_deeplearning_crop_segmentation_45_0.png │ │ │ ├── Lesson3_deeplearning_crop_segmentation_47_0.png │ │ │ ├── Lesson3_deeplearning_crop_segmentation_49_0.png │ │ │ ├── Lesson4_evaluation_21_1.png │ │ │ ├── Lesson4_evaluation_23_1.png │ │ │ ├── Unet_mobilenetv2_arch_arch.png │ │ │ ├── cm.png │ │ │ ├── epoch50_testimage.png │ │ │ ├── epoch50_testimage1.png │ │ │ ├── lulc_labeling.gif │ │ │ ├── marine_debris.png │ │ │ └── neuron-structure.jpg │ │ ├── _panels_static │ │ │ ├── panels-main.c949a650a448cc0ae9fd3441c0e17fb0.css │ │ │ └── panels-variables.06eb56fa6e07937060861dad626602ad.css │ │ ├── _sources │ │ │ └── docs │ │ │ │ ├── Lesson1a_Intro_ML_NN_DL.ipynb │ │ │ │ ├── Lesson1a_annex.ipynb │ │ │ │ ├── Lesson1b_Intro_TensorFlow_Keras.ipynb │ │ │ │ ├── Lesson1b_annex.ipynb │ │ │ │ ├── Lesson2a_get_planet_NICFI.ipynb │ │ │ │ ├── Lesson2b_prep_data_ML_segmentation.ipynb │ │ │ │ ├── Lesson3_deeplearning_crop_segmentation.ipynb │ │ │ │ ├── Lesson4_evaluation.ipynb │ │ │ │ ├── Lesson5_dealing_with_limited_data.ipynb │ │ │ │ ├── appendix.ipynb │ │ │ │ └── index.md │ │ ├── _static │ │ │ ├── __init__.py │ │ │ ├── __pycache__ │ │ │ │ └── __init__.cpython-36.pyc │ │ │ ├── basic.css │ │ │ ├── check-solid.svg │ │ │ ├── clipboard.min.js │ │ │ ├── copy-button.svg │ │ │ ├── copybutton.css │ │ │ ├── copybutton.js │ │ │ ├── copybutton_funcs.js │ │ │ ├── css │ │ │ │ ├── index.c5995385ac14fb8791e8eb36b4908be2.css │ │ │ │ └── theme.css │ │ │ ├── doctools.js │ │ │ ├── documentation_options.js │ │ │ ├── ds.png │ │ │ ├── file.png │ │ │ ├── images │ │ │ │ ├── logo_binder.svg │ │ │ │ ├── logo_colab.png │ │ │ │ └── logo_jupyterhub.svg │ │ │ ├── jquery-3.5.1.js │ │ │ ├── jquery.js │ │ │ ├── js │ │ │ │ └── index.1c5a1a01449ed65a7b51.js │ │ │ ├── language_data.js │ │ │ ├── minus.png │ │ │ ├── mystnb.css │ │ │ ├── panels-main.c949a650a448cc0ae9fd3441c0e17fb0.css │ │ │ ├── panels-variables.06eb56fa6e07937060861dad626602ad.css │ │ │ ├── plus.png │ │ │ ├── pygments.css │ │ │ ├── searchtools.js │ │ │ ├── sphinx-book-theme.12a9622fbb08dcb3a2a40b2c02b83a57.js │ │ │ ├── sphinx-book-theme.css │ │ │ ├── sphinx-book-theme.e2363ea40746bee74734a24ffefccd78.css │ │ │ ├── sphinx-thebe.css │ │ │ ├── sphinx-thebe.js │ │ │ ├── togglebutton.css │ │ │ ├── togglebutton.js │ │ │ ├── underscore-1.12.0.js │ │ │ ├── underscore.js │ │ │ ├── vendor │ │ │ │ └── fontawesome │ │ │ │ │ └── 5.13.0 │ │ │ │ │ ├── LICENSE.txt │ │ │ │ │ ├── css │ │ │ │ │ └── all.min.css │ │ │ │ │ └── webfonts │ │ │ │ │ ├── fa-brands-400.eot │ │ │ │ │ ├── fa-brands-400.svg │ │ │ │ │ ├── fa-brands-400.ttf │ │ │ │ │ ├── fa-brands-400.woff │ │ │ │ │ ├── fa-brands-400.woff2 │ │ │ │ │ ├── fa-regular-400.eot │ │ │ │ │ ├── fa-regular-400.svg │ │ │ │ │ ├── fa-regular-400.ttf │ │ │ │ │ ├── fa-regular-400.woff │ │ │ │ │ ├── fa-regular-400.woff2 │ │ │ │ │ ├── fa-solid-900.eot │ │ │ │ │ ├── fa-solid-900.svg │ │ │ │ │ ├── fa-solid-900.ttf │ │ │ │ │ ├── fa-solid-900.woff │ │ │ │ │ └── fa-solid-900.woff2 │ │ │ └── webpack-macros.html │ │ ├── docs │ │ │ ├── Lesson1a_Intro_ML_NN_DL.html │ │ │ ├── Lesson1a_annex.html │ │ │ ├── Lesson1b_Intro_TensorFlow_Keras.html │ │ │ ├── Lesson1b_annex.html │ │ │ ├── Lesson2a_get_planet_NICFI.html │ │ │ ├── Lesson2b_prep_data_ML_segmentation.html │ │ │ ├── Lesson3_deeplearning_crop_segmentation.html │ │ │ ├── Lesson4_evaluation.html │ │ │ ├── Lesson5_dealing_with_limited_data.html │ │ │ ├── appendix.html │ │ │ └── index.html │ │ ├── genindex.html │ │ ├── index.html │ │ ├── objects.inv │ │ ├── search.html │ │ └── searchindex.js │ └── jupyter_execute │ │ └── docs │ │ ├── Lesson1a_Intro_ML_NN_DL.ipynb │ │ ├── Lesson1a_Intro_ML_NN_DL.py │ │ ├── Lesson1a_Intro_ML_NN_DL_4_0.jpeg │ │ ├── Lesson1a_annex.ipynb │ │ ├── Lesson1a_annex.py │ │ ├── Lesson1b_Intro_TensorFlow_Keras.ipynb │ │ ├── Lesson1b_Intro_TensorFlow_Keras.py │ │ ├── Lesson1b_annex.ipynb │ │ ├── Lesson1b_annex.py │ │ ├── Lesson2a_get_planet_NICFI.ipynb │ │ ├── Lesson2a_get_planet_NICFI.py │ │ ├── Lesson2b_prep_data_ML_segmentation.ipynb │ │ ├── Lesson2b_prep_data_ML_segmentation.py │ │ ├── Lesson2b_prep_data_ML_segmentation_35_1.png │ │ ├── Lesson3_deeplearning_crop_segmentation.ipynb │ │ ├── Lesson3_deeplearning_crop_segmentation.py │ │ ├── Lesson3_deeplearning_crop_segmentation_31_0.png │ │ ├── Lesson3_deeplearning_crop_segmentation_45_0.png │ │ ├── Lesson3_deeplearning_crop_segmentation_47_0.png │ │ ├── Lesson3_deeplearning_crop_segmentation_49_0.png │ │ ├── Lesson4_evaluation.ipynb │ │ ├── Lesson4_evaluation.py │ │ ├── Lesson4_evaluation_21_1.png │ │ ├── Lesson4_evaluation_23_1.png │ │ ├── Lesson5_dealing_with_limited_data.ipynb │ │ ├── Lesson5_dealing_with_limited_data.py │ │ ├── appendix.ipynb │ │ └── appendix.py ├── _config.yml ├── _toc.yml ├── covers │ └── .DS_Store ├── docs │ ├── .DS_Store │ ├── Lesson1a_Intro_ML_NN_DL.ipynb │ ├── Lesson1a_annex.ipynb │ ├── Lesson1b_Intro_TensorFlow_Keras.ipynb │ ├── Lesson1b_annex.ipynb │ ├── Lesson2a_get_planet_NICFI.ipynb │ ├── Lesson2b_prep_data_ML_segmentation.ipynb │ ├── Lesson3_deeplearning_crop_segmentation.ipynb │ ├── Lesson4_evaluation.ipynb │ ├── Lesson5_dealing_with_limited_data.ipynb │ ├── appendix.ipynb │ ├── images │ │ ├── .DS_Store │ │ ├── Keras_functional_API.jpg │ │ ├── Unet.png │ │ ├── Unet_mobilenetv2_arch_arch.png │ │ ├── cm.png │ │ ├── epoch50_testimage.png │ │ ├── epoch50_testimage1.png │ │ ├── kakarla2021.png │ │ ├── loss_curve.png │ │ ├── lulc_labeling.gif │ │ ├── marine_debris.png │ │ ├── neuralnet_basic.png │ │ └── neuron-structure.jpg │ └── index.md └── ds.png └── environment.yml /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Deep Learning with TensorFlow and EO Data 2 | 3 | Complete file set for [Jupyter Book](https://beta.jupyterbook.org/intro.html) 4 | 5 | Autor: Development Seed 6 | 7 | Date: 04 October 2021 8 | 9 | ISBN: (to come) 10 | 11 | 12 | Notebook tutorials demonstrating advanced techniques for use of deep learning with TensorFlow and earth observation data. 13 | 14 | 15 | 16 | **How to run the executable book code**: 17 | 18 | A major advantage of executable books is that the reader may enjoy running the source codes himself, modifying them and playing around. No downloading, installation or configuration are required. Simply go to 19 | 20 | [https://developmentseed.org/tensorflow-eo-training/docs/index.html](https://developmentseed.org/tensorflow-eo-training/docs/index.html), 21 | 22 | and in the left menu select any chapter below the Introduction, click the "rocket" icon at the top right of the screen, and choose "Colab". 23 | 24 | 25 | **Links**: 26 | 27 | - Jupyter Book: 28 | [https://developmentseed.org/tensorflow-eo-training/docs/index.html](https://developmentseed.org/tensorflow-eo-training/docs/index.html) 29 | 30 | 31 | -------------------------------------------------------------------------------- /ds_book/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/.DS_Store -------------------------------------------------------------------------------- /ds_book/_bibliography/references.bib: -------------------------------------------------------------------------------- 1 | @book{kandel2012principles, 2 | title={Principles of Neural Science, Fifth Edition}, 3 | author={Kandel, E.R. and Schwartz, J.H. and Jessell, T.M. and Siegelbaum, S.A. and Hudspeth, A.J.}, 4 | isbn={9780071810012}, 5 | lccn={2012023071}, 6 | url={https://books.google.pl/books?id=Z2yVUTnlIQsC}, 7 | year={2012}, 8 | publisher={McGraw-Hill Education} 9 | } 10 | 11 | @book{muller2012neural, 12 | title={Neural Networks: An Introduction}, 13 | author={M{\"u}ller, B. and Reinhardt, J. and Strickland, M.T.}, 14 | isbn={9783642577604}, 15 | series={Physics of Neural Networks}, 16 | url={https://books.google.pl/books?id=on0QBwAAQBAJ}, 17 | year={2012}, 18 | publisher={Springer Berlin Heidelberg} 19 | } 20 | 21 | @book{feldman2013neural, 22 | title={Neural Networks: A Systematic Introduction}, 23 | author={Feldman, J. and Rojas, R.}, 24 | isbn={9783642610684}, 25 | lccn={96008636}, 26 | year={2013}, 27 | publisher={Springer Berlin Heidelberg} 28 | } 29 | -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/docs/Lesson1a_Intro_ML_NN_DL.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/docs/Lesson1a_Intro_ML_NN_DL.doctree -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/docs/Lesson1a_annex.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/docs/Lesson1a_annex.doctree -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/docs/Lesson1b_Intro_TensorFlow_Keras.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/docs/Lesson1b_Intro_TensorFlow_Keras.doctree -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/docs/Lesson1b_annex.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/docs/Lesson1b_annex.doctree -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/docs/Lesson2a_get_planet_NICFI.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/docs/Lesson2a_get_planet_NICFI.doctree -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/docs/Lesson2b_prep_data_ML_segmentation.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/docs/Lesson2b_prep_data_ML_segmentation.doctree -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/docs/Lesson3_deeplearning_crop_segmentation.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/docs/Lesson3_deeplearning_crop_segmentation.doctree -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/docs/Lesson4_evaluation.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/docs/Lesson4_evaluation.doctree -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/docs/Lesson5_dealing_with_limited_data.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/docs/Lesson5_dealing_with_limited_data.doctree -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/docs/appendix.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/docs/appendix.doctree -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/docs/index.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/docs/index.doctree -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/environment.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/.doctrees/environment.pickle -------------------------------------------------------------------------------- /ds_book/_build/.doctrees/glue_cache.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /ds_book/_build/html/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: 18aa08e9fb8029c2bb5eb55ec0913020 4 | tags: 645f666f9bcd5a90fca523b33c5a78b7 5 | -------------------------------------------------------------------------------- /ds_book/_build/html/_images/Keras_functional_API.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/Keras_functional_API.jpg -------------------------------------------------------------------------------- /ds_book/_build/html/_images/Lesson2b_prep_data_ML_segmentation_35_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/Lesson2b_prep_data_ML_segmentation_35_1.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/Lesson3_deeplearning_crop_segmentation_31_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/Lesson3_deeplearning_crop_segmentation_31_0.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/Lesson3_deeplearning_crop_segmentation_45_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/Lesson3_deeplearning_crop_segmentation_45_0.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/Lesson3_deeplearning_crop_segmentation_47_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/Lesson3_deeplearning_crop_segmentation_47_0.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/Lesson3_deeplearning_crop_segmentation_49_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/Lesson3_deeplearning_crop_segmentation_49_0.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/Lesson4_evaluation_21_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/Lesson4_evaluation_21_1.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/Lesson4_evaluation_23_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/Lesson4_evaluation_23_1.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/Unet_mobilenetv2_arch_arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/Unet_mobilenetv2_arch_arch.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/cm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/cm.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/epoch50_testimage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/epoch50_testimage.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/epoch50_testimage1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/epoch50_testimage1.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/lulc_labeling.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/lulc_labeling.gif -------------------------------------------------------------------------------- /ds_book/_build/html/_images/marine_debris.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/marine_debris.png -------------------------------------------------------------------------------- /ds_book/_build/html/_images/neuron-structure.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_images/neuron-structure.jpg -------------------------------------------------------------------------------- /ds_book/_build/html/_panels_static/panels-main.c949a650a448cc0ae9fd3441c0e17fb0.css: -------------------------------------------------------------------------------- 1 | details.dropdown .summary-title{padding-right:3em !important;-moz-user-select:none;-ms-user-select:none;-webkit-user-select:none;user-select:none}details.dropdown:hover{cursor:pointer}details.dropdown .summary-content{cursor:default}details.dropdown summary{list-style:none;padding:1em}details.dropdown summary .octicon.no-title{vertical-align:middle}details.dropdown[open] summary .octicon.no-title{visibility:hidden}details.dropdown summary::-webkit-details-marker{display:none}details.dropdown summary:focus{outline:none}details.dropdown summary:hover .summary-up svg,details.dropdown summary:hover .summary-down svg{opacity:1}details.dropdown .summary-up svg,details.dropdown .summary-down svg{display:block;opacity:.6}details.dropdown .summary-up,details.dropdown .summary-down{pointer-events:none;position:absolute;right:1em;top:.75em}details.dropdown[open] .summary-down{visibility:hidden}details.dropdown:not([open]) .summary-up{visibility:hidden}details.dropdown.fade-in[open] summary~*{-moz-animation:panels-fade-in .5s ease-in-out;-webkit-animation:panels-fade-in .5s ease-in-out;animation:panels-fade-in .5s ease-in-out}details.dropdown.fade-in-slide-down[open] summary~*{-moz-animation:panels-fade-in .5s ease-in-out, panels-slide-down .5s ease-in-out;-webkit-animation:panels-fade-in .5s ease-in-out, panels-slide-down .5s ease-in-out;animation:panels-fade-in .5s ease-in-out, panels-slide-down .5s ease-in-out}@keyframes panels-fade-in{0%{opacity:0}100%{opacity:1}}@keyframes panels-slide-down{0%{transform:translate(0, -10px)}100%{transform:translate(0, 0)}}.octicon{display:inline-block;fill:currentColor;vertical-align:text-top}.tabbed-content{box-shadow:0 -.0625rem var(--tabs-color-overline),0 .0625rem var(--tabs-color-underline);display:none;order:99;padding-bottom:.75rem;padding-top:.75rem;width:100%}.tabbed-content>:first-child{margin-top:0 !important}.tabbed-content>:last-child{margin-bottom:0 !important}.tabbed-content>.tabbed-set{margin:0}.tabbed-set{border-radius:.125rem;display:flex;flex-wrap:wrap;margin:1em 0;position:relative}.tabbed-set>input{opacity:0;position:absolute}.tabbed-set>input:checked+label{border-color:var(--tabs-color-label-active);color:var(--tabs-color-label-active)}.tabbed-set>input:checked+label+.tabbed-content{display:block}.tabbed-set>input:focus+label{outline-style:auto}.tabbed-set>input:not(.focus-visible)+label{outline:none;-webkit-tap-highlight-color:transparent}.tabbed-set>label{border-bottom:.125rem solid transparent;color:var(--tabs-color-label-inactive);cursor:pointer;font-size:var(--tabs-size-label);font-weight:700;padding:1em 1.25em .5em;transition:color 250ms;width:auto;z-index:1}html .tabbed-set>label:hover{color:var(--tabs-color-label-active)} 2 | -------------------------------------------------------------------------------- /ds_book/_build/html/_panels_static/panels-variables.06eb56fa6e07937060861dad626602ad.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --tabs-color-label-active: hsla(231, 99%, 66%, 1); 3 | --tabs-color-label-inactive: rgba(178, 206, 245, 0.62); 4 | --tabs-color-overline: rgb(207, 236, 238); 5 | --tabs-color-underline: rgb(207, 236, 238); 6 | --tabs-size-label: 1rem; 7 | } -------------------------------------------------------------------------------- /ds_book/_build/html/_sources/docs/Lesson1b_Intro_TensorFlow_Keras.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Introduction to TensorFlow and Keras" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Objectives" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "The goal of this notebook is to teach some basics of the TensorFlow framework and the Keras API. " 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "### What is TensorFlow?\n", 29 | "\n", 30 | "[TensorFlow](https://www.tensorflow.org/guide]) is an open-source framework developed in late 2015 by Google for building various machine learning and deep learning models. TensorFlow is free and open-source, thanks to the Apache Open Source license.\n", 31 | "\n", 32 | "The main objective of using TensorFlow is to reduce the complexity of implementing computations on large numerical data sets. In practice, these large computations can manifest as training and inference with machine learning or deep learning models.\n", 33 | "\n", 34 | "TensorFlow was designed to operate with multiple CPUs or GPUs, as well as a growing number of mobile operating systems. The framework includes wrappers in Python, C++, and Java.\n", 35 | "\n", 36 | "#### How does it work?\n", 37 | "TensorFlow accepts inputs as a multi-dimensional array called a Tensor, which allows the programmer to create dataflow graphs and structures specifying how data travels through. The framework is designed to support creation of a flowchart of operations to be applied to input Tensors, which travel in one direction and out the other.\n", 38 | "\n", 39 | "#### TensorFlow's structure\n", 40 | "There are three main components to TensorFlow's structure.\n", 41 | "\n", 42 | "1. preprocessing the data\n", 43 | "2. building the model\n", 44 | "3. training and estimating the model\n", 45 | "\n", 46 | "The name Tensorflow derives from the way in which the framework receives input in the form of a multi-dimensional array, i.e. the tensors. These tensors travel sequentially through the specified flowchart of the operations, entering at one end and culminating as output at the other end. \n", 47 | "\n", 48 | "#### What are the key TensorFlow components?\n", 49 | "**Tensor**\n", 50 | "\n", 51 | "A tensor is an n-dimensional vector or matrix. In theory, a tensor may represent any form of data. The values belonging to a tensor all share the same data type and often the same shape / dimensionality. A tensor can describe the input data and the output of a calculation. The main difference between a tensor and a conventional n-dimensional array is that tensors are immutable.\n", 52 | "\n", 53 | "\n", 54 | "**Graphs**\n", 55 | "\n", 56 | "TensorFlow uses a graph framework. Graphs collect and summarize all of the calculations and offer several benefits:\n", 57 | "\n", 58 | "1. They are designed to work on CPUs or GPUs, as well as on mobile devices.\n", 59 | "2. Graphs are portable which enables the computations to be saved for immediate or later usage. Otherwise stated, the graph can be frozen and run at a later time.\n", 60 | "3. Graph calculations are executed by linking tensors together.\n", 61 | "4. For each tensor, there is a node and an edge. The node carries out the mathematical process and produces endpoint outputs. The input/output connections are represented by the edges. \n", 62 | "5. All nodes are linked together, so the graph itself is a depiction of the operations and relationships that exist between the nodes.\n", 63 | "\n", 64 | "In TensorFlow, all operations are carried out within a graph, which in effect is a series of computations that happen in order. Each individual operation is referred to as an op node. \n", 65 | "\n", 66 | ":::{figure-md} TFgraph-fig\n", 67 | "\n", 68 | "\n", 69 | "TensorFlow graph example (from [https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4](https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4)).\n", 70 | ":::\n", 71 | "\n", 72 | "\n", 73 | "#### Why do so many people like TensorFlow?\n", 74 | "TensorFlow is intentionally user-friendly, with helpful plugins to visualize model training and a useful software debugging tool. As well, TensorFlow is highly scalable, with easy deployment on both CPUs and GPUs." 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "metadata": {}, 80 | "source": [ 81 | "### What is Keras?\n", 82 | "\n", 83 | "[Keras](https://keras.io/about/) is an API built on Python which reduces the cognitive load associated with programming models through human readability and simple and consistent structures.\n", 84 | "\n", 85 | "Keras is what some might call a wrapper for TensorFlow. It is intended for rapid experimentation.\n", 86 | "\n", 87 | "Tha main components of Keras include:\n", 88 | "1. A models API, which enables one to construct a model with varying levels of complexity depending on use case. We will use the [Functional API](https://keras.io/guides/functional_api/) subclass.\n", 89 | "2. A layers API, which allows one to define the tensor in/tensor out computation functions.\n", 90 | "3. A callback API, which enables one to program specific actions to occur during training, such as log training metrics, visualize interim/internal states and statistics of the model during training, and perform early stopping when the model converges.\n", 91 | "4. A data preprocessing API, which offers support for prepping raw data from disk to model ready Tensor format.\n", 92 | "5. An optimizer API where all of the state of the art optimizers can be plugged in. Learning rate decay / scheduling can also be implemented as part of this API.\n", 93 | "6. A metrics API which is used for assessing the performance of the model during training. A metric is the target to optimize during training, with specific metrics chosen for specific modeling objectives.\n", 94 | "7. A loss API that informs the model quantitatively how much it should try to minimize during training by providing a measure of error. Similar to metrics, specific loss functions are selected for specific modeling objectives.\n", 95 | "\n", 96 | "With the Functional API, our main workflow will follow the diagram below.\n", 97 | "\n", 98 | ":::{figure-md} Keras-fig\n", 99 | "\n", 100 | "\n", 101 | "Keras Functional API diagram (from [https://miro.com/app/board/o9J_lhnKhVE=/](hhttps://miro.com/app/board/o9J_lhnKhVE=/)).\n", 102 | ":::" 103 | ] 104 | } 105 | ], 106 | "metadata": { 107 | "kernelspec": { 108 | "display_name": "Python 3", 109 | "language": "python", 110 | "name": "python3" 111 | }, 112 | "language_info": { 113 | "codemirror_mode": { 114 | "name": "ipython", 115 | "version": 3 116 | }, 117 | "file_extension": ".py", 118 | "mimetype": "text/x-python", 119 | "name": "python", 120 | "nbconvert_exporter": "python", 121 | "pygments_lexer": "ipython3", 122 | "version": "3.8.11" 123 | } 124 | }, 125 | "nbformat": 4, 126 | "nbformat_minor": 2 127 | } 128 | -------------------------------------------------------------------------------- /ds_book/_build/html/_sources/docs/Lesson1b_annex.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Introduction to TensorFlow and Keras" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Objectives" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "The goal of this notebook is to teach some basics of the TensorFlow framework and the Keras API." 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "### What is TensorFlow?\n", 29 | "\n", 30 | "[TensorFlow](https://www.tensorflow.org/guide]) is an open-source framework developed by Google for building various machine learning and deep learning models. Although originally released in late 2015, the first stable version arrived in 2017. TensorFlow is free and open-source, thanks to the Apache Open Source license.\n", 31 | "\n", 32 | "The main objective of using TensorFlow is to reduce the complexity of implementing computations on large numerical data sets. In practice, these large computations can manifest as training and inference with machine learning or deep learning models.\n", 33 | "\n", 34 | "TensorFlow was designed to operate with multiple CPUs or GPUs, as well as a growing number of mobile operating systems. The framework includes wrappers in Python, C++, and Java.\n", 35 | "\n", 36 | "#### How does it work?\n", 37 | "TensorFlow accepts inputs as a multi-dimensional array called a Tensor, which allows the programmer to create dataflow graphs and structures specifying how data travels through. The framework is designed to support creation of a flowchart of operations to be applied to input Tensors, which travel in one direction and out the other.\n", 38 | "\n", 39 | "#### TensorFlow’s structure\n", 40 | "There are three main components to TensorFlow's structure.\n", 41 | "\n", 42 | "1. preprocessing the data\n", 43 | "2. building the model\n", 44 | "3. training and estimating the model\n", 45 | "\n", 46 | "The name TensorFlow derives from the way in which the framework receives input in the form of a multi-dimensional array, i.e. the tensors. These tensors travel sequentially through the specified flowchart of the operations, entering at one end and culminate as output at the other end. \n", 47 | "\n", 48 | "#### What are TensorFlow components?\n", 49 | "**Tensor**\n", 50 | "\n", 51 | "Tensors, the basic unit of data in this framework, are involved in every computation of TensorFlow. A tensor is an n-dimensional vector or matrix. In theory, a tensor may represent any form of data. The values belonging to a tensor all share the same data type and often the same shape / dimensionality. A tensor can describe the input data and the output of a calculation. \n", 52 | "\n", 53 | "In TensorFlow, all operations are carried out within a graph, which in effect is a series of computations that happen in order. Each indivisual operation is referred to as an op node. \n", 54 | "\n", 55 | "**Graphs**\n", 56 | "\n", 57 | "TensorFlow uses a graph framework. Graphs collect and summarize all of the calculations and offer several benefits:\n", 58 | "\n", 59 | "1. They are designed to work on CPUs or GPUs, as well as on mobile devices.\n", 60 | "2. Graphs are portable which enables the computations to be saved for immediate or later usage. Otherwsie stated, the graph can be frozen and run at a later time.\n", 61 | "3. Graph calculations are executed by linking tensors together.\n", 62 | "4. For each tensor, there is a node and an edge. The node carries out the mathematical process and produces endpoint outputs. The input/output connections are represented by the edges.\n", 63 | "5. All nodes are linked together, so the graph itself is a depiction of the operations and relationships that exist between the nodes.\n", 64 | "\n", 65 | ":::{figure-md} TFgraph-fig\n", 66 | "\n", 67 | "\n", 68 | "TensorFlow graph example (from [https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4](https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4)).\n", 69 | ":::\n", 70 | "\n", 71 | "#### Why do so many people like TensorFlow?\n", 72 | "TensorFlow is intentionally user-friendly, with helpful plugins to visualize model training and a useful software debugging tool. As well, TensorFlow is highly scalable, with easy deployment on both CPUs and GPUs." 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "metadata": {}, 78 | "source": [ 79 | "### What is Keras?\n", 80 | "\n", 81 | "[Keras](https://keras.io/about/) is an API built on Python, with human readability at the forefront of its design. With simple and consistent structures and methods extensible across many machine learning and deep learning applications, Keras reduces the cognitive load associated with programming models. Furthermore, the API seeks to minimize the need for programmer interaction by abstracting many complexities into easily callable functions. Lastly, Keras features clear & actionable error messaging, complemented by comprehensive and digestible documentation and developer guides.\n", 82 | "\n", 83 | "Keras is what some might call a wrapper for TensorFlow. By that, one means to say that Keras simplifies a programmer's interaction with TensorFlow through refinement of key methods and constructs.\n", 84 | "\n", 85 | "Importantly, Keras is intended for rapid experimentation.\n", 86 | "\n", 87 | "Tha main components of Keras include:\n", 88 | "1. A models API, which enables one to construct a model with varying levels of complexity depending on use case. We will use the [Functional API](https://keras.io/guides/functional_api/) subclass.\n", 89 | "2. A layers API, which allows one to define the tensor in/tnesor out computation functions.\n", 90 | "3. A callback API, which enables one to program specific actions to occur during training, such as log training metrics, visualize interim/internal states and statistics of the model during training, and perform early stopping when the model converges.\n", 91 | "4. A data preprocessing API, which offers support for prepping raw data from disk to model ready Tensor format.\n", 92 | "5. An optimizer API where all of the state of the art optimizers can be plugged in. Learning rate decay / scheduling can also be implemented a spart of this API.\n", 93 | "6. A metrics API which is used for assessing the performance of the model during training. A metric is the target to optimize during training, with specific metrics chosen for specific modeling objectives.\n", 94 | "7. A loss API that informs the model quantitatively how much it should try to minimize during training by providing a measure of error. Similar to metrics, specific loss functions are selected for specific modelung objectives.\n", 95 | "\n", 96 | "With the Functional API, our main workflow will follow the diagram below.\n", 97 | "\n", 98 | ":::{figure-md} Keras-fig\n", 99 | "\n", 100 | "\n", 101 | "Keras Functional API diagram (from [https://miro.com/app/board/o9J_lhnKhVE=/](hhttps://miro.com/app/board/o9J_lhnKhVE=/)).\n", 102 | ":::" 103 | ] 104 | } 105 | ], 106 | "metadata": { 107 | "kernelspec": { 108 | "display_name": "Python 3", 109 | "language": "python", 110 | "name": "python3" 111 | }, 112 | "language_info": { 113 | "codemirror_mode": { 114 | "name": "ipython", 115 | "version": 3 116 | }, 117 | "file_extension": ".py", 118 | "mimetype": "text/x-python", 119 | "name": "python", 120 | "nbconvert_exporter": "python", 121 | "pygments_lexer": "ipython3", 122 | "version": "3.8.11" 123 | } 124 | }, 125 | "nbformat": 4, 126 | "nbformat_minor": 2 127 | } 128 | -------------------------------------------------------------------------------- /ds_book/_build/html/_sources/docs/appendix.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Appendix" 8 | ] 9 | } 10 | ], 11 | "metadata": { 12 | "kernelspec": { 13 | "display_name": "Python 3", 14 | "language": "python", 15 | "name": "python3" 16 | }, 17 | "language_info": { 18 | "codemirror_mode": { 19 | "name": "ipython", 20 | "version": 3 21 | }, 22 | "file_extension": ".py", 23 | "mimetype": "text/x-python", 24 | "name": "python", 25 | "nbconvert_exporter": "python", 26 | "pygments_lexer": "ipython3", 27 | "version": "3.8.11" 28 | } 29 | }, 30 | "nbformat": 4, 31 | "nbformat_minor": 4 32 | } 33 | -------------------------------------------------------------------------------- /ds_book/_build/html/_sources/docs/index.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | # Deep Learning with TensorFlow:
Tutorials for modeling LULC. 5 | 6 | 7 | Authored by [**Development Seed**](https://developmentseed.org/) engineers [**Lillianne Thomas**](https://github.com/lillythomas) and [**Ryan Avery**](https://github.com/rbavery) 8 | 9 | 10 | 11 | These materials are designed to provide TensorFlow expertise via tutorials and science support, vis a vis suggestions for acquisition and processing of data inputs, training, testing, and evaluation of TensorFlow models as well as different TensorFlow / deep learning techniques demoed in Colab notebooks using real data. 12 | 13 | The content of this workshop assumes general familiarity with geospatial data such as satellite imagery, raster and vector formats, file formats such as [GeoTIFF](https://earthdata.nasa.gov/esdis/eso/standards-and-references/geotiff) and [GeoJSON](https://geojson.org/), the [python](https://www.python.org/) programming language and [Google Colab](https://research.google.com/colaboratory/). Having knowledge of [numpy](https://numpy.org/), [rasterio](https://rasterio.readthedocs.io/en/latest/), [geopandas](https://geopandas.org/en/stable/) and [sci-kit learn](https://scikit-learn.org/stable/) is a plus. 14 | 15 | 16 | ```{admonition} Links 17 | :class: tip 18 | 19 | - Jupyter Book: 20 | [https://developmentseed.github.io/tensorflow-eo-training/docs/index.html) 21 | 22 | ``` 23 | 24 | 25 | ```{admonition} How to run the notebook code 26 | :class: important 27 | 28 | A major advantage of executable books is that the reader may enjoy running the source code, modifying them and playing around. No downloading, installation or configuration are required. Simply go to 29 | 30 | [https://developmentseed.github.io/tensorflow-eo-training/docs/index.html](https://developmentseed.github.io/tensorflow-eo-training/docs/index.html), 31 | 32 | and in the left menu select any topic, click the "rocket" icon at the top right of the screen, and choose “Colab." This will launch the page in a virtual runtime environment hosted by Google. From there, the code can be run using a free GPU. 33 | 34 | For local running, the code for each topic in the form of 35 | [Jupyter](https://jupyter.org) notebooks can be downloaded by clicking the "arrow-down" icon at the top right of the screen. 36 | 37 | ``` 38 | 39 | ```{admonition} How to access the data 40 | :class: important 41 | 42 | These tutorials will make use of open source data hosted on [Radiant Earth MLHub](https://mlhub.earth/). Please register an account with MLHub and obtain your unique API key in advance of starting these tutorials. 43 | 44 | ``` 45 | 46 | ```{admonition} $~$ 47 | Built with [Jupyter Book 48 | 2.0](https://beta.jupyterbook.org/intro.html) tool set, as part of the 49 | [ExecutableBookProject](https://ebp.jupyterbook.org/en/latest/). 50 | ``` 51 | 52 | 53 | 54 | ISBN: *(tbd) 55 | 56 | 57 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/__init__.py -------------------------------------------------------------------------------- /ds_book/_build/html/_static/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /ds_book/_build/html/_static/check-solid.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/clipboard.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * clipboard.js v2.0.8 3 | * https://clipboardjs.com/ 4 | * 5 | * Licensed MIT © Zeno Rocha 6 | */ 7 | !function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return a("cut"),t};var l=function(t){var e,n,o,r=1 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/copybutton.css: -------------------------------------------------------------------------------- 1 | /* Copy buttons */ 2 | button.copybtn { 3 | position: absolute; 4 | display: flex; 5 | top: .3em; 6 | right: .5em; 7 | width: 1.7em; 8 | height: 1.7em; 9 | opacity: 0; 10 | transition: opacity 0.3s, border .3s, background-color .3s; 11 | user-select: none; 12 | padding: 0; 13 | border: none; 14 | outline: none; 15 | border-radius: 0.4em; 16 | border: #e1e1e1 1px solid; 17 | background-color: rgb(245, 245, 245); 18 | } 19 | 20 | button.copybtn.success { 21 | border-color: #22863a; 22 | } 23 | 24 | button.copybtn img { 25 | width: 100%; 26 | padding: .2em; 27 | } 28 | 29 | div.highlight { 30 | position: relative; 31 | } 32 | 33 | .highlight:hover button.copybtn { 34 | opacity: 1; 35 | } 36 | 37 | .highlight button.copybtn:hover { 38 | background-color: rgb(235, 235, 235); 39 | } 40 | 41 | .highlight button.copybtn:active { 42 | background-color: rgb(187, 187, 187); 43 | } 44 | 45 | /** 46 | * A minimal CSS-only tooltip copied from: 47 | * https://codepen.io/mildrenben/pen/rVBrpK 48 | * 49 | * To use, write HTML like the following: 50 | * 51 | *

Short

52 | */ 53 | .o-tooltip--left { 54 | position: relative; 55 | } 56 | 57 | .o-tooltip--left:after { 58 | opacity: 0; 59 | visibility: hidden; 60 | position: absolute; 61 | content: attr(data-tooltip); 62 | padding: .2em; 63 | font-size: .8em; 64 | left: -.2em; 65 | background: grey; 66 | color: white; 67 | white-space: nowrap; 68 | z-index: 2; 69 | border-radius: 2px; 70 | transform: translateX(-102%) translateY(0); 71 | transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); 72 | } 73 | 74 | .o-tooltip--left:hover:after { 75 | display: block; 76 | opacity: 1; 77 | visibility: visible; 78 | transform: translateX(-100%) translateY(0); 79 | transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); 80 | transition-delay: .5s; 81 | } 82 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/copybutton.js: -------------------------------------------------------------------------------- 1 | // Localization support 2 | const messages = { 3 | 'en': { 4 | 'copy': 'Copy', 5 | 'copy_to_clipboard': 'Copy to clipboard', 6 | 'copy_success': 'Copied!', 7 | 'copy_failure': 'Failed to copy', 8 | }, 9 | 'es' : { 10 | 'copy': 'Copiar', 11 | 'copy_to_clipboard': 'Copiar al portapapeles', 12 | 'copy_success': '¡Copiado!', 13 | 'copy_failure': 'Error al copiar', 14 | }, 15 | 'de' : { 16 | 'copy': 'Kopieren', 17 | 'copy_to_clipboard': 'In die Zwischenablage kopieren', 18 | 'copy_success': 'Kopiert!', 19 | 'copy_failure': 'Fehler beim Kopieren', 20 | }, 21 | 'fr' : { 22 | 'copy': 'Copier', 23 | 'copy_to_clipboard': 'Copié dans le presse-papier', 24 | 'copy_success': 'Copié !', 25 | 'copy_failure': 'Échec de la copie', 26 | }, 27 | 'ru': { 28 | 'copy': 'Скопировать', 29 | 'copy_to_clipboard': 'Скопировать в буфер', 30 | 'copy_success': 'Скопировано!', 31 | 'copy_failure': 'Не удалось скопировать', 32 | }, 33 | 'zh-CN': { 34 | 'copy': '复制', 35 | 'copy_to_clipboard': '复制到剪贴板', 36 | 'copy_success': '复制成功!', 37 | 'copy_failure': '复制失败', 38 | } 39 | } 40 | 41 | let locale = 'en' 42 | if( document.documentElement.lang !== undefined 43 | && messages[document.documentElement.lang] !== undefined ) { 44 | locale = document.documentElement.lang 45 | } 46 | 47 | let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT; 48 | if (doc_url_root == '#') { 49 | doc_url_root = ''; 50 | } 51 | 52 | const path_static = `${doc_url_root}_static/`; 53 | 54 | /** 55 | * Set up copy/paste for code blocks 56 | */ 57 | 58 | const runWhenDOMLoaded = cb => { 59 | if (document.readyState != 'loading') { 60 | cb() 61 | } else if (document.addEventListener) { 62 | document.addEventListener('DOMContentLoaded', cb) 63 | } else { 64 | document.attachEvent('onreadystatechange', function() { 65 | if (document.readyState == 'complete') cb() 66 | }) 67 | } 68 | } 69 | 70 | const codeCellId = index => `codecell${index}` 71 | 72 | // Clears selected text since ClipboardJS will select the text when copying 73 | const clearSelection = () => { 74 | if (window.getSelection) { 75 | window.getSelection().removeAllRanges() 76 | } else if (document.selection) { 77 | document.selection.empty() 78 | } 79 | } 80 | 81 | // Changes tooltip text for two seconds, then changes it back 82 | const temporarilyChangeTooltip = (el, oldText, newText) => { 83 | el.setAttribute('data-tooltip', newText) 84 | el.classList.add('success') 85 | setTimeout(() => el.setAttribute('data-tooltip', oldText), 2000) 86 | setTimeout(() => el.classList.remove('success'), 2000) 87 | } 88 | 89 | // Changes the copy button icon for two seconds, then changes it back 90 | const temporarilyChangeIcon = (el) => { 91 | img = el.querySelector("img"); 92 | img.setAttribute('src', `${path_static}check-solid.svg`) 93 | setTimeout(() => img.setAttribute('src', `${path_static}copy-button.svg`), 2000) 94 | } 95 | 96 | const addCopyButtonToCodeCells = () => { 97 | // If ClipboardJS hasn't loaded, wait a bit and try again. This 98 | // happens because we load ClipboardJS asynchronously. 99 | if (window.ClipboardJS === undefined) { 100 | setTimeout(addCopyButtonToCodeCells, 250) 101 | return 102 | } 103 | 104 | // Add copybuttons to all of our code cells 105 | const codeCells = document.querySelectorAll('div.highlight pre') 106 | codeCells.forEach((codeCell, index) => { 107 | const id = codeCellId(index) 108 | codeCell.setAttribute('id', id) 109 | 110 | const clipboardButton = id => 111 | `` 114 | codeCell.insertAdjacentHTML('afterend', clipboardButton(id)) 115 | }) 116 | 117 | function escapeRegExp(string) { 118 | return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string 119 | } 120 | 121 | // Callback when a copy button is clicked. Will be passed the node that was clicked 122 | // should then grab the text and replace pieces of text that shouldn't be used in output 123 | function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { 124 | 125 | var regexp; 126 | var match; 127 | 128 | // Do we check for line continuation characters and "HERE-documents"? 129 | var useLineCont = !!lineContinuationChar 130 | var useHereDoc = !!hereDocDelim 131 | 132 | // create regexp to capture prompt and remaining line 133 | if (isRegexp) { 134 | regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') 135 | } else { 136 | regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') 137 | } 138 | 139 | const outputLines = []; 140 | var promptFound = false; 141 | var gotLineCont = false; 142 | var gotHereDoc = false; 143 | const lineGotPrompt = []; 144 | for (const line of textContent.split('\n')) { 145 | match = line.match(regexp) 146 | if (match || gotLineCont || gotHereDoc) { 147 | promptFound = regexp.test(line) 148 | lineGotPrompt.push(promptFound) 149 | if (removePrompts && promptFound) { 150 | outputLines.push(match[2]) 151 | } else { 152 | outputLines.push(line) 153 | } 154 | gotLineCont = line.endsWith(lineContinuationChar) & useLineCont 155 | if (line.includes(hereDocDelim) & useHereDoc) 156 | gotHereDoc = !gotHereDoc 157 | } else if (!onlyCopyPromptLines) { 158 | outputLines.push(line) 159 | } else if (copyEmptyLines && line.trim() === '') { 160 | outputLines.push(line) 161 | } 162 | } 163 | 164 | // If no lines with the prompt were found then just use original lines 165 | if (lineGotPrompt.some(v => v === true)) { 166 | textContent = outputLines.join('\n'); 167 | } 168 | 169 | // Remove a trailing newline to avoid auto-running when pasting 170 | if (textContent.endsWith("\n")) { 171 | textContent = textContent.slice(0, -1) 172 | } 173 | return textContent 174 | } 175 | 176 | 177 | var copyTargetText = (trigger) => { 178 | var target = document.querySelector(trigger.attributes['data-clipboard-target'].value); 179 | return formatCopyText(target.innerText, '', false, true, true, true, '', '') 180 | } 181 | 182 | // Initialize with a callback so we can modify the text before copy 183 | const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText}) 184 | 185 | // Update UI with error/success messages 186 | clipboard.on('success', event => { 187 | clearSelection() 188 | temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success']) 189 | temporarilyChangeIcon(event.trigger) 190 | }) 191 | 192 | clipboard.on('error', event => { 193 | temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure']) 194 | }) 195 | } 196 | 197 | runWhenDOMLoaded(addCopyButtonToCodeCells) -------------------------------------------------------------------------------- /ds_book/_build/html/_static/copybutton_funcs.js: -------------------------------------------------------------------------------- 1 | function escapeRegExp(string) { 2 | return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string 3 | } 4 | 5 | // Callback when a copy button is clicked. Will be passed the node that was clicked 6 | // should then grab the text and replace pieces of text that shouldn't be used in output 7 | export function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { 8 | 9 | var regexp; 10 | var match; 11 | 12 | // Do we check for line continuation characters and "HERE-documents"? 13 | var useLineCont = !!lineContinuationChar 14 | var useHereDoc = !!hereDocDelim 15 | 16 | // create regexp to capture prompt and remaining line 17 | if (isRegexp) { 18 | regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') 19 | } else { 20 | regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') 21 | } 22 | 23 | const outputLines = []; 24 | var promptFound = false; 25 | var gotLineCont = false; 26 | var gotHereDoc = false; 27 | const lineGotPrompt = []; 28 | for (const line of textContent.split('\n')) { 29 | match = line.match(regexp) 30 | if (match || gotLineCont || gotHereDoc) { 31 | promptFound = regexp.test(line) 32 | lineGotPrompt.push(promptFound) 33 | if (removePrompts && promptFound) { 34 | outputLines.push(match[2]) 35 | } else { 36 | outputLines.push(line) 37 | } 38 | gotLineCont = line.endsWith(lineContinuationChar) & useLineCont 39 | if (line.includes(hereDocDelim) & useHereDoc) 40 | gotHereDoc = !gotHereDoc 41 | } else if (!onlyCopyPromptLines) { 42 | outputLines.push(line) 43 | } else if (copyEmptyLines && line.trim() === '') { 44 | outputLines.push(line) 45 | } 46 | } 47 | 48 | // If no lines with the prompt were found then just use original lines 49 | if (lineGotPrompt.some(v => v === true)) { 50 | textContent = outputLines.join('\n'); 51 | } 52 | 53 | // Remove a trailing newline to avoid auto-running when pasting 54 | if (textContent.endsWith("\n")) { 55 | textContent = textContent.slice(0, -1) 56 | } 57 | return textContent 58 | } 59 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/css/theme.css: -------------------------------------------------------------------------------- 1 | :root { 2 | /***************************************************************************** 3 | * Theme config 4 | **/ 5 | --pst-header-height: 60px; 6 | 7 | /***************************************************************************** 8 | * Font size 9 | **/ 10 | --pst-font-size-base: 15px; /* base font size - applied at body / html level */ 11 | 12 | /* heading font sizes */ 13 | --pst-font-size-h1: 36px; 14 | --pst-font-size-h2: 32px; 15 | --pst-font-size-h3: 26px; 16 | --pst-font-size-h4: 21px; 17 | --pst-font-size-h5: 18px; 18 | --pst-font-size-h6: 16px; 19 | 20 | /* smaller then heading font sizes*/ 21 | --pst-font-size-milli: 12px; 22 | 23 | --pst-sidebar-font-size: .9em; 24 | --pst-sidebar-caption-font-size: .9em; 25 | 26 | /***************************************************************************** 27 | * Font family 28 | **/ 29 | /* These are adapted from https://systemfontstack.com/ */ 30 | --pst-font-family-base-system: -apple-system, BlinkMacSystemFont, Segoe UI, "Helvetica Neue", 31 | Arial, sans-serif, Apple Color Emoji, Segoe UI Emoji, Segoe UI Symbol; 32 | --pst-font-family-monospace-system: "SFMono-Regular", Menlo, Consolas, Monaco, 33 | Liberation Mono, Lucida Console, monospace; 34 | 35 | --pst-font-family-base: var(--pst-font-family-base-system); 36 | --pst-font-family-heading: var(--pst-font-family-base); 37 | --pst-font-family-monospace: var(--pst-font-family-monospace-system); 38 | 39 | /***************************************************************************** 40 | * Color 41 | * 42 | * Colors are defined in rgb string way, "red, green, blue" 43 | **/ 44 | --pst-color-primary: 19, 6, 84; 45 | --pst-color-success: 40, 167, 69; 46 | --pst-color-info: 0, 123, 255; /*23, 162, 184;*/ 47 | --pst-color-warning: 255, 193, 7; 48 | --pst-color-danger: 220, 53, 69; 49 | --pst-color-text-base: 51, 51, 51; 50 | 51 | --pst-color-h1: var(--pst-color-primary); 52 | --pst-color-h2: var(--pst-color-primary); 53 | --pst-color-h3: var(--pst-color-text-base); 54 | --pst-color-h4: var(--pst-color-text-base); 55 | --pst-color-h5: var(--pst-color-text-base); 56 | --pst-color-h6: var(--pst-color-text-base); 57 | --pst-color-paragraph: var(--pst-color-text-base); 58 | --pst-color-link: 0, 91, 129; 59 | --pst-color-link-hover: 227, 46, 0; 60 | --pst-color-headerlink: 198, 15, 15; 61 | --pst-color-headerlink-hover: 255, 255, 255; 62 | --pst-color-preformatted-text: 34, 34, 34; 63 | --pst-color-preformatted-background: 250, 250, 250; 64 | --pst-color-inline-code: 232, 62, 140; 65 | 66 | --pst-color-active-navigation: 19, 6, 84; 67 | --pst-color-navbar-link: 77, 77, 77; 68 | --pst-color-navbar-link-hover: var(--pst-color-active-navigation); 69 | --pst-color-navbar-link-active: var(--pst-color-active-navigation); 70 | --pst-color-sidebar-link: 77, 77, 77; 71 | --pst-color-sidebar-link-hover: var(--pst-color-active-navigation); 72 | --pst-color-sidebar-link-active: var(--pst-color-active-navigation); 73 | --pst-color-sidebar-expander-background-hover: 244, 244, 244; 74 | --pst-color-sidebar-caption: 77, 77, 77; 75 | --pst-color-toc-link: 119, 117, 122; 76 | --pst-color-toc-link-hover: var(--pst-color-active-navigation); 77 | --pst-color-toc-link-active: var(--pst-color-active-navigation); 78 | 79 | /***************************************************************************** 80 | * Icon 81 | **/ 82 | 83 | /* font awesome icons*/ 84 | --pst-icon-check-circle: '\f058'; 85 | --pst-icon-info-circle: '\f05a'; 86 | --pst-icon-exclamation-triangle: '\f071'; 87 | --pst-icon-exclamation-circle: '\f06a'; 88 | --pst-icon-times-circle: '\f057'; 89 | --pst-icon-lightbulb: '\f0eb'; 90 | 91 | /***************************************************************************** 92 | * Admonitions 93 | **/ 94 | 95 | --pst-color-admonition-default: var(--pst-color-info); 96 | --pst-color-admonition-note: var(--pst-color-info); 97 | --pst-color-admonition-attention: var(--pst-color-warning); 98 | --pst-color-admonition-caution: var(--pst-color-warning); 99 | --pst-color-admonition-warning: var(--pst-color-warning); 100 | --pst-color-admonition-danger: var(--pst-color-danger); 101 | --pst-color-admonition-error: var(--pst-color-danger); 102 | --pst-color-admonition-hint: var(--pst-color-success); 103 | --pst-color-admonition-tip: var(--pst-color-success); 104 | --pst-color-admonition-important: var(--pst-color-success); 105 | 106 | --pst-icon-admonition-default: var(--pst-icon-info-circle); 107 | --pst-icon-admonition-note: var(--pst-icon-info-circle); 108 | --pst-icon-admonition-attention: var(--pst-icon-exclamation-circle); 109 | --pst-icon-admonition-caution: var(--pst-icon-exclamation-triangle); 110 | --pst-icon-admonition-warning: var(--pst-icon-exclamation-triangle); 111 | --pst-icon-admonition-danger: var(--pst-icon-exclamation-triangle); 112 | --pst-icon-admonition-error: var(--pst-icon-times-circle); 113 | --pst-icon-admonition-hint: var(--pst-icon-lightbulb); 114 | --pst-icon-admonition-tip: var(--pst-icon-lightbulb); 115 | --pst-icon-admonition-important: var(--pst-icon-exclamation-circle); 116 | 117 | } 118 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Sphinx JavaScript utilities for all documentation. 6 | * 7 | * :copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /** 13 | * select a different prefix for underscore 14 | */ 15 | $u = _.noConflict(); 16 | 17 | /** 18 | * make the code below compatible with browsers without 19 | * an installed firebug like debugger 20 | if (!window.console || !console.firebug) { 21 | var names = ["log", "debug", "info", "warn", "error", "assert", "dir", 22 | "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", 23 | "profile", "profileEnd"]; 24 | window.console = {}; 25 | for (var i = 0; i < names.length; ++i) 26 | window.console[names[i]] = function() {}; 27 | } 28 | */ 29 | 30 | /** 31 | * small helper function to urldecode strings 32 | * 33 | * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL 34 | */ 35 | jQuery.urldecode = function(x) { 36 | if (!x) { 37 | return x 38 | } 39 | return decodeURIComponent(x.replace(/\+/g, ' ')); 40 | }; 41 | 42 | /** 43 | * small helper function to urlencode strings 44 | */ 45 | jQuery.urlencode = encodeURIComponent; 46 | 47 | /** 48 | * This function returns the parsed url parameters of the 49 | * current request. Multiple values per key are supported, 50 | * it will always return arrays of strings for the value parts. 51 | */ 52 | jQuery.getQueryParameters = function(s) { 53 | if (typeof s === 'undefined') 54 | s = document.location.search; 55 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 56 | var result = {}; 57 | for (var i = 0; i < parts.length; i++) { 58 | var tmp = parts[i].split('=', 2); 59 | var key = jQuery.urldecode(tmp[0]); 60 | var value = jQuery.urldecode(tmp[1]); 61 | if (key in result) 62 | result[key].push(value); 63 | else 64 | result[key] = [value]; 65 | } 66 | return result; 67 | }; 68 | 69 | /** 70 | * highlight a given string on a jquery object by wrapping it in 71 | * span elements with the given class name. 72 | */ 73 | jQuery.fn.highlightText = function(text, className) { 74 | function highlight(node, addItems) { 75 | if (node.nodeType === 3) { 76 | var val = node.nodeValue; 77 | var pos = val.toLowerCase().indexOf(text); 78 | if (pos >= 0 && 79 | !jQuery(node.parentNode).hasClass(className) && 80 | !jQuery(node.parentNode).hasClass("nohighlight")) { 81 | var span; 82 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); 83 | if (isInSVG) { 84 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 85 | } else { 86 | span = document.createElement("span"); 87 | span.className = className; 88 | } 89 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 90 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 91 | document.createTextNode(val.substr(pos + text.length)), 92 | node.nextSibling)); 93 | node.nodeValue = val.substr(0, pos); 94 | if (isInSVG) { 95 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); 96 | var bbox = node.parentElement.getBBox(); 97 | rect.x.baseVal.value = bbox.x; 98 | rect.y.baseVal.value = bbox.y; 99 | rect.width.baseVal.value = bbox.width; 100 | rect.height.baseVal.value = bbox.height; 101 | rect.setAttribute('class', className); 102 | addItems.push({ 103 | "parent": node.parentNode, 104 | "target": rect}); 105 | } 106 | } 107 | } 108 | else if (!jQuery(node).is("button, select, textarea")) { 109 | jQuery.each(node.childNodes, function() { 110 | highlight(this, addItems); 111 | }); 112 | } 113 | } 114 | var addItems = []; 115 | var result = this.each(function() { 116 | highlight(this, addItems); 117 | }); 118 | for (var i = 0; i < addItems.length; ++i) { 119 | jQuery(addItems[i].parent).before(addItems[i].target); 120 | } 121 | return result; 122 | }; 123 | 124 | /* 125 | * backward compatibility for jQuery.browser 126 | * This will be supported until firefox bug is fixed. 127 | */ 128 | if (!jQuery.browser) { 129 | jQuery.uaMatch = function(ua) { 130 | ua = ua.toLowerCase(); 131 | 132 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 133 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 134 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 135 | /(msie) ([\w.]+)/.exec(ua) || 136 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 137 | []; 138 | 139 | return { 140 | browser: match[ 1 ] || "", 141 | version: match[ 2 ] || "0" 142 | }; 143 | }; 144 | jQuery.browser = {}; 145 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 146 | } 147 | 148 | /** 149 | * Small JavaScript module for the documentation. 150 | */ 151 | var Documentation = { 152 | 153 | init : function() { 154 | this.fixFirefoxAnchorBug(); 155 | this.highlightSearchWords(); 156 | this.initIndexTable(); 157 | if (DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) { 158 | this.initOnKeyListeners(); 159 | } 160 | }, 161 | 162 | /** 163 | * i18n support 164 | */ 165 | TRANSLATIONS : {}, 166 | PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; }, 167 | LOCALE : 'unknown', 168 | 169 | // gettext and ngettext don't access this so that the functions 170 | // can safely bound to a different name (_ = Documentation.gettext) 171 | gettext : function(string) { 172 | var translated = Documentation.TRANSLATIONS[string]; 173 | if (typeof translated === 'undefined') 174 | return string; 175 | return (typeof translated === 'string') ? translated : translated[0]; 176 | }, 177 | 178 | ngettext : function(singular, plural, n) { 179 | var translated = Documentation.TRANSLATIONS[singular]; 180 | if (typeof translated === 'undefined') 181 | return (n == 1) ? singular : plural; 182 | return translated[Documentation.PLURALEXPR(n)]; 183 | }, 184 | 185 | addTranslations : function(catalog) { 186 | for (var key in catalog.messages) 187 | this.TRANSLATIONS[key] = catalog.messages[key]; 188 | this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); 189 | this.LOCALE = catalog.locale; 190 | }, 191 | 192 | /** 193 | * add context elements like header anchor links 194 | */ 195 | addContextElements : function() { 196 | $('div[id] > :header:first').each(function() { 197 | $('\u00B6'). 198 | attr('href', '#' + this.id). 199 | attr('title', _('Permalink to this headline')). 200 | appendTo(this); 201 | }); 202 | $('dt[id]').each(function() { 203 | $('\u00B6'). 204 | attr('href', '#' + this.id). 205 | attr('title', _('Permalink to this definition')). 206 | appendTo(this); 207 | }); 208 | }, 209 | 210 | /** 211 | * workaround a firefox stupidity 212 | * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 213 | */ 214 | fixFirefoxAnchorBug : function() { 215 | if (document.location.hash && $.browser.mozilla) 216 | window.setTimeout(function() { 217 | document.location.href += ''; 218 | }, 10); 219 | }, 220 | 221 | /** 222 | * highlight the search words provided in the url in the text 223 | */ 224 | highlightSearchWords : function() { 225 | var params = $.getQueryParameters(); 226 | var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; 227 | if (terms.length) { 228 | var body = $('div.body'); 229 | if (!body.length) { 230 | body = $('body'); 231 | } 232 | window.setTimeout(function() { 233 | $.each(terms, function() { 234 | body.highlightText(this.toLowerCase(), 'highlighted'); 235 | }); 236 | }, 10); 237 | $('') 239 | .appendTo($('#searchbox')); 240 | } 241 | }, 242 | 243 | /** 244 | * init the domain index toggle buttons 245 | */ 246 | initIndexTable : function() { 247 | var togglers = $('img.toggler').click(function() { 248 | var src = $(this).attr('src'); 249 | var idnum = $(this).attr('id').substr(7); 250 | $('tr.cg-' + idnum).toggle(); 251 | if (src.substr(-9) === 'minus.png') 252 | $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); 253 | else 254 | $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); 255 | }).css('display', ''); 256 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { 257 | togglers.click(); 258 | } 259 | }, 260 | 261 | /** 262 | * helper function to hide the search marks again 263 | */ 264 | hideSearchWords : function() { 265 | $('#searchbox .highlight-link').fadeOut(300); 266 | $('span.highlighted').removeClass('highlighted'); 267 | }, 268 | 269 | /** 270 | * make the url absolute 271 | */ 272 | makeURL : function(relativeURL) { 273 | return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; 274 | }, 275 | 276 | /** 277 | * get the current relative url 278 | */ 279 | getCurrentURL : function() { 280 | var path = document.location.pathname; 281 | var parts = path.split(/\//); 282 | $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { 283 | if (this === '..') 284 | parts.pop(); 285 | }); 286 | var url = parts.join('/'); 287 | return path.substring(url.lastIndexOf('/') + 1, path.length - 1); 288 | }, 289 | 290 | initOnKeyListeners: function() { 291 | $(document).keydown(function(event) { 292 | var activeElementType = document.activeElement.tagName; 293 | // don't navigate when in search box, textarea, dropdown or button 294 | if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT' 295 | && activeElementType !== 'BUTTON' && !event.altKey && !event.ctrlKey && !event.metaKey 296 | && !event.shiftKey) { 297 | switch (event.keyCode) { 298 | case 37: // left 299 | var prevHref = $('link[rel="prev"]').prop('href'); 300 | if (prevHref) { 301 | window.location.href = prevHref; 302 | return false; 303 | } 304 | case 39: // right 305 | var nextHref = $('link[rel="next"]').prop('href'); 306 | if (nextHref) { 307 | window.location.href = nextHref; 308 | return false; 309 | } 310 | } 311 | } 312 | }); 313 | } 314 | }; 315 | 316 | // quick alias for translations 317 | _ = Documentation.gettext; 318 | 319 | $(document).ready(function() { 320 | Documentation.init(); 321 | }); 322 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/documentation_options.js: -------------------------------------------------------------------------------- 1 | var DOCUMENTATION_OPTIONS = { 2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), 3 | VERSION: '', 4 | LANGUAGE: 'None', 5 | COLLAPSE_INDEX: false, 6 | BUILDER: 'html', 7 | FILE_SUFFIX: '.html', 8 | LINK_SUFFIX: '.html', 9 | HAS_SOURCE: true, 10 | SOURCELINK_SUFFIX: '', 11 | NAVIGATION_WITH_KEYS: true 12 | }; -------------------------------------------------------------------------------- /ds_book/_build/html/_static/ds.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/ds.png -------------------------------------------------------------------------------- /ds_book/_build/html/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/file.png -------------------------------------------------------------------------------- /ds_book/_build/html/_static/images/logo_binder.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 10 | logo 11 | 12 | 13 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/images/logo_colab.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/images/logo_colab.png -------------------------------------------------------------------------------- /ds_book/_build/html/_static/images/logo_jupyterhub.svg: -------------------------------------------------------------------------------- 1 | logo_jupyterhubHub 2 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/language_data.js: -------------------------------------------------------------------------------- 1 | /* 2 | * language_data.js 3 | * ~~~~~~~~~~~~~~~~ 4 | * 5 | * This script contains the language-specific data used by searchtools.js, 6 | * namely the list of stopwords, stemmer, scorer and splitter. 7 | * 8 | * :copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS. 9 | * :license: BSD, see LICENSE for details. 10 | * 11 | */ 12 | 13 | var stopwords = ["a","and","are","as","at","be","but","by","for","if","in","into","is","it","near","no","not","of","on","or","such","that","the","their","then","there","these","they","this","to","was","will","with"]; 14 | 15 | 16 | /* Non-minified version is copied as a separate JS file, is available */ 17 | 18 | /** 19 | * Porter Stemmer 20 | */ 21 | var Stemmer = function() { 22 | 23 | var step2list = { 24 | ational: 'ate', 25 | tional: 'tion', 26 | enci: 'ence', 27 | anci: 'ance', 28 | izer: 'ize', 29 | bli: 'ble', 30 | alli: 'al', 31 | entli: 'ent', 32 | eli: 'e', 33 | ousli: 'ous', 34 | ization: 'ize', 35 | ation: 'ate', 36 | ator: 'ate', 37 | alism: 'al', 38 | iveness: 'ive', 39 | fulness: 'ful', 40 | ousness: 'ous', 41 | aliti: 'al', 42 | iviti: 'ive', 43 | biliti: 'ble', 44 | logi: 'log' 45 | }; 46 | 47 | var step3list = { 48 | icate: 'ic', 49 | ative: '', 50 | alize: 'al', 51 | iciti: 'ic', 52 | ical: 'ic', 53 | ful: '', 54 | ness: '' 55 | }; 56 | 57 | var c = "[^aeiou]"; // consonant 58 | var v = "[aeiouy]"; // vowel 59 | var C = c + "[^aeiouy]*"; // consonant sequence 60 | var V = v + "[aeiou]*"; // vowel sequence 61 | 62 | var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 63 | var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 64 | var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 65 | var s_v = "^(" + C + ")?" + v; // vowel in stem 66 | 67 | this.stemWord = function (w) { 68 | var stem; 69 | var suffix; 70 | var firstch; 71 | var origword = w; 72 | 73 | if (w.length < 3) 74 | return w; 75 | 76 | var re; 77 | var re2; 78 | var re3; 79 | var re4; 80 | 81 | firstch = w.substr(0,1); 82 | if (firstch == "y") 83 | w = firstch.toUpperCase() + w.substr(1); 84 | 85 | // Step 1a 86 | re = /^(.+?)(ss|i)es$/; 87 | re2 = /^(.+?)([^s])s$/; 88 | 89 | if (re.test(w)) 90 | w = w.replace(re,"$1$2"); 91 | else if (re2.test(w)) 92 | w = w.replace(re2,"$1$2"); 93 | 94 | // Step 1b 95 | re = /^(.+?)eed$/; 96 | re2 = /^(.+?)(ed|ing)$/; 97 | if (re.test(w)) { 98 | var fp = re.exec(w); 99 | re = new RegExp(mgr0); 100 | if (re.test(fp[1])) { 101 | re = /.$/; 102 | w = w.replace(re,""); 103 | } 104 | } 105 | else if (re2.test(w)) { 106 | var fp = re2.exec(w); 107 | stem = fp[1]; 108 | re2 = new RegExp(s_v); 109 | if (re2.test(stem)) { 110 | w = stem; 111 | re2 = /(at|bl|iz)$/; 112 | re3 = new RegExp("([^aeiouylsz])\\1$"); 113 | re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 114 | if (re2.test(w)) 115 | w = w + "e"; 116 | else if (re3.test(w)) { 117 | re = /.$/; 118 | w = w.replace(re,""); 119 | } 120 | else if (re4.test(w)) 121 | w = w + "e"; 122 | } 123 | } 124 | 125 | // Step 1c 126 | re = /^(.+?)y$/; 127 | if (re.test(w)) { 128 | var fp = re.exec(w); 129 | stem = fp[1]; 130 | re = new RegExp(s_v); 131 | if (re.test(stem)) 132 | w = stem + "i"; 133 | } 134 | 135 | // Step 2 136 | re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; 137 | if (re.test(w)) { 138 | var fp = re.exec(w); 139 | stem = fp[1]; 140 | suffix = fp[2]; 141 | re = new RegExp(mgr0); 142 | if (re.test(stem)) 143 | w = stem + step2list[suffix]; 144 | } 145 | 146 | // Step 3 147 | re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; 148 | if (re.test(w)) { 149 | var fp = re.exec(w); 150 | stem = fp[1]; 151 | suffix = fp[2]; 152 | re = new RegExp(mgr0); 153 | if (re.test(stem)) 154 | w = stem + step3list[suffix]; 155 | } 156 | 157 | // Step 4 158 | re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; 159 | re2 = /^(.+?)(s|t)(ion)$/; 160 | if (re.test(w)) { 161 | var fp = re.exec(w); 162 | stem = fp[1]; 163 | re = new RegExp(mgr1); 164 | if (re.test(stem)) 165 | w = stem; 166 | } 167 | else if (re2.test(w)) { 168 | var fp = re2.exec(w); 169 | stem = fp[1] + fp[2]; 170 | re2 = new RegExp(mgr1); 171 | if (re2.test(stem)) 172 | w = stem; 173 | } 174 | 175 | // Step 5 176 | re = /^(.+?)e$/; 177 | if (re.test(w)) { 178 | var fp = re.exec(w); 179 | stem = fp[1]; 180 | re = new RegExp(mgr1); 181 | re2 = new RegExp(meq1); 182 | re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 183 | if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) 184 | w = stem; 185 | } 186 | re = /ll$/; 187 | re2 = new RegExp(mgr1); 188 | if (re.test(w) && re2.test(w)) { 189 | re = /.$/; 190 | w = w.replace(re,""); 191 | } 192 | 193 | // and turn initial Y back to y 194 | if (firstch == "y") 195 | w = firstch.toLowerCase() + w.substr(1); 196 | return w; 197 | } 198 | } 199 | 200 | 201 | 202 | 203 | var splitChars = (function() { 204 | var result = {}; 205 | var singles = [96, 180, 187, 191, 215, 247, 749, 885, 903, 907, 909, 930, 1014, 1648, 206 | 1748, 1809, 2416, 2473, 2481, 2526, 2601, 2609, 2612, 2615, 2653, 2702, 207 | 2706, 2729, 2737, 2740, 2857, 2865, 2868, 2910, 2928, 2948, 2961, 2971, 208 | 2973, 3085, 3089, 3113, 3124, 3213, 3217, 3241, 3252, 3295, 3341, 3345, 209 | 3369, 3506, 3516, 3633, 3715, 3721, 3736, 3744, 3748, 3750, 3756, 3761, 210 | 3781, 3912, 4239, 4347, 4681, 4695, 4697, 4745, 4785, 4799, 4801, 4823, 211 | 4881, 5760, 5901, 5997, 6313, 7405, 8024, 8026, 8028, 8030, 8117, 8125, 212 | 8133, 8181, 8468, 8485, 8487, 8489, 8494, 8527, 11311, 11359, 11687, 11695, 213 | 11703, 11711, 11719, 11727, 11735, 12448, 12539, 43010, 43014, 43019, 43587, 214 | 43696, 43713, 64286, 64297, 64311, 64317, 64319, 64322, 64325, 65141]; 215 | var i, j, start, end; 216 | for (i = 0; i < singles.length; i++) { 217 | result[singles[i]] = true; 218 | } 219 | var ranges = [[0, 47], [58, 64], [91, 94], [123, 169], [171, 177], [182, 184], [706, 709], 220 | [722, 735], [741, 747], [751, 879], [888, 889], [894, 901], [1154, 1161], 221 | [1318, 1328], [1367, 1368], [1370, 1376], [1416, 1487], [1515, 1519], [1523, 1568], 222 | [1611, 1631], [1642, 1645], [1750, 1764], [1767, 1773], [1789, 1790], [1792, 1807], 223 | [1840, 1868], [1958, 1968], [1970, 1983], [2027, 2035], [2038, 2041], [2043, 2047], 224 | [2070, 2073], [2075, 2083], [2085, 2087], [2089, 2307], [2362, 2364], [2366, 2383], 225 | [2385, 2391], [2402, 2405], [2419, 2424], [2432, 2436], [2445, 2446], [2449, 2450], 226 | [2483, 2485], [2490, 2492], [2494, 2509], [2511, 2523], [2530, 2533], [2546, 2547], 227 | [2554, 2564], [2571, 2574], [2577, 2578], [2618, 2648], [2655, 2661], [2672, 2673], 228 | [2677, 2692], [2746, 2748], [2750, 2767], [2769, 2783], [2786, 2789], [2800, 2820], 229 | [2829, 2830], [2833, 2834], [2874, 2876], [2878, 2907], [2914, 2917], [2930, 2946], 230 | [2955, 2957], [2966, 2968], [2976, 2978], [2981, 2983], [2987, 2989], [3002, 3023], 231 | [3025, 3045], [3059, 3076], [3130, 3132], [3134, 3159], [3162, 3167], [3170, 3173], 232 | [3184, 3191], [3199, 3204], [3258, 3260], [3262, 3293], [3298, 3301], [3312, 3332], 233 | [3386, 3388], [3390, 3423], [3426, 3429], [3446, 3449], [3456, 3460], [3479, 3481], 234 | [3518, 3519], [3527, 3584], [3636, 3647], [3655, 3663], [3674, 3712], [3717, 3718], 235 | [3723, 3724], [3726, 3731], [3752, 3753], [3764, 3772], [3774, 3775], [3783, 3791], 236 | [3802, 3803], [3806, 3839], [3841, 3871], [3892, 3903], [3949, 3975], [3980, 4095], 237 | [4139, 4158], [4170, 4175], [4182, 4185], [4190, 4192], [4194, 4196], [4199, 4205], 238 | [4209, 4212], [4226, 4237], [4250, 4255], [4294, 4303], [4349, 4351], [4686, 4687], 239 | [4702, 4703], [4750, 4751], [4790, 4791], [4806, 4807], [4886, 4887], [4955, 4968], 240 | [4989, 4991], [5008, 5023], [5109, 5120], [5741, 5742], [5787, 5791], [5867, 5869], 241 | [5873, 5887], [5906, 5919], [5938, 5951], [5970, 5983], [6001, 6015], [6068, 6102], 242 | [6104, 6107], [6109, 6111], [6122, 6127], [6138, 6159], [6170, 6175], [6264, 6271], 243 | [6315, 6319], [6390, 6399], [6429, 6469], [6510, 6511], [6517, 6527], [6572, 6592], 244 | [6600, 6607], [6619, 6655], [6679, 6687], [6741, 6783], [6794, 6799], [6810, 6822], 245 | [6824, 6916], [6964, 6980], [6988, 6991], [7002, 7042], [7073, 7085], [7098, 7167], 246 | [7204, 7231], [7242, 7244], [7294, 7400], [7410, 7423], [7616, 7679], [7958, 7959], 247 | [7966, 7967], [8006, 8007], [8014, 8015], [8062, 8063], [8127, 8129], [8141, 8143], 248 | [8148, 8149], [8156, 8159], [8173, 8177], [8189, 8303], [8306, 8307], [8314, 8318], 249 | [8330, 8335], [8341, 8449], [8451, 8454], [8456, 8457], [8470, 8472], [8478, 8483], 250 | [8506, 8507], [8512, 8516], [8522, 8525], [8586, 9311], [9372, 9449], [9472, 10101], 251 | [10132, 11263], [11493, 11498], [11503, 11516], [11518, 11519], [11558, 11567], 252 | [11622, 11630], [11632, 11647], [11671, 11679], [11743, 11822], [11824, 12292], 253 | [12296, 12320], [12330, 12336], [12342, 12343], [12349, 12352], [12439, 12444], 254 | [12544, 12548], [12590, 12592], [12687, 12689], [12694, 12703], [12728, 12783], 255 | [12800, 12831], [12842, 12880], [12896, 12927], [12938, 12976], [12992, 13311], 256 | [19894, 19967], [40908, 40959], [42125, 42191], [42238, 42239], [42509, 42511], 257 | [42540, 42559], [42592, 42593], [42607, 42622], [42648, 42655], [42736, 42774], 258 | [42784, 42785], [42889, 42890], [42893, 43002], [43043, 43055], [43062, 43071], 259 | [43124, 43137], [43188, 43215], [43226, 43249], [43256, 43258], [43260, 43263], 260 | [43302, 43311], [43335, 43359], [43389, 43395], [43443, 43470], [43482, 43519], 261 | [43561, 43583], [43596, 43599], [43610, 43615], [43639, 43641], [43643, 43647], 262 | [43698, 43700], [43703, 43704], [43710, 43711], [43715, 43738], [43742, 43967], 263 | [44003, 44015], [44026, 44031], [55204, 55215], [55239, 55242], [55292, 55295], 264 | [57344, 63743], [64046, 64047], [64110, 64111], [64218, 64255], [64263, 64274], 265 | [64280, 64284], [64434, 64466], [64830, 64847], [64912, 64913], [64968, 65007], 266 | [65020, 65135], [65277, 65295], [65306, 65312], [65339, 65344], [65371, 65381], 267 | [65471, 65473], [65480, 65481], [65488, 65489], [65496, 65497]]; 268 | for (i = 0; i < ranges.length; i++) { 269 | start = ranges[i][0]; 270 | end = ranges[i][1]; 271 | for (j = start; j <= end; j++) { 272 | result[j] = true; 273 | } 274 | } 275 | return result; 276 | })(); 277 | 278 | function splitQuery(query) { 279 | var result = []; 280 | var start = -1; 281 | for (var i = 0; i < query.length; i++) { 282 | if (splitChars[query.charCodeAt(i)]) { 283 | if (start !== -1) { 284 | result.push(query.slice(start, i)); 285 | start = -1; 286 | } 287 | } else if (start === -1) { 288 | start = i; 289 | } 290 | } 291 | if (start !== -1) { 292 | result.push(query.slice(start)); 293 | } 294 | return result; 295 | } 296 | 297 | 298 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/minus.png -------------------------------------------------------------------------------- /ds_book/_build/html/_static/mystnb.css: -------------------------------------------------------------------------------- 1 | /* Whole cell */ 2 | div.container.cell { 3 | padding-left: 0; 4 | margin-bottom: 1em; 5 | } 6 | 7 | /* Removing all background formatting so we can control at the div level */ 8 | .cell_input div.highlight, .cell_input pre, .cell_output .output * { 9 | border: none; 10 | box-shadow: none; 11 | } 12 | 13 | .cell_output .output pre, .cell_input pre { 14 | margin: 0px; 15 | } 16 | 17 | /* Input cells */ 18 | div.cell div.cell_input { 19 | padding-left: 0em; 20 | padding-right: 0em; 21 | border: 1px #ccc solid; 22 | background-color: #f7f7f7; 23 | border-left-color: green; 24 | border-left-width: medium; 25 | } 26 | 27 | div.cell_input > div, div.cell_output div.output > div.highlight { 28 | margin: 0em !important; 29 | border: none !important; 30 | } 31 | 32 | /* All cell outputs */ 33 | .cell_output { 34 | padding-left: 1em; 35 | padding-right: 0em; 36 | margin-top: 1em; 37 | } 38 | 39 | /* Outputs from jupyter_sphinx overrides to remove extra CSS */ 40 | div.section div.jupyter_container { 41 | padding: .4em; 42 | margin: 0 0 .4em 0; 43 | background-color: none; 44 | border: none; 45 | -moz-box-shadow: none; 46 | -webkit-box-shadow: none; 47 | box-shadow: none; 48 | } 49 | 50 | /* Text outputs from cells */ 51 | .cell_output .output.text_plain, 52 | .cell_output .output.traceback, 53 | .cell_output .output.stream, 54 | .cell_output .output.stderr 55 | { 56 | background: #fcfcfc; 57 | margin-top: 1em; 58 | margin-bottom: 0em; 59 | box-shadow: none; 60 | } 61 | 62 | .cell_output .output.text_plain, 63 | .cell_output .output.stream, 64 | .cell_output .output.stderr { 65 | border: 1px solid #f7f7f7; 66 | } 67 | 68 | .cell_output .output.stderr { 69 | background: #fdd; 70 | } 71 | 72 | .cell_output .output.traceback { 73 | border: 1px solid #ffd6d6; 74 | } 75 | 76 | /* Math align to the left */ 77 | .cell_output .MathJax_Display { 78 | text-align: left !important; 79 | } 80 | 81 | /* Pandas tables. Pulled from the Jupyter / nbsphinx CSS */ 82 | div.cell_output table { 83 | border: none; 84 | border-collapse: collapse; 85 | border-spacing: 0; 86 | color: black; 87 | font-size: 1em; 88 | table-layout: fixed; 89 | } 90 | div.cell_output thead { 91 | border-bottom: 1px solid black; 92 | vertical-align: bottom; 93 | } 94 | div.cell_output tr, 95 | div.cell_output th, 96 | div.cell_output td { 97 | text-align: right; 98 | vertical-align: middle; 99 | padding: 0.5em 0.5em; 100 | line-height: normal; 101 | white-space: normal; 102 | max-width: none; 103 | border: none; 104 | } 105 | div.cell_output th { 106 | font-weight: bold; 107 | } 108 | div.cell_output tbody tr:nth-child(odd) { 109 | background: #f5f5f5; 110 | } 111 | div.cell_output tbody tr:hover { 112 | background: rgba(66, 165, 245, 0.2); 113 | } 114 | 115 | 116 | /* Inline text from `paste` operation */ 117 | 118 | span.pasted-text { 119 | font-weight: bold; 120 | } 121 | 122 | span.pasted-inline img { 123 | max-height: 2em; 124 | } 125 | 126 | tbody span.pasted-inline img { 127 | max-height: none; 128 | } 129 | 130 | /* Font colors for translated ANSI escape sequences 131 | Color values are adapted from share/jupyter/nbconvert/templates/classic/static/style.css 132 | */ 133 | div.highlight .-Color-Bold { 134 | font-weight: bold; 135 | } 136 | div.highlight .-Color[class*=-Black] { 137 | color :#3E424D 138 | } 139 | div.highlight .-Color[class*=-Red] { 140 | color: #E75C58 141 | } 142 | div.highlight .-Color[class*=-Green] { 143 | color: #00A250 144 | } 145 | div.highlight .-Color[class*=-Yellow] { 146 | color: yellow 147 | } 148 | div.highlight .-Color[class*=-Blue] { 149 | color: #208FFB 150 | } 151 | div.highlight .-Color[class*=-Magenta] { 152 | color: #D160C4 153 | } 154 | div.highlight .-Color[class*=-Cyan] { 155 | color: #60C6C8 156 | } 157 | div.highlight .-Color[class*=-White] { 158 | color: #C5C1B4 159 | } 160 | div.highlight .-Color[class*=-BGBlack] { 161 | background-color: #3E424D 162 | } 163 | div.highlight .-Color[class*=-BGRed] { 164 | background-color: #E75C58 165 | } 166 | div.highlight .-Color[class*=-BGGreen] { 167 | background-color: #00A250 168 | } 169 | div.highlight .-Color[class*=-BGYellow] { 170 | background-color: yellow 171 | } 172 | div.highlight .-Color[class*=-BGBlue] { 173 | background-color: #208FFB 174 | } 175 | div.highlight .-Color[class*=-BGMagenta] { 176 | background-color: #D160C4 177 | } 178 | div.highlight .-Color[class*=-BGCyan] { 179 | background-color: #60C6C8 180 | } 181 | div.highlight .-Color[class*=-BGWhite] { 182 | background-color: #C5C1B4 183 | } 184 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/panels-main.c949a650a448cc0ae9fd3441c0e17fb0.css: -------------------------------------------------------------------------------- 1 | details.dropdown .summary-title{padding-right:3em !important;-moz-user-select:none;-ms-user-select:none;-webkit-user-select:none;user-select:none}details.dropdown:hover{cursor:pointer}details.dropdown .summary-content{cursor:default}details.dropdown summary{list-style:none;padding:1em}details.dropdown summary .octicon.no-title{vertical-align:middle}details.dropdown[open] summary .octicon.no-title{visibility:hidden}details.dropdown summary::-webkit-details-marker{display:none}details.dropdown summary:focus{outline:none}details.dropdown summary:hover .summary-up svg,details.dropdown summary:hover .summary-down svg{opacity:1}details.dropdown .summary-up svg,details.dropdown .summary-down svg{display:block;opacity:.6}details.dropdown .summary-up,details.dropdown .summary-down{pointer-events:none;position:absolute;right:1em;top:.75em}details.dropdown[open] .summary-down{visibility:hidden}details.dropdown:not([open]) .summary-up{visibility:hidden}details.dropdown.fade-in[open] summary~*{-moz-animation:panels-fade-in .5s ease-in-out;-webkit-animation:panels-fade-in .5s ease-in-out;animation:panels-fade-in .5s ease-in-out}details.dropdown.fade-in-slide-down[open] summary~*{-moz-animation:panels-fade-in .5s ease-in-out, panels-slide-down .5s ease-in-out;-webkit-animation:panels-fade-in .5s ease-in-out, panels-slide-down .5s ease-in-out;animation:panels-fade-in .5s ease-in-out, panels-slide-down .5s ease-in-out}@keyframes panels-fade-in{0%{opacity:0}100%{opacity:1}}@keyframes panels-slide-down{0%{transform:translate(0, -10px)}100%{transform:translate(0, 0)}}.octicon{display:inline-block;fill:currentColor;vertical-align:text-top}.tabbed-content{box-shadow:0 -.0625rem var(--tabs-color-overline),0 .0625rem var(--tabs-color-underline);display:none;order:99;padding-bottom:.75rem;padding-top:.75rem;width:100%}.tabbed-content>:first-child{margin-top:0 !important}.tabbed-content>:last-child{margin-bottom:0 !important}.tabbed-content>.tabbed-set{margin:0}.tabbed-set{border-radius:.125rem;display:flex;flex-wrap:wrap;margin:1em 0;position:relative}.tabbed-set>input{opacity:0;position:absolute}.tabbed-set>input:checked+label{border-color:var(--tabs-color-label-active);color:var(--tabs-color-label-active)}.tabbed-set>input:checked+label+.tabbed-content{display:block}.tabbed-set>input:focus+label{outline-style:auto}.tabbed-set>input:not(.focus-visible)+label{outline:none;-webkit-tap-highlight-color:transparent}.tabbed-set>label{border-bottom:.125rem solid transparent;color:var(--tabs-color-label-inactive);cursor:pointer;font-size:var(--tabs-size-label);font-weight:700;padding:1em 1.25em .5em;transition:color 250ms;width:auto;z-index:1}html .tabbed-set>label:hover{color:var(--tabs-color-label-active)} 2 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/panels-variables.06eb56fa6e07937060861dad626602ad.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --tabs-color-label-active: hsla(231, 99%, 66%, 1); 3 | --tabs-color-label-inactive: rgba(178, 206, 245, 0.62); 4 | --tabs-color-overline: rgb(207, 236, 238); 5 | --tabs-color-underline: rgb(207, 236, 238); 6 | --tabs-size-label: 1rem; 7 | } -------------------------------------------------------------------------------- /ds_book/_build/html/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/plus.png -------------------------------------------------------------------------------- /ds_book/_build/html/_static/pygments.css: -------------------------------------------------------------------------------- 1 | pre { line-height: 125%; } 2 | td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } 3 | span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } 4 | td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } 5 | span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } 6 | .highlight .hll { background-color: #ffffcc } 7 | .highlight { background: #eeffcc; } 8 | .highlight .c { color: #408090; font-style: italic } /* Comment */ 9 | .highlight .err { border: 1px solid #FF0000 } /* Error */ 10 | .highlight .k { color: #007020; font-weight: bold } /* Keyword */ 11 | .highlight .o { color: #666666 } /* Operator */ 12 | .highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */ 13 | .highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ 14 | .highlight .cp { color: #007020 } /* Comment.Preproc */ 15 | .highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */ 16 | .highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ 17 | .highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ 18 | .highlight .gd { color: #A00000 } /* Generic.Deleted */ 19 | .highlight .ge { font-style: italic } /* Generic.Emph */ 20 | .highlight .gr { color: #FF0000 } /* Generic.Error */ 21 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 22 | .highlight .gi { color: #00A000 } /* Generic.Inserted */ 23 | .highlight .go { color: #333333 } /* Generic.Output */ 24 | .highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ 25 | .highlight .gs { font-weight: bold } /* Generic.Strong */ 26 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 27 | .highlight .gt { color: #0044DD } /* Generic.Traceback */ 28 | .highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ 29 | .highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ 30 | .highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ 31 | .highlight .kp { color: #007020 } /* Keyword.Pseudo */ 32 | .highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ 33 | .highlight .kt { color: #902000 } /* Keyword.Type */ 34 | .highlight .m { color: #208050 } /* Literal.Number */ 35 | .highlight .s { color: #4070a0 } /* Literal.String */ 36 | .highlight .na { color: #4070a0 } /* Name.Attribute */ 37 | .highlight .nb { color: #007020 } /* Name.Builtin */ 38 | .highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ 39 | .highlight .no { color: #60add5 } /* Name.Constant */ 40 | .highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ 41 | .highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ 42 | .highlight .ne { color: #007020 } /* Name.Exception */ 43 | .highlight .nf { color: #06287e } /* Name.Function */ 44 | .highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ 45 | .highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ 46 | .highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ 47 | .highlight .nv { color: #bb60d5 } /* Name.Variable */ 48 | .highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ 49 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */ 50 | .highlight .mb { color: #208050 } /* Literal.Number.Bin */ 51 | .highlight .mf { color: #208050 } /* Literal.Number.Float */ 52 | .highlight .mh { color: #208050 } /* Literal.Number.Hex */ 53 | .highlight .mi { color: #208050 } /* Literal.Number.Integer */ 54 | .highlight .mo { color: #208050 } /* Literal.Number.Oct */ 55 | .highlight .sa { color: #4070a0 } /* Literal.String.Affix */ 56 | .highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ 57 | .highlight .sc { color: #4070a0 } /* Literal.String.Char */ 58 | .highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ 59 | .highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ 60 | .highlight .s2 { color: #4070a0 } /* Literal.String.Double */ 61 | .highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ 62 | .highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ 63 | .highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ 64 | .highlight .sx { color: #c65d09 } /* Literal.String.Other */ 65 | .highlight .sr { color: #235388 } /* Literal.String.Regex */ 66 | .highlight .s1 { color: #4070a0 } /* Literal.String.Single */ 67 | .highlight .ss { color: #517918 } /* Literal.String.Symbol */ 68 | .highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ 69 | .highlight .fm { color: #06287e } /* Name.Function.Magic */ 70 | .highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ 71 | .highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ 72 | .highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ 73 | .highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ 74 | .highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /ds_book/_build/html/_static/sphinx-book-theme.12a9622fbb08dcb3a2a40b2c02b83a57.js: -------------------------------------------------------------------------------- 1 | var initTriggerNavBar=()=>{if($(window).width()<768){$("#navbar-toggler").trigger("click")}} 2 | var scrollToActive=()=>{var navbar=document.getElementById('site-navigation') 3 | var active_pages=navbar.querySelectorAll(".active") 4 | var active_page=active_pages[active_pages.length-1] 5 | if(active_page!==undefined&&active_page.offsetTop>($(window).height()*.5)){navbar.scrollTop=active_page.offsetTop-($(window).height()*.2)}} 6 | var sbRunWhenDOMLoaded=cb=>{if(document.readyState!='loading'){cb()}else if(document.addEventListener){document.addEventListener('DOMContentLoaded',cb)}else{document.attachEvent('onreadystatechange',function(){if(document.readyState=='complete')cb()})}} 7 | function toggleFullScreen(){var navToggler=$("#navbar-toggler");if(!document.fullscreenElement){document.documentElement.requestFullscreen();if(!navToggler.hasClass("collapsed")){navToggler.click();}}else{if(document.exitFullscreen){document.exitFullscreen();if(navToggler.hasClass("collapsed")){navToggler.click();}}}} 8 | var initTooltips=()=>{$(document).ready(function(){$('[data-toggle="tooltip"]').tooltip();});} 9 | var initTocHide=()=>{var scrollTimeout;var throttle=200;var tocHeight=$("#bd-toc-nav").outerHeight(true)+$(".bd-toc").outerHeight(true);var hideTocAfter=tocHeight+200;var checkTocScroll=function(){var margin_content=$(".margin, .tag_margin, .full-width, .full_width, .tag_full-width, .tag_full_width, .sidebar, .tag_sidebar, .popout, .tag_popout");margin_content.each((index,item)=>{var topOffset=$(item).offset().top-$(window).scrollTop();var bottomOffset=topOffset+$(item).outerHeight(true);var topOverlaps=((topOffset>=0)&&(topOffset=0)&&(bottomOffset20){$("div.bd-toc").removeClass("show") 10 | return false}else{$("div.bd-toc").addClass("show")};})};var manageScrolledClassOnBody=function(){if(window.scrollY>0){document.body.classList.add("scrolled");}else{document.body.classList.remove("scrolled");}} 11 | $(window).on('scroll',function(){if(!scrollTimeout){scrollTimeout=setTimeout(function(){checkTocScroll();manageScrolledClassOnBody();scrollTimeout=null;},throttle);}});} 12 | var initThebeSBT=()=>{var title=$("div.section h1")[0] 13 | if(!$(title).next().hasClass("thebe-launch-button")){$("").insertAfter($(title))} 14 | initThebe();} 15 | sbRunWhenDOMLoaded(initTooltips) 16 | sbRunWhenDOMLoaded(initTriggerNavBar) 17 | sbRunWhenDOMLoaded(scrollToActive) 18 | sbRunWhenDOMLoaded(initTocHide) 19 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/sphinx-thebe.css: -------------------------------------------------------------------------------- 1 | /* Thebelab Buttons */ 2 | .thebelab-button { 3 | z-index: 999; 4 | display: inline-block; 5 | padding: 0.35em 1.2em; 6 | margin: 0px 1px; 7 | border-radius: 0.12em; 8 | box-sizing: border-box; 9 | text-decoration: none; 10 | font-family: 'Roboto', sans-serif; 11 | font-weight: 300; 12 | text-align: center; 13 | transition: all 0.2s; 14 | background-color: #dddddd; 15 | border: 0.05em solid white; 16 | color: #000000; 17 | } 18 | 19 | .thebelab-button:hover{ 20 | border: 0.05em solid black; 21 | background-color: #fcfcfc; 22 | } 23 | 24 | .thebe-launch-button { 25 | height: 2.2em; 26 | font-size: .8em; 27 | border: 1px black solid; 28 | } 29 | 30 | /* Thebelab Cell */ 31 | .thebelab-cell pre { 32 | background: none; 33 | } 34 | 35 | .thebelab-cell .thebelab-input { 36 | padding-left: 1em; 37 | margin-bottom: .5em; 38 | margin-top: .5em; 39 | } 40 | 41 | .thebelab-cell .jp-OutputArea { 42 | margin-top: .5em; 43 | margin-left: 1em; 44 | } 45 | 46 | button.thebelab-button.thebelab-run-button { 47 | margin-left: 1.5em; 48 | margin-bottom: .5em; 49 | } 50 | 51 | /* Loading button */ 52 | button.thebe-launch-button div.spinner { 53 | float: left; 54 | margin-right: 1em; 55 | } 56 | 57 | /* Remove the spinner when thebelab is ready */ 58 | .thebe-launch-button.thebe-status-ready .spinner { 59 | display: none; 60 | } 61 | 62 | .thebe-launch-button span.status { 63 | font-family: monospace; 64 | font-weight: bold; 65 | } 66 | 67 | .thebe-launch-button.thebe-status-ready span.status { 68 | color: green; 69 | } 70 | 71 | .spinner { 72 | height: 2em; 73 | text-align: center; 74 | font-size: 0.7em; 75 | } 76 | 77 | .spinner > div { 78 | background-color: #F37726; 79 | height: 100%; 80 | width: 6px; 81 | display: inline-block; 82 | 83 | -webkit-animation: sk-stretchdelay 1.2s infinite ease-in-out; 84 | animation: sk-stretchdelay 1.2s infinite ease-in-out; 85 | } 86 | 87 | .spinner .rect2 { 88 | -webkit-animation-delay: -1.1s; 89 | animation-delay: -1.1s; 90 | } 91 | 92 | .spinner .rect3 { 93 | -webkit-animation-delay: -1.0s; 94 | animation-delay: -1.0s; 95 | } 96 | 97 | .spinner .rect4 { 98 | -webkit-animation-delay: -0.9s; 99 | animation-delay: -0.9s; 100 | } 101 | 102 | .spinner .rect5 { 103 | -webkit-animation-delay: -0.8s; 104 | animation-delay: -0.8s; 105 | } 106 | 107 | @-webkit-keyframes sk-stretchdelay { 108 | 0%, 40%, 100% { -webkit-transform: scaleY(0.4) } 109 | 20% { -webkit-transform: scaleY(1.0) } 110 | } 111 | 112 | @keyframes sk-stretchdelay { 113 | 0%, 40%, 100% { 114 | transform: scaleY(0.4); 115 | -webkit-transform: scaleY(0.4); 116 | } 20% { 117 | transform: scaleY(1.0); 118 | -webkit-transform: scaleY(1.0); 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/sphinx-thebe.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Add attributes to Thebe blocks to initialize thebe properly 3 | */ 4 | 5 | var initThebe = () => { 6 | // If Thebelab hasn't loaded, wait a bit and try again. This 7 | // happens because we load ClipboardJS asynchronously. 8 | if (window.thebelab === undefined) { 9 | console.log("thebe not loaded, retrying..."); 10 | setTimeout(initThebe, 500) 11 | return 12 | } 13 | 14 | console.log("Adding thebe to code cells..."); 15 | 16 | // Load thebe config in case we want to update it as some point 17 | thebe_config = $('script[type="text/x-thebe-config"]')[0] 18 | 19 | 20 | // If we already detect a Thebe cell, don't re-run 21 | if (document.querySelectorAll('div.thebe-cell').length > 0) { 22 | return; 23 | } 24 | 25 | // Update thebe buttons with loading message 26 | $(".thebe-launch-button").each((ii, button) => { 27 | button.innerHTML = ` 28 |
29 |
30 |
31 |
32 |
33 |
34 | `; 35 | }) 36 | 37 | // Set thebe event hooks 38 | var thebeStatus; 39 | thebelab.on("status", function (evt, data) { 40 | console.log("Status changed:", data.status, data.message); 41 | 42 | $(".thebe-launch-button ") 43 | .removeClass("thebe-status-" + thebeStatus) 44 | .addClass("thebe-status-" + data.status) 45 | .find(".loading-text").html("Launching from mybinder.org: " + data.status + ""); 46 | 47 | // Now update our thebe status 48 | thebeStatus = data.status; 49 | 50 | // Find any cells with an initialization tag and ask thebe to run them when ready 51 | if (data.status === "ready") { 52 | var thebeInitCells = document.querySelectorAll('.thebe-init, .tag_thebe-init'); 53 | thebeInitCells.forEach((cell) => { 54 | console.log("Initializing Thebe with cell: " + cell.id); 55 | cell.querySelector('.thebelab-run-button').click(); 56 | }); 57 | } 58 | }); 59 | 60 | 61 | // Find all code cells, replace with Thebe interactive code cells 62 | const codeCells = document.querySelectorAll(thebe_selector) 63 | codeCells.forEach((codeCell, index) => { 64 | const codeCellId = index => `codecell${index}`; 65 | codeCell.id = codeCellId(index); 66 | codeCellText = codeCell.querySelector(thebe_selector_input); 67 | codeCellOutput = codeCell.querySelector(thebe_selector_output); 68 | 69 | // Clean up the language to make it work w/ CodeMirror and add it to the cell 70 | dataLanguage = detectLanguage(kernelName); 71 | 72 | if (codeCellText) { 73 | codeCellText.setAttribute('data-language', dataLanguage); 74 | codeCellText.setAttribute('data-executable', 'true'); 75 | 76 | // If we had an output, insert it just after the `pre` cell 77 | if (codeCellOutput) { 78 | $(codeCellOutput).attr("data-output", ""); 79 | $(codeCellOutput).insertAfter(codeCellText); 80 | } 81 | } 82 | }); 83 | 84 | // Init thebe 85 | thebelab.bootstrap(); 86 | } 87 | 88 | // Helper function to munge the language name 89 | var detectLanguage = (language) => { 90 | if (language.indexOf('python') > -1) { 91 | language = "python"; 92 | } else if (language === 'ir') { 93 | language = "r" 94 | } 95 | return language; 96 | } 97 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/togglebutton.css: -------------------------------------------------------------------------------- 1 | /* Visibility of the target */ 2 | .toggle, div.admonition.toggle .admonition-title ~ * { 3 | transition: opacity .5s, height .5s; 4 | } 5 | 6 | .toggle-hidden:not(.admonition) { 7 | visibility: hidden; 8 | opacity: 0; 9 | height: 1.5em; 10 | margin: 0px; 11 | padding: 0px; 12 | } 13 | 14 | /* Overrides for admonition toggles */ 15 | 16 | /* Titles should cut off earlier to avoid overlapping w/ button */ 17 | div.admonition.toggle p.admonition-title { 18 | padding-right: 25%; 19 | } 20 | 21 | /* hides all the content of a page until de-toggled */ 22 | div.admonition.toggle-hidden .admonition-title ~ * { 23 | height: 0; 24 | margin: 0; 25 | float: left; /* so they overlap when hidden */ 26 | opacity: 0; 27 | visibility: hidden; 28 | } 29 | 30 | /* Toggle buttons inside admonitions so we see the title */ 31 | .toggle.admonition { 32 | position: relative; 33 | } 34 | 35 | .toggle.admonition.admonition-title:after { 36 | content: "" !important; 37 | } 38 | 39 | /* Note, we'll over-ride this in sphinx-book-theme */ 40 | .toggle.admonition button.toggle-button { 41 | margin-right: 0.5em; 42 | right: 0em; 43 | position: absolute; 44 | top: .2em; 45 | } 46 | 47 | /* General button style */ 48 | button.toggle-button { 49 | background: #999; 50 | border: none; 51 | z-index: 100; 52 | right: -2.5em; 53 | margin-left: -2.5em; /* A hack to keep code blocks from being pushed left */ 54 | position: relative; 55 | float: right; 56 | border-radius: 100%; 57 | width: 1.5em; 58 | height: 1.5em; 59 | padding: 0px; 60 | } 61 | 62 | @media (min-width: 768px) { 63 | button.toggle-button.toggle-button-hidden:before { 64 | content: "Click to show"; 65 | position: absolute; 66 | font-size: .8em; 67 | left: -6.5em; 68 | bottom: .4em; 69 | } 70 | } 71 | 72 | 73 | /* Plus / minus toggles */ 74 | .toggle-button .bar { 75 | background-color: white; 76 | position: absolute; 77 | left: 15%; 78 | top: 43%; 79 | width: 16px; 80 | height: 3px; 81 | } 82 | 83 | .toggle-button .vertical { 84 | transition: all 0.25s ease-in-out; 85 | transform-origin: center; 86 | } 87 | 88 | .toggle-button-hidden .vertical { 89 | transform: rotate(-90deg); 90 | } -------------------------------------------------------------------------------- /ds_book/_build/html/_static/togglebutton.js: -------------------------------------------------------------------------------- 1 | var initToggleItems = () => { 2 | var itemsToToggle = document.querySelectorAll(togglebuttonSelector); 3 | console.log(itemsToToggle, togglebuttonSelector) 4 | // Add the button to each admonition and hook up a callback to toggle visibility 5 | itemsToToggle.forEach((item, index) => { 6 | var toggleID = `toggle-${index}`; 7 | var buttonID = `button-${toggleID}`; 8 | var collapseButton = ` 9 | `; 13 | 14 | item.setAttribute('id', toggleID); 15 | 16 | if (!item.classList.contains("toggle")){ 17 | item.classList.add("toggle"); 18 | } 19 | 20 | // If it's an admonition block, then we'll add the button inside 21 | if (item.classList.contains("admonition")) { 22 | item.insertAdjacentHTML("afterbegin", collapseButton); 23 | } else { 24 | item.insertAdjacentHTML('beforebegin', collapseButton); 25 | } 26 | 27 | thisButton = $(`#${buttonID}`); 28 | thisButton.on('click', toggleClickHandler); 29 | if (!item.classList.contains("toggle-shown")) { 30 | toggleHidden(thisButton[0]); 31 | } 32 | }) 33 | }; 34 | 35 | // This should simply add / remove the collapsed class and change the button text 36 | var toggleHidden = (button) => { 37 | target = button.dataset['target'] 38 | var itemToToggle = document.getElementById(target); 39 | if (itemToToggle.classList.contains("toggle-hidden")) { 40 | itemToToggle.classList.remove("toggle-hidden"); 41 | button.classList.remove("toggle-button-hidden"); 42 | } else { 43 | itemToToggle.classList.add("toggle-hidden"); 44 | button.classList.add("toggle-button-hidden"); 45 | } 46 | } 47 | 48 | var toggleClickHandler = (click) => { 49 | button = document.getElementById(click.target.dataset['button']); 50 | toggleHidden(button); 51 | } 52 | 53 | // If we want to blanket-add toggle classes to certain cells 54 | var addToggleToSelector = () => { 55 | const selector = ""; 56 | if (selector.length > 0) { 57 | document.querySelectorAll(selector).forEach((item) => { 58 | item.classList.add("toggle"); 59 | }) 60 | } 61 | } 62 | 63 | // Helper function to run when the DOM is finished 64 | const sphinxToggleRunWhenDOMLoaded = cb => { 65 | if (document.readyState != 'loading') { 66 | cb() 67 | } else if (document.addEventListener) { 68 | document.addEventListener('DOMContentLoaded', cb) 69 | } else { 70 | document.attachEvent('onreadystatechange', function() { 71 | if (document.readyState == 'complete') cb() 72 | }) 73 | } 74 | } 75 | sphinxToggleRunWhenDOMLoaded(addToggleToSelector) 76 | sphinxToggleRunWhenDOMLoaded(initToggleItems) 77 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/LICENSE.txt: -------------------------------------------------------------------------------- 1 | Font Awesome Free License 2 | ------------------------- 3 | 4 | Font Awesome Free is free, open source, and GPL friendly. You can use it for 5 | commercial projects, open source projects, or really almost whatever you want. 6 | Full Font Awesome Free license: https://fontawesome.com/license/free. 7 | 8 | # Icons: CC BY 4.0 License (https://creativecommons.org/licenses/by/4.0/) 9 | In the Font Awesome Free download, the CC BY 4.0 license applies to all icons 10 | packaged as SVG and JS file types. 11 | 12 | # Fonts: SIL OFL 1.1 License (https://scripts.sil.org/OFL) 13 | In the Font Awesome Free download, the SIL OFL license applies to all icons 14 | packaged as web and desktop font files. 15 | 16 | # Code: MIT License (https://opensource.org/licenses/MIT) 17 | In the Font Awesome Free download, the MIT license applies to all non-font and 18 | non-icon files. 19 | 20 | # Attribution 21 | Attribution is required by MIT, SIL OFL, and CC BY licenses. Downloaded Font 22 | Awesome Free files already contain embedded comments with sufficient 23 | attribution, so you shouldn't need to do anything additional when using these 24 | files normally. 25 | 26 | We've kept attribution comments terse, so we ask that you do not actively work 27 | to remove them from files, especially code. They're a great way for folks to 28 | learn about Font Awesome. 29 | 30 | # Brand Icons 31 | All brand icons are trademarks of their respective owners. The use of these 32 | trademarks does not indicate endorsement of the trademark holder by Font 33 | Awesome, nor vice versa. **Please do not use brand logos for any purpose except 34 | to represent the company, product, or service to which they refer.** 35 | -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.eot -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.ttf -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.woff -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-brands-400.woff2 -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-regular-400.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-regular-400.eot -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-regular-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-regular-400.ttf -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-regular-400.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-regular-400.woff -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-regular-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-regular-400.woff2 -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.eot -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.ttf -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.woff -------------------------------------------------------------------------------- /ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/_static/vendor/fontawesome/5.13.0/webfonts/fa-solid-900.woff2 -------------------------------------------------------------------------------- /ds_book/_build/html/_static/webpack-macros.html: -------------------------------------------------------------------------------- 1 | 2 | {% macro head_pre_icons() %} 3 | 5 | 7 | 9 | {% endmacro %} 10 | 11 | {% macro head_pre_fonts() %} 12 | {% endmacro %} 13 | 14 | {% macro head_pre_bootstrap() %} 15 | 16 | 17 | {% endmacro %} 18 | 19 | {% macro head_js_preload() %} 20 | 21 | {% endmacro %} 22 | 23 | {% macro body_post() %} 24 | 25 | {% endmacro %} -------------------------------------------------------------------------------- /ds_book/_build/html/genindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Index — Deep learning with TensorFlow 9 | 10 | 11 | 12 | 13 | 14 | 16 | 18 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 |
59 |
60 | 61 | 132 | 133 | 134 | 135 | 136 | 137 | 138 |
139 | 140 |
141 |
142 | 143 |
144 | 145 | 153 | 154 | 155 | 156 | 157 | 172 | 173 | 174 | 175 | 179 | 180 | 181 | 182 |
183 | 184 | 185 |
186 | 187 |
188 |
189 |
190 |
191 |
192 | 193 |
194 | 195 | 196 |

Index

197 | 198 |
199 | 200 |
201 | 202 | 203 |
204 | 205 | 206 | 207 | 208 | 209 | 210 |
211 | 212 | 213 |
214 | 215 |
216 |
217 |
218 |
219 |

220 | 221 | By Development Seed
222 | 223 |

224 |
225 |
226 |
227 | 228 | 229 |
230 |
231 | 232 | 233 | 234 | 235 | 236 | -------------------------------------------------------------------------------- /ds_book/_build/html/index.html: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /ds_book/_build/html/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/html/objects.inv -------------------------------------------------------------------------------- /ds_book/_build/html/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Search — Deep learning with TensorFlow 9 | 10 | 11 | 12 | 13 | 14 | 16 | 18 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 |
65 |
66 | 67 | 138 | 139 | 140 | 141 | 142 | 143 | 144 |
145 | 146 |
147 |
148 | 149 |
150 | 151 | 159 | 160 | 161 | 162 | 163 | 178 | 179 | 180 | 181 | 185 | 186 | 187 | 188 |
189 | 190 | 191 |
192 | 193 |
194 |
195 |
196 |
197 |
198 | 199 |
200 | 201 |

Search

202 |
203 | 204 |

205 | Please activate JavaScript to enable the search 206 | functionality. 207 |

208 |
209 |

210 | Searching for multiple words only shows matches that contain 211 | all words. 212 |

213 |
214 | 215 | 216 | 217 |
218 | 219 |
220 | 221 |
222 | 223 |
224 | 225 | 226 | 227 | 228 | 229 | 230 |
231 | 232 | 233 |
234 | 235 |
236 |
237 |
238 |
239 |

240 | 241 | By Development Seed
242 | 243 |

244 |
245 |
246 |
247 | 248 | 249 |
250 |
251 | 252 | 253 | 254 | 255 | 256 | -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson1a_Intro_ML_NN_DL_4_0.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/jupyter_execute/docs/Lesson1a_Intro_ML_NN_DL_4_0.jpeg -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson1b_Intro_TensorFlow_Keras.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Introduction to TensorFlow and Keras" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Objectives" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "The goal of this notebook is to teach some basics of the TensorFlow framework and the Keras API. " 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "### What is TensorFlow?\n", 29 | "\n", 30 | "[TensorFlow](https://www.tensorflow.org/guide]) is an open-source framework developed in late 2015 by Google for building various machine learning and deep learning models. TensorFlow is free and open-source, thanks to the Apache Open Source license.\n", 31 | "\n", 32 | "The main objective of using TensorFlow is to reduce the complexity of implementing computations on large numerical data sets. In practice, these large computations can manifest as training and inference with machine learning or deep learning models.\n", 33 | "\n", 34 | "TensorFlow was designed to operate with multiple CPUs or GPUs, as well as a growing number of mobile operating systems. The framework includes wrappers in Python, C++, and Java.\n", 35 | "\n", 36 | "#### How does it work?\n", 37 | "TensorFlow accepts inputs as a multi-dimensional array called a Tensor, which allows the programmer to create dataflow graphs and structures specifying how data travels through. The framework is designed to support creation of a flowchart of operations to be applied to input Tensors, which travel in one direction and out the other.\n", 38 | "\n", 39 | "#### TensorFlow's structure\n", 40 | "There are three main components to TensorFlow's structure.\n", 41 | "\n", 42 | "1. preprocessing the data\n", 43 | "2. building the model\n", 44 | "3. training and estimating the model\n", 45 | "\n", 46 | "The name Tensorflow derives from the way in which the framework receives input in the form of a multi-dimensional array, i.e. the tensors. These tensors travel sequentially through the specified flowchart of the operations, entering at one end and culminating as output at the other end. \n", 47 | "\n", 48 | "#### What are the key TensorFlow components?\n", 49 | "**Tensor**\n", 50 | "\n", 51 | "A tensor is an n-dimensional vector or matrix. In theory, a tensor may represent any form of data. The values belonging to a tensor all share the same data type and often the same shape / dimensionality. A tensor can describe the input data and the output of a calculation. The main difference between a tensor and a conventional n-dimensional array is that tensors are immutable.\n", 52 | "\n", 53 | "\n", 54 | "**Graphs**\n", 55 | "\n", 56 | "TensorFlow uses a graph framework. Graphs collect and summarize all of the calculations and offer several benefits:\n", 57 | "\n", 58 | "1. They are designed to work on CPUs or GPUs, as well as on mobile devices.\n", 59 | "2. Graphs are portable which enables the computations to be saved for immediate or later usage. Otherwise stated, the graph can be frozen and run at a later time.\n", 60 | "3. Graph calculations are executed by linking tensors together.\n", 61 | "4. For each tensor, there is a node and an edge. The node carries out the mathematical process and produces endpoint outputs. The input/output connections are represented by the edges. \n", 62 | "5. All nodes are linked together, so the graph itself is a depiction of the operations and relationships that exist between the nodes.\n", 63 | "\n", 64 | "In TensorFlow, all operations are carried out within a graph, which in effect is a series of computations that happen in order. Each individual operation is referred to as an op node. \n", 65 | "\n", 66 | ":::{figure-md} TFgraph-fig\n", 67 | "\n", 68 | "\n", 69 | "TensorFlow graph example (from [https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4](https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4)).\n", 70 | ":::\n", 71 | "\n", 72 | "\n", 73 | "#### Why do so many people like TensorFlow?\n", 74 | "TensorFlow is intentionally user-friendly, with helpful plugins to visualize model training and a useful software debugging tool. As well, TensorFlow is highly scalable, with easy deployment on both CPUs and GPUs." 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "metadata": {}, 80 | "source": [ 81 | "### What is Keras?\n", 82 | "\n", 83 | "[Keras](https://keras.io/about/) is an API built on Python which reduces the cognitive load associated with programming models through human readability and simple and consistent structures.\n", 84 | "\n", 85 | "Keras is what some might call a wrapper for TensorFlow. It is intended for rapid experimentation.\n", 86 | "\n", 87 | "Tha main components of Keras include:\n", 88 | "1. A models API, which enables one to construct a model with varying levels of complexity depending on use case. We will use the [Functional API](https://keras.io/guides/functional_api/) subclass.\n", 89 | "2. A layers API, which allows one to define the tensor in/tensor out computation functions.\n", 90 | "3. A callback API, which enables one to program specific actions to occur during training, such as log training metrics, visualize interim/internal states and statistics of the model during training, and perform early stopping when the model converges.\n", 91 | "4. A data preprocessing API, which offers support for prepping raw data from disk to model ready Tensor format.\n", 92 | "5. An optimizer API where all of the state of the art optimizers can be plugged in. Learning rate decay / scheduling can also be implemented as part of this API.\n", 93 | "6. A metrics API which is used for assessing the performance of the model during training. A metric is the target to optimize during training, with specific metrics chosen for specific modeling objectives.\n", 94 | "7. A loss API that informs the model quantitatively how much it should try to minimize during training by providing a measure of error. Similar to metrics, specific loss functions are selected for specific modeling objectives.\n", 95 | "\n", 96 | "With the Functional API, our main workflow will follow the diagram below.\n", 97 | "\n", 98 | ":::{figure-md} Keras-fig\n", 99 | "\n", 100 | "\n", 101 | "Keras Functional API diagram (from [https://miro.com/app/board/o9J_lhnKhVE=/](hhttps://miro.com/app/board/o9J_lhnKhVE=/)).\n", 102 | ":::" 103 | ] 104 | } 105 | ], 106 | "metadata": { 107 | "kernelspec": { 108 | "display_name": "Python 3", 109 | "language": "python", 110 | "name": "python3" 111 | }, 112 | "language_info": { 113 | "codemirror_mode": { 114 | "name": "ipython", 115 | "version": 3 116 | }, 117 | "file_extension": ".py", 118 | "mimetype": "text/x-python", 119 | "name": "python", 120 | "nbconvert_exporter": "python", 121 | "pygments_lexer": "ipython3", 122 | "version": "3.8.11" 123 | } 124 | }, 125 | "nbformat": 4, 126 | "nbformat_minor": 2 127 | } -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson1b_Intro_TensorFlow_Keras.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # # Introduction to TensorFlow and Keras 5 | 6 | # ## Objectives 7 | 8 | # The goal of this notebook is to teach some basics of the TensorFlow framework and the Keras API. 9 | 10 | # ### What is TensorFlow? 11 | # 12 | # [TensorFlow](https://www.tensorflow.org/guide]) is an open-source framework developed in late 2015 by Google for building various machine learning and deep learning models. TensorFlow is free and open-source, thanks to the Apache Open Source license. 13 | # 14 | # The main objective of using TensorFlow is to reduce the complexity of implementing computations on large numerical data sets. In practice, these large computations can manifest as training and inference with machine learning or deep learning models. 15 | # 16 | # TensorFlow was designed to operate with multiple CPUs or GPUs, as well as a growing number of mobile operating systems. The framework includes wrappers in Python, C++, and Java. 17 | # 18 | # #### How does it work? 19 | # TensorFlow accepts inputs as a multi-dimensional array called a Tensor, which allows the programmer to create dataflow graphs and structures specifying how data travels through. The framework is designed to support creation of a flowchart of operations to be applied to input Tensors, which travel in one direction and out the other. 20 | # 21 | # #### TensorFlow's structure 22 | # There are three main components to TensorFlow's structure. 23 | # 24 | # 1. preprocessing the data 25 | # 2. building the model 26 | # 3. training and estimating the model 27 | # 28 | # The name Tensorflow derives from the way in which the framework receives input in the form of a multi-dimensional array, i.e. the tensors. These tensors travel sequentially through the specified flowchart of the operations, entering at one end and culminating as output at the other end. 29 | # 30 | # #### What are the key TensorFlow components? 31 | # **Tensor** 32 | # 33 | # A tensor is an n-dimensional vector or matrix. In theory, a tensor may represent any form of data. The values belonging to a tensor all share the same data type and often the same shape / dimensionality. A tensor can describe the input data and the output of a calculation. The main difference between a tensor and a conventional n-dimensional array is that tensors are immutable. 34 | # 35 | # 36 | # **Graphs** 37 | # 38 | # TensorFlow uses a graph framework. Graphs collect and summarize all of the calculations and offer several benefits: 39 | # 40 | # 1. They are designed to work on CPUs or GPUs, as well as on mobile devices. 41 | # 2. Graphs are portable which enables the computations to be saved for immediate or later usage. Otherwise stated, the graph can be frozen and run at a later time. 42 | # 3. Graph calculations are executed by linking tensors together. 43 | # 4. For each tensor, there is a node and an edge. The node carries out the mathematical process and produces endpoint outputs. The input/output connections are represented by the edges. 44 | # 5. All nodes are linked together, so the graph itself is a depiction of the operations and relationships that exist between the nodes. 45 | # 46 | # In TensorFlow, all operations are carried out within a graph, which in effect is a series of computations that happen in order. Each individual operation is referred to as an op node. 47 | # 48 | # :::{figure-md} TFgraph-fig 49 | # 50 | # 51 | # TensorFlow graph example (from [https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4](https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4)). 52 | # ::: 53 | # 54 | # 55 | # #### Why do so many people like TensorFlow? 56 | # TensorFlow is intentionally user-friendly, with helpful plugins to visualize model training and a useful software debugging tool. As well, TensorFlow is highly scalable, with easy deployment on both CPUs and GPUs. 57 | 58 | # ### What is Keras? 59 | # 60 | # [Keras](https://keras.io/about/) is an API built on Python which reduces the cognitive load associated with programming models through human readability and simple and consistent structures. 61 | # 62 | # Keras is what some might call a wrapper for TensorFlow. It is intended for rapid experimentation. 63 | # 64 | # Tha main components of Keras include: 65 | # 1. A models API, which enables one to construct a model with varying levels of complexity depending on use case. We will use the [Functional API](https://keras.io/guides/functional_api/) subclass. 66 | # 2. A layers API, which allows one to define the tensor in/tensor out computation functions. 67 | # 3. A callback API, which enables one to program specific actions to occur during training, such as log training metrics, visualize interim/internal states and statistics of the model during training, and perform early stopping when the model converges. 68 | # 4. A data preprocessing API, which offers support for prepping raw data from disk to model ready Tensor format. 69 | # 5. An optimizer API where all of the state of the art optimizers can be plugged in. Learning rate decay / scheduling can also be implemented as part of this API. 70 | # 6. A metrics API which is used for assessing the performance of the model during training. A metric is the target to optimize during training, with specific metrics chosen for specific modeling objectives. 71 | # 7. A loss API that informs the model quantitatively how much it should try to minimize during training by providing a measure of error. Similar to metrics, specific loss functions are selected for specific modeling objectives. 72 | # 73 | # With the Functional API, our main workflow will follow the diagram below. 74 | # 75 | # :::{figure-md} Keras-fig 76 | # 77 | # 78 | # Keras Functional API diagram (from [https://miro.com/app/board/o9J_lhnKhVE=/](hhttps://miro.com/app/board/o9J_lhnKhVE=/)). 79 | # ::: 80 | -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson1b_annex.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Introduction to TensorFlow and Keras" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Objectives" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "The goal of this notebook is to teach some basics of the TensorFlow framework and the Keras API." 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "### What is TensorFlow?\n", 29 | "\n", 30 | "[TensorFlow](https://www.tensorflow.org/guide]) is an open-source framework developed by Google for building various machine learning and deep learning models. Although originally released in late 2015, the first stable version arrived in 2017. TensorFlow is free and open-source, thanks to the Apache Open Source license.\n", 31 | "\n", 32 | "The main objective of using TensorFlow is to reduce the complexity of implementing computations on large numerical data sets. In practice, these large computations can manifest as training and inference with machine learning or deep learning models.\n", 33 | "\n", 34 | "TensorFlow was designed to operate with multiple CPUs or GPUs, as well as a growing number of mobile operating systems. The framework includes wrappers in Python, C++, and Java.\n", 35 | "\n", 36 | "#### How does it work?\n", 37 | "TensorFlow accepts inputs as a multi-dimensional array called a Tensor, which allows the programmer to create dataflow graphs and structures specifying how data travels through. The framework is designed to support creation of a flowchart of operations to be applied to input Tensors, which travel in one direction and out the other.\n", 38 | "\n", 39 | "#### TensorFlow’s structure\n", 40 | "There are three main components to TensorFlow's structure.\n", 41 | "\n", 42 | "1. preprocessing the data\n", 43 | "2. building the model\n", 44 | "3. training and estimating the model\n", 45 | "\n", 46 | "The name TensorFlow derives from the way in which the framework receives input in the form of a multi-dimensional array, i.e. the tensors. These tensors travel sequentially through the specified flowchart of the operations, entering at one end and culminate as output at the other end. \n", 47 | "\n", 48 | "#### What are TensorFlow components?\n", 49 | "**Tensor**\n", 50 | "\n", 51 | "Tensors, the basic unit of data in this framework, are involved in every computation of TensorFlow. A tensor is an n-dimensional vector or matrix. In theory, a tensor may represent any form of data. The values belonging to a tensor all share the same data type and often the same shape / dimensionality. A tensor can describe the input data and the output of a calculation. \n", 52 | "\n", 53 | "In TensorFlow, all operations are carried out within a graph, which in effect is a series of computations that happen in order. Each indivisual operation is referred to as an op node. \n", 54 | "\n", 55 | "**Graphs**\n", 56 | "\n", 57 | "TensorFlow uses a graph framework. Graphs collect and summarize all of the calculations and offer several benefits:\n", 58 | "\n", 59 | "1. They are designed to work on CPUs or GPUs, as well as on mobile devices.\n", 60 | "2. Graphs are portable which enables the computations to be saved for immediate or later usage. Otherwsie stated, the graph can be frozen and run at a later time.\n", 61 | "3. Graph calculations are executed by linking tensors together.\n", 62 | "4. For each tensor, there is a node and an edge. The node carries out the mathematical process and produces endpoint outputs. The input/output connections are represented by the edges.\n", 63 | "5. All nodes are linked together, so the graph itself is a depiction of the operations and relationships that exist between the nodes.\n", 64 | "\n", 65 | ":::{figure-md} TFgraph-fig\n", 66 | "\n", 67 | "\n", 68 | "TensorFlow graph example (from [https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4](https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4)).\n", 69 | ":::\n", 70 | "\n", 71 | "#### Why do so many people like TensorFlow?\n", 72 | "TensorFlow is intentionally user-friendly, with helpful plugins to visualize model training and a useful software debugging tool. As well, TensorFlow is highly scalable, with easy deployment on both CPUs and GPUs." 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "metadata": {}, 78 | "source": [ 79 | "### What is Keras?\n", 80 | "\n", 81 | "[Keras](https://keras.io/about/) is an API built on Python, with human readability at the forefront of its design. With simple and consistent structures and methods extensible across many machine learning and deep learning applications, Keras reduces the cognitive load associated with programming models. Furthermore, the API seeks to minimize the need for programmer interaction by abstracting many complexities into easily callable functions. Lastly, Keras features clear & actionable error messaging, complemented by comprehensive and digestible documentation and developer guides.\n", 82 | "\n", 83 | "Keras is what some might call a wrapper for TensorFlow. By that, one means to say that Keras simplifies a programmer's interaction with TensorFlow through refinement of key methods and constructs.\n", 84 | "\n", 85 | "Importantly, Keras is intended for rapid experimentation.\n", 86 | "\n", 87 | "Tha main components of Keras include:\n", 88 | "1. A models API, which enables one to construct a model with varying levels of complexity depending on use case. We will use the [Functional API](https://keras.io/guides/functional_api/) subclass.\n", 89 | "2. A layers API, which allows one to define the tensor in/tnesor out computation functions.\n", 90 | "3. A callback API, which enables one to program specific actions to occur during training, such as log training metrics, visualize interim/internal states and statistics of the model during training, and perform early stopping when the model converges.\n", 91 | "4. A data preprocessing API, which offers support for prepping raw data from disk to model ready Tensor format.\n", 92 | "5. An optimizer API where all of the state of the art optimizers can be plugged in. Learning rate decay / scheduling can also be implemented a spart of this API.\n", 93 | "6. A metrics API which is used for assessing the performance of the model during training. A metric is the target to optimize during training, with specific metrics chosen for specific modeling objectives.\n", 94 | "7. A loss API that informs the model quantitatively how much it should try to minimize during training by providing a measure of error. Similar to metrics, specific loss functions are selected for specific modelung objectives.\n", 95 | "\n", 96 | "With the Functional API, our main workflow will follow the diagram below.\n", 97 | "\n", 98 | ":::{figure-md} Keras-fig\n", 99 | "\n", 100 | "\n", 101 | "Keras Functional API diagram (from [https://miro.com/app/board/o9J_lhnKhVE=/](hhttps://miro.com/app/board/o9J_lhnKhVE=/)).\n", 102 | ":::" 103 | ] 104 | } 105 | ], 106 | "metadata": { 107 | "kernelspec": { 108 | "display_name": "Python 3", 109 | "language": "python", 110 | "name": "python3" 111 | }, 112 | "language_info": { 113 | "codemirror_mode": { 114 | "name": "ipython", 115 | "version": 3 116 | }, 117 | "file_extension": ".py", 118 | "mimetype": "text/x-python", 119 | "name": "python", 120 | "nbconvert_exporter": "python", 121 | "pygments_lexer": "ipython3", 122 | "version": "3.8.11" 123 | } 124 | }, 125 | "nbformat": 4, 126 | "nbformat_minor": 2 127 | } -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson1b_annex.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # # Introduction to TensorFlow and Keras 5 | 6 | # ## Objectives 7 | 8 | # The goal of this notebook is to teach some basics of the TensorFlow framework and the Keras API. 9 | 10 | # ### What is TensorFlow? 11 | # 12 | # [TensorFlow](https://www.tensorflow.org/guide]) is an open-source framework developed by Google for building various machine learning and deep learning models. Although originally released in late 2015, the first stable version arrived in 2017. TensorFlow is free and open-source, thanks to the Apache Open Source license. 13 | # 14 | # The main objective of using TensorFlow is to reduce the complexity of implementing computations on large numerical data sets. In practice, these large computations can manifest as training and inference with machine learning or deep learning models. 15 | # 16 | # TensorFlow was designed to operate with multiple CPUs or GPUs, as well as a growing number of mobile operating systems. The framework includes wrappers in Python, C++, and Java. 17 | # 18 | # #### How does it work? 19 | # TensorFlow accepts inputs as a multi-dimensional array called a Tensor, which allows the programmer to create dataflow graphs and structures specifying how data travels through. The framework is designed to support creation of a flowchart of operations to be applied to input Tensors, which travel in one direction and out the other. 20 | # 21 | # #### TensorFlow’s structure 22 | # There are three main components to TensorFlow's structure. 23 | # 24 | # 1. preprocessing the data 25 | # 2. building the model 26 | # 3. training and estimating the model 27 | # 28 | # The name TensorFlow derives from the way in which the framework receives input in the form of a multi-dimensional array, i.e. the tensors. These tensors travel sequentially through the specified flowchart of the operations, entering at one end and culminate as output at the other end. 29 | # 30 | # #### What are TensorFlow components? 31 | # **Tensor** 32 | # 33 | # Tensors, the basic unit of data in this framework, are involved in every computation of TensorFlow. A tensor is an n-dimensional vector or matrix. In theory, a tensor may represent any form of data. The values belonging to a tensor all share the same data type and often the same shape / dimensionality. A tensor can describe the input data and the output of a calculation. 34 | # 35 | # In TensorFlow, all operations are carried out within a graph, which in effect is a series of computations that happen in order. Each indivisual operation is referred to as an op node. 36 | # 37 | # **Graphs** 38 | # 39 | # TensorFlow uses a graph framework. Graphs collect and summarize all of the calculations and offer several benefits: 40 | # 41 | # 1. They are designed to work on CPUs or GPUs, as well as on mobile devices. 42 | # 2. Graphs are portable which enables the computations to be saved for immediate or later usage. Otherwsie stated, the graph can be frozen and run at a later time. 43 | # 3. Graph calculations are executed by linking tensors together. 44 | # 4. For each tensor, there is a node and an edge. The node carries out the mathematical process and produces endpoint outputs. The input/output connections are represented by the edges. 45 | # 5. All nodes are linked together, so the graph itself is a depiction of the operations and relationships that exist between the nodes. 46 | # 47 | # :::{figure-md} TFgraph-fig 48 | # 49 | # 50 | # TensorFlow graph example (from [https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4](https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4)). 51 | # ::: 52 | # 53 | # #### Why do so many people like TensorFlow? 54 | # TensorFlow is intentionally user-friendly, with helpful plugins to visualize model training and a useful software debugging tool. As well, TensorFlow is highly scalable, with easy deployment on both CPUs and GPUs. 55 | 56 | # ### What is Keras? 57 | # 58 | # [Keras](https://keras.io/about/) is an API built on Python, with human readability at the forefront of its design. With simple and consistent structures and methods extensible across many machine learning and deep learning applications, Keras reduces the cognitive load associated with programming models. Furthermore, the API seeks to minimize the need for programmer interaction by abstracting many complexities into easily callable functions. Lastly, Keras features clear & actionable error messaging, complemented by comprehensive and digestible documentation and developer guides. 59 | # 60 | # Keras is what some might call a wrapper for TensorFlow. By that, one means to say that Keras simplifies a programmer's interaction with TensorFlow through refinement of key methods and constructs. 61 | # 62 | # Importantly, Keras is intended for rapid experimentation. 63 | # 64 | # Tha main components of Keras include: 65 | # 1. A models API, which enables one to construct a model with varying levels of complexity depending on use case. We will use the [Functional API](https://keras.io/guides/functional_api/) subclass. 66 | # 2. A layers API, which allows one to define the tensor in/tnesor out computation functions. 67 | # 3. A callback API, which enables one to program specific actions to occur during training, such as log training metrics, visualize interim/internal states and statistics of the model during training, and perform early stopping when the model converges. 68 | # 4. A data preprocessing API, which offers support for prepping raw data from disk to model ready Tensor format. 69 | # 5. An optimizer API where all of the state of the art optimizers can be plugged in. Learning rate decay / scheduling can also be implemented a spart of this API. 70 | # 6. A metrics API which is used for assessing the performance of the model during training. A metric is the target to optimize during training, with specific metrics chosen for specific modeling objectives. 71 | # 7. A loss API that informs the model quantitatively how much it should try to minimize during training by providing a measure of error. Similar to metrics, specific loss functions are selected for specific modelung objectives. 72 | # 73 | # With the Functional API, our main workflow will follow the diagram below. 74 | # 75 | # :::{figure-md} Keras-fig 76 | # 77 | # 78 | # Keras Functional API diagram (from [https://miro.com/app/board/o9J_lhnKhVE=/](hhttps://miro.com/app/board/o9J_lhnKhVE=/)). 79 | # ::: 80 | -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson2b_prep_data_ML_segmentation_35_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/jupyter_execute/docs/Lesson2b_prep_data_ML_segmentation_35_1.png -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson3_deeplearning_crop_segmentation_31_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/jupyter_execute/docs/Lesson3_deeplearning_crop_segmentation_31_0.png -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson3_deeplearning_crop_segmentation_45_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/jupyter_execute/docs/Lesson3_deeplearning_crop_segmentation_45_0.png -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson3_deeplearning_crop_segmentation_47_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/jupyter_execute/docs/Lesson3_deeplearning_crop_segmentation_47_0.png -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson3_deeplearning_crop_segmentation_49_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/jupyter_execute/docs/Lesson3_deeplearning_crop_segmentation_49_0.png -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson4_evaluation.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # # Evaluating Semantic Segmentation Models 5 | # > A guide for understanding the performance of semantic segmentation for land use / land cover in satellite imagery. 6 | 7 | # After we have trained a model for segmenting images according to a set of classes it's time to evaluate how well it has performed. With any supervised machine learning model, we are typically interested in minimizing false positives or false negatives, with a preference for one or the other. 8 | # 9 | # Additionally, are also interested in making sure our model generalizes beyond the training dataset to new data it was not trained on. Finally, we'd like our model to make high confidence, correct predictions, and for higher confidence thresholds to not result in many more false negatives. 10 | # 11 | # For semantic segmentation, our evaluation unit is an individual pixel, which can be one of four categories: 12 | # * true positive: the pixel was classified correctly as a class of interest. 13 | # * true negative: the pixel was classified correctly as the background class. 14 | # * false positive: the pixel was incorrectly assigned a class of interest 15 | # * false negative: the pixel was incorrectly assigned the background class or a different class 16 | # 17 | # The most in depth and succinct summary we can produce is a confusion matrix. It summarrizes the counts of pixels that fall into each of these categories for each of our classes of interest and the background class. 18 | # 19 | # 20 | # ![Confusion Matrix Example](https://github.com/developmentseed/tensorflow-eo-training/blob/main/ds_book/docs/images/cm.png?raw=1) 21 | # 22 | # In this tutorial we will using data from a reference dataset hosted on Radiant Earth MLHub called ["A Fusion Dataset for Crop Type Classification in Germany"](https://mlhub.earth/data/dlr_fusion_competition_germany), and our U-Net predictions to compute a confusion matrix to assess our model performance. 23 | # 24 | # ## Specific concepts that will be covered 25 | # In the process, we will build practical experience and develop intuition around the following concepts: 26 | # * **[sci-kit learn](https://scikit-learn.org/stable/modules/generated/sklearn.metrics.confusion_matrix.html)** - we will use sci-kit learn to compute a confusion matrix and discuss how supplying different values to the `normalize `argument can help us interpret our data. 27 | # * **Metrics** - We will cover useful summary metrics that capture much of the information in the confusion matrix, including precision, recall, and F1 Score. 28 | # 29 | # 30 | # **Audience:** This post is geared towards intermediate users who are comfortable with basic machine learning concepts. 31 | # 32 | # **Time Estimated**: 60-120 min 33 | # 34 | # 35 | 36 | # ## Setup Notebook 37 | 38 | # In[ ]: 39 | 40 | 41 | # install required libraries 42 | get_ipython().system('pip install -q rasterio==1.2.10') 43 | get_ipython().system('pip install -q geopandas==0.10.2') 44 | get_ipython().system('pip install -q git+https://github.com/tensorflow/examples.git') 45 | get_ipython().system('pip install -q -U tfds-nightly') 46 | get_ipython().system('pip install -q focal-loss') 47 | get_ipython().system('pip install -q tensorflow-addons==0.8.3') 48 | #!pip install -q matplotlib==3.5 # UNCOMMENT if running on LOCAL 49 | get_ipython().system('pip install -q scikit-learn==1.0.1') 50 | get_ipython().system('pip install -q scikit-image==0.18.3') 51 | 52 | 53 | # In[2]: 54 | 55 | 56 | # import required libraries 57 | import os, glob, functools, fnmatch 58 | from zipfile import ZipFile 59 | from itertools import product 60 | 61 | import numpy as np 62 | import matplotlib.pyplot as plt 63 | import matplotlib as mpl 64 | mpl.rcParams['axes.grid'] = False 65 | mpl.rcParams['figure.figsize'] = (12,12) 66 | import matplotlib.image as mpimg 67 | import pandas as pd 68 | from PIL import Image 69 | import geopandas as gpd 70 | from IPython.display import clear_output 71 | from time import sleep 72 | 73 | import skimage.io as skio # lighter dependency than tensorflow for working with our tensors/arrays 74 | from sklearn.metrics import confusion_matrix, f1_score 75 | 76 | 77 | # #### Getting set up with the data 78 | # 79 | # ```{important} 80 | # The tiled imagery is available at the following path that is accessible with the google.colab `drive` module: `'/content/gdrive/My Drive/tf-eo-devseed/'` 81 | # ``` 82 | # 83 | # We'll be working with the following folders and files in the `tf-eo-devseed` folder: 84 | # ``` 85 | # tf-eo-devseed/ 86 | # ├── stacks/ 87 | # ├── stacks_brightened/ 88 | # ├── indices/ 89 | # ├── labels/ 90 | # ├── background_list_train.txt 91 | # ├── train_list_clean.txt 92 | # └── lulc_classes.csv 93 | # ``` 94 | 95 | # In[ ]: 96 | 97 | 98 | # set your root directory and tiled data folders 99 | if 'google.colab' in str(get_ipython()): 100 | # mount google drive 101 | from google.colab import drive 102 | drive.mount('/content/gdrive') 103 | root_dir = '/content/gdrive/My Drive/tf-eo-devseed/' 104 | workshop_dir = '/content/gdrive/My Drive/tf-eo-devseed-workshop' 105 | dirs = [root_dir, workshop_dir] 106 | for d in dirs: 107 | if not os.path.exists(d): 108 | os.makedirs(d) 109 | print('Running on Colab') 110 | else: 111 | root_dir = os.path.abspath("./data/tf-eo-devseed") 112 | workshop_dir = os.path.abspath('./tf-eo-devseed-workshop') 113 | print(f'Not running on Colab, data needs to be downloaded locally at {os.path.abspath(root_dir)}') 114 | 115 | 116 | # In[ ]: 117 | 118 | 119 | # go to root directory 120 | get_ipython().run_line_magic('cd', '$root_dir') 121 | 122 | 123 | # ### Check out the labels 124 | # Class names and identifiers extracted from the documentation provided here: https://radiantearth.blob.core.windows.net/mlhub/esa-food-security-challenge/Crops_GT_Brandenburg_Doc.pdf 125 | # 126 | # We'll use these labels to label our confusion matrix and table with class specific metrics. 127 | 128 | # In[7]: 129 | 130 | 131 | # Read the classes 132 | 133 | data = {'class_names': ['Background', 'Wheat', 'Rye', 'Barley', 'Oats', 'Corn', 'Oil Seeds', 'Root Crops', 'Meadows', 'Forage Crops'], 134 | 'class_ids': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] 135 | } 136 | 137 | classes = pd.DataFrame(data) 138 | print(classes) 139 | 140 | 141 | # ### Getting image, prediction, and label filenames 142 | # 143 | # Before creating our confusion matrix, we need to associate labels, images, and prediction files together so that we know what to compare. We'll use our standard python for this. 144 | 145 | # ### Evaluate Model 146 | # 147 | # Compute confusion matrix from all predicted images and their ground truth label masks. 148 | 149 | # First we need to read in our prediction masks that we saved out in the last notebook. To do this, we can use scikit image, and then we can use scikit learn to compute our confusion matrix. No tensorflow needed for this part! 150 | 151 | # In[10]: 152 | 153 | 154 | path_df = pd.read_csv(os.path.join(workshop_dir, "test_file_paths.csv")) 155 | 156 | 157 | # In[ ]: 158 | 159 | 160 | pd.set_option('display.max_colwidth', None) 161 | path_df 162 | 163 | 164 | # In[12]: 165 | 166 | 167 | # reading in preds 168 | label_arr_lst = path_df["label_names"].apply(skio.imread) 169 | pred_arr_lst = path_df["pred_names"].apply(skio.imread) 170 | 171 | 172 | # A few of our labels have an image dimension that doesn't match the prediction dimension! It's possible this image was corrupted. We can skip it and the corresponding prediction when computing our metrics. 173 | 174 | # In[ ]: 175 | 176 | 177 | pred_arr_lst_valid = [] 178 | label_arr_lst_valid = [] 179 | for i in range(0, len(pred_arr_lst)): 180 | if pred_arr_lst[i].shape != label_arr_lst[i].shape: 181 | 182 | print(f"The {i}th label has an incorrect dimension, skipping.") 183 | print(pred_arr_lst[i]) 184 | print(label_arr_lst[i]) 185 | print(pred_arr_lst[i].shape) 186 | print(label_arr_lst[i].shape) 187 | 188 | else: 189 | pred_arr_lst_valid.append(pred_arr_lst[i]) 190 | label_arr_lst_valid.append(label_arr_lst[i]) 191 | 192 | 193 | # With our predictions and labels in lists of tiled arrays, we can then flatten these so that we instead have lists of pixels for predictions and labels. This is the format expected by scikit-learn's `confusion_matrix` function. 194 | 195 | # In[14]: 196 | 197 | 198 | # flatten our tensors and use scikit-learn to create a confusion matrix 199 | flat_preds = np.concatenate(pred_arr_lst_valid).flatten() 200 | flat_truth = np.concatenate(label_arr_lst_valid).flatten() 201 | OUTPUT_CHANNELS = 10 202 | cm = confusion_matrix(flat_truth, flat_preds, labels=list(range(OUTPUT_CHANNELS))) 203 | 204 | 205 | # Finally, we can plot the confusion matrix. We can either use the built-in method from scikit-learn... 206 | 207 | # In[ ]: 208 | 209 | 210 | from sklearn.metrics import ConfusionMatrixDisplay 211 | 212 | ConfusionMatrixDisplay.from_predictions(flat_truth, flat_preds, normalize='true') 213 | 214 | 215 | # ... or matplotlib, which allows us to more easily customize all aspects of our plot. 216 | 217 | # In[ ]: 218 | 219 | 220 | classes = [0,1,2,3,4,5,6,7,8,9] 221 | 222 | get_ipython().run_line_magic('matplotlib', 'inline') 223 | cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] 224 | fig, ax = plt.subplots(figsize=(10, 10)) 225 | im = ax.imshow(cm, interpolation='nearest', cmap=plt.cm.Blues) 226 | ax.figure.colorbar(im, ax=ax) 227 | # We want to show all ticks... 228 | ax.set(xticks=np.arange(cm.shape[1]), 229 | yticks=np.arange(cm.shape[0]), 230 | # ... and label them with the respective list entries 231 | xticklabels=list(range(OUTPUT_CHANNELS)), yticklabels=list(range(OUTPUT_CHANNELS)), 232 | title='Normalized Confusion Matrix', 233 | ylabel='True label', 234 | xlabel='Predicted label') 235 | 236 | # Rotate the tick labels and set their alignment. 237 | plt.setp(ax.get_xticklabels(), rotation=45, ha="right", 238 | rotation_mode="anchor") 239 | 240 | # Loop over data dimensions and create text annotations. 241 | fmt = '.2f' #'d' # if normalize else 'd' 242 | thresh = cm.max() / 2. 243 | for i in range(cm.shape[0]): 244 | for j in range(cm.shape[1]): 245 | ax.text(j, i, format(cm[i, j], fmt), 246 | ha="center", va="center", 247 | color="white" if cm[i, j] > thresh else "black") 248 | fig.tight_layout(pad=2.0, h_pad=2.0, w_pad=2.0) 249 | ax.set_ylim(len(classes)-0.5, -0.5) 250 | 251 | 252 | # Now let's compute the f1 score 253 | # 254 | # F1 = 2 * (precision * recall) / (precision + recall) 255 | # 256 | 257 | # You can view the documentation for a python function in a jupyter notebook with "??". scikit-learn docs usually come with detaile descriptions of each argument and examples of usage. 258 | 259 | # In[ ]: 260 | 261 | 262 | get_ipython().run_line_magic('pinfo2', 'f1_score') 263 | 264 | 265 | # In[ ]: 266 | 267 | 268 | # compute f1 score 269 | f1_score(flat_truth, flat_preds, average='macro') 270 | 271 | 272 | # In[ ]: 273 | 274 | 275 | 276 | 277 | -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson4_evaluation_21_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/jupyter_execute/docs/Lesson4_evaluation_21_1.png -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/Lesson4_evaluation_23_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/_build/jupyter_execute/docs/Lesson4_evaluation_23_1.png -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/appendix.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Appendix" 8 | ] 9 | } 10 | ], 11 | "metadata": { 12 | "kernelspec": { 13 | "display_name": "Python 3", 14 | "language": "python", 15 | "name": "python3" 16 | }, 17 | "language_info": { 18 | "codemirror_mode": { 19 | "name": "ipython", 20 | "version": 3 21 | }, 22 | "file_extension": ".py", 23 | "mimetype": "text/x-python", 24 | "name": "python", 25 | "nbconvert_exporter": "python", 26 | "pygments_lexer": "ipython3", 27 | "version": "3.8.11" 28 | } 29 | }, 30 | "nbformat": 4, 31 | "nbformat_minor": 4 32 | } -------------------------------------------------------------------------------- /ds_book/_build/jupyter_execute/docs/appendix.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # # Appendix 5 | -------------------------------------------------------------------------------- /ds_book/_config.yml: -------------------------------------------------------------------------------- 1 | title: "Deep learning with TensorFlow" 2 | author: "Development Seed" 3 | email: "lilly@developmentseed.org" 4 | release: "1" 5 | logo: 'ds.png' 6 | 7 | # Short description about the book 8 | description: >- 9 | Notebook tutorials demonstrating advanced techniques for use of deep learning with TensorFlow and earth observation data. 10 | 11 | execute: 12 | # execute_notebooks : auto 13 | execute_notebooks : off 14 | timeout: -1 15 | 16 | # HTML-specific settings 17 | html: 18 | favicon : "ds.png" 19 | home_page_in_navbar : false 20 | use_edit_page_button : true 21 | use_repository_button: true 22 | use_issues_button : true 23 | 24 | # Interact link settings 25 | notebook_interface : "notebook" 26 | 27 | repository: 28 | url : "https://github.com/developmentseed/tensorflow-eo-training/" 29 | branch : main 30 | path_to_book : ds_book 31 | 32 | # Launch button settings 33 | #launch_buttons: 34 | # binder : false 35 | # binderhub : false 36 | # binderhub_url : off 37 | # jupyterhub : false 38 | # thebe : false 39 | # colab : false 40 | 41 | # Launch button settings 42 | launch_buttons: 43 | # notebook_interface: classic #jupyterlab 44 | binderhub_url: https://mybinder.org 45 | colab_url: https://colab.research.google.com 46 | 47 | 48 | # LaTeX settings 49 | bibtex_bibfiles: 50 | - _bibliography/references.bib 51 | 52 | latex: 53 | latex_engine : "pdflatex" 54 | use_jupyterbook_latex : true # use jupyterbook-latex for pdf builds as default 55 | latex_documents: 56 | targetname: ds_book.tex 57 | 58 | sphinx: 59 | config: 60 | html_show_copyright: false 61 | latex_toplevel_sectioning: 'section' 62 | # latex_show_urls: 'footnote' 63 | latex_elements.papersize: a4paper 64 | latex_elements.pointsize: 12pt 65 | 66 | 67 | #google_analytics: 68 | # mytrackingcode: UA-205698170-1 69 | -------------------------------------------------------------------------------- /ds_book/_toc.yml: -------------------------------------------------------------------------------- 1 | format: jb-book 2 | root: docs/index 3 | chapters: 4 | - file: docs/Lesson1a_Intro_ML_NN_DL 5 | - file: docs/Lesson1b_Intro_TensorFlow_Keras 6 | - file: docs/Lesson2a_get_planet_NICFI 7 | - file: docs/Lesson2b_prep_data_ML_segmentation 8 | - file: docs/Lesson3_deeplearning_crop_segmentation 9 | - file: docs/Lesson4_evaluation 10 | - file: docs/Lesson5_dealing_with_limited_data 11 | - file: docs/appendix 12 | -------------------------------------------------------------------------------- /ds_book/covers/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/covers/.DS_Store -------------------------------------------------------------------------------- /ds_book/docs/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/.DS_Store -------------------------------------------------------------------------------- /ds_book/docs/Lesson1b_Intro_TensorFlow_Keras.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Introduction to TensorFlow and Keras" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Objectives" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "The goal of this notebook is to teach some basics of the TensorFlow framework and the Keras API. " 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "### What is TensorFlow?\n", 29 | "\n", 30 | "[TensorFlow](https://www.tensorflow.org/guide]) is an open-source framework developed in late 2015 by Google for building various machine learning and deep learning models. TensorFlow is free and open-source, thanks to the Apache Open Source license.\n", 31 | "\n", 32 | "The main objective of using TensorFlow is to reduce the complexity of implementing computations on large numerical data sets. In practice, these large computations can manifest as training and inference with machine learning or deep learning models.\n", 33 | "\n", 34 | "TensorFlow was designed to operate with multiple CPUs or GPUs, as well as a growing number of mobile operating systems. The framework includes wrappers in Python, C++, and Java.\n", 35 | "\n", 36 | "#### How does it work?\n", 37 | "TensorFlow accepts inputs as a multi-dimensional array called a Tensor, which allows the programmer to create dataflow graphs and structures specifying how data travels through. The framework is designed to support creation of a flowchart of operations to be applied to input Tensors, which travel in one direction and out the other.\n", 38 | "\n", 39 | "#### TensorFlow's structure\n", 40 | "There are three main components to TensorFlow's structure.\n", 41 | "\n", 42 | "1. preprocessing the data\n", 43 | "2. building the model\n", 44 | "3. training and estimating the model\n", 45 | "\n", 46 | "The name Tensorflow derives from the way in which the framework receives input in the form of a multi-dimensional array, i.e. the tensors. These tensors travel sequentially through the specified flowchart of the operations, entering at one end and culminating as output at the other end. \n", 47 | "\n", 48 | "#### What are the key TensorFlow components?\n", 49 | "**Tensor**\n", 50 | "\n", 51 | "A tensor is an n-dimensional vector or matrix. In theory, a tensor may represent any form of data. The values belonging to a tensor all share the same data type and often the same shape / dimensionality. A tensor can describe the input data and the output of a calculation. The main difference between a tensor and a conventional n-dimensional array is that tensors are immutable.\n", 52 | "\n", 53 | "\n", 54 | "**Graphs**\n", 55 | "\n", 56 | "TensorFlow uses a graph framework. Graphs collect and summarize all of the calculations and offer several benefits:\n", 57 | "\n", 58 | "1. They are designed to work on CPUs or GPUs, as well as on mobile devices.\n", 59 | "2. Graphs are portable which enables the computations to be saved for immediate or later usage. Otherwise stated, the graph can be frozen and run at a later time.\n", 60 | "3. Graph calculations are executed by linking tensors together.\n", 61 | "4. For each tensor, there is a node and an edge. The node carries out the mathematical process and produces endpoint outputs. The input/output connections are represented by the edges. \n", 62 | "5. All nodes are linked together, so the graph itself is a depiction of the operations and relationships that exist between the nodes.\n", 63 | "\n", 64 | "In TensorFlow, all operations are carried out within a graph, which in effect is a series of computations that happen in order. Each individual operation is referred to as an op node. \n", 65 | "\n", 66 | ":::{figure-md} TFgraph-fig\n", 67 | "\n", 68 | "\n", 69 | "TensorFlow graph example (from [https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4](https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4)).\n", 70 | ":::\n", 71 | "\n", 72 | "\n", 73 | "#### Why do so many people like TensorFlow?\n", 74 | "TensorFlow is intentionally user-friendly, with helpful plugins to visualize model training and a useful software debugging tool. As well, TensorFlow is highly scalable, with easy deployment on both CPUs and GPUs." 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "metadata": {}, 80 | "source": [ 81 | "### What is Keras?\n", 82 | "\n", 83 | "[Keras](https://keras.io/about/) is an API built on Python which reduces the cognitive load associated with programming models through human readability and simple and consistent structures.\n", 84 | "\n", 85 | "Keras is what some might call a wrapper for TensorFlow. It is intended for rapid experimentation.\n", 86 | "\n", 87 | "Tha main components of Keras include:\n", 88 | "1. A models API, which enables one to construct a model with varying levels of complexity depending on use case. We will use the [Functional API](https://keras.io/guides/functional_api/) subclass.\n", 89 | "2. A layers API, which allows one to define the tensor in/tensor out computation functions.\n", 90 | "3. A callback API, which enables one to program specific actions to occur during training, such as log training metrics, visualize interim/internal states and statistics of the model during training, and perform early stopping when the model converges.\n", 91 | "4. A data preprocessing API, which offers support for prepping raw data from disk to model ready Tensor format.\n", 92 | "5. An optimizer API where all of the state of the art optimizers can be plugged in. Learning rate decay / scheduling can also be implemented as part of this API.\n", 93 | "6. A metrics API which is used for assessing the performance of the model during training. A metric is the target to optimize during training, with specific metrics chosen for specific modeling objectives.\n", 94 | "7. A loss API that informs the model quantitatively how much it should try to minimize during training by providing a measure of error. Similar to metrics, specific loss functions are selected for specific modeling objectives.\n", 95 | "\n", 96 | "With the Functional API, our main workflow will follow the diagram below.\n", 97 | "\n", 98 | ":::{figure-md} Keras-fig\n", 99 | "\n", 100 | "\n", 101 | "Keras Functional API diagram (from [https://miro.com/app/board/o9J_lhnKhVE=/](hhttps://miro.com/app/board/o9J_lhnKhVE=/)).\n", 102 | ":::" 103 | ] 104 | } 105 | ], 106 | "metadata": { 107 | "kernelspec": { 108 | "display_name": "Python 3", 109 | "language": "python", 110 | "name": "python3" 111 | }, 112 | "language_info": { 113 | "codemirror_mode": { 114 | "name": "ipython", 115 | "version": 3 116 | }, 117 | "file_extension": ".py", 118 | "mimetype": "text/x-python", 119 | "name": "python", 120 | "nbconvert_exporter": "python", 121 | "pygments_lexer": "ipython3", 122 | "version": "3.8.11" 123 | } 124 | }, 125 | "nbformat": 4, 126 | "nbformat_minor": 2 127 | } 128 | -------------------------------------------------------------------------------- /ds_book/docs/Lesson1b_annex.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Introduction to TensorFlow and Keras" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Objectives" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "The goal of this notebook is to teach some basics of the TensorFlow framework and the Keras API." 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "### What is TensorFlow?\n", 29 | "\n", 30 | "[TensorFlow](https://www.tensorflow.org/guide]) is an open-source framework developed by Google for building various machine learning and deep learning models. Although originally released in late 2015, the first stable version arrived in 2017. TensorFlow is free and open-source, thanks to the Apache Open Source license.\n", 31 | "\n", 32 | "The main objective of using TensorFlow is to reduce the complexity of implementing computations on large numerical data sets. In practice, these large computations can manifest as training and inference with machine learning or deep learning models.\n", 33 | "\n", 34 | "TensorFlow was designed to operate with multiple CPUs or GPUs, as well as a growing number of mobile operating systems. The framework includes wrappers in Python, C++, and Java.\n", 35 | "\n", 36 | "#### How does it work?\n", 37 | "TensorFlow accepts inputs as a multi-dimensional array called a Tensor, which allows the programmer to create dataflow graphs and structures specifying how data travels through. The framework is designed to support creation of a flowchart of operations to be applied to input Tensors, which travel in one direction and out the other.\n", 38 | "\n", 39 | "#### TensorFlow’s structure\n", 40 | "There are three main components to TensorFlow's structure.\n", 41 | "\n", 42 | "1. preprocessing the data\n", 43 | "2. building the model\n", 44 | "3. training and estimating the model\n", 45 | "\n", 46 | "The name TensorFlow derives from the way in which the framework receives input in the form of a multi-dimensional array, i.e. the tensors. These tensors travel sequentially through the specified flowchart of the operations, entering at one end and culminate as output at the other end. \n", 47 | "\n", 48 | "#### What are TensorFlow components?\n", 49 | "**Tensor**\n", 50 | "\n", 51 | "Tensors, the basic unit of data in this framework, are involved in every computation of TensorFlow. A tensor is an n-dimensional vector or matrix. In theory, a tensor may represent any form of data. The values belonging to a tensor all share the same data type and often the same shape / dimensionality. A tensor can describe the input data and the output of a calculation. \n", 52 | "\n", 53 | "In TensorFlow, all operations are carried out within a graph, which in effect is a series of computations that happen in order. Each indivisual operation is referred to as an op node. \n", 54 | "\n", 55 | "**Graphs**\n", 56 | "\n", 57 | "TensorFlow uses a graph framework. Graphs collect and summarize all of the calculations and offer several benefits:\n", 58 | "\n", 59 | "1. They are designed to work on CPUs or GPUs, as well as on mobile devices.\n", 60 | "2. Graphs are portable which enables the computations to be saved for immediate or later usage. Otherwsie stated, the graph can be frozen and run at a later time.\n", 61 | "3. Graph calculations are executed by linking tensors together.\n", 62 | "4. For each tensor, there is a node and an edge. The node carries out the mathematical process and produces endpoint outputs. The input/output connections are represented by the edges.\n", 63 | "5. All nodes are linked together, so the graph itself is a depiction of the operations and relationships that exist between the nodes.\n", 64 | "\n", 65 | ":::{figure-md} TFgraph-fig\n", 66 | "\n", 67 | "\n", 68 | "TensorFlow graph example (from [https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4](https://medium.com/the-artificial-impostor/notes-understanding-tensorflow-part-1-5f0ebb253ad4)).\n", 69 | ":::\n", 70 | "\n", 71 | "#### Why do so many people like TensorFlow?\n", 72 | "TensorFlow is intentionally user-friendly, with helpful plugins to visualize model training and a useful software debugging tool. As well, TensorFlow is highly scalable, with easy deployment on both CPUs and GPUs." 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "metadata": {}, 78 | "source": [ 79 | "### What is Keras?\n", 80 | "\n", 81 | "[Keras](https://keras.io/about/) is an API built on Python, with human readability at the forefront of its design. With simple and consistent structures and methods extensible across many machine learning and deep learning applications, Keras reduces the cognitive load associated with programming models. Furthermore, the API seeks to minimize the need for programmer interaction by abstracting many complexities into easily callable functions. Lastly, Keras features clear & actionable error messaging, complemented by comprehensive and digestible documentation and developer guides.\n", 82 | "\n", 83 | "Keras is what some might call a wrapper for TensorFlow. By that, one means to say that Keras simplifies a programmer's interaction with TensorFlow through refinement of key methods and constructs.\n", 84 | "\n", 85 | "Importantly, Keras is intended for rapid experimentation.\n", 86 | "\n", 87 | "Tha main components of Keras include:\n", 88 | "1. A models API, which enables one to construct a model with varying levels of complexity depending on use case. We will use the [Functional API](https://keras.io/guides/functional_api/) subclass.\n", 89 | "2. A layers API, which allows one to define the tensor in/tnesor out computation functions.\n", 90 | "3. A callback API, which enables one to program specific actions to occur during training, such as log training metrics, visualize interim/internal states and statistics of the model during training, and perform early stopping when the model converges.\n", 91 | "4. A data preprocessing API, which offers support for prepping raw data from disk to model ready Tensor format.\n", 92 | "5. An optimizer API where all of the state of the art optimizers can be plugged in. Learning rate decay / scheduling can also be implemented a spart of this API.\n", 93 | "6. A metrics API which is used for assessing the performance of the model during training. A metric is the target to optimize during training, with specific metrics chosen for specific modeling objectives.\n", 94 | "7. A loss API that informs the model quantitatively how much it should try to minimize during training by providing a measure of error. Similar to metrics, specific loss functions are selected for specific modelung objectives.\n", 95 | "\n", 96 | "With the Functional API, our main workflow will follow the diagram below.\n", 97 | "\n", 98 | ":::{figure-md} Keras-fig\n", 99 | "\n", 100 | "\n", 101 | "Keras Functional API diagram (from [https://miro.com/app/board/o9J_lhnKhVE=/](hhttps://miro.com/app/board/o9J_lhnKhVE=/)).\n", 102 | ":::" 103 | ] 104 | } 105 | ], 106 | "metadata": { 107 | "kernelspec": { 108 | "display_name": "Python 3", 109 | "language": "python", 110 | "name": "python3" 111 | }, 112 | "language_info": { 113 | "codemirror_mode": { 114 | "name": "ipython", 115 | "version": 3 116 | }, 117 | "file_extension": ".py", 118 | "mimetype": "text/x-python", 119 | "name": "python", 120 | "nbconvert_exporter": "python", 121 | "pygments_lexer": "ipython3", 122 | "version": "3.8.11" 123 | } 124 | }, 125 | "nbformat": 4, 126 | "nbformat_minor": 2 127 | } 128 | -------------------------------------------------------------------------------- /ds_book/docs/appendix.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Appendix" 8 | ] 9 | } 10 | ], 11 | "metadata": { 12 | "kernelspec": { 13 | "display_name": "Python 3", 14 | "language": "python", 15 | "name": "python3" 16 | }, 17 | "language_info": { 18 | "codemirror_mode": { 19 | "name": "ipython", 20 | "version": 3 21 | }, 22 | "file_extension": ".py", 23 | "mimetype": "text/x-python", 24 | "name": "python", 25 | "nbconvert_exporter": "python", 26 | "pygments_lexer": "ipython3", 27 | "version": "3.8.11" 28 | } 29 | }, 30 | "nbformat": 4, 31 | "nbformat_minor": 4 32 | } 33 | -------------------------------------------------------------------------------- /ds_book/docs/images/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/.DS_Store -------------------------------------------------------------------------------- /ds_book/docs/images/Keras_functional_API.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/Keras_functional_API.jpg -------------------------------------------------------------------------------- /ds_book/docs/images/Unet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/Unet.png -------------------------------------------------------------------------------- /ds_book/docs/images/Unet_mobilenetv2_arch_arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/Unet_mobilenetv2_arch_arch.png -------------------------------------------------------------------------------- /ds_book/docs/images/cm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/cm.png -------------------------------------------------------------------------------- /ds_book/docs/images/epoch50_testimage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/epoch50_testimage.png -------------------------------------------------------------------------------- /ds_book/docs/images/epoch50_testimage1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/epoch50_testimage1.png -------------------------------------------------------------------------------- /ds_book/docs/images/kakarla2021.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/kakarla2021.png -------------------------------------------------------------------------------- /ds_book/docs/images/loss_curve.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/loss_curve.png -------------------------------------------------------------------------------- /ds_book/docs/images/lulc_labeling.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/lulc_labeling.gif -------------------------------------------------------------------------------- /ds_book/docs/images/marine_debris.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/marine_debris.png -------------------------------------------------------------------------------- /ds_book/docs/images/neuralnet_basic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/neuralnet_basic.png -------------------------------------------------------------------------------- /ds_book/docs/images/neuron-structure.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/docs/images/neuron-structure.jpg -------------------------------------------------------------------------------- /ds_book/docs/index.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | # Deep Learning with TensorFlow:
Tutorials for modeling LULC. 5 | 6 | 7 | Authored by [**Development Seed**](https://developmentseed.org/) engineers [**Lillianne Thomas**](https://github.com/lillythomas) and [**Ryan Avery**](https://github.com/rbavery) 8 | 9 | 10 | 11 | These materials are designed to provide TensorFlow expertise via tutorials and science support, vis a vis suggestions for acquisition and processing of data inputs, training, testing, and evaluation of TensorFlow models as well as different TensorFlow / deep learning techniques demoed in Colab notebooks using real data. 12 | 13 | The content of this workshop assumes general familiarity with geospatial data such as satellite imagery, raster and vector formats, file formats such as [GeoTIFF](https://earthdata.nasa.gov/esdis/eso/standards-and-references/geotiff) and [GeoJSON](https://geojson.org/), the [python](https://www.python.org/) programming language and [Google Colab](https://research.google.com/colaboratory/). Having knowledge of [numpy](https://numpy.org/), [rasterio](https://rasterio.readthedocs.io/en/latest/), [geopandas](https://geopandas.org/en/stable/) and [sci-kit learn](https://scikit-learn.org/stable/) is a plus. 14 | 15 | 16 | ```{admonition} Links 17 | :class: tip 18 | 19 | - Jupyter Book: 20 | [https://developmentseed.github.io/tensorflow-eo-training/docs/index.html) 21 | 22 | ``` 23 | 24 | 25 | ```{admonition} How to run the notebook code 26 | :class: important 27 | 28 | A major advantage of executable books is that the reader may enjoy running the source code, modifying them and playing around. No downloading, installation or configuration are required. Simply go to 29 | 30 | [https://developmentseed.github.io/tensorflow-eo-training/docs/index.html](https://developmentseed.github.io/tensorflow-eo-training/docs/index.html), 31 | 32 | and in the left menu select any topic, click the "rocket" icon at the top right of the screen, and choose “Colab." This will launch the page in a virtual runtime environment hosted by Google. From there, the code can be run using a free GPU. 33 | 34 | For local running, the code for each topic in the form of 35 | [Jupyter](https://jupyter.org) notebooks can be downloaded by clicking the "arrow-down" icon at the top right of the screen. 36 | 37 | ``` 38 | 39 | ```{admonition} How to access the data 40 | :class: important 41 | 42 | These tutorials will make use of open source data hosted on [Radiant Earth MLHub](https://mlhub.earth/). Please register an account with MLHub and obtain your unique API key in advance of starting these tutorials. 43 | 44 | ``` 45 | 46 | ```{admonition} $~$ 47 | Built with [Jupyter Book 48 | 2.0](https://beta.jupyterbook.org/intro.html) tool set, as part of the 49 | [ExecutableBookProject](https://ebp.jupyterbook.org/en/latest/). 50 | ``` 51 | 52 | 53 | 54 | ISBN: *(tbd) 55 | 56 | 57 | -------------------------------------------------------------------------------- /ds_book/ds.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developmentseed/tensorflow-eo-training/80ad3f8f8d8031731adc19f36b4f8084f4c3e01b/ds_book/ds.png -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: ds_book 2 | channels: 3 | - default 4 | dependencies: 5 | - python=3.8 6 | - sphinx 7 | - pip 8 | - numpy 9 | - matplotlib 10 | - tqdm 11 | - pip: 12 | - jupyter-book 13 | 14 | --------------------------------------------------------------------------------