├── Brendon_Hall ├── Geochemical Facies Analysis.ipynb ├── README.md ├── XRF_dataset.csv └── well_trajectory.csv ├── Graham_Ganssle ├── LICENSE ├── README.md ├── dat │ ├── .ipynb_checkpoints │ │ └── array_explorer-checkpoint.ipynb │ ├── array_explorer.ipynb │ └── zips │ │ ├── fault_dyke_fold_model.zip │ │ ├── fold_dyke_fault_model.zip │ │ └── gbasin_simplified_model.zip ├── evaluation │ ├── .ipynb_checkpoints │ │ └── loss_eval-checkpoint.ipynb │ ├── loss_comparison.png │ ├── loss_eval.ipynb │ ├── losses.csv │ ├── network_output.png │ ├── real_data_test_one.png │ ├── real_data_test_two.png │ └── real_loss_comparison.png ├── img_frmt │ ├── array_to_image.py │ ├── modeler.py │ └── pair_generator.py ├── pix2pix │ ├── LICENSE │ ├── data │ │ ├── data.lua │ │ ├── dataset.lua │ │ └── donkey_folder.lua │ ├── models.lua │ ├── scripts │ │ ├── combine_A_and_B.py │ │ └── receptive_field_sizes.m │ ├── test.lua │ ├── train.lua │ └── util │ │ ├── cudnn_convert_custom.lua │ │ └── util.lua └── prep_images.sh ├── Matteo_Niccoli ├── 9-Cant&Ethier-1994 3 classes.txt ├── README.md ├── aadm.txt ├── figure1_crossplot_robustness_margin.ipynb ├── figure2_XOR_ABZ.ipynb ├── figure3_C_parameter.ipynb ├── figure4_gamma_parameter.ipynb ├── figure5_validation_curves.ipynb └── figure6_grid_search.ipynb └── README.md /Brendon_Hall/README.md: -------------------------------------------------------------------------------- 1 | # CSEG Geochemical Facies Analysis 2 | 3 | This is the CSEG repo for [Brendon Hall's](https://github.com/brendonhall/) article, *Geochemical Facies Analysis using Unsupervised Machine Learning*, in the January 2018 [CSEG Recorder](https://csegrecorder.com/), focused on Machine Learning. 4 | 5 | This tutorial demonstrates how dimensionality reduction and unsupervised machine learning can be used to analyze X-ray fluorescence measurements of cuttings samples. 6 | 7 | The notebook contains the article and the full code used to generate the results. 8 | 9 | Please do not hesitate to contact [Brendon](https://github.com/brendonhall/) if you have any comments or questions. 10 | -------------------------------------------------------------------------------- /Brendon_Hall/well_trajectory.csv: -------------------------------------------------------------------------------- 1 | MD,INC,AZ,TVD 2 | 39.80688,0.21,307.66,39.80688 3 | 67.90944,0.21,306.93,67.90944 4 | 96.3168,0.16,296.29,96.3168 5 | 124.72416,0.06,310.33,124.72416 6 | 153.40584,0.22,111.46,153.40584 7 | 183.55056,0.26,114.89,183.55056 8 | 211.25688,0.29,118.58,211.25688 9 | 239.20704,0.26,116.96,239.20704 10 | 267.06576,0.27,120.93,267.062712 11 | 295.13784,0.21,124.88,295.134792 12 | 322.66128,0.16,111.21,322.658232 13 | 350.45904,0.08,86.52,350.455992 14 | 379.20168,0.03,1.64,379.198632 15 | 407.9748,0.11,350.3,407.971752 16 | 436.9308,0.09,24.03,436.927752 17 | 465.94776,0.09,113.42,465.944712 18 | 494.90376,0.09,117.36,494.900712 19 | 523.61592,0.12,110.91,523.612872 20 | 552.54144,0.13,122.7,552.538392 21 | 581.43648,0.17,123.94,581.433432 22 | 610.362,0.25,127.54,610.358952 23 | 639.28752,0.25,130.95,639.284472 24 | 668.03016,0.26,126.63,668.027112 25 | 696.7728,0.24,135.11,696.769752 26 | 725.54592,0.22,135.17,725.542872 27 | 754.31904,0.21,138.34,754.315992 28 | 783.30552,0.2,139.1,783.302472 29 | 812.17008,0.22,163.77,812.167032 30 | 840.97368,0.21,159.98,840.967584 31 | 869.80776,0.07,193.49,869.801664 32 | 898.64184,0.06,199.57,898.635744 33 | 927.38448,0.05,196.44,927.378384 34 | 956.24904,0.02,257.81,956.242944 35 | 985.02216,0.01,241.38,985.016064 36 | 1013.88672,0.16,22.44,1013.880624 37 | 1042.69032,0.29,3.64,1042.684224 38 | 1071.55488,0.32,359.66,1071.548784 39 | 1100.4804,0.64,16.2,1100.474304 40 | 1129.34496,0.97,17.47,1129.335816 41 | 1158.30096,0.96,330.84,1158.288768 42 | 1187.31792,1.1,310.44,1187.299632 43 | 1216.152,1.06,308.99,1216.130664 44 | 1245.07752,1.0,307.4,1245.050088 45 | 1274.03352,0.89,311.43,1274.00304 46 | 1302.89808,0.88,315.27,1302.864552 47 | 1331.79312,1.17,322.93,1331.753496 48 | 1360.74912,1.2,322.54,1360.7034 49 | 1389.49176,0.97,317.85,1389.439944 50 | 1418.47824,0.95,317.17,1418.423376 51 | 1447.25136,0.82,318.53,1447.193448 52 | 1476.1464,0.85,316.59,1476.08544 53 | 1505.07192,0.77,311.0,1505.007912 54 | 1534.0584,0.69,315.63,1533.991344 55 | 1563.0144,0.71,318.11,1562.944296 56 | 1591.78752,0.7,324.97,1591.717416 57 | 1620.56064,0.56,336.61,1620.487488 58 | 1649.33376,0.48,345.54,1649.260608 59 | 1678.16784,0.54,16.24,1678.09164 60 | 1707.12384,0.73,20.9,1707.04764 61 | 1736.01888,0.76,21.41,1735.939632 62 | 1765.03584,0.61,12.3,1764.953544 63 | 1793.83944,0.58,357.35,1793.757144 64 | 1822.704,0.58,355.76,1822.618656 65 | 1851.72096,0.68,3.37,1851.635616 66 | 1880.58552,0.59,13.32,1880.497128 67 | 1909.48056,0.56,8.18,1909.392168 68 | 1938.25368,1.05,332.22,1938.16224 69 | 1967.0268,1.35,346.11,1966.929264 70 | 1995.9828,1.68,359.17,1995.873072 71 | 2024.84736,2.02,0.98,2024.722392 72 | 2053.80336,2.15,3.65,2053.660104 73 | 2082.66792,1.98,5.33,2082.506376 74 | 2111.502,1.71,5.19,2111.325216 75 | 2140.24464,1.64,352.51,2140.055664 76 | 2169.07872,1.7,355.41,2168.877552 77 | 2197.82136,1.76,1.15,2197.608 78 | 2226.65544,1.99,15.58,2226.42684 79 | 2255.6724,2.05,23.03,2255.425512 80 | 2284.56744,1.25,51.42,2284.30836 81 | 2313.61488,0.99,62.5,2313.349704 82 | 2342.35752,0.93,54.81,2342.089296 83 | 2371.13064,0.85,51.6,2370.859368 84 | 2400.1476,0.75,42.35,2399.87328 85 | 2429.01216,0.36,46.56,2428.734792 86 | 2457.9072,0.26,53.72,2457.629832 87 | 2486.80224,0.25,44.99,2486.524872 88 | 2515.69728,0.27,38.16,2515.419912 89 | 2544.6228,0.33,52.81,2544.345432 90 | 2573.4264,0.37,49.81,2573.149032 91 | 2602.41288,0.44,37.09,2602.135512 92 | 2631.24696,0.53,33.01,2630.966544 93 | 2660.08104,0.52,27.5,2659.800624 94 | 2689.00656,0.56,21.79,2688.723096 95 | 2717.84064,0.69,0.79,2717.557176 96 | 2746.61376,0.97,350.6,2746.327248 97 | 2775.47832,0.85,346.72,2775.18876 98 | 2804.37336,0.66,28.14,2804.080752 99 | 2833.32936,0.38,351.26,2833.033704 100 | 2862.28536,0.54,301.32,2861.989704 101 | 2891.05848,0.73,327.59,2890.762824 102 | 2919.984,1.0,338.17,2919.685296 103 | 2948.81808,1.07,333.15,2948.51328 104 | 2977.80456,1.39,328.55,2977.493664 105 | 3006.6996,1.39,344.27,3006.37956 106 | 3035.53368,1.29,341.32,3035.204496 107 | 3064.39824,1.02,0.84,3064.066008 108 | 3093.32376,1.03,1.08,3092.985432 109 | 3122.09688,0.87,22.28,3121.755504 110 | 3151.14432,1.13,35.57,3150.796848 111 | 3179.88696,1.54,42.26,3179.533392 112 | 3208.81248,1.87,54.31,3208.44672 113 | 3237.61608,1.89,50.47,3237.232032 114 | 3266.45016,1.86,47.35,3266.050872 115 | 3295.46712,1.9,51.12,3295.052592 116 | 3324.20976,2.02,56.98,3323.779992 117 | 3352.9524,2.1,56.49,3352.504344 118 | 3381.72552,2.06,48.53,3381.259176 119 | 3410.68152,2.15,40.34,3410.19384 120 | 3439.63752,2.24,38.53,3439.128504 121 | 3446.9832,2.22,39.33,3446.468088 122 | 3470.4528,2.23,40.93,3469.922448 123 | 3499.4088,2.2,44.75,3498.854064 124 | 3528.06,9.02,2.89,3527.35896 125 | 3557.016,18.26,354.68,3555.470664 126 | 3586.2768,31.3,351.26,3581.982168 127 | 3614.928,44.78,345.83,3604.500792 128 | 3634.1304,45.83,345.56,3618.00648 129 | 3643.884,49.75,345.38,3624.55968 130 | 3672.5352,63.97,343.14,3640.183728 131 | 3691.7376,72.83,342.82,3647.245944 132 | 3701.4912,73.69,342.85,3650.053152 133 | 3710.94,76.89,343.65,3652.451928 134 | 3720.6936,80.12,344.66,3654.396552 135 | 3730.4472,82.42,345.63,3655.87788 136 | 3739.896,85.84,345.96,3656.844096 137 | 3749.6496,89.11,346.53,3657.273864 138 | 3759.4032,89.73,346.82,3657.3714 139 | 3788.0544,89.48,346.72,3657.56952 140 | 3817.0104,89.76,346.61,3657.761544 141 | 3845.9664,91.06,349.46,3657.55428 142 | 3874.6176,92.16,350.43,3656.749608 143 | 3903.5736,92.71,350.24,3655.518216 144 | 3935.2728,93.23,350.74,3653.875344 145 | 3963.924,92.61,350.86,3652.415352 146 | 3992.88,91.75,350.6,3651.315024 147 | 4021.836,90.82,350.26,3650.6658 148 | 4055.0592,91.41,350.45,3650.019624 149 | 4084.0152,93.37,353.63,3648.812616 150 | 4112.6664,93.92,356.56,3646.989912 151 | 4141.6224,91.89,358.77,3645.520776 152 | 4170.2736,92.51,359.8,3644.423496 153 | 4199.2296,93.4,0.48,3642.929976 154 | 4227.8808,90.34,1.88,3641.99424 155 | 4256.8368,90.17,2.19,3641.866224 156 | 4285.7928,91.48,2.23,3641.448648 157 | 4314.444,91.75,3.07,3640.640928 158 | 4343.4,91.2,2.58,3639.897216 159 | 4372.0512,90.58,3.03,3639.452208 160 | 4401.0072,90.45,3.33,3639.19008 161 | 4429.9632,92.1,2.49,3638.546952 162 | 4458.9192,92.37,3.1,3637.416144 163 | 4487.5704,92.2,3.6,3636.273144 164 | 4516.5264,92.51,3.54,3635.084424 165 | 4545.4824,92.23,3.61,3633.88656 166 | 4574.4384,92.1,3.76,3632.792328 167 | 4603.3944,91.2,4.38,3631.960224 168 | 4632.0456,89.9,3.83,3631.685904 169 | 4669.536,89.73,4.57,3631.804776 170 | 4698.1872,90.93,1.81,3631.640184 171 | 4727.1432,91.85,359.36,3630.939144 172 | 4755.7944,91.44,359.19,3630.116184 173 | 4784.7504,90.45,359.07,3629.637648 174 | 4813.7064,90.96,359.52,3629.281032 175 | 4842.6624,90.0,359.35,3629.04024 176 | 4871.6184,91.92,359.64,3628.555608 177 | 4900.5744,93.16,359.6,3627.2724 178 | 4929.2256,94.13,359.66,3625.449696 179 | 4958.1816,95.38,359.69,3623.047872 180 | 4986.8328,94.58,0.15,3620.560704 181 | 5015.7888,93.75,0.58,3618.457584 182 | 5044.7448,93.19,359.87,3616.704984 183 | 5073.7008,92.03,359.49,3615.388248 184 | 5102.6568,92.78,359.57,3614.172096 185 | 5131.6128,91.96,359.71,3612.974232 186 | 5160.264,91.92,0.01,3612.004968 187 | 5189.22,92.54,359.84,3610.880256 188 | 5218.176,93.47,359.63,3609.362352 189 | 5246.8272,92.58,0.22,3607.847496 190 | 5268.468,92.51,0.44,3606.887376 191 | 5297.424,92.65,0.53,3605.58588 192 | 5326.0752,91.2,0.37,3604.622712 193 | 5383.6824,91.1,0.22,3603.464472 194 | 5412.6384,91.75,0.3,3602.745144 195 | 5440.9848,92.2,359.88,3601.769784 196 | 5469.3312,91.72,359.54,3600.80052 197 | 5498.592,91.51,359.41,3599.974512 198 | 5527.2432,91.75,359.44,3599.160696 199 | 5555.2848,91.61,359.19,3598.337736 200 | 5583.3264,91.93,359.46,3597.472104 201 | 5611.6728,90.65,359.99,3596.832024 202 | 5640.0192,89.62,359.91,3596.768016 203 | 5668.6704,88.66,359.05,3597.197784 204 | 5696.712,91.55,357.65,3597.145968 205 | 5725.3632,91.2,357.27,3596.45712 206 | 5754.0144,92.17,359.23,3595.615872 207 | 5782.6656,92.37,0.22,3594.478968 208 | 5811.3168,92.92,0.33,3593.159184 209 | 5839.968,92.85,0.77,3591.71748 210 | 5868.6192,92.24,1.98,3590.443416 211 | 5897.5752,92.99,2.8,3589.123632 212 | 5926.2264,92.61,2.18,3587.721552 213 | 5954.8776,92.96,2.73,3586.331664 214 | 5983.8336,92.48,3.1,3584.957016 215 | 6012.4848,93.2,2.27,3583.536648 216 | 6041.136,92.55,3.97,3582.10104 217 | 6069.4824,92.16,4.11,3580.936704 218 | 6098.4384,91.96,3.42,3579.894288 219 | 6127.6992,91.62,3.66,3578.979888 220 | 6156.3504,90.58,3.67,3578.431248 221 | 6185.0016,89.79,3.44,3578.33676 222 | 6213.9576,89.86,2.58,3578.425152 223 | 6242.6088,91.45,3.56,3578.099016 224 | 6271.26,93.71,3.69,3576.809712 225 | 6299.9112,94.41,1.37,3574.779744 226 | 6328.5624,94.47,358.64,3572.5608 227 | 6357.2136,95.13,356.92,3570.165072 228 | 6385.8648,96.13,355.29,3567.354816 229 | 6414.516,96.23,352.94,3564.267192 230 | 6443.472,94.33,350.73,3561.60324 231 | 6472.428,91.37,352.07,3560.164584 232 | 6500.7744,91.96,354.4,3559.341624 233 | 6529.7304,92.34,357.39,3558.253488 234 | 6558.3816,92.68,359.61,3556.997712 235 | 6587.3376,92.82,2.47,3555.610872 236 | 6615.9888,91.27,4.04,3554.586744 237 | 6644.3352,90.48,5.77,3554.153928 238 | 6655.6128,90.76,7.37,3554.032008 239 | 6676.644,90.76,7.37,3553.751592 240 | -------------------------------------------------------------------------------- /Graham_Ganssle/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2017 Graham Ganssle 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | 21 | ---------------- Pix2pix license ---------------------------------------------- 22 | 23 | Copyright (c) 2016, Phillip Isola and Jun-Yan Zhu 24 | All rights reserved. 25 | 26 | Redistribution and use in source and binary forms, with or without 27 | modification, are permitted provided that the following conditions are met: 28 | 29 | * Redistributions of source code must retain the above copyright notice, this 30 | list of conditions and the following disclaimer. 31 | 32 | * Redistributions in binary form must reproduce the above copyright notice, 33 | this list of conditions and the following disclaimer in the documentation 34 | and/or other materials provided with the distribution. 35 | 36 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 37 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 38 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 39 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 40 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 41 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 42 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 43 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 44 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 45 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 46 | 47 | 48 | 49 | ----------------------------- LICENSE FOR DCGAN -------------------------------- 50 | BSD License 51 | 52 | For dcgan.torch software 53 | 54 | Copyright (c) 2015, Facebook, Inc. All rights reserved. 55 | 56 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 57 | 58 | Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 59 | 60 | Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 61 | 62 | Neither the name Facebook nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 63 | 64 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 65 | 66 | -------------------------------------------------------------------------------- /Graham_Ganssle/README.md: -------------------------------------------------------------------------------- 1 | # CSEG Image Translation 2 | 3 | Welcome to the repo for [Graham Ganssle's](https://gra.m-gan.sl) article, *Denoising Seismic by Image Translation Networks*, in the CSEG Recorder, January 2018 edition! If you have any questions, please send Graham a message. 4 | 5 | ## Results 6 | It works! 7 | ![results](evaluation/network_output.png "results!") 8 | *The above image is an example output from the denoising network.* 9 | 10 | ![comparison](evaluation/loss_comparison.png "comparison!") 11 | *The above image is a comparison of the denoising network with three common denoising filters: total variation, bilateral, and wavelet.* 12 | 13 | ## Setup 14 | To run this code you'll need [Torch7](http://torch.ch/) running on CUDA, and CuDNN. You'll also need [Agile's](https://agilescientific.com/) bruges, which can be installed simply with `pip install bruges`. 15 | 16 | ## Running this code 17 | I've included the `prep_images.sh` script to prep the data for you. To run it use `bash prep_images.sh`. It'll create a folder, `dat/pairs`, which has the subdirectories `test`, `train`, and `val`. These three folders are the ones you should use to train your model. 18 | 19 | Training will take a considerable amount of compute. To do a full training run with 200 epochs it took a whole day on an NVIDIA Tesla K80. 20 | 21 | ## Acknowledgements 22 | 23 | The image translation network used in this project was the wonderful [pix2pix](https://github.com/phillipi/pix2pix). Thanks Phillipi! 24 | 25 | Thanks, also, to Steve Purves for building the Earth models provided (in .zip format) in [dat/zips](./dat/zips)! To use these models simply `cd dat/zips/` and `unzip xxx`, where `xxx` is the file name. 26 | -------------------------------------------------------------------------------- /Graham_Ganssle/dat/.ipynb_checkpoints/array_explorer-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Array Exploration Notebook\n", 8 | "\n", 9 | "This notebook is used simply to examine the contents of `dat/`, so we can build images of the correct dimensionality for the image translation network." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 1, 15 | "metadata": { 16 | "collapsed": true 17 | }, 18 | "outputs": [], 19 | "source": [ 20 | "import numpy as np\n", 21 | "import matplotlib.pyplot as plt\n", 22 | "%matplotlib inline\n", 23 | "import glob" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 2, 29 | "metadata": { 30 | "collapsed": true 31 | }, 32 | "outputs": [], 33 | "source": [ 34 | "fault_dyke_fold_sizes = []\n", 35 | "fold_dyke_fault_sizes = []\n", 36 | "gbasin_simplified_sizes = []\n", 37 | "\n", 38 | "for file in glob.glob('fault_dyke_fold_model/*'):\n", 39 | " fault_dyke_fold_sizes.append(np.load(file).shape)\n", 40 | "\n", 41 | "for file in glob.glob('fold_dyke_fault_model/*'):\n", 42 | " fold_dyke_fault_sizes.append(np.load(file).shape)\n", 43 | " \n", 44 | "for file in glob.glob('gbasin_simplified_model/*'):\n", 45 | " gbasin_simplified_sizes.append(np.load(file).shape)" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 11, 51 | "metadata": { 52 | "collapsed": false 53 | }, 54 | "outputs": [ 55 | { 56 | "name": "stdout", 57 | "output_type": "stream", 58 | "text": [ 59 | "there are 11200 images in fault_dyke_fold_model/\n", 60 | "there are 2800 images in fold_dyke_fault_model/\n", 61 | "there are 3600 images in gbasin_simplified_model/\n" 62 | ] 63 | } 64 | ], 65 | "source": [ 66 | "print('there are %d images in fault_dyke_fold_model/' %len(fault_dyke_fold_sizes))\n", 67 | "print('there are %d images in fold_dyke_fault_model/' %len(fold_dyke_fault_sizes))\n", 68 | "print('there are %d images in gbasin_simplified_model/' %len(gbasin_simplified_sizes))" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": 4, 74 | "metadata": { 75 | "collapsed": false 76 | }, 77 | "outputs": [ 78 | { 79 | "data": { 80 | "text/plain": [ 81 | "{(200, 200)}" 82 | ] 83 | }, 84 | "execution_count": 4, 85 | "metadata": {}, 86 | "output_type": "execute_result" 87 | } 88 | ], 89 | "source": [ 90 | "set(fault_dyke_fold_sizes)" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 5, 96 | "metadata": { 97 | "collapsed": false 98 | }, 99 | "outputs": [ 100 | { 101 | "data": { 102 | "text/plain": [ 103 | "{(280, 200), (400, 200)}" 104 | ] 105 | }, 106 | "execution_count": 5, 107 | "metadata": {}, 108 | "output_type": "execute_result" 109 | } 110 | ], 111 | "source": [ 112 | "set(fold_dyke_fault_sizes)" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": 6, 118 | "metadata": { 119 | "collapsed": false 120 | }, 121 | "outputs": [ 122 | { 123 | "data": { 124 | "text/plain": [ 125 | "{(376, 200), (496, 200)}" 126 | ] 127 | }, 128 | "execution_count": 6, 129 | "metadata": {}, 130 | "output_type": "execute_result" 131 | } 132 | ], 133 | "source": [ 134 | "set(gbasin_simplified_sizes)" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": 7, 140 | "metadata": { 141 | "collapsed": true 142 | }, 143 | "outputs": [], 144 | "source": [ 145 | "fault_dyke_fold_img = np.load('fault_dyke_fold_model/fdf_model_xz_RF_272.npy')\n", 146 | "fold_dyke_fault_img = np.load('fold_dyke_fault_model/out_0060_xz225.npy')\n", 147 | "gbasin_simplified_img = np.load('gbasin_simplified_model/out_0040_xz175.npy')" 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": 8, 153 | "metadata": { 154 | "collapsed": false 155 | }, 156 | "outputs": [ 157 | { 158 | "data": { 159 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQQAAAEACAYAAABVmQgcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADENJREFUeJzt3V2MnOV5gOH7sQ0utgkFio2CAZdaOCa0AaRaoVGVjRoZ\naKUYRZXr9ASEoqJSTnIEPgJUVSEHiauqoj8EIUeCOqZVgntQcBB1JKRUGBFKqH+wotjGDl6cgFwF\nQ2zw04MdHsb2jvdvvv12pvclrZh5d/S97+vB9843M56NzESSAOa1vQBJc4dBkFQMgqRiECQVgyCp\nGARJpbEgRMStEbEnIl6PiPuamkdS/0QT70OIiHnA68AfAT8HdgIbMnNP3yeT1DdNPUJYA+zLzAOZ\neRLYAqxraC5JfdJUEK4A3ui6fqgzJmkO80lFSWVBQ8c9DFzVdX15Z6xEhP+IQmpRZsaZY00FYSew\nMiKuBt4ENgBfOftmnwdGGlrCmIW8z9fYxEJONDrPeP6Raxkdb9tDYQdN33ft2sFw7++hcUcbCUJm\nfhgR9wLbGTsteSwzdzcxl6T+aeoRApn5DLCqqeNL6r+Wn1Rc0e70DVvCb7W9hAataHsBDVvR9gJa\nYRAadKFBGGAr2l5AK3zZUVIxCJKKQZBUDIKkYhAkFYMgqRgEScUgSCoGQVIxCJKKQZBUDIKkYhAk\nFYMgqRgESaWxT0yanE/OwhzvAWd9luQs+U1mZ49Sf7QchEtnYY53aS8IS5idPUr94SmDpGIQJBWD\nIKkYBEnFIEgqBkFSMQiSikGQVAyCpGIQJBWDIKkYBEnFIEgqBkFSMQiSikGQVAyCpGIQJBWDIKkY\nBEnFIEgqBkFSMQiSikGQVAyCpGIQJBWDIKm0/Lsdm7WIY6zhP5jPB20vRRoIMwpCROwHjgGngJOZ\nuSYiLga+C1wN7AfWZ+axGa5zWhZzjM/zr21MLQ2kmZ4ynAJGMvPGzFzTGbsfeC4zVwHPAxtnOIek\nWTLTIMQ4x1gHbO5c3gzcPsM5BtIxLuXXLGp7GdKUzDQICfwgInZGxFc7Y8sycxQgM48AS2c4x0Da\nxl+ym8+2vQxpSmb6pOLnMvPNiLgM2B4RexmLRLczr0uao2YUhMx8s/PfoxHxfWANMBoRyzJzNCIu\nB97qfYTvdl3+NHD9TJYjqafXgP+Z8FbTDkJELALmZeavImIxsBZ4CNgG3Al8A7gDeLr3Uf5sutNL\nmpLrOf0H7lPj3momjxCWAd+LiOwc54nM3B4RLwFbI+Iu4ACwfgZzSJpF0w5CZv4MuGGc8beBL85k\nUZLa4VuXJRWDIKkYBEnFIEgqBkFSMQiSSsufh3Btw8dvs3fLaX5/Un+1HIQlDR//goaPP9HcTe9P\n6i9PGSQVgyCpGARJxSBIKgZBUjEIkopBkFQMgqRiECQVgyCpGARJxSBIKgZBUjEIkopBkFQMgqRi\nECQVgyCpGARJxSBIKq1+yOpv3B2NHn/h29Hrt1437uY/eZnrl7/fzuTSBLb90/jjkZmzu5KPJo7I\ny/JAo3Nc8tpeNvzu2kbn6OUzz67ikrUXtTK3NJGReJHMPOsnsqcMkopBkFQMgqRiECQVgyCpGARJ\nxSBIKgZBUjEIkopBkFQMgqRiECQVgyCpGARJpdXPQxhmW1nPMT7T9jKkHv503FGD0JC9XMshfr/t\nZUhTMrSnDJ/46QGue/Rf2l6GNFAmDEJEPBYRoxHxatfYxRGxPSL2RsSzEXFR1/c2RsS+iNgdEe18\nXBFw0b6f8Xt/93hb00sDaTKPEB4Hbjlj7H7gucxcBTwPbASIiOuA9cBq4DbgkYho9oMTJfXNhEHI\nzBeAd84YXgds7lzeDNzeufwlYEtmfpCZ+4F9wJr+LFVS06b7HMLSzBwFyMwjwNLO+BXAG123O9wZ\n+3/l5KILyPnz216GNGX9epWhnY9unqOe/s8tHL3x+raXIU3ZdIMwGhHLMnM0Ii4H3uqMHwau7Lrd\n8s7YuN59cFNdPm/ks5w/cvM0lzO3nLhwMXmer+hq7jix40ec3PFfE95usv/XRufrI9uAO4FvAHcA\nT3eNPxERmxg7VVgJvNjroIsf/Nokp5c0E+eP3HzaD9zjD/3tuLebMAgR8SQwAlwaEQeBB4CHgaci\n4i7gAGOvLJCZuyJiK7ALOAnck239JhhJUzZhEDLzz3t864s9bv914OszWZSkdgztOxUlTZ1BkFQM\ngqRiECQVgyCpGARJxSBIKgZBUjEIkkqr/wLn+OYLGzv2iZ8sauzY0rBqNQjvPnxxY8d+71dLGju2\nNKw8ZZBUDIKkYhAkFYMgqRgEScUgSCoGQVIxCJKKQZBUDIKkYhAkFYMgqRgEScUgSCoGQVIxCJKK\nQZBUDIKkYhAkFYMgqRgEScUgSCqtfgz7sDp211J+ufiTbS9DmjKD0IBTows4tdA/Wg0eTxkkFYMg\nqRgEScUgSCoGQVIxCJKKQZBUDIKkYhAkFYMgqRgEScUgSCoTBiEiHouI0Yh4tWvsgYg4FBEvd75u\n7frexojYFxG7I2JtUws/l0tOvM41x59rY2ppoE3mEcLjwC3jjH8rM2/qfD0DEBGrgfXAauA24JGI\niL6tdpJWHP8hf/D2N2d7WmngTRiEzHwBeGecb433F30dsCUzP8jM/cA+YM2MVihp1szkOYR7I+KV\niPh2RFzUGbsCeKPrNoc7Y5IGwHSD8AhwTWbeABwBfHwuDYFpfaxPZh7tuvoo8O+dy4eBK7u+t7wz\nNr6jD358edEILB6ZznLmjF/HEr5z1fO8c941bS9FOt27O+D4jglvNtkgBF3PGUTE5Zl5pHP1y8Br\nncvbgCciYhNjpworgRd7HvWyByc5/WDImMcvzv8UH85b2PZSpNMtHjn9B+4vHxr3ZhMGISKeBEaA\nSyPiIPAA8IWIuAE4BewH7gbIzF0RsRXYBZwE7snM7HXs+/7mryexk6mb/9xL8A+NHFoaanGOv6/N\nThyRO7KZFyB+/uhb7P2L/Y0c+1zen/cJNv3OIU7Mv3DW55amZE+QmWe9Uug7FSUVgyCpGARJxSBI\nKgZBUjEIkopBkFQMgqRiECQVgyCpGARJxSBIKgZBUjEIkopBkFQMgqRiECQVgyCpGARJxSBIKgZB\nUpnWL2rR+ObNS5avOMgHCxa3vRTpnA7uGX/cIPTRBYuO88//dgcLLpzf9lKkcxrp8TvZPWWQVAyC\npGIQJBWDIKkYBEnFIEgqBkFSMQiSikGQVAyCpGIQJBWDIKkYBEnFIEgqBkFSMQiSikGQVAyCpGIQ\nJBWDIKkYBEnFIEgqBkFSMQiSSqtB+PGO/21z+sYN8/6GeW8w/PvrpdUgvNLAH/p7P32fd3e/3/fj\nTkcT+5srhnlvMPz762XoThmOfOcXHNp0pO1lSANp6IIgafoiM9uZOKKdiSUBkJln/crX1oIgae7x\nlEFSMQiSSitBiIhbI2JPRLweEfe1sYZ+i4j9EfHfEfHjiHixM3ZxRGyPiL0R8WxEXNT2OicrIh6L\niNGIeLVrrOd+ImJjROyLiN0RsbadVU9ej/09EBGHIuLlztetXd8bqP1N16wHISLmAX8P3AJ8GvhK\nRHxqttfRgFPASGbemJlrOmP3A89l5irgeWBja6ubuscZu4+6jbufiLgOWA+sBm4DHomIs56wmmPG\n2x/AtzLzps7XMwARsZrB29+0tPEIYQ2wLzMPZOZJYAuwroV19Ftw9p/nOmBz5/Jm4PZZXdEMZOYL\nwDtnDPfaz5eALZn5QWbuB/Yxdj/PWT32B2P345nWMWD7m642gnAF8EbX9UOdsUGXwA8iYmdEfLUz\ntiwzRwEy8wiwtLXV9cfSHvs58z49zODep/dGxCsR8e2uU6Jh2t85+aRi/3wuM28C/hj4q4j4Q8Yi\n0W3YXuMdtv08AlyTmTcAR4BvtryeWddGEA4DV3VdX94ZG2iZ+Wbnv0eB7zP2kHI0IpYBRMTlwFvt\nrbAveu3nMHBl1+0G8j7NzKP58RtzHuXj04Kh2N9ktBGEncDKiLg6Is4HNgDbWlhH30TEoohY0rm8\nGFgL/ISxfd3ZudkdwNOtLHD6gtPPqXvtZxuwISLOj4jfBlYCL87WImfgtP11IveRLwOvdS4P6v6m\nbMFsT5iZH0bEvcB2xoL0WGbunu119Nky4Hudt2MvAJ7IzO0R8RKwNSLuAg4w9kz1QIiIJ4ER4NKI\nOAg8ADwMPHXmfjJzV0RsBXYBJ4F7co6/BbbH/r4QETcw9orRfuBuGMz9TZdvXZZUfFJRUjEIkopB\nkFQMgqRiECQVgyCpGARJxSBIKv8Hc17GPutNjGAAAAAASUVORK5CYII=\n", 160 | "text/plain": [ 161 | "" 162 | ] 163 | }, 164 | "metadata": {}, 165 | "output_type": "display_data" 166 | } 167 | ], 168 | "source": [ 169 | "plt.imshow(fault_dyke_fold_img)\n", 170 | "plt.show()" 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": 9, 176 | "metadata": { 177 | "collapsed": false 178 | }, 179 | "outputs": [ 180 | { 181 | "data": { 182 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAM4AAAEACAYAAAD7ikm0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADmhJREFUeJzt3X+MHPV5x/H3xxin8VHIFTi78WFs1wkYGuQQ11IhVY4E\nUUhbjNKKUIgEQWmpCE2iVi12WslYqgREgiithBQSSk1jREnVYlMJMBa9PyhNwASDjY1xAdv4wGc7\nP6gwKfjH0z92DOtj72732dmb9fF5SSvPfmfmO4/37uOdHe/uo4jAzFozpeoCzI5FDo5ZgoNjluDg\nmCU4OGYJDo5ZQseCI+liSS9IelHSjZ06jlkV1In/x5E0BXgR+BzwGvAUcEVEvFD6wcwq0KlnnMXA\ntojYEREHgPuAJR06ltmE61RwZgGv1t3fVYyZTQq+OGCWMLVD8w4Bs+vu9xdj75LkN8lZpSJC2X07\nFZyngPmSTgdeB64A/vj9m30GGOhQCWPpB3qb2O5fgC/W3T9nxPr5QPqx5zde2sS0ee+k9t1703c5\n9abrUvu+8/o0XvrT30zt27StN8EZN426+ukHz+rs8cfxKW1pa/+OBCciDkm6AVhL7XTwrohor1Kz\nLtKpZxwi4mHgjE7Nb1alii8OzKn28OM6u+oCRjV94FNVlzC2kweqrqCjHJwxdfh1QBt6BhZVXcLY\nThmouoKO8uVoswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQH\nxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwc\ns4S2+uNI2g68ARwGDkTEYkm91FqZnQ5sBy6PiDfarNOsq7T7jHMYGIiIT0bE4mJsKbAuIs4AHgOW\ntXkMs67TbnDUYI4lwMpieSVwWZvHMOs67QYngEclPSXpK8XYjIgYBoiI3UBfm8cw6zrt9gA9PyJe\nl3QqsFbSVmphque27DbptBWciHi9+HOvpAeAxcCwpBkRMSxpJrBn9BkG65bn0P2tDe1YtX5wP08P\nvlXafOngSJoOTImINyX1ABcBK4A1wDXArcDVwOrRZxnIHt6sJYsGelg00PPu/TtX7GtrvnaecWYA\n/y4pinlWRcRaSeuB+yVdC+wALm+rQrMulA5ORLwCLGww/jPgwnaKMut2fueAWYKDY5bg4JglODhm\nCQ6OWYKDY5bg4JglODhmCQ6OWYKDY5bg4JglODhmCQ6OWYKDY5bg4JglODhmCQ6OWYKDY5bg4Jgl\nODhmCQ6OWYKDY5bg4JglODhmCQ6OWYKDY5bg4JglODhmCQ6OWYKDY5bg4JgljBscSXdJGpb0XN1Y\nr6S1krZKekTSSXXrlknaJmmLpIs6VXgVTmJoxG3Hu7ep/LLq8mwCNdNY6m7gH4B76saWAusi4luS\nbgSWAUslnUWtA9sCoB9YJ+ljETEpGuh+gwtGXfcEf8kePtHSfH3/NsRxpx543/hbs3+dvRcsark+\nmzjjBiciHpd0+ojhJcBniuWV1LrgLgUuBe6LiIPAdknbqDXU/XFpFXep87it9Z3+qvHw/tkz2fPZ\n32qvoDq/nNXHpr+7vrT5LN/KsC8ihgEiYrekvmJ8FvDfddsNFWPWgp6du5n7Tw+WNt/h46cye9VD\n7w0cEgf2HZ+e7+8/90oJVR3b2mrXXmdSnIpNVlMOHOSE7a+VNt/y/1Dbc6ypm+K8/5zN9LnT2p4T\nYOqJU5jWe1wpc415nOR+w5JmRMSwpJnAnmJ8CDitbrv+YmwUg3XLc4qbfdA8ccHO0ubqu6SHWV88\n8X3jG7e8zaYX3intOM0GR8XtiDXANcCtwNXA6rrxVZK+Te0UbT7w5OjTDrRSq9m49jy0nz0P7W+4\n7swSjzNucCTdS+03/GRJO4HlwC3ADyVdC+ygdiWNiNgs6X5gM3AAuH6yXFEzq9fMVbUrR1l14Sjb\n3wzc3E5RZt3O7xwwS3BwzBIcHLMEB8cswcExS3BwzBIcHLMEB8cswcExS3BwzBIcHLMEB8cswcEx\nSyjrE6AfCKuPetN3L3D0pxZ/jz9jKuV9WMq6l4PTgg38Yd29WcD0o9a/wgWoyU+R931rFyf+0S/G\n3ObTf/ANTnr+5RartIng4JTojRY++v3hU8Vxc3vG3OaRTT9sOL7/2RPY+bcfb6U0PrHrB3x269+0\ntM9YZpw4xNvDh0qb71jj4HxAbOz/Ehv7v1TKXFOmHOKxWz7Otlt+Wsp8ALvueaO0uSaCg2Mpv7rg\nQ5y78qOlzXfKwPTxN2rSy9/5Gf/77NulzdeIg2NdYfaXP1LaXH2fP4FDbx0ec5sV815q6xgOjk06\nvzKj87/W/n8cswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcs4RK3zlw6bttdWpe46Osp7ze\nl2adUmlwPsmGo+6fzfOcxxOlzb+PU7iXq0qbz+yIZhpL3QX8PjAcEecUY8uBP+G9FobfjIiHi3XL\ngGuBg8DXI2Jts8VM4wDTGPvDXa3o5RcsZ0Vp863gxdLmsmNbM69x7gZ+t8H47RFxbnE7EpoF1Lqz\nLQAuAe6Q1H6nVbMuM25wIuJx4OcNVjUKxBLgvog4GBHbgW3A4rYqNOtC7VxVu0HSBknfl3RSMTYL\neLVum6FizGxSyQbnDmBeRCwEdgO3lVeSWfdLXVWLiL11d78HPFgsDwGn1a3rL8YaGqxbnlPczDph\n/eB+nh58q7T5mg2OqHtNI2lmROwu7n4B2FQsrwFWSfo2tVO0+cCTo0060Gq1ZkmLBnpYNPDetwrd\nuWJfW/M1czn6Xmq/4ydL2gksBy6QtBA4DGwHrgOIiM2S7gc2AweA6yOiuS8aMzuGjBuciLiywfDd\nY2x/Mxz1lZdmk47fq2aW4OCYJTg4ZgkOjlmCg2OW4OCYJTg4ZgkOjlmCg2OW4OCYJTg4ZgmVflnH\nCpY3HL+KH3AK7b179YgP8TYf5v9KmcvsiK5sLLWKcnpVAszlZc7hubqRaYzsFm3Wqq4MTpleYR6v\nMK9upJfa5+vM8vwaxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQH\nxyzBwTFLcHDMEhwcswQHxyxh3OBI6pf0mKTnJW2U9LVivFfSWklbJT1S10AXScskbZO0RdJFnfwL\nmFWhmWecg8BfRMTZwG8DX5V0JrAUWBcRZwCPAcsAJJ0FXA4sAC4B7pDUqLW72TFr3OBExO6I2FAs\nvwlsofah/SXAymKzlcBlxfKlwH0RcTAitgPbgMUl121WqZZe40iaAywEfgTMiIhhqIUL6Cs2mwW8\nWrfbUDFmNmk0/S03kk4A/hX4ekS8KWlkU9xEk9zBuuU5uGG7dUol7dolTaUWmn+OiNXF8LCkGREx\nLGkmsKcYHwJOq9u9vxhrYCBRslnrym7X3uyp2j8CmyPiO3Vja4BriuWrgdV141dImiZpLjAfeLKt\nKs26zLjPOJLOB64CNkp6htop2TeBW4H7JV0L7KB2JY2I2CzpfmAzcAC4PiISp3Fm3Wvc4ETEfwHH\njbL6wlH2uRm4uY26zLqa3zlgluDgmCU4OGYJDo5ZgoNjluDgmCU4OGYJDo5ZgoNjluDgmCU4OGYJ\nDo5ZgoNjluDgmCU4OGYJDo5ZgoNjluDgmCU4OGYJDo5ZgoNjluDgmCU4OGYJDo5ZgoNjluDgmCU4\nOGYJDo5ZgoNjluDgmCVk2rX/eTG+XNIuST8pbhfX7eN27TapNdPK8Ei79g1FH9CnJT1arLs9Im6v\n31jSAt5r194PrJP0MTeXsskk2679SBdpNdhlCW7XbpNctl37j4uhGyRtkPR9SScVY27XbpNe08EZ\n2a4duAOYFxELgd3AbZ0p0az7pNu1R8Teuk2+BzxYLLfQrn2wbnlOcTMr3/rB/Tw9+FZp8zUVHBq0\na5c0MyJ2F3e/AGwqltcAqyR9m9op2hjt2gdar9gsYdFAD4sGet69f+eKfW3N10679islLQQOA9uB\n68Dt2u2DoZ127Q+PsY/btduk5ncOmCU4OGYJDo5ZgoNjluDgmCU4OGYJDo5ZgoNjluDgmCU4OGYJ\nDo5ZgoNjluDgmCU4OGYJDo5ZgoNjllBxcLZXe/hxbRp/k6rsG6y6gjGtH9xfdQkd5eCM6fmqCxjd\nTwerrmBMZX4xRjfyqZpZgoNjlqCqvoBGkr/5xioVEY2+wrkplQXH7FjmUzWzBAfHLKGS4Ei6WNIL\nkl6UdGMVNYwkabukZyU9I+nJYqxX0lpJWyU9UteRYSLquUvSsKTn6sZGrWeim3mNUl9XNBtr0Azt\na8V4eY9fREzojVpY/wc4HTge2ACcOdF1NKjrZaB3xNitwF8XyzcCt0xgPZ+m1lLlufHqAc4CnqH2\nzaxzisdXFdS3nFoTspHbLpjI+oCZwMJi+QRgK3BmmY9fFc84i4FtEbEjIg4A91FrRlU18f5n4CXA\nymJ5JXDZRBUTEY8DP2+ynkuZ4GZeo9QHXdBsLBo3Q+unxMeviuCMbDy1i+5oPBXAo5KekvSVYmxG\nRAxD7YcB9FVWXU3fKPV0UzOvrmo2VtcM7UeM/vNsuT5fHHjP+RFxLvB54KuSfodamOp127X7bqun\nq5qNNWiGVtrPs4rgDAGz6+6P0Xhq4kTE68Wfe4EHqD1VD0uaAbV+QMCe6iqEMeppoZlX50TE3ihe\nNFBrNnbkdGfC62vUDI0SH78qgvMUMF/S6ZKmAVdQa0ZVGUnTi3+dkNQDXARsLOq6ptjsamB1wwk6\nWBpHv2YYrZ41wBWSpkmay5jNvDpXX/HLeMTIZmMTXd/7mqFR5uM3UVeJRlz1uJjalY5twNIqahhR\nz1xqV/eeoRaYpcX4rwHrilrXAh+ZwJruBV4D3gZ2Al8GekerB1hG7WrQFuCiiuq7B3iueCwfoPaa\nYsLrA84HDtX9TH9S/M6N+vNstT6/5cYswRcHzBIcHLMEB8cswcExS3BwzBIcHLMEB8cswcExS/h/\nJYCEbZVLIdAAAAAASUVORK5CYII=\n", 183 | "text/plain": [ 184 | "" 185 | ] 186 | }, 187 | "metadata": {}, 188 | "output_type": "display_data" 189 | } 190 | ], 191 | "source": [ 192 | "plt.imshow(fold_dyke_fault_img)\n", 193 | "plt.show()" 194 | ] 195 | }, 196 | { 197 | "cell_type": "code", 198 | "execution_count": 10, 199 | "metadata": { 200 | "collapsed": false 201 | }, 202 | "outputs": [ 203 | { 204 | "data": { 205 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAKYAAAEACAYAAAA5ug0wAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAEc5JREFUeJzt3X2UVPV9x/H3Z1FIACVEZde4yOITAo0SbbBVlLVVRGPB\ng6nRtKkP9aFGjw/0nAAmLZrSIqcVkyaHeKLGgx6twfRU8QmB2M0RExUFBAMCsVkEgRWrVTlGUfbb\nP+7d3dnn2blz5/5m5/s6Z8/cuXNnfr+Z/ex9nN93ZWY4F5qqrDvgXFc8mC5IHkwXJA+mC5IH0wXJ\ng+mClFowJU2V9LqkLZJmpdWO65+UxnlMSVXAFuDPgZ3AauBiM3u96I25fimtNeZEYKuZbTOzT4GH\ngekpteX6obSCeQSwPef+jniec3nxgx8XpANSet23gCNz7tfG81pJ8ov0/ZiZKcnz0wrmauAYSaOA\nXcDFwCUdF5oM1Cdo5BG+zkbG97LUzA735wNzErTawfkH57fc5lthzK3MmPUQ4ya9lt9zzBhQdXvB\nXQNooO0z3m9FfN89mKf5iV8jlWCa2X5J1wPLiXYX7jWzTWm05fqntNaYmNkyYExar+/6t0wPfuoy\naXVSJq1ySH0mzdZl0mpyFRjM0zNplUPrM2m2LpNWk/PTRS5IHkwXJA+mC5IH0wXJg+mC5MF0QfJg\nuiB5MF2QPJguSB5MFyQPpguSB9MFyYPpguTBdEHyYLogeTBdkDyYLkgeTBckD6YLUqJRkpIagfeB\nZuBTM5soaTjwc2AU0AhcZGbvJ+ynqzBJ15jNQL2ZfcXMJsbzZgMrzWwM8CxFrS7gKkXSYKqL15gO\nLI6nFwMXJGzDVaCkwTRghaTVkq6M51WbWROAme0GRiRsw1WgpJU4TjOzXZIOA5ZL2kwU1lxePMv1\nWaJgmtmu+HaPpEeJCrY2Sao2syZJNcDb3T2/IWe6jvIdnF/pGhu2sa3hzaK+ZsHBlDQYqDKzvZKG\nAFOA24ClwGXAAuBS4LHuXqO+0MZdUOrqR1FXP6r1/nO3rUr8mknWmNXAf8V1Lg8AHjSz5ZJeBpZI\nugLYBlyUuJeu4hQcTDP7PTChi/nvAmcl6ZRzfuXHBcmD6YLkwXRB8mC6IHkwXZA8mC5IHkwXJA+m\nC5IH0wXJg+mC5MF0QfJguiB5MF2QPJguSB5MFyQPpguSB9MFyYPpguTBdEHyYLogeTBdkHoNpqR7\nJTVJWp8zb7ik5ZI2S3pG0rCcx+ZI2ippk6QpaXXc9W/5rDHvA87pMK/Lim6SxhGNIx8LnAsskqTi\ndddVil6DaWargPc6zO6uots04GEz+8zMGoGtRGVjnOuTQvcxR3RT0e0IYHvOcm/F85zrk6TV3loU\nVNGtIWe6Di+qVa5CKqrVXUW3t4CROcvVxvO6VF9g4y4saRTVyndTrvinRUtFN2hf0W0pcLGkgZJG\nA8cALyXupas4va4xJT1EtHI7RNKbwFzgduCRjhXdzGyjpCXARuBT4Ntm5oVbXZ/1Gkwz+2Y3D3VZ\n0c3M5gPzk3TKOb/y44LkwXRB8mC6IHkwXZA8mC5IHkwXJA+mC5IH0wXJg+mC5MF0QfJguiB5MF2Q\nPJguSB5MFyQPpguSB9MFyYPpguTBdEEq1vDdgC2Mb8d0mP+1UnfE9UEFBLPF5h7u54b0S8BB6XfH\n9SifUZL3AucDTWZ2QjxvLnAVbePJbzGzZfFjc4ArgM+AG81seRodL64ne3jsppxp3/MplXzWmPcB\nPwLu7zB/oZktzJ0haSxtRbVqgZWSji3vIbw/yJk+Omd6eqk7UlHyGb67StKoLh7qqorbdOKiWkCj\npJaiWi8m62Yo3ug864kPotsTP9c2b+TA0nSnH0uyj3m9pG8BLwN/b2bvExXQ+k3OMpVTVOvVj7ue\nBpias89axtuOUio0mIuA75uZSZoH3AFcWbxu9TPLPmydXLVzImurxwNw+vee58gzdmTVq6AVFEwz\n25Nz927g8Xi6T0W1GnKm66iMam9vrx/ROv37FaPbPXbuT5a1Tg8/ZS9HTdhasn4lkWW1t3ZFtSTV\nxHUxAWYAr8XTS4EHJd1JtAnvsahWfV972889fe3UtjvnHxzfzue43Y8xbf1VrQ8N2beHkKRR7a3Q\nolpnSpoANAONwDXgRbXSsqVmOv9W03YWYOS7bb94mXH5b87IolupKrSo1n09LO9FtVK2/YuT2t2/\n7fzob/97S+egmb9sna8nf4e2dqxSXh4q6MpPBZCwO9uK8OVOlxsPZoWqOrlto2dn12G3n5lhbzrz\nYFYordndfnrBC633m/95crtl7ZZTS9avFh5M10nVd3/VfkZ834DmTVe3zf/SUDh4UCp98GC6vAkY\nMPanrfftiIPg4IE0b7y6+ycVyIPpCqa3Puzh8kky/j0uFyQPZsGezboD/Zpvygu2Lv45GZjcy7Ku\nr3yNmdgrROOKdgJ/yLgv/YevMYvmYWAQcBxwdsZ9KX8ezKL6BNgQ/5xI9E+Jv5xpj8qVBzM1r8a3\nK4ALga5Gp7ju+D5mSfwn8OOsO1FWfI1ZMvuIDpJqiYovnJhtdwLnwSy5HfHPL4kGlQ4FqjPtUYg8\nmJlq+TfvA4Abs+xIcHwfMwj7aaux5MCDGa73Psu6B5nyTXmonv8ouj3vIKjqquhJ/+bBDN1TH0J1\n/Gv66uBs+1JCvW7KJdVKelbSbyVtkHRDPH+4pOWSNkt6RtKwnOfMkbRV0iZJU9J8AxWh6bPo54kP\nYPu+rHtTEvnsY34GzDSz8cCfAtdJOh6YDaw0szFE3wGbAyBpHG0V384FFkmqvG1RWl79OArojn2w\nP/sh+zY0nQJi+Ywr3w3sjqf3StpEdJZ4Om3f91pMVPFlNjCNfl3xLRDrPoaNn8CwKjhlSMmb3//M\nN6KJKUel8vp92seUVAdMAF4Aqs2sCaLwSmopylO5Fd9KbZ/Bnv3RGvTLn+OVuyZw8t+tS6UpO+oL\n2NmjsasmwMk1qbSRK+9gShoK/IKoSvBeSR23I9lvVyrZho95+tqpPH3tVG5u+neGjPgo8UvaoAFw\n8CCa7zkPph1bhE7mL69gSjqAKJQPmFnL5YomSdVm1iSphray13lXfGvIma6jMqq9lcKd1Tcw8rTt\nTP6n56g7s+9V2Axofu6v4dDBcPwhvS6fRrU35VPzStL9wDtmNjNn3gLgXTNbIGkWMNzMZscHPw8C\npxBtwlcAncpdS7K5CTv/CF9nI+MTvkpIZva+SB9Vn9jECZdu4JSbX243f4Dal5cywG76alHKyszT\nfMws0QFvPtXeTgP+CtggaS3Re7gFWAAskXQFsI3oSNwrvgWm6dVqVsysZsXMs7i+6S4EDBvxfwDY\nkVGpw+Zt12XYw67lc1T+PNG3DLrS5Z+XV3wL04//9juAUXP0Tq58eRecfHjWXeqWXyuvOGL3G0cw\n79b7WLVkMu9sPyzrDnXJL0lWsIYHzqHhgXM4+qQtnH3VExxa+07WXWrla0zHG2uO465rb+aub9/U\n+8Il4mtMFxPvbB/BvL/4FyZd9N8A1H9rRWa98WC6TlYtiYq4/u/OQ7lw1n9k0gfflLsgeTBdkDyY\nrgfZXRfxYLogeTBdkDyYLkgeTNeD7EbEeDBdkDyYrp2hH+/kuKbHufz5SVw0+4TM+uFXflyrGWsu\nYfyuR6iy/Vl3xYNZ0cy4cO0lDPvDdka+9+use9OOB7PSmDFu1yP85ZpvZN2THnkwK4CICnT94xPl\nU3fCg9mPncoPGMmLHM9TWXelzzyY/cyRk99k+K9WcQHXZt2VRDyYZc8YOHQfY2ZsYfriJ4HOQ3PL\nUT7Dd2uB+4kKhTcDPzWzH0maC1xFW6GDW8xsWfycOcAVRAW5bjSz5Wl0vtIdecabTPrurzlqSmPW\nXSm6fNaYLdXe1sVlYl6R1PKd+4Vm1q5Gs6SxtFV7qwVWSupU8MAV5qRr1oKM837Sv//WC6321lIk\nq6vDvOl4tbeiGjBkP3P2/mvW3SipPl2SzKn21hKy6yWtk3RPTuHWI4DtOU/zam+FOnUwnDqY4/5h\ne+/L9jNJqr0tAr5vZiZpHnAHcGVfGm/Ima7Di2oxpApGHAC1B8Kw3OInYZ9/TKOoVsHV3sxsT84i\ndwOPx9N5V3ur72Nn+6UBB8LAwfBHX4PqoVn3piB19aOoq2/7X5nP3bYq8Wvmu8b8GbDRzH7YMkNS\nTbz/CTADeC2eXgo8KOlOok34McBLiXvaH026Gg78PAwZnnVPgpOk2ts3JU0gOoXUCFwDXu2tdxPh\n/PP6+JzK+/iSVHtb1sNzvNpbq5a14eWZ9qLc+JWf1EwmOpzrvSKv68yDWVRjiIL4J1l3pOx5MBM7\nDBgFnJFiG2GfLkqDB7Ngo4E/A4b1tqArgAezT1rWin+caS8qgQezV4cDl2TcBz9d5FpdE9+W/t/h\nOQ9mN4r//3Zc33jBAxckD2ZZqLzTRR5MFyQPpguSB7MsVN7pIg+mC5IH0wXJg+mC5MEsC366yLkg\neDBdkDyYZcFPF3UiaZCkFyWtlbQhLqaFpOGSlkvaLOmZnEocSJojaaukTZKmpPkGXP/UazDN7BPg\nTDP7ClF5mHMlTQRmAyvNbAzwLDAHQNI42opqnQssklR5e+8ukbw25Wb2UTw5iOirckZUPGtxPH8x\ncEE8PY24qJaZNQItRbWcy1tewZRUFRc72A2sMLPVQLWZNUFrRbgR8eJeVKvoKm+Dk+8asznelNcC\nEyWNp/MeeeXtobvU9Okb7Gb2gaQGYCrQJKnazJok1dBWWTjvoloNOdN1eLW3cpVJtTdJhwKfmtn7\nkj4PnA3cTlQ86zJgAXAp8Fj8lLyLatUn7HzlCHtjlFW1t8OBxZKqiDb9PzezpyS9ACyRdAWwjehI\n3ItquaLIp6jWBuCkLua/C5zVzXO8qJZLxK/8uCB5MMuCny5yLggeTBckD2ZZqLyTGh5MFyQPpguS\nB9MFyYNZFvx0kXNB8GC6IHkwy4KfLnIuCB5MFyQPpguSB7Ms+Oki54LgwXRB8mCWBT9d5FwQPJgu\nSEmqvc2VtEPSmvhnas5zvNqbSySf4bufSDrTzD6SNAB4XtLT8cMLzWxh7vKSxtJW7a0WWCnpWB9b\nnoSfLupSN9XeoOtPbDpe7c0llKTaG8D1ktZJuiencKtXe3OJFVrtbRywCDjKzCYQBfaO9LpZ6Spv\nL6jgam8d9i3vBh6Pp73aW4UJqtqbpJq4YCvADOC1eNqrvVWY0Kq93S9pAtAMNALXgFd7c8WRpNrb\n3/TwHK/2VlR+usi5IHgwXZA8mGWh8nbRPZguSB5MFyQPpguSB7Ms+OmikmqsoFbhuUxabcyk1eQ8\nmCWT/DJdIRozaTU535SXBT9d5FwQlNX3KyRV3mqggphZoiO2zILpXE98U+6C5MF0QcokmJKmSnpd\n0hZJs1Juq1HSq/G4+JfiecMlLZe0WdIzOQPpkrRzr6QmSetz5nXbTjHG3nfTZurj/SXVSnpW0m/j\nWgM3FP39mllJf4j+GH4HjAIOBNYBx6fY3v8AwzvMWwB8J56eBdxehHYmAROA9b21A4wD1hJ9Ubsu\n/jxUpDbnAjO7WHZsMdqMX6sGmBBPDwU2A8cX8/1mscacCGw1s21m9inwMNFY9LSIzluG6cDieHox\ncEHSRsxsFfBenu1Mowhj77tpE1Ie729mu81sXTy9F9hENOiwaO83i2B2HHe+g3THnRuwQtJqSVfG\n86rNrAmiDxkYkVLbI7ppJ+2x9yUb7y+pjmit/QLdf659brsSDn5OM7OTgPOA6ySdTudLKaU6Z1aK\ndko23l/SUOAXwI3xmrNon2sWwXwLODLnfrfjzovBzHbFt3uAR4k2IU2SqgEk1QBvp9R8d+3kPfa+\nr8xsj1nryem7adtkFrVNSQcQhfIBM3ssnl2095tFMFcDx0gaJWkgcDHRWPSikzQ4/qtG0hBgCrAh\nbu+yeLFLgce6fIECmqT9/l137SwFLpY0UNJoehh739c240C06Djev1htAvwM2GhmP8yZV7z3m9bR\ncC9HdVOJjuS2ArNTbGc00VH/WqJAzo7nfxFYGfdhOfCFIrT1ELAT+AR4E7gcGN5dO8AcoqPTTcCU\nIrZ5P7A+ft+PEu33Fa3N+HVOA/bnfLZr4t9pt59rX9v2S5IuSJVw8OPKkAfTBcmD6YLkwXRB8mC6\nIHkwXZA8mC5IHkwXpP8HslxQdz4HHxMAAAAASUVORK5CYII=\n", 206 | "text/plain": [ 207 | "" 208 | ] 209 | }, 210 | "metadata": {}, 211 | "output_type": "display_data" 212 | } 213 | ], 214 | "source": [ 215 | "plt.imshow(gbasin_simplified_img)\n", 216 | "plt.show()" 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": null, 222 | "metadata": { 223 | "collapsed": true 224 | }, 225 | "outputs": [], 226 | "source": [] 227 | } 228 | ], 229 | "metadata": { 230 | "kernelspec": { 231 | "display_name": "Python 3", 232 | "language": "python", 233 | "name": "python3" 234 | }, 235 | "language_info": { 236 | "codemirror_mode": { 237 | "name": "ipython", 238 | "version": 3 239 | }, 240 | "file_extension": ".py", 241 | "mimetype": "text/x-python", 242 | "name": "python", 243 | "nbconvert_exporter": "python", 244 | "pygments_lexer": "ipython3", 245 | "version": "3.5.3" 246 | } 247 | }, 248 | "nbformat": 4, 249 | "nbformat_minor": 0 250 | } 251 | -------------------------------------------------------------------------------- /Graham_Ganssle/dat/array_explorer.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Array Exploration Notebook\n", 8 | "\n", 9 | "This notebook is used simply to examine the contents of `dat/`, so we can build images of the correct dimensionality for the image translation network." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 1, 15 | "metadata": { 16 | "collapsed": true 17 | }, 18 | "outputs": [], 19 | "source": [ 20 | "import numpy as np\n", 21 | "import matplotlib.pyplot as plt\n", 22 | "%matplotlib inline\n", 23 | "import glob" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 2, 29 | "metadata": { 30 | "collapsed": true 31 | }, 32 | "outputs": [], 33 | "source": [ 34 | "fault_dyke_fold_sizes = []\n", 35 | "fold_dyke_fault_sizes = []\n", 36 | "gbasin_simplified_sizes = []\n", 37 | "\n", 38 | "for file in glob.glob('fault_dyke_fold_model/*'):\n", 39 | " fault_dyke_fold_sizes.append(np.load(file).shape)\n", 40 | "\n", 41 | "for file in glob.glob('fold_dyke_fault_model/*'):\n", 42 | " fold_dyke_fault_sizes.append(np.load(file).shape)\n", 43 | " \n", 44 | "for file in glob.glob('gbasin_simplified_model/*'):\n", 45 | " gbasin_simplified_sizes.append(np.load(file).shape)" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 11, 51 | "metadata": { 52 | "collapsed": false 53 | }, 54 | "outputs": [ 55 | { 56 | "name": "stdout", 57 | "output_type": "stream", 58 | "text": [ 59 | "there are 11200 images in fault_dyke_fold_model/\n", 60 | "there are 2800 images in fold_dyke_fault_model/\n", 61 | "there are 3600 images in gbasin_simplified_model/\n" 62 | ] 63 | } 64 | ], 65 | "source": [ 66 | "print('there are %d images in fault_dyke_fold_model/' %len(fault_dyke_fold_sizes))\n", 67 | "print('there are %d images in fold_dyke_fault_model/' %len(fold_dyke_fault_sizes))\n", 68 | "print('there are %d images in gbasin_simplified_model/' %len(gbasin_simplified_sizes))" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": 4, 74 | "metadata": { 75 | "collapsed": false 76 | }, 77 | "outputs": [ 78 | { 79 | "data": { 80 | "text/plain": [ 81 | "{(200, 200)}" 82 | ] 83 | }, 84 | "execution_count": 4, 85 | "metadata": {}, 86 | "output_type": "execute_result" 87 | } 88 | ], 89 | "source": [ 90 | "set(fault_dyke_fold_sizes)" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 5, 96 | "metadata": { 97 | "collapsed": false 98 | }, 99 | "outputs": [ 100 | { 101 | "data": { 102 | "text/plain": [ 103 | "{(280, 200), (400, 200)}" 104 | ] 105 | }, 106 | "execution_count": 5, 107 | "metadata": {}, 108 | "output_type": "execute_result" 109 | } 110 | ], 111 | "source": [ 112 | "set(fold_dyke_fault_sizes)" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": 6, 118 | "metadata": { 119 | "collapsed": false 120 | }, 121 | "outputs": [ 122 | { 123 | "data": { 124 | "text/plain": [ 125 | "{(376, 200), (496, 200)}" 126 | ] 127 | }, 128 | "execution_count": 6, 129 | "metadata": {}, 130 | "output_type": "execute_result" 131 | } 132 | ], 133 | "source": [ 134 | "set(gbasin_simplified_sizes)" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": 7, 140 | "metadata": { 141 | "collapsed": true 142 | }, 143 | "outputs": [], 144 | "source": [ 145 | "fault_dyke_fold_img = np.load('fault_dyke_fold_model/fdf_model_xz_RF_272.npy')\n", 146 | "fold_dyke_fault_img = np.load('fold_dyke_fault_model/out_0060_xz225.npy')\n", 147 | "gbasin_simplified_img = np.load('gbasin_simplified_model/out_0040_xz175.npy')" 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": 8, 153 | "metadata": { 154 | "collapsed": false 155 | }, 156 | "outputs": [ 157 | { 158 | "data": { 159 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQQAAAEACAYAAABVmQgcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADENJREFUeJzt3V2MnOV5gOH7sQ0utgkFio2CAZdaOCa0AaRaoVGVjRoZ\naKUYRZXr9ASEoqJSTnIEPgJUVSEHiauqoj8EIUeCOqZVgntQcBB1JKRUGBFKqH+wotjGDl6cgFwF\nQ2zw04MdHsb2jvdvvv12pvclrZh5d/S97+vB9843M56NzESSAOa1vQBJc4dBkFQMgqRiECQVgyCp\nGARJpbEgRMStEbEnIl6PiPuamkdS/0QT70OIiHnA68AfAT8HdgIbMnNP3yeT1DdNPUJYA+zLzAOZ\neRLYAqxraC5JfdJUEK4A3ui6fqgzJmkO80lFSWVBQ8c9DFzVdX15Z6xEhP+IQmpRZsaZY00FYSew\nMiKuBt4ENgBfOftmnwdGGlrCmIW8z9fYxEJONDrPeP6Raxkdb9tDYQdN33ft2sFw7++hcUcbCUJm\nfhgR9wLbGTsteSwzdzcxl6T+aeoRApn5DLCqqeNL6r+Wn1Rc0e70DVvCb7W9hAataHsBDVvR9gJa\nYRAadKFBGGAr2l5AK3zZUVIxCJKKQZBUDIKkYhAkFYMgqRgEScUgSCoGQVIxCJKKQZBUDIKkYhAk\nFYMgqRgESaWxT0yanE/OwhzvAWd9luQs+U1mZ49Sf7QchEtnYY53aS8IS5idPUr94SmDpGIQJBWD\nIKkYBEnFIEgqBkFSMQiSikGQVAyCpGIQJBWDIKkYBEnFIEgqBkFSMQiSikGQVAyCpGIQJBWDIKkY\nBEnFIEgqBkFSMQiSikGQVAyCpGIQJBWDIKm0/Lsdm7WIY6zhP5jPB20vRRoIMwpCROwHjgGngJOZ\nuSYiLga+C1wN7AfWZ+axGa5zWhZzjM/zr21MLQ2kmZ4ynAJGMvPGzFzTGbsfeC4zVwHPAxtnOIek\nWTLTIMQ4x1gHbO5c3gzcPsM5BtIxLuXXLGp7GdKUzDQICfwgInZGxFc7Y8sycxQgM48AS2c4x0Da\nxl+ym8+2vQxpSmb6pOLnMvPNiLgM2B4RexmLRLczr0uao2YUhMx8s/PfoxHxfWANMBoRyzJzNCIu\nB97qfYTvdl3+NHD9TJYjqafXgP+Z8FbTDkJELALmZeavImIxsBZ4CNgG3Al8A7gDeLr3Uf5sutNL\nmpLrOf0H7lPj3momjxCWAd+LiOwc54nM3B4RLwFbI+Iu4ACwfgZzSJpF0w5CZv4MuGGc8beBL85k\nUZLa4VuXJRWDIKkYBEnFIEgqBkFSMQiSSsufh3Btw8dvs3fLaX5/Un+1HIQlDR//goaPP9HcTe9P\n6i9PGSQVgyCpGARJxSBIKgZBUjEIkopBkFQMgqRiECQVgyCpGARJxSBIKgZBUjEIkopBkFQMgqRi\nECQVgyCpGARJxSBIKq1+yOpv3B2NHn/h29Hrt1437uY/eZnrl7/fzuTSBLb90/jjkZmzu5KPJo7I\ny/JAo3Nc8tpeNvzu2kbn6OUzz67ikrUXtTK3NJGReJHMPOsnsqcMkopBkFQMgqRiECQVgyCpGARJ\nxSBIKgZBUjEIkopBkFQMgqRiECQVgyCpGARJpdXPQxhmW1nPMT7T9jKkHv503FGD0JC9XMshfr/t\nZUhTMrSnDJ/46QGue/Rf2l6GNFAmDEJEPBYRoxHxatfYxRGxPSL2RsSzEXFR1/c2RsS+iNgdEe18\nXBFw0b6f8Xt/93hb00sDaTKPEB4Hbjlj7H7gucxcBTwPbASIiOuA9cBq4DbgkYho9oMTJfXNhEHI\nzBeAd84YXgds7lzeDNzeufwlYEtmfpCZ+4F9wJr+LFVS06b7HMLSzBwFyMwjwNLO+BXAG123O9wZ\n+3/l5KILyPnz216GNGX9epWhnY9unqOe/s8tHL3x+raXIU3ZdIMwGhHLMnM0Ii4H3uqMHwau7Lrd\n8s7YuN59cFNdPm/ks5w/cvM0lzO3nLhwMXmer+hq7jix40ec3PFfE95usv/XRufrI9uAO4FvAHcA\nT3eNPxERmxg7VVgJvNjroIsf/Nokp5c0E+eP3HzaD9zjD/3tuLebMAgR8SQwAlwaEQeBB4CHgaci\n4i7gAGOvLJCZuyJiK7ALOAnck239JhhJUzZhEDLzz3t864s9bv914OszWZSkdgztOxUlTZ1BkFQM\ngqRiECQVgyCpGARJxSBIKgZBUjEIkkqr/wLn+OYLGzv2iZ8sauzY0rBqNQjvPnxxY8d+71dLGju2\nNKw8ZZBUDIKkYhAkFYMgqRgEScUgSCoGQVIxCJKKQZBUDIKkYhAkFYMgqRgEScUgSCoGQVIxCJKK\nQZBUDIKkYhAkFYMgqRgEScUgSCqtfgz7sDp211J+ufiTbS9DmjKD0IBTows4tdA/Wg0eTxkkFYMg\nqRgEScUgSCoGQVIxCJKKQZBUDIKkYhAkFYMgqRgEScUgSCoTBiEiHouI0Yh4tWvsgYg4FBEvd75u\n7frexojYFxG7I2JtUws/l0tOvM41x59rY2ppoE3mEcLjwC3jjH8rM2/qfD0DEBGrgfXAauA24JGI\niL6tdpJWHP8hf/D2N2d7WmngTRiEzHwBeGecb433F30dsCUzP8jM/cA+YM2MVihp1szkOYR7I+KV\niPh2RFzUGbsCeKPrNoc7Y5IGwHSD8AhwTWbeABwBfHwuDYFpfaxPZh7tuvoo8O+dy4eBK7u+t7wz\nNr6jD358edEILB6ZznLmjF/HEr5z1fO8c941bS9FOt27O+D4jglvNtkgBF3PGUTE5Zl5pHP1y8Br\nncvbgCciYhNjpworgRd7HvWyByc5/WDImMcvzv8UH85b2PZSpNMtHjn9B+4vHxr3ZhMGISKeBEaA\nSyPiIPAA8IWIuAE4BewH7gbIzF0RsRXYBZwE7snM7HXs+/7mryexk6mb/9xL8A+NHFoaanGOv6/N\nThyRO7KZFyB+/uhb7P2L/Y0c+1zen/cJNv3OIU7Mv3DW55amZE+QmWe9Uug7FSUVgyCpGARJxSBI\nKgZBUjEIkopBkFQMgqRiECQVgyCpGARJxSBIKgZBUjEIkopBkFQMgqRiECQVgyCpGARJxSBIKgZB\nUpnWL2rR+ObNS5avOMgHCxa3vRTpnA7uGX/cIPTRBYuO88//dgcLLpzf9lKkcxrp8TvZPWWQVAyC\npGIQJBWDIKkYBEnFIEgqBkFSMQiSikGQVAyCpGIQJBWDIKkYBEnFIEgqBkFSMQiSikGQVAyCpGIQ\nJBWDIKkYBEnFIEgqBkFSMQiSSqtB+PGO/21z+sYN8/6GeW8w/PvrpdUgvNLAH/p7P32fd3e/3/fj\nTkcT+5srhnlvMPz762XoThmOfOcXHNp0pO1lSANp6IIgafoiM9uZOKKdiSUBkJln/crX1oIgae7x\nlEFSMQiSSitBiIhbI2JPRLweEfe1sYZ+i4j9EfHfEfHjiHixM3ZxRGyPiL0R8WxEXNT2OicrIh6L\niNGIeLVrrOd+ImJjROyLiN0RsbadVU9ej/09EBGHIuLlztetXd8bqP1N16wHISLmAX8P3AJ8GvhK\nRHxqttfRgFPASGbemJlrOmP3A89l5irgeWBja6ubuscZu4+6jbufiLgOWA+sBm4DHomIs56wmmPG\n2x/AtzLzps7XMwARsZrB29+0tPEIYQ2wLzMPZOZJYAuwroV19Ftw9p/nOmBz5/Jm4PZZXdEMZOYL\nwDtnDPfaz5eALZn5QWbuB/Yxdj/PWT32B2P345nWMWD7m642gnAF8EbX9UOdsUGXwA8iYmdEfLUz\ntiwzRwEy8wiwtLXV9cfSHvs58z49zODep/dGxCsR8e2uU6Jh2t85+aRi/3wuM28C/hj4q4j4Q8Yi\n0W3YXuMdtv08AlyTmTcAR4BvtryeWddGEA4DV3VdX94ZG2iZ+Wbnv0eB7zP2kHI0IpYBRMTlwFvt\nrbAveu3nMHBl1+0G8j7NzKP58RtzHuXj04Kh2N9ktBGEncDKiLg6Is4HNgDbWlhH30TEoohY0rm8\nGFgL/ISxfd3ZudkdwNOtLHD6gtPPqXvtZxuwISLOj4jfBlYCL87WImfgtP11IveRLwOvdS4P6v6m\nbMFsT5iZH0bEvcB2xoL0WGbunu119Nky4Hudt2MvAJ7IzO0R8RKwNSLuAg4w9kz1QIiIJ4ER4NKI\nOAg8ADwMPHXmfjJzV0RsBXYBJ4F7co6/BbbH/r4QETcw9orRfuBuGMz9TZdvXZZUfFJRUjEIkopB\nkFQMgqRiECQVgyCpGARJxSBIKv8Hc17GPutNjGAAAAAASUVORK5CYII=\n", 160 | "text/plain": [ 161 | "" 162 | ] 163 | }, 164 | "metadata": {}, 165 | "output_type": "display_data" 166 | } 167 | ], 168 | "source": [ 169 | "plt.imshow(fault_dyke_fold_img)\n", 170 | "plt.show()" 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": 9, 176 | "metadata": { 177 | "collapsed": false 178 | }, 179 | "outputs": [ 180 | { 181 | "data": { 182 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAM4AAAEACAYAAAD7ikm0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADmhJREFUeJzt3X+MHPV5x/H3xxin8VHIFTi78WFs1wkYGuQQ11IhVY4E\nUUhbjNKKUIgEQWmpCE2iVi12WslYqgREgiithBQSSk1jREnVYlMJMBa9PyhNwASDjY1xAdv4wGc7\nP6gwKfjH0z92DOtj72732dmb9fF5SSvPfmfmO4/37uOdHe/uo4jAzFozpeoCzI5FDo5ZgoNjluDg\nmCU4OGYJDo5ZQseCI+liSS9IelHSjZ06jlkV1In/x5E0BXgR+BzwGvAUcEVEvFD6wcwq0KlnnMXA\ntojYEREHgPuAJR06ltmE61RwZgGv1t3fVYyZTQq+OGCWMLVD8w4Bs+vu9xdj75LkN8lZpSJC2X07\nFZyngPmSTgdeB64A/vj9m30GGOhQCWPpB3qb2O5fgC/W3T9nxPr5QPqx5zde2sS0ee+k9t1703c5\n9abrUvu+8/o0XvrT30zt27StN8EZN426+ukHz+rs8cfxKW1pa/+OBCciDkm6AVhL7XTwrohor1Kz\nLtKpZxwi4mHgjE7Nb1alii8OzKn28OM6u+oCRjV94FNVlzC2kweqrqCjHJwxdfh1QBt6BhZVXcLY\nThmouoKO8uVoswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQH\nxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwc\ns4S2+uNI2g68ARwGDkTEYkm91FqZnQ5sBy6PiDfarNOsq7T7jHMYGIiIT0bE4mJsKbAuIs4AHgOW\ntXkMs67TbnDUYI4lwMpieSVwWZvHMOs67QYngEclPSXpK8XYjIgYBoiI3UBfm8cw6zrt9gA9PyJe\nl3QqsFbSVmphque27DbptBWciHi9+HOvpAeAxcCwpBkRMSxpJrBn9BkG65bn0P2tDe1YtX5wP08P\nvlXafOngSJoOTImINyX1ABcBK4A1wDXArcDVwOrRZxnIHt6sJYsGelg00PPu/TtX7GtrvnaecWYA\n/y4pinlWRcRaSeuB+yVdC+wALm+rQrMulA5ORLwCLGww/jPgwnaKMut2fueAWYKDY5bg4JglODhm\nCQ6OWYKDY5bg4JglODhmCQ6OWYKDY5bg4JglODhmCQ6OWYKDY5bg4JglODhmCQ6OWYKDY5bg4Jgl\nODhmCQ6OWYKDY5bg4JglODhmCQ6OWYKDY5bg4JglODhmCQ6OWYKDY5bg4JgljBscSXdJGpb0XN1Y\nr6S1krZKekTSSXXrlknaJmmLpIs6VXgVTmJoxG3Hu7ep/LLq8mwCNdNY6m7gH4B76saWAusi4luS\nbgSWAUslnUWtA9sCoB9YJ+ljETEpGuh+gwtGXfcEf8kePtHSfH3/NsRxpx543/hbs3+dvRcsark+\nmzjjBiciHpd0+ojhJcBniuWV1LrgLgUuBe6LiIPAdknbqDXU/XFpFXep87it9Z3+qvHw/tkz2fPZ\n32qvoDq/nNXHpr+7vrT5LN/KsC8ihgEiYrekvmJ8FvDfddsNFWPWgp6du5n7Tw+WNt/h46cye9VD\n7w0cEgf2HZ+e7+8/90oJVR3b2mrXXmdSnIpNVlMOHOSE7a+VNt/y/1Dbc6ypm+K8/5zN9LnT2p4T\nYOqJU5jWe1wpc415nOR+w5JmRMSwpJnAnmJ8CDitbrv+YmwUg3XLc4qbfdA8ccHO0ubqu6SHWV88\n8X3jG7e8zaYX3intOM0GR8XtiDXANcCtwNXA6rrxVZK+Te0UbT7w5OjTDrRSq9m49jy0nz0P7W+4\n7swSjzNucCTdS+03/GRJO4HlwC3ADyVdC+ygdiWNiNgs6X5gM3AAuH6yXFEzq9fMVbUrR1l14Sjb\n3wzc3E5RZt3O7xwwS3BwzBIcHLMEB8cswcExS3BwzBIcHLMEB8cswcExS3BwzBIcHLMEB8cswcEx\nSyjrE6AfCKuPetN3L3D0pxZ/jz9jKuV9WMq6l4PTgg38Yd29WcD0o9a/wgWoyU+R931rFyf+0S/G\n3ObTf/ANTnr+5RartIng4JTojRY++v3hU8Vxc3vG3OaRTT9sOL7/2RPY+bcfb6U0PrHrB3x269+0\ntM9YZpw4xNvDh0qb71jj4HxAbOz/Ehv7v1TKXFOmHOKxWz7Otlt+Wsp8ALvueaO0uSaCg2Mpv7rg\nQ5y78qOlzXfKwPTxN2rSy9/5Gf/77NulzdeIg2NdYfaXP1LaXH2fP4FDbx0ec5sV815q6xgOjk06\nvzKj87/W/n8cswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcs4RK3zlw6bttdWpe46Osp7ze\nl2adUmlwPsmGo+6fzfOcxxOlzb+PU7iXq0qbz+yIZhpL3QX8PjAcEecUY8uBP+G9FobfjIiHi3XL\ngGuBg8DXI2Jts8VM4wDTGPvDXa3o5RcsZ0Vp863gxdLmsmNbM69x7gZ+t8H47RFxbnE7EpoF1Lqz\nLQAuAe6Q1H6nVbMuM25wIuJx4OcNVjUKxBLgvog4GBHbgW3A4rYqNOtC7VxVu0HSBknfl3RSMTYL\neLVum6FizGxSyQbnDmBeRCwEdgO3lVeSWfdLXVWLiL11d78HPFgsDwGn1a3rL8YaGqxbnlPczDph\n/eB+nh58q7T5mg2OqHtNI2lmROwu7n4B2FQsrwFWSfo2tVO0+cCTo0060Gq1ZkmLBnpYNPDetwrd\nuWJfW/M1czn6Xmq/4ydL2gksBy6QtBA4DGwHrgOIiM2S7gc2AweA6yOiuS8aMzuGjBuciLiywfDd\nY2x/Mxz1lZdmk47fq2aW4OCYJTg4ZgkOjlmCg2OW4OCYJTg4ZgkOjlmCg2OW4OCYJTg4ZgmVflnH\nCpY3HL+KH3AK7b179YgP8TYf5v9KmcvsiK5sLLWKcnpVAszlZc7hubqRaYzsFm3Wqq4MTpleYR6v\nMK9upJfa5+vM8vwaxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQHxyzBwTFLcHDMEhwcswQH\nxyzBwTFLcHDMEhwcswQHxyxh3OBI6pf0mKTnJW2U9LVivFfSWklbJT1S10AXScskbZO0RdJFnfwL\nmFWhmWecg8BfRMTZwG8DX5V0JrAUWBcRZwCPAcsAJJ0FXA4sAC4B7pDUqLW72TFr3OBExO6I2FAs\nvwlsofah/SXAymKzlcBlxfKlwH0RcTAitgPbgMUl121WqZZe40iaAywEfgTMiIhhqIUL6Cs2mwW8\nWrfbUDFmNmk0/S03kk4A/hX4ekS8KWlkU9xEk9zBuuU5uGG7dUol7dolTaUWmn+OiNXF8LCkGREx\nLGkmsKcYHwJOq9u9vxhrYCBRslnrym7X3uyp2j8CmyPiO3Vja4BriuWrgdV141dImiZpLjAfeLKt\nKs26zLjPOJLOB64CNkp6htop2TeBW4H7JV0L7KB2JY2I2CzpfmAzcAC4PiISp3Fm3Wvc4ETEfwHH\njbL6wlH2uRm4uY26zLqa3zlgluDgmCU4OGYJDo5ZgoNjluDgmCU4OGYJDo5ZgoNjluDgmCU4OGYJ\nDo5ZgoNjluDgmCU4OGYJDo5ZgoNjluDgmCU4OGYJDo5ZgoNjluDgmCU4OGYJDo5ZgoNjluDgmCU4\nOGYJDo5ZgoNjluDgmCVk2rX/eTG+XNIuST8pbhfX7eN27TapNdPK8Ei79g1FH9CnJT1arLs9Im6v\n31jSAt5r194PrJP0MTeXsskk2679SBdpNdhlCW7XbpNctl37j4uhGyRtkPR9SScVY27XbpNe08EZ\n2a4duAOYFxELgd3AbZ0p0az7pNu1R8Teuk2+BzxYLLfQrn2wbnlOcTMr3/rB/Tw9+FZp8zUVHBq0\na5c0MyJ2F3e/AGwqltcAqyR9m9op2hjt2gdar9gsYdFAD4sGet69f+eKfW3N10679islLQQOA9uB\n68Dt2u2DoZ127Q+PsY/btduk5ncOmCU4OGYJDo5ZgoNjluDgmCU4OGYJDo5ZgoNjluDgmCU4OGYJ\nDo5ZgoNjluDgmCU4OGYJDo5ZgoNjllBxcLZXe/hxbRp/k6rsG6y6gjGtH9xfdQkd5eCM6fmqCxjd\nTwerrmBMZX4xRjfyqZpZgoNjlqCqvoBGkr/5xioVEY2+wrkplQXH7FjmUzWzBAfHLKGS4Ei6WNIL\nkl6UdGMVNYwkabukZyU9I+nJYqxX0lpJWyU9UteRYSLquUvSsKTn6sZGrWeim3mNUl9XNBtr0Azt\na8V4eY9fREzojVpY/wc4HTge2ACcOdF1NKjrZaB3xNitwF8XyzcCt0xgPZ+m1lLlufHqAc4CnqH2\nzaxzisdXFdS3nFoTspHbLpjI+oCZwMJi+QRgK3BmmY9fFc84i4FtEbEjIg4A91FrRlU18f5n4CXA\nymJ5JXDZRBUTEY8DP2+ynkuZ4GZeo9QHXdBsLBo3Q+unxMeviuCMbDy1i+5oPBXAo5KekvSVYmxG\nRAxD7YcB9FVWXU3fKPV0UzOvrmo2VtcM7UeM/vNsuT5fHHjP+RFxLvB54KuSfodamOp127X7bqun\nq5qNNWiGVtrPs4rgDAGz6+6P0Xhq4kTE68Wfe4EHqD1VD0uaAbV+QMCe6iqEMeppoZlX50TE3ihe\nNFBrNnbkdGfC62vUDI0SH78qgvMUMF/S6ZKmAVdQa0ZVGUnTi3+dkNQDXARsLOq6ptjsamB1wwk6\nWBpHv2YYrZ41wBWSpkmay5jNvDpXX/HLeMTIZmMTXd/7mqFR5uM3UVeJRlz1uJjalY5twNIqahhR\nz1xqV/eeoRaYpcX4rwHrilrXAh+ZwJruBV4D3gZ2Al8GekerB1hG7WrQFuCiiuq7B3iueCwfoPaa\nYsLrA84HDtX9TH9S/M6N+vNstT6/5cYswRcHzBIcHLMEB8cswcExS3BwzBIcHLMEB8cswcExS/h/\nJYCEbZVLIdAAAAAASUVORK5CYII=\n", 183 | "text/plain": [ 184 | "" 185 | ] 186 | }, 187 | "metadata": {}, 188 | "output_type": "display_data" 189 | } 190 | ], 191 | "source": [ 192 | "plt.imshow(fold_dyke_fault_img)\n", 193 | "plt.show()" 194 | ] 195 | }, 196 | { 197 | "cell_type": "code", 198 | "execution_count": 10, 199 | "metadata": { 200 | "collapsed": false 201 | }, 202 | "outputs": [ 203 | { 204 | "data": { 205 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAKYAAAEACAYAAAA5ug0wAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAEc5JREFUeJzt3X2UVPV9x/H3Z1FIACVEZde4yOITAo0SbbBVlLVVRGPB\ng6nRtKkP9aFGjw/0nAAmLZrSIqcVkyaHeKLGgx6twfRU8QmB2M0RExUFBAMCsVkEgRWrVTlGUfbb\nP+7d3dnn2blz5/5m5/s6Z8/cuXNnfr+Z/ex9nN93ZWY4F5qqrDvgXFc8mC5IHkwXJA+mC5IH0wXJ\ng+mClFowJU2V9LqkLZJmpdWO65+UxnlMSVXAFuDPgZ3AauBiM3u96I25fimtNeZEYKuZbTOzT4GH\ngekpteX6obSCeQSwPef+jniec3nxgx8XpANSet23gCNz7tfG81pJ8ov0/ZiZKcnz0wrmauAYSaOA\nXcDFwCUdF5oM1Cdo5BG+zkbG97LUzA735wNzErTawfkH57fc5lthzK3MmPUQ4ya9lt9zzBhQdXvB\nXQNooO0z3m9FfN89mKf5iV8jlWCa2X5J1wPLiXYX7jWzTWm05fqntNaYmNkyYExar+/6t0wPfuoy\naXVSJq1ySH0mzdZl0mpyFRjM0zNplUPrM2m2LpNWk/PTRS5IHkwXJA+mC5IH0wXJg+mC5MF0QfJg\nuiB5MF2QPJguSB5MFyQPpguSB9MFyYPpguTBdEHyYLogeTBdkDyYLkgeTBckD6YLUqJRkpIagfeB\nZuBTM5soaTjwc2AU0AhcZGbvJ+ynqzBJ15jNQL2ZfcXMJsbzZgMrzWwM8CxFrS7gKkXSYKqL15gO\nLI6nFwMXJGzDVaCkwTRghaTVkq6M51WbWROAme0GRiRsw1WgpJU4TjOzXZIOA5ZL2kwU1lxePMv1\nWaJgmtmu+HaPpEeJCrY2Sao2syZJNcDb3T2/IWe6jvIdnF/pGhu2sa3hzaK+ZsHBlDQYqDKzvZKG\nAFOA24ClwGXAAuBS4LHuXqO+0MZdUOrqR1FXP6r1/nO3rUr8mknWmNXAf8V1Lg8AHjSz5ZJeBpZI\nugLYBlyUuJeu4hQcTDP7PTChi/nvAmcl6ZRzfuXHBcmD6YLkwXRB8mC6IHkwXZA8mC5IHkwXJA+m\nC5IH0wXJg+mC5MF0QfJguiB5MF2QPJguSB5MFyQPpguSB9MFyYPpguTBdEHyYLogeTBdkHoNpqR7\nJTVJWp8zb7ik5ZI2S3pG0rCcx+ZI2ippk6QpaXXc9W/5rDHvA87pMK/Lim6SxhGNIx8LnAsskqTi\ndddVil6DaWargPc6zO6uots04GEz+8zMGoGtRGVjnOuTQvcxR3RT0e0IYHvOcm/F85zrk6TV3loU\nVNGtIWe6Di+qVa5CKqrVXUW3t4CROcvVxvO6VF9g4y4saRTVyndTrvinRUtFN2hf0W0pcLGkgZJG\nA8cALyXupas4va4xJT1EtHI7RNKbwFzgduCRjhXdzGyjpCXARuBT4Ntm5oVbXZ/1Gkwz+2Y3D3VZ\n0c3M5gPzk3TKOb/y44LkwXRB8mC6IHkwXZA8mC5IHkwXJA+mC5IH0wXJg+mC5MF0QfJguiB5MF2Q\nPJguSB5MFyQPpguSB9MFyYPpguTBdEEq1vDdgC2Mb8d0mP+1UnfE9UEFBLPF5h7u54b0S8BB6XfH\n9SifUZL3AucDTWZ2QjxvLnAVbePJbzGzZfFjc4ArgM+AG81seRodL64ne3jsppxp3/MplXzWmPcB\nPwLu7zB/oZktzJ0haSxtRbVqgZWSji3vIbw/yJk+Omd6eqk7UlHyGb67StKoLh7qqorbdOKiWkCj\npJaiWi8m62Yo3ug864kPotsTP9c2b+TA0nSnH0uyj3m9pG8BLwN/b2bvExXQ+k3OMpVTVOvVj7ue\nBpias89axtuOUio0mIuA75uZSZoH3AFcWbxu9TPLPmydXLVzImurxwNw+vee58gzdmTVq6AVFEwz\n25Nz927g8Xi6T0W1GnKm66iMam9vrx/ROv37FaPbPXbuT5a1Tg8/ZS9HTdhasn4lkWW1t3ZFtSTV\nxHUxAWYAr8XTS4EHJd1JtAnvsahWfV972889fe3UtjvnHxzfzue43Y8xbf1VrQ8N2beHkKRR7a3Q\nolpnSpoANAONwDXgRbXSsqVmOv9W03YWYOS7bb94mXH5b87IolupKrSo1n09LO9FtVK2/YuT2t2/\n7fzob/97S+egmb9sna8nf4e2dqxSXh4q6MpPBZCwO9uK8OVOlxsPZoWqOrlto2dn12G3n5lhbzrz\nYFYordndfnrBC633m/95crtl7ZZTS9avFh5M10nVd3/VfkZ834DmTVe3zf/SUDh4UCp98GC6vAkY\nMPanrfftiIPg4IE0b7y6+ycVyIPpCqa3Puzh8kky/j0uFyQPZsGezboD/Zpvygu2Lv45GZjcy7Ku\nr3yNmdgrROOKdgJ/yLgv/YevMYvmYWAQcBxwdsZ9KX8ezKL6BNgQ/5xI9E+Jv5xpj8qVBzM1r8a3\nK4ALga5Gp7ju+D5mSfwn8OOsO1FWfI1ZMvuIDpJqiYovnJhtdwLnwSy5HfHPL4kGlQ4FqjPtUYg8\nmJlq+TfvA4Abs+xIcHwfMwj7aaux5MCDGa73Psu6B5nyTXmonv8ouj3vIKjqquhJ/+bBDN1TH0J1\n/Gv66uBs+1JCvW7KJdVKelbSbyVtkHRDPH+4pOWSNkt6RtKwnOfMkbRV0iZJU9J8AxWh6bPo54kP\nYPu+rHtTEvnsY34GzDSz8cCfAtdJOh6YDaw0szFE3wGbAyBpHG0V384FFkmqvG1RWl79OArojn2w\nP/sh+zY0nQJi+Ywr3w3sjqf3StpEdJZ4Om3f91pMVPFlNjCNfl3xLRDrPoaNn8CwKjhlSMmb3//M\nN6KJKUel8vp92seUVAdMAF4Aqs2sCaLwSmopylO5Fd9KbZ/Bnv3RGvTLn+OVuyZw8t+tS6UpO+oL\n2NmjsasmwMk1qbSRK+9gShoK/IKoSvBeSR23I9lvVyrZho95+tqpPH3tVG5u+neGjPgo8UvaoAFw\n8CCa7zkPph1bhE7mL69gSjqAKJQPmFnL5YomSdVm1iSphray13lXfGvIma6jMqq9lcKd1Tcw8rTt\nTP6n56g7s+9V2Axofu6v4dDBcPwhvS6fRrU35VPzStL9wDtmNjNn3gLgXTNbIGkWMNzMZscHPw8C\npxBtwlcAncpdS7K5CTv/CF9nI+MTvkpIZva+SB9Vn9jECZdu4JSbX243f4Dal5cywG76alHKyszT\nfMws0QFvPtXeTgP+CtggaS3Re7gFWAAskXQFsI3oSNwrvgWm6dVqVsysZsXMs7i+6S4EDBvxfwDY\nkVGpw+Zt12XYw67lc1T+PNG3DLrS5Z+XV3wL04//9juAUXP0Tq58eRecfHjWXeqWXyuvOGL3G0cw\n79b7WLVkMu9sPyzrDnXJL0lWsIYHzqHhgXM4+qQtnH3VExxa+07WXWrla0zHG2uO465rb+aub9/U\n+8Il4mtMFxPvbB/BvL/4FyZd9N8A1H9rRWa98WC6TlYtiYq4/u/OQ7lw1n9k0gfflLsgeTBdkDyY\nrgfZXRfxYLogeTBdkDyYLkgeTNeD7EbEeDBdkDyYrp2hH+/kuKbHufz5SVw0+4TM+uFXflyrGWsu\nYfyuR6iy/Vl3xYNZ0cy4cO0lDPvDdka+9+use9OOB7PSmDFu1yP85ZpvZN2THnkwK4CICnT94xPl\nU3fCg9mPncoPGMmLHM9TWXelzzyY/cyRk99k+K9WcQHXZt2VRDyYZc8YOHQfY2ZsYfriJ4HOQ3PL\nUT7Dd2uB+4kKhTcDPzWzH0maC1xFW6GDW8xsWfycOcAVRAW5bjSz5Wl0vtIdecabTPrurzlqSmPW\nXSm6fNaYLdXe1sVlYl6R1PKd+4Vm1q5Gs6SxtFV7qwVWSupU8MAV5qRr1oKM837Sv//WC6321lIk\nq6vDvOl4tbeiGjBkP3P2/mvW3SipPl2SzKn21hKy6yWtk3RPTuHWI4DtOU/zam+FOnUwnDqY4/5h\ne+/L9jNJqr0tAr5vZiZpHnAHcGVfGm/Ima7Di2oxpApGHAC1B8Kw3OInYZ9/TKOoVsHV3sxsT84i\ndwOPx9N5V3ur72Nn+6UBB8LAwfBHX4PqoVn3piB19aOoq2/7X5nP3bYq8Wvmu8b8GbDRzH7YMkNS\nTbz/CTADeC2eXgo8KOlOok34McBLiXvaH026Gg78PAwZnnVPgpOk2ts3JU0gOoXUCFwDXu2tdxPh\n/PP6+JzK+/iSVHtb1sNzvNpbq5a14eWZ9qLc+JWf1EwmOpzrvSKv68yDWVRjiIL4J1l3pOx5MBM7\nDBgFnJFiG2GfLkqDB7Ngo4E/A4b1tqArgAezT1rWin+caS8qgQezV4cDl2TcBz9d5FpdE9+W/t/h\nOQ9mN4r//3Zc33jBAxckD2ZZqLzTRR5MFyQPpguSB7MsVN7pIg+mC5IH0wXJg+mC5MEsC366yLkg\neDBdkDyYZcFPF3UiaZCkFyWtlbQhLqaFpOGSlkvaLOmZnEocSJojaaukTZKmpPkGXP/UazDN7BPg\nTDP7ClF5mHMlTQRmAyvNbAzwLDAHQNI42opqnQssklR5e+8ukbw25Wb2UTw5iOirckZUPGtxPH8x\ncEE8PY24qJaZNQItRbWcy1tewZRUFRc72A2sMLPVQLWZNUFrRbgR8eJeVKvoKm+Dk+8asznelNcC\nEyWNp/MeeeXtobvU9Okb7Gb2gaQGYCrQJKnazJok1dBWWTjvoloNOdN1eLW3cpVJtTdJhwKfmtn7\nkj4PnA3cTlQ86zJgAXAp8Fj8lLyLatUn7HzlCHtjlFW1t8OBxZKqiDb9PzezpyS9ACyRdAWwjehI\n3ItquaLIp6jWBuCkLua/C5zVzXO8qJZLxK/8uCB5MMuCny5yLggeTBckD2ZZqLyTGh5MFyQPpguS\nB9MFyYNZFvx0kXNB8GC6IHkwy4KfLnIuCB5MFyQPpguSB7Ms+Oki54LgwXRB8mCWBT9d5FwQPJgu\nSEmqvc2VtEPSmvhnas5zvNqbSySf4bufSDrTzD6SNAB4XtLT8cMLzWxh7vKSxtJW7a0WWCnpWB9b\nnoSfLupSN9XeoOtPbDpe7c0llKTaG8D1ktZJuiencKtXe3OJFVrtbRywCDjKzCYQBfaO9LpZ6Spv\nL6jgam8d9i3vBh6Pp73aW4UJqtqbpJq4YCvADOC1eNqrvVWY0Kq93S9pAtAMNALXgFd7c8WRpNrb\n3/TwHK/2VlR+usi5IHgwXZA8mGWh8nbRPZguSB5MFyQPpguSB7Ms+OmikmqsoFbhuUxabcyk1eQ8\nmCWT/DJdIRozaTU535SXBT9d5FwQlNX3KyRV3mqggphZoiO2zILpXE98U+6C5MF0QcokmJKmSnpd\n0hZJs1Juq1HSq/G4+JfiecMlLZe0WdIzOQPpkrRzr6QmSetz5nXbTjHG3nfTZurj/SXVSnpW0m/j\nWgM3FP39mllJf4j+GH4HjAIOBNYBx6fY3v8AwzvMWwB8J56eBdxehHYmAROA9b21A4wD1hJ9Ubsu\n/jxUpDbnAjO7WHZsMdqMX6sGmBBPDwU2A8cX8/1mscacCGw1s21m9inwMNFY9LSIzluG6cDieHox\ncEHSRsxsFfBenu1Mowhj77tpE1Ie729mu81sXTy9F9hENOiwaO83i2B2HHe+g3THnRuwQtJqSVfG\n86rNrAmiDxkYkVLbI7ppJ+2x9yUb7y+pjmit/QLdf659brsSDn5OM7OTgPOA6ySdTudLKaU6Z1aK\ndko23l/SUOAXwI3xmrNon2sWwXwLODLnfrfjzovBzHbFt3uAR4k2IU2SqgEk1QBvp9R8d+3kPfa+\nr8xsj1nryem7adtkFrVNSQcQhfIBM3ssnl2095tFMFcDx0gaJWkgcDHRWPSikzQ4/qtG0hBgCrAh\nbu+yeLFLgce6fIECmqT9/l137SwFLpY0UNJoehh739c240C06Djev1htAvwM2GhmP8yZV7z3m9bR\ncC9HdVOJjuS2ArNTbGc00VH/WqJAzo7nfxFYGfdhOfCFIrT1ELAT+AR4E7gcGN5dO8AcoqPTTcCU\nIrZ5P7A+ft+PEu33Fa3N+HVOA/bnfLZr4t9pt59rX9v2S5IuSJVw8OPKkAfTBcmD6YLkwXRB8mC6\nIHkwXZA8mC5IHkwXpP8HslxQdz4HHxMAAAAASUVORK5CYII=\n", 206 | "text/plain": [ 207 | "" 208 | ] 209 | }, 210 | "metadata": {}, 211 | "output_type": "display_data" 212 | } 213 | ], 214 | "source": [ 215 | "plt.imshow(gbasin_simplified_img)\n", 216 | "plt.show()" 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": null, 222 | "metadata": { 223 | "collapsed": true 224 | }, 225 | "outputs": [], 226 | "source": [] 227 | } 228 | ], 229 | "metadata": { 230 | "kernelspec": { 231 | "display_name": "Python 3", 232 | "language": "python", 233 | "name": "python3" 234 | }, 235 | "language_info": { 236 | "codemirror_mode": { 237 | "name": "ipython", 238 | "version": 3 239 | }, 240 | "file_extension": ".py", 241 | "mimetype": "text/x-python", 242 | "name": "python", 243 | "nbconvert_exporter": "python", 244 | "pygments_lexer": "ipython3", 245 | "version": "3.5.3" 246 | } 247 | }, 248 | "nbformat": 4, 249 | "nbformat_minor": 0 250 | } 251 | -------------------------------------------------------------------------------- /Graham_Ganssle/dat/zips/fault_dyke_fold_model.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSEG/Machine-Learning-CSEG-special-issue/5cfad97e8a8c4284c78af12d3f1a670110cec067/Graham_Ganssle/dat/zips/fault_dyke_fold_model.zip -------------------------------------------------------------------------------- /Graham_Ganssle/dat/zips/fold_dyke_fault_model.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSEG/Machine-Learning-CSEG-special-issue/5cfad97e8a8c4284c78af12d3f1a670110cec067/Graham_Ganssle/dat/zips/fold_dyke_fault_model.zip -------------------------------------------------------------------------------- /Graham_Ganssle/dat/zips/gbasin_simplified_model.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSEG/Machine-Learning-CSEG-special-issue/5cfad97e8a8c4284c78af12d3f1a670110cec067/Graham_Ganssle/dat/zips/gbasin_simplified_model.zip -------------------------------------------------------------------------------- /Graham_Ganssle/evaluation/loss_comparison.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSEG/Machine-Learning-CSEG-special-issue/5cfad97e8a8c4284c78af12d3f1a670110cec067/Graham_Ganssle/evaluation/loss_comparison.png -------------------------------------------------------------------------------- /Graham_Ganssle/evaluation/losses.csv: -------------------------------------------------------------------------------- 1 | pix2pix, tv, bilat, wavelet 2 | 1236.91407653, 3522.66079072, 4520.10627024, 5018.15317813, 3 | 984.298333592, 2940.58262711, 4064.51519965, 4894.31814595, 4 | 1460.81051793, 6963.77854448, 7307.60788624, 7527.78705875, 5 | 955.38994732, 7644.89908448, 7921.73327517, 7959.95183859, 6 | 2158.03101504, 3035.68075902, 4242.04521264, 5153.03084284, 7 | 1117.02534658, 7511.20195978, 7804.03659437, 7808.72902584, 8 | 2121.82750191, 8682.08582731, 8783.63092219, 8910.61888157, 9 | 1227.60736668, 2820.29965628, 4088.13549777, 4795.59523864, 10 | 1551.2703849, 2422.6364914, 3925.48909637, 4631.42789456, 11 | 1364.24287465, 3746.5305193, 4660.69390975, 5252.70445179, 12 | 1338.24705581, 3310.9402142, 4370.54443146, 5094.65303559, 13 | 1148.44276946, 6970.15285546, 7303.18499098, 7541.01985023, 14 | 7978.57934641, 3175.9730817, 4264.29255953, 4976.70968213, 15 | 738.656756757, 3605.69273939, 4530.8534115, 5227.80377309, 16 | 2076.55492344, 6958.09189215, 7399.24269416, 7590.86069762, 17 | 1035.83915944, 7859.37726975, 8104.05727199, 8183.9196696, 18 | 1968.58461268, 3959.01434989, 4914.07101678, 5560.95571391, 19 | 1713.35835936, 3033.15763947, 4169.41892917, 4769.00003273, 20 | 1042.01272016, 3075.46691348, 4236.99834727, 4978.29197827, 21 | 1072.86812644, 2597.03765421, 3809.45194312, 4546.38247631, 22 | 3045.15664488, 6034.87043592, 6448.78785268, 6822.72080123, 23 | 1493.04838822, 7259.87814321, 7674.62027904, 7838.58121251, 24 | 653.906493506, 4071.869432, 4788.26859777, 5445.65301659, 25 | 4275.89410094, 2798.84783232, 3883.30452931, 4696.79743334, 26 | 1005.27450467, 7469.27426983, 7774.59260731, 7784.2081725, 27 | 4303.57385621, 3624.43408596, 4602.81627854, 5264.79214505, 28 | 1648.52351339, 5780.4618204, 6567.94682308, 7204.49528297, 29 | 899.440722012, 2709.32492251, 3977.84907667, 4768.82206216, 30 | 931.475984947, 2954.3032121, 4206.16772409, 4984.60461221, 31 | 1552.90453465, 3407.06990904, 4476.92223203, 5159.1260401, 32 | 8670.77990196, 3548.18947015, 4313.0612302, 5098.89424074, 33 | 1190.75203819, 7662.8858297, 7941.66931269, 7948.45803009, 34 | 1114.37965435, 6976.81961951, 7320.25249675, 7555.71134382, 35 | 1222.30348364, 7366.61448556, 7777.71137078, 7906.08325263, 36 | 815.392561983, 4395.26188402, 4976.06182935, 5549.55568596, 37 | 947.53737037, 7484.24468058, 7779.08275906, 7787.5746284, 38 | 1703.36162988, 4252.81868505, 4915.64373258, 5501.34292841, 39 | 5885.01662433, 4176.29615102, 4925.78448504, 5535.39430303, 40 | 2573.36733199, 5293.42027183, 5659.89593533, 6253.04058821, 41 | 1872.15544431, 4892.40007108, 5391.20446002, 5948.42204156, 42 | 1485.67748121, 7627.49103546, 7947.11391348, 8111.87123729, 43 | 1609.8801364, 3775.08532412, 4657.63630543, 5325.64068785, 44 | 1542.5488491, 3050.47361139, 4075.30566292, 4895.5664544, 45 | 4549.24582786, 9834.5149494, 9876.47961511, 10020.7934716, 46 | 2057.85620711, 6619.71361673, 6984.32624496, 7205.80084673, 47 | 880.378148574, 3604.07145177, 4389.16556699, 5142.40443994, 48 | 1968.59584972, 2689.28999204, 3990.37743366, 4759.37149549, 49 | 1265.21071041, 7045.5514141, 7378.38390418, 7599.92614878, 50 | 1855.44650724, 6970.03301368, 7419.45052588, 7604.46174491, 51 | 1103.74124706, 7019.84839452, 7361.2820261, 7574.95983311, 52 | 1778.41218116, 3764.91978586, 4580.68144517, 5289.06791158, 53 | 1136.6230897, 7728.22309825, 7997.33302127, 8044.43306794, 54 | 1203.02681611, 4351.32900559, 5040.75554064, 5694.6564028, 55 | 1377.00230359, 3656.08888018, 4670.90005881, 5353.70150099, 56 | 4121.68529032, 6843.61053618, 7001.15166173, 7281.70090915, 57 | 896.996437407, 2592.59748472, 4059.35393192, 4749.31588166, 58 | 1446.16740254, 3077.28073811, 4327.91974343, 5140.73224807, 59 | 1018.33475, 7513.00074702, 7809.6414832, 7834.24370082, 60 | 804.177489177, 6189.84980749, 6408.03650339, 6874.17245475, 61 | 4248.75089715, 3139.63209209, 4133.92514211, 4951.32766816, 62 | 2795.39821849, 8406.63930513, 8731.24685935, 8804.27597685, 63 | 2121.83794245, 5084.45652569, 5671.99730393, 6065.06991264, 64 | 984.721218487, 6146.57275102, 6527.02821834, 6913.01867835, 65 | 1597.69146825, 6932.35501369, 7259.14520464, 7480.72919734, 66 | 1806.81642512, 7190.4778831, 7499.18987225, 7713.46538384, 67 | 935.680288958, 9799.89777433, 9923.17276669, 9957.99799582, 68 | 2360.06450855, 2779.48300947, 3996.1107322, 4766.83665231, 69 | 1970.80058338, 6320.50111398, 6740.57163985, 7016.46733111, 70 | 4761.38848833, 3348.25295898, 4139.2500281, 5005.54000306, 71 | 2274.24452275, 4830.71903523, 5311.94035819, 5897.2447628, 72 | 1092.13470134, 7062.14251374, 7381.47406736, 7595.83010218, 73 | 4406.38070728, 6445.83321499, 6876.94097094, 7096.16950297, 74 | 2123.94405986, 7004.45502146, 7447.69099043, 7639.29127425, 75 | 1118.54418131, 8680.14730625, 8778.75037323, 8905.85070195, 76 | 2665.3802862, 2633.6082515, 3921.95273919, 4732.3944141, 77 | 3385.82924837, 3364.45618227, 4303.17151801, 5045.80985147, 78 | 2147.84176587, 7077.63272895, 7403.00297653, 7621.2992401, 79 | 799.269873331, 3641.38127891, 4732.69201247, 5543.31028187, 80 | 940.509560538, 7610.58859837, 7889.80281182, 7937.45059506, 81 | 1458.169443, 4822.98240022, 5249.05680992, 5893.44955137, 82 | 1566.17147252, 3381.3907576, 4427.62251815, 4920.17032291, 83 | 1969.87567303, 7105.95516245, 7531.03259966, 7710.06177445, 84 | 959.790196078, 4550.80735914, 5203.10079661, 5846.11518246, 85 | 2073.21471888, 2825.26641902, 4062.54614275, 4904.73989643, 86 | 739.544510859, 3810.90506533, 4611.52020197, 5335.03534213, 87 | 4832.32736322, 4053.72322316, 4691.56727693, 5474.17138234, 88 | 1663.19824017, 7831.07186703, 8101.92997103, 8241.30439074, 89 | 1307.61890619, 6956.91267734, 7290.98946756, 7502.85867055, 90 | 1871.6585396, 3460.33428132, 4456.90924232, 4931.04699997, 91 | 2156.1606443, 5092.22291789, 5602.27766269, 5930.70780403, 92 | 5375.04089862, 3566.87185385, 4422.69537918, 5257.57098926, 93 | 2492.91675988, 2641.80310625, 3850.59148451, 4735.82088487, 94 | 1504.11230937, 8089.89402821, 8342.64053458, 8449.52437163, 95 | 2606.23912724, 5371.78098828, 5728.44923298, 6218.77286375, 96 | 2247.44210302, 2788.51610003, 4005.92897028, 4803.82657066, 97 | 3881.249519, 2759.60739399, 4158.36582724, 4910.12237919, 98 | 1392.28784857, 6429.48972007, 6835.98291017, 7097.45191189, 99 | 5101.69755739, 2484.89536068, 3878.2303451, 4671.63519276, 100 | 1645.18793911, 2830.98976781, 4082.71374811, 4894.10121952, 101 | 1004.76323459, 2855.23109009, 4043.3534696, 4871.3757453, 102 | 1721.87730891, 3262.09722765, 4259.76743078, 5072.34380542, 103 | 1936.10539896, 7020.63070307, 7357.37204189, 7571.41222067, 104 | 1156.74926122, 5164.69736055, 5542.11671859, 6009.98584938, 105 | 1336.19266055, 10162.2651511, 10305.845705, 10370.3760687, 106 | 3000.4020712, 3314.10614879, 4413.1561947, 5256.08143326, 107 | 2368.20647744, 5069.69676248, 5640.01983278, 6083.7285766, 108 | 3218.87528537, 2886.69300707, 4188.78953885, 4950.02493077, 109 | 1226.71273862, 7047.61959629, 7379.27383793, 7604.33116625, 110 | 9009.96106577, 2658.55468294, 4042.44449311, 4767.22654928, 111 | 1529.95779221, 7154.20484959, 7461.0431811, 7681.48282922, 112 | 1153.84644309, 8436.83714497, 8635.38519625, 8661.09276526, 113 | 2209.55702811, 2991.92029782, 4167.94535769, 4958.2134868, 114 | 2731.10075415, 4112.61659011, 4952.80305088, 5584.68937257, 115 | 1708.68184955, 7130.12299851, 7568.97831256, 7750.73179025, 116 | 2449.71985786, 3625.54601373, 4457.09540212, 5136.07267327, 117 | 2359.31927598, 4771.34538765, 5211.98713262, 5859.10379023, 118 | 2704.42349206, 5083.99331118, 6163.43717865, 6853.07561077, 119 | 2034.62892607, 3305.91272982, 4386.57967476, 5142.5689938, 120 | 1805.99079284, 5071.71176223, 5705.78259998, 6216.10585138, 121 | 998.603413655, 2599.12900579, 3800.51818529, 4697.55947779, 122 | 1264.447597, 6377.76019182, 7032.08631385, 7306.42321212, 123 | 1870.8630865, 6970.03301368, 7419.45052588, 7604.46174491, 124 | 1997.78376525, 3590.74239851, 4470.95314098, 5134.42623998, 125 | 1206.88358602, 6270.63465168, 6691.55004104, 6971.06974293, 126 | 1455.95423138, 7016.14268371, 7364.01074763, 7575.34381186, 127 | 3039.1351634, 4076.5847974, 4929.21324936, 5589.11842075, 128 | 1426.16885007, 6983.55459836, 7428.89847173, 7606.01344513, 129 | 1497.46549801, 6207.74629501, 6899.8826084, 7199.26912086, 130 | 777.196223931, 7553.228821, 7849.48922574, 7866.71406131, 131 | 1281.64288789, 7735.64361682, 8015.33228305, 8160.66577384, 132 | 768.85688821, 3325.71513275, 4548.96275747, 5344.01815294, 133 | 2344.60478737, 2753.60883434, 3959.22952549, 4758.57375649, 134 | 2766.94292929, 3891.91447017, 4681.98034217, 5305.20088734, 135 | 827.473630081, 2946.07733833, 4016.46088925, 4710.60914868, 136 | 1286.59593883, 7044.81103085, 7390.03672812, 7601.28680152, 137 | 1711.60543478, 5648.648782, 5977.9443841, 6395.94916611, 138 | 3117.36954741, 6405.04457796, 6789.0633861, 7031.41830683, 139 | 1218.94783282, 3629.56439203, 4747.55696919, 5461.824915, 140 | 1156.26336097, 2779.42100832, 4068.78139821, 4832.03375124, 141 | 2392.73906161, 6847.85049255, 6983.49929317, 7296.07500551, 142 | 1071.55358527, 4049.3365468, 4753.6455519, 5345.01816495, 143 | 1467.65856998, 3721.30417253, 4840.7849114, 5586.75979689, 144 | 1000.02766841, 3141.82672103, 4139.54255131, 4918.82056376, 145 | 959.979039892, 3498.66338867, 4511.30862694, 5315.26839724, 146 | 7534.17984957, 5307.85690645, 5710.67114563, 6172.41836851, 147 | 1103.89192998, 7013.39210639, 7338.33868641, 7548.89496123, 148 | 3325.91469979, 5738.40773634, 6053.91694508, 6499.26918221, 149 | 2023.02725668, 4012.80250092, 4782.54087293, 5448.58751708, 150 | 1185.62821622, 7725.65464522, 8004.21927689, 8157.43685343, 151 | 2220.12580816, 3088.06579816, 4279.97339347, 4998.53937473, 152 | 3312.78956816, 3020.64292775, 4208.51821431, 4977.12927559, 153 | 2331.86634921, 6606.45302398, 6974.83702774, 7188.88395406, 154 | 971.342629482, 7544.14260951, 7832.86205464, 7845.43896754, 155 | 1016.91608521, 2049.64370629, 3614.79695568, 4357.60040287, 156 | 1118.19517256, 6985.98109452, 7323.78003697, 7548.17674974, 157 | 1702.18546366, 7130.12299851, 7568.97831256, 7750.73179025, 158 | 6841.34268227, 7890.12258003, 8125.94259931, 8316.70778171, 159 | 990.678499872, 6079.60310448, 6477.86323288, 6838.27264757, 160 | 3098.00309027, 7098.58013526, 7350.65454761, 7634.14204245, 161 | 2219.20178047, 4155.15196074, 4953.04173082, 5571.23755782, 162 | 3108.98957989, 3157.32203807, 4348.61624803, 5152.2172592, 163 | 1604.6626419, 7333.06645617, 7829.47534947, 7966.50592966, 164 | 1668.52752687, 3486.74162323, 4375.18727612, 5109.83935777, 165 | 1294.2195071, 7366.61448556, 7777.71137078, 7906.08325263, 166 | 769.578611872, 7466.72633007, 7765.58746016, 7770.15991909, 167 | 2891.47322727, 3666.72676754, 4571.72286698, 5306.37889151, 168 | 2445.87289003, 4153.61499408, 4918.75423025, 5647.69444391, 169 | 1585.01260504, 3524.76852043, 4422.63896859, 5213.62595813, 170 | 1555.02992736, 2764.51662994, 4076.78173405, 4863.9906013, 171 | 8919.20198155, 5303.32256903, 5744.9524052, 6207.55542736, 172 | 2913.05681115, 8117.95627054, 8395.78952232, 8517.51488524, 173 | 934.638539509, 3986.90758271, 4755.28990984, 5353.24646186, 174 | 1470.67323511, 3445.4896454, 4311.35638971, 5107.44470717, 175 | 1796.59398083, 7558.94320619, 7855.536865, 7873.7905017, 176 | 3192.42841154, 3167.48739031, 4286.5896499, 5065.26719911, 177 | 2015.34962293, 2991.75297695, 4215.5486958, 4807.49025903, 178 | 2403.32732203, 8406.63930513, 8731.24685935, 8804.27597685, 179 | 1813.79943978, 2929.46315393, 4164.29159885, 4967.39051722, 180 | 3135.85682508, 2884.61948617, 4118.13086957, 4833.83559537, 181 | 1491.62648649, 6118.10994027, 6756.2252282, 7189.79918277, 182 | 4131.87793914, 3385.27048998, 4219.87067353, 4945.71990506, 183 | 1599.68510288, 2312.88051347, 3727.48717398, 4588.06054316, 184 | 804.680126523, 2563.19593725, 3942.32258466, 4692.38051505, 185 | 4789.78635815, 3820.50800075, 4653.80872013, 5433.19741033, 186 | 734.622787493, 3218.14786775, 4407.96295735, 5214.92675353, 187 | 2544.61091618, 2950.80406593, 4070.46996041, 4929.01900266, 188 | 1948.72324395, 2607.96841742, 3979.24704046, 4816.32350894, 189 | 1078.0296146, 2771.237099, 4062.64417514, 4824.03707846, 190 | 1385.33266904, 7161.02589274, 7599.87368811, 7793.3301241, 191 | 897.802413273, 6032.47662859, 6442.76214677, 6795.28351067, 192 | 1256.96751732, 4504.29699566, 5277.15803459, 5820.88531436, 193 | 966.079963801, 4187.79539192, 5062.89422824, 5672.46181707, 194 | 2152.36804992, 6133.24827378, 6744.28988959, 7164.59405014, 195 | 949.380058803, 2553.00884241, 3920.39892996, 4704.04691158, 196 | 756.531269841, 4013.2987408, 4682.8583101, 5323.45550573, 197 | 1655.12830568, 3934.25786566, 4924.2186987, 5578.62485487, 198 | 1307.01404695, 7482.56377762, 7796.0483946, 7820.64747002, 199 | 764.904561932, 5213.57035732, 5624.75428383, 6123.75943478, 200 | 4574.62430242, 5114.30535676, 5545.50309705, 6058.18010116, 201 | 4622.37521091, 7791.43004717, 8007.79902993, 8234.3912075, 202 | 1366.14780341, 6931.08883655, 7281.96609589, 7510.58576597, 203 | 1271.7568439, 3770.36269615, 4506.6394843, 5316.91975124, 204 | 3559.064, 4588.44214119, 5115.79022489, 5624.95364313, 205 | 722.794220846, 2433.81176159, 3693.60294914, 4622.15988029, 206 | 874.322128852, 3180.78930237, 4415.88154267, 5203.59935007, 207 | 1117.99163449, 7567.81523358, 7864.3552657, 7887.38362297, 208 | 2553.6648026, 5913.51574015, 6665.06648834, 7272.40762783, 209 | 946.061916836, 2750.69699271, 3980.63288978, 4827.01926444, 210 | 1688.54347826, 3945.27974466, 4900.75763199, 5610.63235688, 211 | 1423.84061625, 2373.23747683, 3795.2880414, 4618.25461124, 212 | 1395.54166667, 7545.27706913, 7844.30031646, 7857.47656112, 213 | 3002.18039216, 3149.55740166, 4378.61374963, 5125.67582818, 214 | 8596.58582784, 2799.87924096, 4035.83932721, 4781.02748808, 215 | 2682.70698107, 2785.1886093, 3988.91463393, 4881.30119918, 216 | 1974.85188156, 2814.87525211, 4147.39071391, 4922.33991806, 217 | 3045.47216362, 2052.31514512, 3555.53413396, 4306.27165992, 218 | 2417.03600887, 6617.78366315, 6996.86620069, 7194.79921257, 219 | 1518.99256911, 3522.66079072, 4520.10627024, 5018.15317813, 220 | 1222.95444947, 2404.20757909, 3825.40918895, 4651.71264735, 221 | 3279.57830622, 3725.35321248, 4593.4089163, 5382.74239664, 222 | 3102.29828431, 3114.60740988, 4359.37150671, 5100.80294553, 223 | 1601.41870426, 3828.43416425, 4674.67981421, 5308.46914007, 224 | 583.590518739, 3563.50307333, 4504.03904478, 5108.86988286, 225 | 3729.06829268, 4481.31057318, 5048.74733275, 5766.68654365, 226 | 1001.15155556, 7470.83532874, 7773.4278697, 7783.06839111, 227 | 1200.62470849, 7880.35656158, 8137.26051112, 8309.08555, 228 | 1858.40186982, 4519.25045097, 5055.06133622, 5689.46081302, 229 | 1410.29295557, 3327.67549306, 4460.09993514, 5159.43553343, 230 | 1562.79567862, 7297.31842722, 7713.51437079, 7882.4450542, 231 | 1956.04345546, 3282.85808191, 4379.7954353, 5218.66964091, 232 | 1833.9384745, 6176.10501032, 6583.26531357, 6885.74528019, 233 | 1064.35080989, 3983.13187676, 4811.53217796, 5452.14723598, 234 | 876.105202603, 7663.04196534, 7941.520481, 7948.4493226, 235 | 2596.06881055, 4780.71180174, 5239.50256409, 5843.8063473, 236 | 952.939541952, 3549.4251015, 4550.32012191, 5304.58075527, 237 | 4041.21543272, 3022.83698275, 4143.93814634, 5065.0345767, 238 | 2274.06099973, 3006.2298587, 4204.79831377, 5047.99121747, 239 | 1019.4709067, 3686.44721603, 4539.39877476, 5278.70698525, 240 | 1621.38930481, 3950.15661967, 4818.24475468, 5458.5033767, 241 | 1125.39457014, 3841.37320413, 4745.43793921, 5415.65354214, 242 | 1438.40626172, 5709.01374159, 5990.48751876, 6381.39857042, 243 | 2116.21469106, 3995.80323455, 4687.63657694, 5225.90445693, 244 | 817.033238636, 3144.24691929, 4230.06320797, 5016.06577592, 245 | 1407.83829365, 7156.23274013, 7467.48800228, 7690.09910796, 246 | 3435.61160714, 3370.56668001, 4493.12511809, 5263.72511744, 247 | 1950.58709273, 7171.8533171, 7571.63465444, 7759.68122412, 248 | 833.722825446, 3584.56434191, 4463.40126885, 5188.28188336, 249 | 715.714779661, 3179.79271362, 4381.37395849, 5208.52189084, 250 | 940.857577384, 3024.79193163, 4192.47115714, 4986.41140565, 251 | 3844.53237045, 3955.89174579, 4718.19763455, 5427.64253832, 252 | 7044.71946478, 3690.69734639, 4535.72978826, 5202.49471399, 253 | 4837.39329016, 5089.50473495, 5689.31967126, 6225.31698309, 254 | 1134.53940492, 3855.08928699, 4856.73770157, 5656.6167571, 255 | 835.942098832, 3314.58441972, 4445.19924618, 5151.87937064, 256 | 1271.33447353, 2976.80934908, 4179.11940241, 4907.33589519, 257 | 1124.36960784, 7052.74070203, 7384.13486212, 7598.11473042, 258 | 1727.15639658, 2636.13625723, 3979.5686989, 4758.97454676, 259 | 1551.97228894, 3183.83574088, 4254.13830288, 4814.88712835, 260 | 888.949833995, 3152.58229465, 4174.92796826, 4905.01923635, 261 | 5022.06188269, 2688.8069673, 3925.29865722, 4772.24009668, 262 | 2893.02910053, 3120.70194511, 4193.22175057, 5012.60804126, 263 | 1725.92830948, 2702.8343418, 4046.79087746, 4827.60402305, 264 | 1683.40696847, 6606.45302398, 6974.83702774, 7188.88395406, 265 | 1478.36828547, 3195.27350964, 4179.09599782, 4973.70304275, 266 | 993.081713945, 3231.50986226, 4231.92617333, 4953.50295495, 267 | 1656.68180825, 3300.03449173, 4181.04777438, 4888.93675241, 268 | 873.376687449, 5380.38855074, 5734.25639694, 6226.49535764, 269 | 1426.20134956, 6980.29242095, 7420.39080396, 7602.07216421, 270 | 1678.84621942, 2098.62775207, 3680.11710941, 4352.49298129, 271 | 965.668866359, 7558.64909818, 7849.68005464, 7887.22725962, 272 | 1057.30045351, 3991.78709141, 4675.78721491, 5313.64619296, 273 | 1473.46735822, 5102.80029673, 5545.79919846, 6067.45580393, 274 | 1062.99746357, 3804.73719675, 4751.92324325, 5450.43726941, 275 | 1159.97178378, 7736.38114529, 8014.47825635, 8161.24668255, 276 | 2635.47241514, 2803.32991084, 4039.60001787, 4855.79613278, 277 | 2585.3282554, 8017.2720122, 8266.81288122, 8412.9426377, 278 | 884.56160945, 7830.8273618, 8103.09659318, 8242.24886575, 279 | 1293.62467011, 3475.66955687, 4411.73721071, 5090.05143237, 280 | 2077.57289238, 9883.27636196, 10062.0647786, 10107.8381746, 281 | 866.060142857, 5637.38086938, 5946.19268201, 6383.56695081, 282 | 1238.42887145, 7682.8135702, 7968.56260609, 8116.96563539, 283 | 1525.29302423, 7292.61580643, 7708.6181452, 7867.04677719, 284 | 809.596710479, 7836.29964703, 8105.09590132, 8245.86746966, 285 | 1243.63515748, 7649.84110056, 7925.79276218, 7964.77316045, 286 | 2168.49873959, 2651.63158344, 3875.04765232, 4741.4807383, 287 | 784.150083084, 2373.48561035, 3794.48511506, 4629.99069712, 288 | 854.844540626, 2888.68746075, 4127.61750828, 4849.45599863, 289 | 1047.69263428, 2929.93476353, 4045.56841293, 4813.9917014, 290 | 1032.26726727, 7633.9386921, 7919.07284714, 7941.7408782, 291 | 3219.1042069, 3542.70234665, 4450.99672461, 5219.08039714, 292 | 3200.49900716, 5207.19114865, 5679.37229873, 6112.43266814, 293 | 1524.65864684, 8169.04658839, 8479.21487002, 8571.31782647, 294 | 2403.95677966, 8406.63930513, 8731.24685935, 8804.27597685, 295 | 1528.7042577, 2801.97379147, 4053.38425874, 4885.27609339, 296 | 2745.97779917, 2569.65119274, 3842.84328663, 4645.39067728, 297 | 3028.03706499, 4288.3919876, 4954.5381489, 5569.05794541, 298 | 1789.91755793, 3366.1354256, 4588.76210354, 5381.28249859, 299 | 3106.70223547, 2690.57180717, 4053.51778547, 4803.21389508, 300 | 2039.16766767, 2341.90455785, 3738.17063395, 4603.80680639, 301 | 1598.37802306, 3460.33428132, 4456.90924232, 4931.04699997, 302 | 1025.77290837, 7551.19763969, 7848.02846516, 7869.40861601, 303 | 3730.19647696, 5024.60073274, 5435.68844313, 5964.58060409, 304 | 1640.25132857, 4956.12776059, 5483.47585039, 5963.3897663, 305 | 3103.01856209, 3337.57996225, 4428.66983358, 5113.86922197, 306 | 1338.80639386, 2467.16057649, 3862.03240887, 4669.18424177, 307 | 2266.70545037, 3489.12890193, 4349.40851239, 5125.24487333, 308 | 2197.70009003, 2709.05539777, 4052.13008349, 4887.13911995, 309 | 896.939849624, 7346.56634426, 7523.53984063, 7736.9074211, 310 | 3338.55340925, 2224.25486784, 3621.51151155, 4470.12204722, 311 | 1155.59376165, 7177.9285613, 7484.54039294, 7701.66014255, 312 | 3919.39687334, 3109.24516355, 4238.63349992, 4869.51014739, 313 | 3833.31047514, 3041.80464299, 4149.76159709, 4919.688533, 314 | 1246.21704486, 6952.92587499, 7294.6961335, 7538.97355178, 315 | 989.465287318, 3482.06167299, 4471.04978557, 4976.11238773, 316 | 1369.86058435, 6983.55459836, 7428.89847173, 7606.01344513, 317 | 1552.23392146, 6326.61728583, 6960.02397843, 7268.37466199, 318 | 2064.2400498, 3032.49007849, 4172.63547024, 4963.72556311, 319 | 5386.48419121, 5287.12551994, 5702.37727247, 6180.71996078, 320 | 861.411661507, 3668.52629325, 4630.49686148, 5299.44829068, 321 | 2384.86114551, 3804.34997994, 4722.33519581, 5411.12618998, 322 | 2764.69519421, 3721.28585151, 4546.21952628, 5199.73449242, 323 | 1726.54626942, 3012.01531567, 4255.05478737, 4930.79050667, 324 | 1109.27495108, 7011.59467317, 7337.7780178, 7547.85475004, 325 | 2211.06998637, 3175.30647468, 4230.58142313, 5007.89643197, 326 | 3127.22916239, 10059.4242447, 10317.9835616, 10377.5455998, 327 | 1894.43532182, 5327.040692, 5667.74737682, 6199.32879131, 328 | 1268.03648753, 7563.68149576, 7849.2177134, 7866.01232335, 329 | 1454.17961131, 4074.32004656, 5044.96239734, 5656.87915321, 330 | 878.283464567, 7777.62920224, 8048.33523625, 8077.32543795, 331 | 960.054261553, 4675.02090647, 5196.01604464, 5787.1649254, 332 | 1737.93902524, 6945.6235123, 7388.7102664, 7584.99644937, 333 | 1596.86293141, 6180.2701551, 6589.39868942, 6892.2904735, 334 | 5620.50866463, 4113.66796628, 4759.21812231, 5403.19206377, 335 | 2296.91333333, 7171.8533171, 7571.63465444, 7759.68122412, 336 | 1437.46629779, 7102.705959, 7410.73019551, 7626.39755086, 337 | 3022.79885612, 4075.56732877, 4873.39515963, 5481.01856924, 338 | 906.029667377, 7541.81477131, 7853.19215063, 7880.65155069, 339 | 1347.90699219, 2378.85419058, 3771.64058305, 4577.12444131, 340 | 4123.94747326, 7980.21013155, 8331.90731745, 8526.98645106, 341 | 2225.77107692, 6162.62676869, 6389.32274223, 6802.56854645, 342 | 905.314229178, 7946.48286871, 8214.06378398, 8285.19317916, 343 | 1413.78927029, 2222.8511735, 3709.9421013, 4492.05252508, 344 | 7254.71295263, 5417.47575817, 5815.1028889, 6240.37897688, 345 | 1135.62633306, 3247.81953002, 4422.04089237, 5154.6103787, 346 | 1464.50328054, 4480.65712246, 4986.42271434, 5625.84501873, 347 | 1069.72526462, 3717.69767852, 4801.42676971, 5582.95933308, 348 | 7120.19915209, 3474.13448209, 4510.47095502, 5181.57024048, 349 | 6603.06576194, 6975.57123725, 7076.71171948, 7446.0382134, 350 | 2741.06459083, 2486.24940594, 3885.12800379, 4667.56786996, 351 | 1436.75396963, 7503.60312238, 7798.59092031, 7797.77078535, 352 | 1174.97641295, 3692.69388593, 4653.99356584, 5323.69042956, 353 | 3666.38524971, 5770.60723106, 6014.87797222, 6457.54749308, 354 | 710.86381982, 2656.00234123, 4054.11450778, 4795.80222637, 355 | 1261.56431024, 8304.15313249, 8396.22083677, 8587.15963593, 356 | 850.494716939, 1961.26061961, 3566.15999828, 4283.43329808, 357 | -------------------------------------------------------------------------------- /Graham_Ganssle/evaluation/network_output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSEG/Machine-Learning-CSEG-special-issue/5cfad97e8a8c4284c78af12d3f1a670110cec067/Graham_Ganssle/evaluation/network_output.png -------------------------------------------------------------------------------- /Graham_Ganssle/evaluation/real_data_test_one.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSEG/Machine-Learning-CSEG-special-issue/5cfad97e8a8c4284c78af12d3f1a670110cec067/Graham_Ganssle/evaluation/real_data_test_one.png -------------------------------------------------------------------------------- /Graham_Ganssle/evaluation/real_data_test_two.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSEG/Machine-Learning-CSEG-special-issue/5cfad97e8a8c4284c78af12d3f1a670110cec067/Graham_Ganssle/evaluation/real_data_test_two.png -------------------------------------------------------------------------------- /Graham_Ganssle/evaluation/real_loss_comparison.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CSEG/Machine-Learning-CSEG-special-issue/5cfad97e8a8c4284c78af12d3f1a670110cec067/Graham_Ganssle/evaluation/real_loss_comparison.png -------------------------------------------------------------------------------- /Graham_Ganssle/img_frmt/array_to_image.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python 2 | 3 | ''' 4 | This script simply crops and rotates the input numpy arrays, 5 | and outputs them as images for QC. These ideas are all also 6 | written into the modeling algorithm script. 7 | ''' 8 | 9 | # deps 10 | import numpy as np 11 | import glob 12 | import scipy.misc 13 | 14 | # folder names 15 | in1 = '../dat/fold_dyke_fault_model/*' 16 | in2 = '../dat/fault_dyke_fold_model/*' 17 | in3 = '../dat/gbasin_simplified_model/*' 18 | 19 | out = '../dat/clean_imgs/' 20 | 21 | # initialize 22 | idx = 1 23 | 24 | ''' 25 | # all files in first folder 26 | for name in glob.glob(in1): 27 | # read 28 | pic = np.load(name) 29 | 30 | # crop 31 | pic = pic[:200, :200] 32 | 33 | # flip 34 | pic = np.transpose(pic) 35 | 36 | # write out 37 | scipy.misc.imsave(''.join((out, str(idx), '.jpg')), pic) 38 | 39 | # count 40 | idx += 1 41 | 42 | 43 | # all files in second folder 44 | for name in glob.glob(in2): 45 | # read 46 | pic = np.load(name) 47 | 48 | # crop 49 | pic = pic[:200, :200] 50 | 51 | # write out 52 | scipy.misc.imsave(''.join((out, str(idx), '.jpg')), pic) 53 | 54 | # count 55 | idx += 1 56 | ''' 57 | 58 | # all files in third folder 59 | for name in glob.glob(in3): 60 | # read 61 | pic = np.load(name) 62 | 63 | # crop 64 | pic = pic[:200, :200] 65 | 66 | # flip 67 | pic = np.transpose(pic) 68 | pic = np.flip(pic, axis=0) 69 | 70 | # write out 71 | scipy.misc.imsave(''.join((out, str(idx), '.jpg')), pic) 72 | 73 | # count 74 | idx += 1 75 | 76 | 77 | -------------------------------------------------------------------------------- /Graham_Ganssle/img_frmt/modeler.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python 2 | 3 | ''' 4 | This script aligns the numpy arrays, calculates reflection coefficients, 5 | and models (convolves w a wavelet) them. 6 | ''' 7 | 8 | # deps 9 | import numpy as np 10 | import glob 11 | import scipy.misc 12 | import bruges 13 | 14 | # folder names 15 | in1 = '../dat/fold_dyke_fault_model/*' 16 | in2 = '../dat/fault_dyke_fold_model/*' 17 | in3 = '../dat/gbasin_simplified_model/*' 18 | 19 | out = '../dat/output_seismic/' 20 | 21 | # initialize 22 | idx = 1 23 | ricker = bruges.filters.ricker(duration=1, dt=0.002, f=30) 24 | 25 | # all files in first folder 26 | for name in glob.glob(in1): 27 | # read 28 | pic = np.load(name) 29 | 30 | # crop 31 | pic = pic[:200, :200] 32 | 33 | # flip 34 | pic = np.transpose(pic) 35 | 36 | # calc refl coef 37 | rc = (pic[1:,:] - pic[:-1,:]) / (pic[1:,:] + pic[:-1,:]) 38 | 39 | # convolve a ricker wavelet with the refl coef 40 | pic = np.array(np.apply_along_axis( 41 | lambda t: np.convolve(t, ricker, mode='valid'), axis=0, arr=pic)) 42 | 43 | # clip off ends of convolved data 44 | pic = pic[50:250, :] 45 | 46 | # write out 47 | scipy.misc.imsave(''.join((out, str(idx), '.jpg')), pic) 48 | 49 | # count 50 | idx += 1 51 | 52 | # all files in second folder 53 | for name in glob.glob(in2): 54 | # read 55 | pic = np.load(name) 56 | 57 | # crop 58 | pic = pic[:200, :200] 59 | 60 | # calc refl coef 61 | rc = (pic[1:,:] - pic[:-1,:]) / (pic[1:,:] + pic[:-1,:]) 62 | 63 | # convolve a ricker wavelet with the refl coef 64 | pic = np.array(np.apply_along_axis( 65 | lambda t: np.convolve(t, ricker, mode='valid'), axis=0, arr=pic)) 66 | 67 | # clip off ends of convolved data 68 | pic = pic[50:250, :] 69 | 70 | # write out 71 | scipy.misc.imsave(''.join((out, str(idx), '.jpg')), pic) 72 | 73 | # count 74 | idx += 1 75 | 76 | 77 | # all files in third folder 78 | for name in glob.glob(in3): 79 | # read 80 | pic = np.load(name) 81 | 82 | # crop 83 | pic = pic[:200, :200] 84 | 85 | # flip 86 | pic = np.transpose(pic) 87 | pic = np.flip(pic, axis=0) 88 | 89 | # calc refl coef 90 | rc = (pic[1:,:] - pic[:-1,:]) / (pic[1:,:] + pic[:-1,:]) 91 | 92 | # convolve a ricker wavelet with the refl coef 93 | pic = np.array(np.apply_along_axis( 94 | lambda t: np.convolve(t, ricker, mode='valid'), axis=0, arr=pic)) 95 | 96 | # clip off ends of convolved data 97 | pic = pic[50:250, :] 98 | 99 | # write out 100 | scipy.misc.imsave(''.join((out, str(idx), '.jpg')), pic) 101 | 102 | # count 103 | idx += 1 104 | 105 | 106 | -------------------------------------------------------------------------------- /Graham_Ganssle/img_frmt/pair_generator.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python 2 | 3 | ''' 4 | This script takes clean seismic images, adds noise, and sorts them 5 | into folders specified in the pix2pix framework. The step after 6 | this is to use the pix2pix script "combine_A_and_B.py". 7 | ''' 8 | 9 | # deps 10 | import numpy as np 11 | import matplotlib.image as img 12 | import glob 13 | import scipy.misc 14 | 15 | # folder specs 16 | inputs = '../dat/output_seismic/*' 17 | outclean = '../dat/pairs/A/train/' 18 | outnoisy = '../dat/pairs/B/train/' 19 | 20 | # initialize 21 | idx = 1 22 | 23 | # add noise and write out image pairs 24 | for name in glob.glob(inputs): 25 | # read in image 26 | pic = img.imread(name) 27 | pic = np.asarray(pic) 28 | 29 | # define white noise section 30 | errPerc = np.random.rand() 31 | noise = np.random.rand(pic.shape[0], pic.shape[1]) 32 | 33 | # add noise to image 34 | noisy = pic + noise * np.max(pic) * errPerc 35 | 36 | # write out images 37 | scipy.misc.imsave(''.join((outclean, str(idx), '.jpg')), pic) 38 | scipy.misc.imsave(''.join((outnoisy, str(idx), '.jpg')), noisy) 39 | 40 | # count 41 | idx += 1 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /Graham_Ganssle/pix2pix/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Phillip Isola and Jun-Yan Zhu 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 15 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 17 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 18 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 20 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 21 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 22 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 | 25 | 26 | 27 | ----------------------------- LICENSE FOR DCGAN -------------------------------- 28 | BSD License 29 | 30 | For dcgan.torch software 31 | 32 | Copyright (c) 2015, Facebook, Inc. All rights reserved. 33 | 34 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 35 | 36 | Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 37 | 38 | Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 39 | 40 | Neither the name Facebook nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 41 | 42 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 43 | -------------------------------------------------------------------------------- /Graham_Ganssle/pix2pix/data/data.lua: -------------------------------------------------------------------------------- 1 | --[[ 2 | This data loader is a modified version of the one from dcgan.torch 3 | (see https://github.com/soumith/dcgan.torch/blob/master/data/data.lua). 4 | 5 | Copyright (c) 2016, Deepak Pathak [See LICENSE file for details] 6 | ]]-- 7 | 8 | local Threads = require 'threads' 9 | Threads.serialization('threads.sharedserialize') 10 | 11 | local data = {} 12 | 13 | local result = {} 14 | local unpack = unpack and unpack or table.unpack 15 | 16 | function data.new(n, opt_) 17 | opt_ = opt_ or {} 18 | local self = {} 19 | for k,v in pairs(data) do 20 | self[k] = v 21 | end 22 | 23 | local donkey_file = 'donkey_folder.lua' 24 | -- print('n..' .. n) 25 | if n > 0 then 26 | local options = opt_ 27 | self.threads = Threads(n, 28 | function() require 'torch' end, 29 | function(idx) 30 | opt = options 31 | tid = idx 32 | local seed = (opt.manualSeed and opt.manualSeed or 0) + idx 33 | torch.manualSeed(seed) 34 | torch.setnumthreads(1) 35 | print(string.format('Starting donkey with id: %d seed: %d', tid, seed)) 36 | assert(options, 'options not found') 37 | assert(opt, 'opt not given') 38 | print(opt) 39 | paths.dofile(donkey_file) 40 | end 41 | 42 | ) 43 | else 44 | if donkey_file then paths.dofile(donkey_file) end 45 | -- print('empty threads') 46 | self.threads = {} 47 | function self.threads:addjob(f1, f2) f2(f1()) end 48 | function self.threads:dojob() end 49 | function self.threads:synchronize() end 50 | end 51 | 52 | local nSamples = 0 53 | self.threads:addjob(function() return trainLoader:size() end, 54 | function(c) nSamples = c end) 55 | self.threads:synchronize() 56 | self._size = nSamples 57 | 58 | for i = 1, n do 59 | self.threads:addjob(self._getFromThreads, 60 | self._pushResult) 61 | end 62 | -- print(self.threads) 63 | return self 64 | end 65 | 66 | function data._getFromThreads() 67 | assert(opt.batchSize, 'opt.batchSize not found') 68 | return trainLoader:sample(opt.batchSize) 69 | end 70 | 71 | function data._pushResult(...) 72 | local res = {...} 73 | if res == nil then 74 | self.threads:synchronize() 75 | end 76 | result[1] = res 77 | end 78 | 79 | 80 | 81 | function data:getBatch() 82 | -- queue another job 83 | -- print(self.threads) 84 | self.threads:addjob(self._getFromThreads, self._pushResult) 85 | self.threads:dojob() 86 | local res = result[1] 87 | -- print(res) 88 | -- print('result') 89 | -- print(res) 90 | -- os.exit() 91 | -- paths = results[3] 92 | -- print(paths) 93 | 94 | img_data = res[1] 95 | img_paths = res[3] 96 | -- print(img_data:size()) 97 | -- print(type(img_data)) 98 | -- print(img_paths) 99 | -- print(type(img_paths)) 100 | -- result[3] = nil 101 | -- print(type(res)) 102 | 103 | result[1] = nil 104 | if torch.type(img_data) == 'table' then 105 | img_data = unpack(img_data) 106 | end 107 | 108 | 109 | return img_data, img_paths 110 | end 111 | 112 | function data:size() 113 | return self._size 114 | end 115 | 116 | return data 117 | -------------------------------------------------------------------------------- /Graham_Ganssle/pix2pix/data/dataset.lua: -------------------------------------------------------------------------------- 1 | --[[ 2 | Copyright (c) 2015-present, Facebook, Inc. 3 | All rights reserved. 4 | 5 | This source code is licensed under the BSD-style license found in the 6 | LICENSE file in the root directory of this source tree. An additional grant 7 | of patent rights can be found in the PATENTS file in the same directory. 8 | ]]-- 9 | 10 | require 'torch' 11 | torch.setdefaulttensortype('torch.FloatTensor') 12 | local ffi = require 'ffi' 13 | local class = require('pl.class') 14 | local dir = require 'pl.dir' 15 | local tablex = require 'pl.tablex' 16 | local argcheck = require 'argcheck' 17 | require 'sys' 18 | require 'xlua' 19 | require 'image' 20 | 21 | local dataset = torch.class('dataLoader') 22 | 23 | local initcheck = argcheck{ 24 | pack=true, 25 | help=[[ 26 | A dataset class for images in a flat folder structure (folder-name is class-name). 27 | Optimized for extremely large datasets (upwards of 14 million images). 28 | Tested only on Linux (as it uses command-line linux utilities to scale up) 29 | ]], 30 | {check=function(paths) 31 | local out = true; 32 | for k,v in ipairs(paths) do 33 | if type(v) ~= 'string' then 34 | print('paths can only be of string input'); 35 | out = false 36 | end 37 | end 38 | return out 39 | end, 40 | name="paths", 41 | type="table", 42 | help="Multiple paths of directories with images"}, 43 | 44 | {name="sampleSize", 45 | type="table", 46 | help="a consistent sample size to resize the images"}, 47 | 48 | {name="split", 49 | type="number", 50 | help="Percentage of split to go to Training" 51 | }, 52 | {name="serial_batches", 53 | type="number", 54 | help="if randomly sample training images"}, 55 | 56 | {name="samplingMode", 57 | type="string", 58 | help="Sampling mode: random | balanced ", 59 | default = "balanced"}, 60 | 61 | {name="verbose", 62 | type="boolean", 63 | help="Verbose mode during initialization", 64 | default = false}, 65 | 66 | {name="loadSize", 67 | type="table", 68 | help="a size to load the images to, initially", 69 | opt = true}, 70 | 71 | {name="forceClasses", 72 | type="table", 73 | help="If you want this loader to map certain classes to certain indices, " 74 | .. "pass a classes table that has {classname : classindex} pairs." 75 | .. " For example: {3 : 'dog', 5 : 'cat'}" 76 | .. "This function is very useful when you want two loaders to have the same " 77 | .. "class indices (trainLoader/testLoader for example)", 78 | opt = true}, 79 | 80 | {name="sampleHookTrain", 81 | type="function", 82 | help="applied to sample during training(ex: for lighting jitter). " 83 | .. "It takes the image path as input", 84 | opt = true}, 85 | 86 | {name="sampleHookTest", 87 | type="function", 88 | help="applied to sample during testing", 89 | opt = true}, 90 | } 91 | 92 | function dataset:__init(...) 93 | 94 | -- argcheck 95 | local args = initcheck(...) 96 | print(args) 97 | for k,v in pairs(args) do self[k] = v end 98 | 99 | if not self.loadSize then self.loadSize = self.sampleSize; end 100 | 101 | if not self.sampleHookTrain then self.sampleHookTrain = self.defaultSampleHook end 102 | if not self.sampleHookTest then self.sampleHookTest = self.defaultSampleHook end 103 | self.image_count = 1 104 | -- print('image_count_init', self.image_count) 105 | -- find class names 106 | self.classes = {} 107 | local classPaths = {} 108 | if self.forceClasses then 109 | for k,v in pairs(self.forceClasses) do 110 | self.classes[k] = v 111 | classPaths[k] = {} 112 | end 113 | end 114 | local function tableFind(t, o) for k,v in pairs(t) do if v == o then return k end end end 115 | -- loop over each paths folder, get list of unique class names, 116 | -- also store the directory paths per class 117 | -- for each class, 118 | for k,path in ipairs(self.paths) do 119 | -- print('path', path) 120 | local dirs = {} -- hack 121 | dirs[1] = path 122 | -- local dirs = dir.getdirectories(path); 123 | for k,dirpath in ipairs(dirs) do 124 | local class = paths.basename(dirpath) 125 | local idx = tableFind(self.classes, class) 126 | -- print(class) 127 | -- print(idx) 128 | if not idx then 129 | table.insert(self.classes, class) 130 | idx = #self.classes 131 | classPaths[idx] = {} 132 | end 133 | if not tableFind(classPaths[idx], dirpath) then 134 | table.insert(classPaths[idx], dirpath); 135 | end 136 | end 137 | end 138 | 139 | self.classIndices = {} 140 | for k,v in ipairs(self.classes) do 141 | self.classIndices[v] = k 142 | end 143 | 144 | -- define command-line tools, try your best to maintain OSX compatibility 145 | local wc = 'wc' 146 | local cut = 'cut' 147 | local find = 'find -H' -- if folder name is symlink, do find inside it after dereferencing 148 | if jit.os == 'OSX' then 149 | wc = 'gwc' 150 | cut = 'gcut' 151 | find = 'gfind' 152 | end 153 | ---------------------------------------------------------------------- 154 | -- Options for the GNU find command 155 | local extensionList = {'jpg', 'png','JPG','PNG','JPEG', 'ppm', 'PPM', 'bmp', 'BMP'} 156 | local findOptions = ' -iname "*.' .. extensionList[1] .. '"' 157 | for i=2,#extensionList do 158 | findOptions = findOptions .. ' -o -iname "*.' .. extensionList[i] .. '"' 159 | end 160 | 161 | -- find the image path names 162 | self.imagePath = torch.CharTensor() -- path to each image in dataset 163 | self.imageClass = torch.LongTensor() -- class index of each image (class index in self.classes) 164 | self.classList = {} -- index of imageList to each image of a particular class 165 | self.classListSample = self.classList -- the main list used when sampling data 166 | 167 | print('running "find" on each class directory, and concatenate all' 168 | .. ' those filenames into a single file containing all image paths for a given class') 169 | -- so, generates one file per class 170 | local classFindFiles = {} 171 | for i=1,#self.classes do 172 | classFindFiles[i] = os.tmpname() 173 | end 174 | local combinedFindList = os.tmpname(); 175 | 176 | local tmpfile = os.tmpname() 177 | local tmphandle = assert(io.open(tmpfile, 'w')) 178 | -- iterate over classes 179 | for i, class in ipairs(self.classes) do 180 | -- iterate over classPaths 181 | for j,path in ipairs(classPaths[i]) do 182 | local command = find .. ' "' .. path .. '" ' .. findOptions 183 | .. ' >>"' .. classFindFiles[i] .. '" \n' 184 | tmphandle:write(command) 185 | end 186 | end 187 | io.close(tmphandle) 188 | os.execute('bash ' .. tmpfile) 189 | os.execute('rm -f ' .. tmpfile) 190 | 191 | print('now combine all the files to a single large file') 192 | local tmpfile = os.tmpname() 193 | local tmphandle = assert(io.open(tmpfile, 'w')) 194 | -- concat all finds to a single large file in the order of self.classes 195 | for i=1,#self.classes do 196 | local command = 'cat "' .. classFindFiles[i] .. '" >>' .. combinedFindList .. ' \n' 197 | tmphandle:write(command) 198 | end 199 | io.close(tmphandle) 200 | os.execute('bash ' .. tmpfile) 201 | os.execute('rm -f ' .. tmpfile) 202 | 203 | --========================================================================== 204 | print('load the large concatenated list of sample paths to self.imagePath') 205 | local cmd = wc .. " -L '" 206 | .. combinedFindList .. "' |" 207 | .. cut .. " -f1 -d' '" 208 | print('cmd..' .. cmd) 209 | local maxPathLength = tonumber(sys.fexecute(wc .. " -L '" 210 | .. combinedFindList .. "' |" 211 | .. cut .. " -f1 -d' '")) + 1 212 | local length = tonumber(sys.fexecute(wc .. " -l '" 213 | .. combinedFindList .. "' |" 214 | .. cut .. " -f1 -d' '")) 215 | assert(length > 0, "Could not find any image file in the given input paths") 216 | assert(maxPathLength > 0, "paths of files are length 0?") 217 | self.imagePath:resize(length, maxPathLength):fill(0) 218 | local s_data = self.imagePath:data() 219 | local count = 0 220 | for line in io.lines(combinedFindList) do 221 | ffi.copy(s_data, line) 222 | s_data = s_data + maxPathLength 223 | if self.verbose and count % 10000 == 0 then 224 | xlua.progress(count, length) 225 | end; 226 | count = count + 1 227 | end 228 | 229 | self.numSamples = self.imagePath:size(1) 230 | if self.verbose then print(self.numSamples .. ' samples found.') end 231 | --========================================================================== 232 | print('Updating classList and imageClass appropriately') 233 | self.imageClass:resize(self.numSamples) 234 | local runningIndex = 0 235 | for i=1,#self.classes do 236 | if self.verbose then xlua.progress(i, #(self.classes)) end 237 | local length = tonumber(sys.fexecute(wc .. " -l '" 238 | .. classFindFiles[i] .. "' |" 239 | .. cut .. " -f1 -d' '")) 240 | if length == 0 then 241 | error('Class has zero samples') 242 | else 243 | self.classList[i] = torch.range(runningIndex + 1, runningIndex + length):long() 244 | self.imageClass[{{runningIndex + 1, runningIndex + length}}]:fill(i) 245 | end 246 | runningIndex = runningIndex + length 247 | end 248 | 249 | --========================================================================== 250 | -- clean up temporary files 251 | print('Cleaning up temporary files') 252 | local tmpfilelistall = '' 253 | for i=1,#(classFindFiles) do 254 | tmpfilelistall = tmpfilelistall .. ' "' .. classFindFiles[i] .. '"' 255 | if i % 1000 == 0 then 256 | os.execute('rm -f ' .. tmpfilelistall) 257 | tmpfilelistall = '' 258 | end 259 | end 260 | os.execute('rm -f ' .. tmpfilelistall) 261 | os.execute('rm -f "' .. combinedFindList .. '"') 262 | --========================================================================== 263 | 264 | if self.split == 100 then 265 | self.testIndicesSize = 0 266 | else 267 | print('Splitting training and test sets to a ratio of ' 268 | .. self.split .. '/' .. (100-self.split)) 269 | self.classListTrain = {} 270 | self.classListTest = {} 271 | self.classListSample = self.classListTrain 272 | local totalTestSamples = 0 273 | -- split the classList into classListTrain and classListTest 274 | for i=1,#self.classes do 275 | local list = self.classList[i] 276 | local count = self.classList[i]:size(1) 277 | local splitidx = math.floor((count * self.split / 100) + 0.5) -- +round 278 | local perm = torch.randperm(count) 279 | self.classListTrain[i] = torch.LongTensor(splitidx) 280 | for j=1,splitidx do 281 | self.classListTrain[i][j] = list[perm[j]] 282 | end 283 | if splitidx == count then -- all samples were allocated to train set 284 | self.classListTest[i] = torch.LongTensor() 285 | else 286 | self.classListTest[i] = torch.LongTensor(count-splitidx) 287 | totalTestSamples = totalTestSamples + self.classListTest[i]:size(1) 288 | local idx = 1 289 | for j=splitidx+1,count do 290 | self.classListTest[i][idx] = list[perm[j]] 291 | idx = idx + 1 292 | end 293 | end 294 | end 295 | -- Now combine classListTest into a single tensor 296 | self.testIndices = torch.LongTensor(totalTestSamples) 297 | self.testIndicesSize = totalTestSamples 298 | local tdata = self.testIndices:data() 299 | local tidx = 0 300 | for i=1,#self.classes do 301 | local list = self.classListTest[i] 302 | if list:dim() ~= 0 then 303 | local ldata = list:data() 304 | for j=0,list:size(1)-1 do 305 | tdata[tidx] = ldata[j] 306 | tidx = tidx + 1 307 | end 308 | end 309 | end 310 | end 311 | end 312 | 313 | -- size(), size(class) 314 | function dataset:size(class, list) 315 | list = list or self.classList 316 | if not class then 317 | return self.numSamples 318 | elseif type(class) == 'string' then 319 | return list[self.classIndices[class]]:size(1) 320 | elseif type(class) == 'number' then 321 | return list[class]:size(1) 322 | end 323 | end 324 | 325 | -- getByClass 326 | function dataset:getByClass(class) 327 | local index = 0 328 | if self.serial_batches == 1 then 329 | index = math.fmod(self.image_count-1, self.classListSample[class]:nElement())+1 330 | self.image_count = self.image_count +1 331 | else 332 | index = math.ceil(torch.uniform() * self.classListSample[class]:nElement()) 333 | end 334 | -- print('serial_batches: ', self.serial_batches) 335 | -- print('max_index:, ', self.classListSample[class]:nElement()) 336 | -- print('index: ', index) 337 | -- print('image_count', 338 | local imgpath = ffi.string(torch.data(self.imagePath[self.classListSample[class][index]])) 339 | return self:sampleHookTrain(imgpath), imgpath 340 | end 341 | 342 | -- converts a table of samples (and corresponding labels) to a clean tensor 343 | local function tableToOutput(self, dataTable, scalarTable) 344 | local data, scalarLabels, labels 345 | local quantity = #scalarTable 346 | -- print(dataTable[1]:()) 347 | assert(dataTable[1]:dim() == 3) 348 | -- print(quantity) 349 | -- print(self.sampleSize[1]) 350 | -- print(self.sampleSize[2]) 351 | -- print(self.sampleSize[3]) 352 | data = torch.Tensor(quantity, 353 | self.sampleSize[1], self.sampleSize[2], self.sampleSize[3]) 354 | -- print(data:size()) 355 | scalarLabels = torch.LongTensor(quantity):fill(-1111) 356 | for i=1,#dataTable do 357 | data[i]:copy(dataTable[i]) 358 | scalarLabels[i] = scalarTable[i] 359 | end 360 | return data, scalarLabels 361 | end 362 | 363 | -- sampler, samples from the training set. 364 | function dataset:sample(quantity) 365 | assert(quantity) 366 | local dataTable = {} 367 | local scalarTable = {} 368 | local samplePaths = {} 369 | for i=1,quantity do 370 | local class = torch.random(1, #self.classes) 371 | -- print(class) 372 | local out, imgpath = self:getByClass(class) 373 | table.insert(dataTable, out) 374 | table.insert(scalarTable, class) 375 | samplePaths[i] = imgpath 376 | -- print(imgpath) 377 | -- table.insert(pathTable, imgpath) 378 | -- table.insert() 379 | -- print('out', out:size()) 380 | end 381 | -- print('table') 382 | -- print(table) 383 | local data, scalarLabels = tableToOutput(self, dataTable, scalarTable) 384 | return data, scalarLabels, samplePaths-- filePaths 385 | end 386 | 387 | function dataset:get(i1, i2) 388 | local indices = torch.range(i1, i2); 389 | local quantity = i2 - i1 + 1; 390 | assert(quantity > 0) 391 | -- now that indices has been initialized, get the samples 392 | local dataTable = {} 393 | local scalarTable = {} 394 | for i=1,quantity do 395 | -- load the sample 396 | local imgpath = ffi.string(torch.data(self.imagePath[indices[i]])) 397 | local out = self:sampleHookTest(imgpath) 398 | table.insert(dataTable, out) 399 | table.insert(scalarTable, self.imageClass[indices[i]]) 400 | end 401 | local data, scalarLabels = tableToOutput(self, dataTable, scalarTable) 402 | return data, scalarLabels 403 | end 404 | 405 | return dataset 406 | -------------------------------------------------------------------------------- /Graham_Ganssle/pix2pix/data/donkey_folder.lua: -------------------------------------------------------------------------------- 1 | 2 | --[[ 3 | This data loader is a modified version of the one from dcgan.torch 4 | (see https://github.com/soumith/dcgan.torch/blob/master/data/donkey_folder.lua). 5 | Copyright (c) 2016, Deepak Pathak [See LICENSE file for details] 6 | Copyright (c) 2015-present, Facebook, Inc. 7 | All rights reserved. 8 | This source code is licensed under the BSD-style license found in the 9 | LICENSE file in the root directory of this source tree. An additional grant 10 | of patent rights can be found in the PATENTS file in the same directory. 11 | ]]-- 12 | 13 | require 'image' 14 | paths.dofile('dataset.lua') 15 | -- This file contains the data-loading logic and details. 16 | -- It is run by each data-loader thread. 17 | ------------------------------------------ 18 | -------- COMMON CACHES and PATHS 19 | -- Check for existence of opt.data 20 | print(os.getenv('DATA_ROOT')) 21 | opt.data = paths.concat(os.getenv('DATA_ROOT'), opt.phase) 22 | 23 | if not paths.dirp(opt.data) then 24 | error('Did not find directory: ' .. opt.data) 25 | end 26 | 27 | -- a cache file of the training metadata (if doesnt exist, will be created) 28 | local cache = "cache" 29 | local cache_prefix = opt.data:gsub('/', '_') 30 | os.execute('mkdir -p cache') 31 | local trainCache = paths.concat(cache, cache_prefix .. '_trainCache.t7') 32 | 33 | -------------------------------------------------------------------------------------------- 34 | local input_nc = opt.input_nc -- input channels 35 | local output_nc = opt.output_nc 36 | local loadSize = {input_nc, opt.loadSize} 37 | local sampleSize = {input_nc, opt.fineSize} 38 | 39 | local preprocessAandB = function(imA, imB) 40 | imA = image.scale(imA, loadSize[2], loadSize[2]) 41 | imB = image.scale(imB, loadSize[2], loadSize[2]) 42 | local perm = torch.LongTensor{3, 2, 1} 43 | imA = imA:index(1, perm)--:mul(256.0): brg, rgb 44 | imA = imA:mul(2):add(-1) 45 | imB = imB:index(1, perm) 46 | imB = imB:mul(2):add(-1) 47 | -- print(img:size()) 48 | assert(imA:max()<=1,"A: badly scaled inputs") 49 | assert(imA:min()>=-1,"A: badly scaled inputs") 50 | assert(imB:max()<=1,"B: badly scaled inputs") 51 | assert(imB:min()>=-1,"B: badly scaled inputs") 52 | 53 | 54 | local oW = sampleSize[2] 55 | local oH = sampleSize[2] 56 | local iH = imA:size(2) 57 | local iW = imA:size(3) 58 | 59 | if iH~=oH then 60 | h1 = math.ceil(torch.uniform(1e-2, iH-oH)) 61 | end 62 | 63 | if iW~=oW then 64 | w1 = math.ceil(torch.uniform(1e-2, iW-oW)) 65 | end 66 | if iH ~= oH or iW ~= oW then 67 | imA = image.crop(imA, w1, h1, w1 + oW, h1 + oH) 68 | imB = image.crop(imB, w1, h1, w1 + oW, h1 + oH) 69 | end 70 | 71 | if opt.flip == 1 and torch.uniform() > 0.5 then 72 | imA = image.hflip(imA) 73 | imB = image.hflip(imB) 74 | end 75 | 76 | return imA, imB 77 | end 78 | 79 | 80 | 81 | local function loadImageChannel(path) 82 | local input = image.load(path, 3, 'float') 83 | input = image.scale(input, loadSize[2], loadSize[2]) 84 | 85 | local oW = sampleSize[2] 86 | local oH = sampleSize[2] 87 | local iH = input:size(2) 88 | local iW = input:size(3) 89 | 90 | if iH~=oH then 91 | h1 = math.ceil(torch.uniform(1e-2, iH-oH)) 92 | end 93 | 94 | if iW~=oW then 95 | w1 = math.ceil(torch.uniform(1e-2, iW-oW)) 96 | end 97 | if iH ~= oH or iW ~= oW then 98 | input = image.crop(input, w1, h1, w1 + oW, h1 + oH) 99 | end 100 | 101 | 102 | if opt.flip == 1 and torch.uniform() > 0.5 then 103 | input = image.hflip(input) 104 | end 105 | 106 | -- print(input:mean(), input:min(), input:max()) 107 | local input_lab = image.rgb2lab(input) 108 | -- print(input_lab:size()) 109 | -- os.exit() 110 | local imA = input_lab[{{1}, {}, {} }]:div(50.0) - 1.0 111 | local imB = input_lab[{{2,3},{},{}}]:div(110.0) 112 | local imAB = torch.cat(imA, imB, 1) 113 | assert(imAB:max()<=1,"A: badly scaled inputs") 114 | assert(imAB:min()>=-1,"A: badly scaled inputs") 115 | 116 | return imAB 117 | end 118 | 119 | --local function loadImage 120 | 121 | local function loadImage(path) 122 | local input = image.load(path, 3, 'float') 123 | local h = input:size(2) 124 | local w = input:size(3) 125 | 126 | local imA = image.crop(input, 0, 0, w/2, h) 127 | local imB = image.crop(input, w/2, 0, w, h) 128 | 129 | return imA, imB 130 | end 131 | 132 | local function loadImageInpaint(path) 133 | local imB = image.load(path, 3, 'float') 134 | imB = image.scale(imB, loadSize[2], loadSize[2]) 135 | local perm = torch.LongTensor{3, 2, 1} 136 | imB = imB:index(1, perm)--:mul(256.0): brg, rgb 137 | imB = imB:mul(2):add(-1) 138 | assert(imB:max()<=1,"A: badly scaled inputs") 139 | assert(imB:min()>=-1,"A: badly scaled inputs") 140 | local oW = sampleSize[2] 141 | local oH = sampleSize[2] 142 | local iH = imB:size(2) 143 | local iW = imB:size(3) 144 | if iH~=oH then 145 | h1 = math.ceil(torch.uniform(1e-2, iH-oH)) 146 | end 147 | 148 | if iW~=oW then 149 | w1 = math.ceil(torch.uniform(1e-2, iW-oW)) 150 | end 151 | if iH ~= oH or iW ~= oW then 152 | imB = image.crop(imB, w1, h1, w1 + oW, h1 + oH) 153 | end 154 | local imA = imB:clone() 155 | imA[{{},{1 + oH/4, oH/2 + oH/4},{1 + oW/4, oW/2 + oW/4}}] = 1.0 156 | if opt.flip == 1 and torch.uniform() > 0.5 then 157 | imA = image.hflip(imA) 158 | imB = image.hflip(imB) 159 | end 160 | imAB = torch.cat(imA, imB, 1) 161 | return imAB 162 | end 163 | 164 | -- channel-wise mean and std. Calculate or load them from disk later in the script. 165 | local mean,std 166 | -------------------------------------------------------------------------------- 167 | -- Hooks that are used for each image that is loaded 168 | 169 | -- function to load the image, jitter it appropriately (random crops etc.) 170 | local trainHook = function(self, path) 171 | collectgarbage() 172 | if opt.preprocess == 'regular' then 173 | -- print('process regular') 174 | local imA, imB = loadImage(path) 175 | imA, imB = preprocessAandB(imA, imB) 176 | imAB = torch.cat(imA, imB, 1) 177 | end 178 | 179 | if opt.preprocess == 'colorization' then 180 | -- print('process colorization') 181 | imAB = loadImageChannel(path) 182 | end 183 | 184 | if opt.preprocess == 'inpaint' then 185 | -- print('process inpaint') 186 | imAB = loadImageInpaint(path) 187 | end 188 | -- print('image AB size') 189 | -- print(imAB:size()) 190 | return imAB 191 | end 192 | 193 | -------------------------------------- 194 | -- trainLoader 195 | print('trainCache', trainCache) 196 | --if paths.filep(trainCache) then 197 | -- print('Loading train metadata from cache') 198 | -- trainLoader = torch.load(trainCache) 199 | -- trainLoader.sampleHookTrain = trainHook 200 | -- trainLoader.loadSize = {input_nc, opt.loadSize, opt.loadSize} 201 | -- trainLoader.sampleSize = {input_nc+output_nc, sampleSize[2], sampleSize[2]} 202 | -- trainLoader.serial_batches = opt.serial_batches 203 | -- trainLoader.split = 100 204 | --else 205 | print('Creating train metadata') 206 | -- print(opt.data) 207 | print('serial batch:, ', opt.serial_batches) 208 | trainLoader = dataLoader{ 209 | paths = {opt.data}, 210 | loadSize = {input_nc, loadSize[2], loadSize[2]}, 211 | sampleSize = {input_nc+output_nc, sampleSize[2], sampleSize[2]}, 212 | split = 100, 213 | serial_batches = opt.serial_batches, 214 | verbose = true 215 | } 216 | -- print('finish') 217 | --torch.save(trainCache, trainLoader) 218 | --print('saved metadata cache at', trainCache) 219 | trainLoader.sampleHookTrain = trainHook 220 | --end 221 | collectgarbage() 222 | 223 | -- do some sanity checks on trainLoader 224 | do 225 | local class = trainLoader.imageClass 226 | local nClasses = #trainLoader.classes 227 | assert(class:max() <= nClasses, "class logic has error") 228 | assert(class:min() >= 1, "class logic has error") 229 | end -------------------------------------------------------------------------------- /Graham_Ganssle/pix2pix/models.lua: -------------------------------------------------------------------------------- 1 | require 'nngraph' 2 | 3 | function defineG_encoder_decoder(input_nc, output_nc, ngf) 4 | local netG = nil 5 | -- input is (nc) x 256 x 256 6 | local e1 = - nn.SpatialConvolution(input_nc, ngf, 4, 4, 2, 2, 1, 1) 7 | -- input is (ngf) x 128 x 128 8 | local e2 = e1 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf, ngf * 2, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 2) 9 | -- input is (ngf * 2) x 64 x 64 10 | local e3 = e2 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 2, ngf * 4, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 4) 11 | -- input is (ngf * 4) x 32 x 32 12 | local e4 = e3 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 4, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 13 | -- input is (ngf * 8) x 16 x 16 14 | local e5 = e4 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 15 | -- input is (ngf * 8) x 8 x 8 16 | local e6 = e5 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 17 | -- input is (ngf * 8) x 4 x 4 18 | local e7 = e6 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 19 | -- input is (ngf * 8) x 2 x 2 20 | local e8 = e7 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) -- nn.SpatialBatchNormalization(ngf * 8) 21 | -- input is (ngf * 8) x 1 x 1 22 | 23 | local d1 = e8 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) - nn.Dropout(0.5) 24 | -- input is (ngf * 8) x 2 x 2 25 | local d2 = d1 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) - nn.Dropout(0.5) 26 | -- input is (ngf * 8) x 4 x 4 27 | local d3 = d2 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) - nn.Dropout(0.5) 28 | -- input is (ngf * 8) x 8 x 8 29 | local d4 = d3 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 30 | -- input is (ngf * 8) x 16 x 16 31 | local d5 = d4 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8, ngf * 4, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 4) 32 | -- input is (ngf * 4) x 32 x 32 33 | local d6 = d5 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 4, ngf * 2, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 2) 34 | -- input is (ngf * 2) x 64 x 64 35 | local d7 = d6 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 2, ngf, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf) 36 | -- input is (ngf) x128 x 128 37 | local d8 = d7 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf, output_nc, 4, 4, 2, 2, 1, 1) 38 | -- input is (nc) x 256 x 256 39 | 40 | local o1 = d8 - nn.Tanh() 41 | 42 | netG = nn.gModule({e1},{o1}) 43 | 44 | return netG 45 | end 46 | 47 | function defineG_unet(input_nc, output_nc, ngf) 48 | local netG = nil 49 | -- input is (nc) x 256 x 256 50 | local e1 = - nn.SpatialConvolution(input_nc, ngf, 4, 4, 2, 2, 1, 1) 51 | -- input is (ngf) x 128 x 128 52 | local e2 = e1 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf, ngf * 2, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 2) 53 | -- input is (ngf * 2) x 64 x 64 54 | local e3 = e2 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 2, ngf * 4, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 4) 55 | -- input is (ngf * 4) x 32 x 32 56 | local e4 = e3 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 4, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 57 | -- input is (ngf * 8) x 16 x 16 58 | local e5 = e4 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 59 | -- input is (ngf * 8) x 8 x 8 60 | local e6 = e5 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 61 | -- input is (ngf * 8) x 4 x 4 62 | local e7 = e6 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 63 | -- input is (ngf * 8) x 2 x 2 64 | local e8 = e7 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) -- nn.SpatialBatchNormalization(ngf * 8) 65 | -- input is (ngf * 8) x 1 x 1 66 | 67 | local d1_ = e8 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) - nn.Dropout(0.5) 68 | -- input is (ngf * 8) x 2 x 2 69 | local d1 = {d1_,e7} - nn.JoinTable(2) 70 | local d2_ = d1 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8 * 2, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) - nn.Dropout(0.5) 71 | -- input is (ngf * 8) x 4 x 4 72 | local d2 = {d2_,e6} - nn.JoinTable(2) 73 | local d3_ = d2 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8 * 2, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) - nn.Dropout(0.5) 74 | -- input is (ngf * 8) x 8 x 8 75 | local d3 = {d3_,e5} - nn.JoinTable(2) 76 | local d4_ = d3 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8 * 2, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 77 | -- input is (ngf * 8) x 16 x 16 78 | local d4 = {d4_,e4} - nn.JoinTable(2) 79 | local d5_ = d4 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8 * 2, ngf * 4, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 4) 80 | -- input is (ngf * 4) x 32 x 32 81 | local d5 = {d5_,e3} - nn.JoinTable(2) 82 | local d6_ = d5 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 4 * 2, ngf * 2, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 2) 83 | -- input is (ngf * 2) x 64 x 64 84 | local d6 = {d6_,e2} - nn.JoinTable(2) 85 | local d7_ = d6 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 2 * 2, ngf, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf) 86 | -- input is (ngf) x128 x 128 87 | local d7 = {d7_,e1} - nn.JoinTable(2) 88 | local d8 = d7 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 2, output_nc, 4, 4, 2, 2, 1, 1) 89 | -- input is (nc) x 256 x 256 90 | 91 | local o1 = d8 - nn.Tanh() 92 | 93 | netG = nn.gModule({e1},{o1}) 94 | 95 | --graph.dot(netG.fg,'netG') 96 | 97 | return netG 98 | end 99 | 100 | function defineG_unet_128(input_nc, output_nc, ngf) 101 | -- Two layer less than the default unet to handle 128x128 input 102 | local netG = nil 103 | -- input is (nc) x 128 x 128 104 | local e1 = - nn.SpatialConvolution(input_nc, ngf, 4, 4, 2, 2, 1, 1) 105 | -- input is (ngf) x 64 x 64 106 | local e2 = e1 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf, ngf * 2, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 2) 107 | -- input is (ngf * 2) x 32 x 32 108 | local e3 = e2 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 2, ngf * 4, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 4) 109 | -- input is (ngf * 4) x 16 x 16 110 | local e4 = e3 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 4, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 111 | -- input is (ngf * 8) x 8 x 8 112 | local e5 = e4 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 113 | -- input is (ngf * 8) x 4 x 4 114 | local e6 = e5 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) 115 | -- input is (ngf * 8) x 2 x 2 116 | local e7 = e6 - nn.LeakyReLU(0.2, true) - nn.SpatialConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) -- nn.SpatialBatchNormalization(ngf * 8) 117 | -- input is (ngf * 8) x 1 x 1 118 | 119 | local d1_ = e7 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) - nn.Dropout(0.5) 120 | -- input is (ngf * 8) x 2 x 2 121 | local d1 = {d1_,e6} - nn.JoinTable(2) 122 | local d2_ = d1 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8 * 2, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) - nn.Dropout(0.5) 123 | -- input is (ngf * 8) x 4 x 4 124 | local d2 = {d2_,e5} - nn.JoinTable(2) 125 | local d3_ = d2 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8 * 2, ngf * 8, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 8) - nn.Dropout(0.5) 126 | -- input is (ngf * 8) x 8 x 8 127 | local d3 = {d3_,e4} - nn.JoinTable(2) 128 | local d4_ = d3 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 8 * 2, ngf * 4, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 4) 129 | -- input is (ngf * 8) x 16 x 16 130 | local d4 = {d4_,e3} - nn.JoinTable(2) 131 | local d5_ = d4 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 4 * 2, ngf * 2, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf * 2) 132 | -- input is (ngf * 4) x 32 x 32 133 | local d5 = {d5_,e2} - nn.JoinTable(2) 134 | local d6_ = d5 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 2 * 2, ngf, 4, 4, 2, 2, 1, 1) - nn.SpatialBatchNormalization(ngf) 135 | -- input is (ngf * 2) x 64 x 64 136 | local d6 = {d6_,e1} - nn.JoinTable(2) 137 | local d7 = d6 - nn.ReLU(true) - nn.SpatialFullConvolution(ngf * 2, output_nc, 4, 4, 2, 2, 1, 1) 138 | -- input is (ngf) x128 x 128 139 | 140 | local o1 = d7 - nn.Tanh() 141 | 142 | netG = nn.gModule({e1},{o1}) 143 | 144 | --graph.dot(netG.fg,'netG') 145 | 146 | return netG 147 | end 148 | 149 | function defineD_basic(input_nc, output_nc, ndf) 150 | n_layers = 3 151 | return defineD_n_layers(input_nc, output_nc, ndf, n_layers) 152 | end 153 | 154 | -- rf=1 155 | function defineD_pixelGAN(input_nc, output_nc, ndf) 156 | local netD = nn.Sequential() 157 | 158 | -- input is (nc) x 256 x 256 159 | netD:add(nn.SpatialConvolution(input_nc+output_nc, ndf, 1, 1, 1, 1, 0, 0)) 160 | netD:add(nn.LeakyReLU(0.2, true)) 161 | -- state size: (ndf) x 256 x 256 162 | netD:add(nn.SpatialConvolution(ndf, ndf * 2, 1, 1, 1, 1, 0, 0)) 163 | netD:add(nn.SpatialBatchNormalization(ndf * 2)):add(nn.LeakyReLU(0.2, true)) 164 | -- state size: (ndf*2) x 256 x 256 165 | netD:add(nn.SpatialConvolution(ndf * 2, 1, 1, 1, 1, 1, 0, 0)) 166 | -- state size: 1 x 256 x 256 167 | netD:add(nn.Sigmoid()) 168 | -- state size: 1 x 256 x 256 169 | 170 | return netD 171 | end 172 | 173 | -- if n=0, then use pixelGAN (rf=1) 174 | -- else rf is 16 if n=1 175 | -- 34 if n=2 176 | -- 70 if n=3 177 | -- 142 if n=4 178 | -- 286 if n=5 179 | -- 574 if n=6 180 | function defineD_n_layers(input_nc, output_nc, ndf, n_layers) 181 | if n_layers==0 then 182 | return defineD_pixelGAN(input_nc, output_nc, ndf) 183 | else 184 | 185 | local netD = nn.Sequential() 186 | 187 | -- input is (nc) x 256 x 256 188 | netD:add(nn.SpatialConvolution(input_nc+output_nc, ndf, 4, 4, 2, 2, 1, 1)) 189 | netD:add(nn.LeakyReLU(0.2, true)) 190 | 191 | local nf_mult = 1 192 | local nf_mult_prev = 1 193 | for n = 1, n_layers-1 do 194 | nf_mult_prev = nf_mult 195 | nf_mult = math.min(2^n,8) 196 | netD:add(nn.SpatialConvolution(ndf * nf_mult_prev, ndf * nf_mult, 4, 4, 2, 2, 1, 1)) 197 | netD:add(nn.SpatialBatchNormalization(ndf * nf_mult)):add(nn.LeakyReLU(0.2, true)) 198 | end 199 | 200 | -- state size: (ndf*M) x N x N 201 | nf_mult_prev = nf_mult 202 | nf_mult = math.min(2^n_layers,8) 203 | netD:add(nn.SpatialConvolution(ndf * nf_mult_prev, ndf * nf_mult, 4, 4, 1, 1, 1, 1)) 204 | netD:add(nn.SpatialBatchNormalization(ndf * nf_mult)):add(nn.LeakyReLU(0.2, true)) 205 | -- state size: (ndf*M*2) x (N-1) x (N-1) 206 | netD:add(nn.SpatialConvolution(ndf * nf_mult, 1, 4, 4, 1, 1, 1, 1)) 207 | -- state size: 1 x (N-2) x (N-2) 208 | 209 | netD:add(nn.Sigmoid()) 210 | -- state size: 1 x (N-2) x (N-2) 211 | 212 | return netD 213 | end 214 | end 215 | -------------------------------------------------------------------------------- /Graham_Ganssle/pix2pix/scripts/combine_A_and_B.py: -------------------------------------------------------------------------------- 1 | from pdb import set_trace as st 2 | import os 3 | import numpy as np 4 | import cv2 5 | import argparse 6 | 7 | parser = argparse.ArgumentParser('create image pairs') 8 | parser.add_argument('--fold_A', dest='fold_A', help='input directory for image A', type=str, default='../dataset/50kshoes_edges') 9 | parser.add_argument('--fold_B', dest='fold_B', help='input directory for image B', type=str, default='../dataset/50kshoes_jpg') 10 | parser.add_argument('--fold_AB', dest='fold_AB', help='output directory', type=str, default='../dataset/test_AB') 11 | parser.add_argument('--num_imgs', dest='num_imgs', help='number of images',type=int, default=1000000) 12 | parser.add_argument('--use_AB', dest='use_AB', help='if true: (0001_A, 0001_B) to (0001_AB)',action='store_true') 13 | args = parser.parse_args() 14 | 15 | for arg in vars(args): 16 | print('[%s] = ' % arg, getattr(args, arg)) 17 | 18 | splits = filter( lambda f: not f.startswith('.'), os.listdir(args.fold_A)) # ignore hidden folders like .DS_Store 19 | 20 | for sp in splits: 21 | img_fold_A = os.path.join(args.fold_A, sp) 22 | img_fold_B = os.path.join(args.fold_B, sp) 23 | img_list = filter( lambda f: not f.startswith('.'), os.listdir(img_fold_A)) # ignore hidden folders like .DS_Store 24 | if args.use_AB: 25 | img_list = [img_path for img_path in img_list if '_A.' in img_path] 26 | 27 | num_imgs = min(args.num_imgs, len(img_list)) 28 | print('split = %s, use %d/%d images' % (sp, num_imgs, len(img_list))) 29 | img_fold_AB = os.path.join(args.fold_AB, sp) 30 | if not os.path.isdir(img_fold_AB): 31 | os.makedirs(img_fold_AB) 32 | print('split = %s, number of images = %d' % (sp, num_imgs)) 33 | for n in range(num_imgs): 34 | name_A = img_list[n] 35 | path_A = os.path.join(img_fold_A, name_A) 36 | if args.use_AB: 37 | name_B = name_A.replace('_A.', '_B.') 38 | else: 39 | name_B = name_A 40 | path_B = os.path.join(img_fold_B, name_B) 41 | if os.path.isfile(path_A) and os.path.isfile(path_B): 42 | name_AB = name_A 43 | if args.use_AB: 44 | name_AB = name_AB.replace('_A.', '.') # remove _A 45 | path_AB = os.path.join(img_fold_AB, name_AB) 46 | im_A = cv2.imread(path_A, cv2.IMREAD_COLOR) 47 | im_B = cv2.imread(path_B, cv2.IMREAD_COLOR) 48 | im_AB = np.concatenate([im_A, im_B], 1) 49 | cv2.imwrite(path_AB, im_AB) 50 | 51 | -------------------------------------------------------------------------------- /Graham_Ganssle/pix2pix/scripts/receptive_field_sizes.m: -------------------------------------------------------------------------------- 1 | % modified from: https://github.com/rbgirshick/rcnn/blob/master/utils/receptive_field_sizes.m 2 | % 3 | % RCNN LICENSE: 4 | % 5 | % Copyright (c) 2014, The Regents of the University of California (Regents) 6 | % All rights reserved. 7 | % 8 | % Redistribution and use in source and binary forms, with or without 9 | % modification, are permitted provided that the following conditions are met: 10 | % 11 | % 1. Redistributions of source code must retain the above copyright notice, this 12 | % list of conditions and the following disclaimer. 13 | % 2. Redistributions in binary form must reproduce the above copyright notice, 14 | % this list of conditions and the following disclaimer in the documentation 15 | % and/or other materials provided with the distribution. 16 | % 17 | % THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 18 | % ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 19 | % WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 20 | % DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 21 | % ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 22 | % (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 23 | % LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 24 | % ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 | % (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 26 | % SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 | 28 | function receptive_field_sizes() 29 | 30 | 31 | % compute input size from a given output size 32 | f = @(output_size, ksize, stride) (output_size - 1) * stride + ksize; 33 | 34 | 35 | %% n=1 discriminator 36 | 37 | % fix the output size to 1 and derive the receptive field in the input 38 | out = ... 39 | f(f(f(1, 4, 1), ... % conv2 -> conv3 40 | 4, 1), ... % conv1 -> conv2 41 | 4, 2); % input -> conv1 42 | 43 | fprintf('n=1 discriminator receptive field size: %d\n', out); 44 | 45 | 46 | %% n=2 discriminator 47 | 48 | % fix the output size to 1 and derive the receptive field in the input 49 | out = ... 50 | f(f(f(f(1, 4, 1), ... % conv3 -> conv4 51 | 4, 1), ... % conv2 -> conv3 52 | 4, 2), ... % conv1 -> conv2 53 | 4, 2); % input -> conv1 54 | 55 | fprintf('n=2 discriminator receptive field size: %d\n', out); 56 | 57 | 58 | %% n=3 discriminator 59 | 60 | % fix the output size to 1 and derive the receptive field in the input 61 | out = ... 62 | f(f(f(f(f(1, 4, 1), ... % conv4 -> conv5 63 | 4, 1), ... % conv3 -> conv4 64 | 4, 2), ... % conv2 -> conv3 65 | 4, 2), ... % conv1 -> conv2 66 | 4, 2); % input -> conv1 67 | 68 | fprintf('n=3 discriminator receptive field size: %d\n', out); 69 | 70 | 71 | %% n=4 discriminator 72 | 73 | % fix the output size to 1 and derive the receptive field in the input 74 | out = ... 75 | f(f(f(f(f(f(1, 4, 1), ... % conv5 -> conv6 76 | 4, 1), ... % conv4 -> conv5 77 | 4, 2), ... % conv3 -> conv4 78 | 4, 2), ... % conv2 -> conv3 79 | 4, 2), ... % conv1 -> conv2 80 | 4, 2); % input -> conv1 81 | 82 | fprintf('n=4 discriminator receptive field size: %d\n', out); 83 | 84 | 85 | %% n=5 discriminator 86 | 87 | % fix the output size to 1 and derive the receptive field in the input 88 | out = ... 89 | f(f(f(f(f(f(f(1, 4, 1), ... % conv6 -> conv7 90 | 4, 1), ... % conv5 -> conv6 91 | 4, 2), ... % conv4 -> conv5 92 | 4, 2), ... % conv3 -> conv4 93 | 4, 2), ... % conv2 -> conv3 94 | 4, 2), ... % conv1 -> conv2 95 | 4, 2); % input -> conv1 96 | 97 | fprintf('n=5 discriminator receptive field size: %d\n', out); -------------------------------------------------------------------------------- /Graham_Ganssle/pix2pix/test.lua: -------------------------------------------------------------------------------- 1 | -- usage: DATA_ROOT=/path/to/data/ name=expt1 which_direction=BtoA th test.lua 2 | -- 3 | -- code derived from https://github.com/soumith/dcgan.torch 4 | -- 5 | 6 | require 'image' 7 | require 'nn' 8 | require 'nngraph' 9 | util = paths.dofile('util/util.lua') 10 | torch.setdefaulttensortype('torch.FloatTensor') 11 | 12 | opt = { 13 | DATA_ROOT = '', -- path to images (should have subfolders 'train', 'val', etc) 14 | batchSize = 1, -- # images in batch 15 | loadSize = 256, -- scale images to this size 16 | fineSize = 256, -- then crop to this size 17 | flip=0, -- horizontal mirroring data augmentation 18 | display = 1, -- display samples while training. 0 = false 19 | display_id = 200, -- display window id. 20 | gpu = 1, -- gpu = 0 is CPU mode. gpu=X is GPU mode on GPU X 21 | how_many = 'all', -- how many test images to run (set to all to run on every image found in the data/phase folder) 22 | which_direction = 'AtoB', -- AtoB or BtoA 23 | phase = 'val', -- train, val, test ,etc 24 | preprocess = 'regular', -- for special purpose preprocessing, e.g., for colorization, change this (selects preprocessing functions in util.lua) 25 | aspect_ratio = 1.0, -- aspect ratio of result images 26 | name = '', -- name of experiment, selects which model to run, should generally should be passed on command line 27 | input_nc = 3, -- # of input image channels 28 | output_nc = 3, -- # of output image channels 29 | serial_batches = 1, -- if 1, takes images in order to make batches, otherwise takes them randomly 30 | serial_batch_iter = 1, -- iter into serial image list 31 | cudnn = 1, -- set to 0 to not use cudnn (untested) 32 | checkpoints_dir = './checkpoints', -- loads models from here 33 | results_dir='./results/', -- saves results here 34 | which_epoch = 'latest', -- which epoch to test? set to 'latest' to use latest cached model 35 | } 36 | 37 | 38 | -- one-line argument parser. parses enviroment variables to override the defaults 39 | for k,v in pairs(opt) do opt[k] = tonumber(os.getenv(k)) or os.getenv(k) or opt[k] end 40 | opt.nThreads = 1 -- test only works with 1 thread... 41 | print(opt) 42 | if opt.display == 0 then opt.display = false end 43 | 44 | opt.manualSeed = torch.random(1, 10000) -- set seed 45 | print("Random Seed: " .. opt.manualSeed) 46 | torch.manualSeed(opt.manualSeed) 47 | torch.setdefaulttensortype('torch.FloatTensor') 48 | 49 | opt.netG_name = opt.name .. '/' .. opt.which_epoch .. '_net_G' 50 | 51 | local data_loader = paths.dofile('data/data.lua') 52 | print('#threads...' .. opt.nThreads) 53 | local data = data_loader.new(opt.nThreads, opt) 54 | print("Dataset Size: ", data:size()) 55 | 56 | -- translation direction 57 | local idx_A = nil 58 | local idx_B = nil 59 | local input_nc = opt.input_nc 60 | local output_nc = opt.output_nc 61 | if opt.which_direction=='AtoB' then 62 | idx_A = {1, input_nc} 63 | idx_B = {input_nc+1, input_nc+output_nc} 64 | elseif opt.which_direction=='BtoA' then 65 | idx_A = {input_nc+1, input_nc+output_nc} 66 | idx_B = {1, input_nc} 67 | else 68 | error(string.format('bad direction %s',opt.which_direction)) 69 | end 70 | ---------------------------------------------------------------------------- 71 | 72 | local input = torch.FloatTensor(opt.batchSize,3,opt.fineSize,opt.fineSize) 73 | local target = torch.FloatTensor(opt.batchSize,3,opt.fineSize,opt.fineSize) 74 | 75 | print('checkpoints_dir', opt.checkpoints_dir) 76 | local netG = util.load(paths.concat(opt.checkpoints_dir, opt.netG_name .. '.t7'), opt) 77 | --netG:evaluate() 78 | 79 | print(netG) 80 | 81 | 82 | function TableConcat(t1,t2) 83 | for i=1,#t2 do 84 | t1[#t1+1] = t2[i] 85 | end 86 | return t1 87 | end 88 | 89 | if opt.how_many=='all' then 90 | opt.how_many=data:size() 91 | end 92 | opt.how_many=math.min(opt.how_many, data:size()) 93 | 94 | local filepaths = {} -- paths to images tested on 95 | for n=1,math.floor(opt.how_many/opt.batchSize) do 96 | print('processing batch ' .. n) 97 | 98 | local data_curr, filepaths_curr = data:getBatch() 99 | filepaths_curr = util.basename_batch(filepaths_curr) 100 | print('filepaths_curr: ', filepaths_curr) 101 | 102 | input = data_curr[{ {}, idx_A, {}, {} }] 103 | target = data_curr[{ {}, idx_B, {}, {} }] 104 | 105 | if opt.gpu > 0 then 106 | input = input:cuda() 107 | end 108 | 109 | if opt.preprocess == 'colorization' then 110 | local output_AB = netG:forward(input):float() 111 | local input_L = input:float() 112 | output = util.deprocessLAB_batch(input_L, output_AB) 113 | local target_AB = target:float() 114 | target = util.deprocessLAB_batch(input_L, target_AB) 115 | input = util.deprocessL_batch(input_L) 116 | else 117 | output = util.deprocess_batch(netG:forward(input)) 118 | input = util.deprocess_batch(input):float() 119 | output = output:float() 120 | target = util.deprocess_batch(target):float() 121 | end 122 | paths.mkdir(paths.concat(opt.results_dir, opt.netG_name .. '_' .. opt.phase)) 123 | local image_dir = paths.concat(opt.results_dir, opt.netG_name .. '_' .. opt.phase, 'images') 124 | paths.mkdir(image_dir) 125 | paths.mkdir(paths.concat(image_dir,'input')) 126 | paths.mkdir(paths.concat(image_dir,'output')) 127 | paths.mkdir(paths.concat(image_dir,'target')) 128 | -- print(input:size()) 129 | -- print(output:size()) 130 | -- print(target:size()) 131 | for i=1, opt.batchSize do 132 | image.save(paths.concat(image_dir,'input',filepaths_curr[i]), image.scale(input[i],input[i]:size(2),input[i]:size(3)/opt.aspect_ratio)) 133 | image.save(paths.concat(image_dir,'output',filepaths_curr[i]), image.scale(output[i],output[i]:size(2),output[i]:size(3)/opt.aspect_ratio)) 134 | image.save(paths.concat(image_dir,'target',filepaths_curr[i]), image.scale(target[i],target[i]:size(2),target[i]:size(3)/opt.aspect_ratio)) 135 | end 136 | print('Saved images to: ', image_dir) 137 | 138 | if opt.display then 139 | if opt.preprocess == 'regular' then 140 | disp = require 'display' 141 | disp.image(util.scaleBatch(input,100,100),{win=opt.display_id, title='input'}) 142 | disp.image(util.scaleBatch(output,100,100),{win=opt.display_id+1, title='output'}) 143 | disp.image(util.scaleBatch(target,100,100),{win=opt.display_id+2, title='target'}) 144 | 145 | print('Displayed images') 146 | end 147 | end 148 | 149 | filepaths = TableConcat(filepaths, filepaths_curr) 150 | end 151 | 152 | -- make webpage 153 | io.output(paths.concat(opt.results_dir,opt.netG_name .. '_' .. opt.phase, 'index.html')) 154 | 155 | io.write('') 156 | 157 | io.write('') 158 | for i=1, #filepaths do 159 | io.write('') 160 | io.write('') 161 | io.write('') 162 | io.write('') 163 | io.write('') 164 | io.write('') 165 | end 166 | 167 | io.write('
Image #InputOutputGround Truth
' .. filepaths[i] .. '
') -------------------------------------------------------------------------------- /Graham_Ganssle/pix2pix/train.lua: -------------------------------------------------------------------------------- 1 | -- usage example: DATA_ROOT=/path/to/data/ which_direction=BtoA name=expt1 th train.lua 2 | -- 3 | -- code derived from https://github.com/soumith/dcgan.torch 4 | -- 5 | 6 | require 'torch' 7 | require 'nn' 8 | require 'optim' 9 | util = paths.dofile('util/util.lua') 10 | require 'image' 11 | require 'models' 12 | 13 | 14 | opt = { 15 | DATA_ROOT = '', -- path to images (should have subfolders 'train', 'val', etc) 16 | batchSize = 1, -- # images in batch 17 | loadSize = 286, -- scale images to this size 18 | fineSize = 256, -- then crop to this size 19 | ngf = 64, -- # of gen filters in first conv layer 20 | ndf = 64, -- # of discrim filters in first conv layer 21 | input_nc = 3, -- # of input image channels 22 | output_nc = 3, -- # of output image channels 23 | niter = 200, -- # of iter at starting learning rate 24 | lr = 0.0002, -- initial learning rate for adam 25 | beta1 = 0.5, -- momentum term of adam 26 | ntrain = math.huge, -- # of examples per epoch. math.huge for full dataset 27 | flip = 1, -- if flip the images for data argumentation 28 | display = 1, -- display samples while training. 0 = false 29 | display_id = 10, -- display window id. 30 | display_plot = 'errL1', -- which loss values to plot over time. Accepted values include a comma seperated list of: errL1, errG, and errD 31 | gpu = 1, -- gpu = 0 is CPU mode. gpu=X is GPU mode on GPU X 32 | name = '', -- name of the experiment, should generally be passed on the command line 33 | which_direction = 'AtoB', -- AtoB or BtoA 34 | phase = 'train', -- train, val, test, etc 35 | preprocess = 'regular', -- for special purpose preprocessing, e.g., for colorization, change this (selects preprocessing functions in util.lua) 36 | nThreads = 2, -- # threads for loading data 37 | save_epoch_freq = 50, -- save a model every save_epoch_freq epochs (does not overwrite previously saved models) 38 | save_latest_freq = 5000, -- save the latest model every latest_freq sgd iterations (overwrites the previous latest model) 39 | print_freq = 50, -- print the debug information every print_freq iterations 40 | display_freq = 100, -- display the current results every display_freq iterations 41 | save_display_freq = 5000, -- save the current display of results every save_display_freq_iterations 42 | continue_train=0, -- if continue training, load the latest model: 1: true, 0: false 43 | serial_batches = 0, -- if 1, takes images in order to make batches, otherwise takes them randomly 44 | serial_batch_iter = 1, -- iter into serial image list 45 | checkpoints_dir = './checkpoints', -- models are saved here 46 | cudnn = 1, -- set to 0 to not use cudnn 47 | condition_GAN = 1, -- set to 0 to use unconditional discriminator 48 | use_GAN = 1, -- set to 0 to turn off GAN term 49 | use_L1 = 1, -- set to 0 to turn off L1 term 50 | which_model_netD = 'basic', -- selects model to use for netD 51 | which_model_netG = 'unet', -- selects model to use for netG 52 | n_layers_D = 0, -- only used if which_model_netD=='n_layers' 53 | lambda = 100, -- weight on L1 term in objective 54 | } 55 | 56 | -- one-line argument parser. parses enviroment variables to override the defaults 57 | for k,v in pairs(opt) do opt[k] = tonumber(os.getenv(k)) or os.getenv(k) or opt[k] end 58 | print(opt) 59 | 60 | local input_nc = opt.input_nc 61 | local output_nc = opt.output_nc 62 | -- translation direction 63 | local idx_A = nil 64 | local idx_B = nil 65 | 66 | if opt.which_direction=='AtoB' then 67 | idx_A = {1, input_nc} 68 | idx_B = {input_nc+1, input_nc+output_nc} 69 | elseif opt.which_direction=='BtoA' then 70 | idx_A = {input_nc+1, input_nc+output_nc} 71 | idx_B = {1, input_nc} 72 | else 73 | error(string.format('bad direction %s',opt.which_direction)) 74 | end 75 | 76 | if opt.display == 0 then opt.display = false end 77 | 78 | opt.manualSeed = torch.random(1, 10000) -- fix seed 79 | print("Random Seed: " .. opt.manualSeed) 80 | torch.manualSeed(opt.manualSeed) 81 | torch.setdefaulttensortype('torch.FloatTensor') 82 | 83 | -- create data loader 84 | local data_loader = paths.dofile('data/data.lua') 85 | print('#threads...' .. opt.nThreads) 86 | local data = data_loader.new(opt.nThreads, opt) 87 | print("Dataset Size: ", data:size()) 88 | tmp_d, tmp_paths = data:getBatch() 89 | 90 | ---------------------------------------------------------------------------- 91 | local function weights_init(m) 92 | local name = torch.type(m) 93 | if name:find('Convolution') then 94 | m.weight:normal(0.0, 0.02) 95 | m.bias:fill(0) 96 | elseif name:find('BatchNormalization') then 97 | if m.weight then m.weight:normal(1.0, 0.02) end 98 | if m.bias then m.bias:fill(0) end 99 | end 100 | end 101 | 102 | 103 | local ndf = opt.ndf 104 | local ngf = opt.ngf 105 | local real_label = 1 106 | local fake_label = 0 107 | 108 | function defineG(input_nc, output_nc, ngf) 109 | local netG = nil 110 | if opt.which_model_netG == "encoder_decoder" then netG = defineG_encoder_decoder(input_nc, output_nc, ngf) 111 | elseif opt.which_model_netG == "unet" then netG = defineG_unet(input_nc, output_nc, ngf) 112 | elseif opt.which_model_netG == "unet_128" then netG = defineG_unet_128(input_nc, output_nc, ngf) 113 | else error("unsupported netG model") 114 | end 115 | 116 | netG:apply(weights_init) 117 | 118 | return netG 119 | end 120 | 121 | function defineD(input_nc, output_nc, ndf) 122 | local netD = nil 123 | if opt.condition_GAN==1 then 124 | input_nc_tmp = input_nc 125 | else 126 | input_nc_tmp = 0 -- only penalizes structure in output channels 127 | end 128 | 129 | if opt.which_model_netD == "basic" then netD = defineD_basic(input_nc_tmp, output_nc, ndf) 130 | elseif opt.which_model_netD == "n_layers" then netD = defineD_n_layers(input_nc_tmp, output_nc, ndf, opt.n_layers_D) 131 | else error("unsupported netD model") 132 | end 133 | 134 | netD:apply(weights_init) 135 | 136 | return netD 137 | end 138 | 139 | 140 | -- load saved models and finetune 141 | if opt.continue_train == 1 then 142 | print('loading previously trained netG...') 143 | netG = util.load(paths.concat(opt.checkpoints_dir, opt.name, 'latest_net_G.t7'), opt) 144 | print('loading previously trained netD...') 145 | netD = util.load(paths.concat(opt.checkpoints_dir, opt.name, 'latest_net_D.t7'), opt) 146 | else 147 | print('define model netG...') 148 | netG = defineG(input_nc, output_nc, ngf) 149 | print('define model netD...') 150 | netD = defineD(input_nc, output_nc, ndf) 151 | end 152 | 153 | print(netG) 154 | print(netD) 155 | 156 | 157 | local criterion = nn.BCECriterion() 158 | local criterionAE = nn.AbsCriterion() 159 | --------------------------------------------------------------------------- 160 | optimStateG = { 161 | learningRate = opt.lr, 162 | beta1 = opt.beta1, 163 | } 164 | optimStateD = { 165 | learningRate = opt.lr, 166 | beta1 = opt.beta1, 167 | } 168 | ---------------------------------------------------------------------------- 169 | local real_A = torch.Tensor(opt.batchSize, input_nc, opt.fineSize, opt.fineSize) 170 | local real_B = torch.Tensor(opt.batchSize, output_nc, opt.fineSize, opt.fineSize) 171 | local fake_B = torch.Tensor(opt.batchSize, output_nc, opt.fineSize, opt.fineSize) 172 | local real_AB = torch.Tensor(opt.batchSize, output_nc + input_nc*opt.condition_GAN, opt.fineSize, opt.fineSize) 173 | local fake_AB = torch.Tensor(opt.batchSize, output_nc + input_nc*opt.condition_GAN, opt.fineSize, opt.fineSize) 174 | local errD, errG, errL1 = 0, 0, 0 175 | local epoch_tm = torch.Timer() 176 | local tm = torch.Timer() 177 | local data_tm = torch.Timer() 178 | ---------------------------------------------------------------------------- 179 | 180 | if opt.gpu > 0 then 181 | print('transferring to gpu...') 182 | require 'cunn' 183 | cutorch.setDevice(opt.gpu) 184 | real_A = real_A:cuda(); 185 | real_B = real_B:cuda(); fake_B = fake_B:cuda(); 186 | real_AB = real_AB:cuda(); fake_AB = fake_AB:cuda(); 187 | if opt.cudnn==1 then 188 | netG = util.cudnn(netG); netD = util.cudnn(netD); 189 | end 190 | netD:cuda(); netG:cuda(); criterion:cuda(); criterionAE:cuda(); 191 | print('done') 192 | else 193 | print('running model on CPU') 194 | end 195 | 196 | 197 | local parametersD, gradParametersD = netD:getParameters() 198 | local parametersG, gradParametersG = netG:getParameters() 199 | 200 | 201 | 202 | if opt.display then disp = require 'display' end 203 | 204 | 205 | function createRealFake() 206 | -- load real 207 | data_tm:reset(); data_tm:resume() 208 | local real_data, data_path = data:getBatch() 209 | data_tm:stop() 210 | 211 | real_A:copy(real_data[{ {}, idx_A, {}, {} }]) 212 | real_B:copy(real_data[{ {}, idx_B, {}, {} }]) 213 | 214 | if opt.condition_GAN==1 then 215 | real_AB = torch.cat(real_A,real_B,2) 216 | else 217 | real_AB = real_B -- unconditional GAN, only penalizes structure in B 218 | end 219 | 220 | -- create fake 221 | fake_B = netG:forward(real_A) 222 | 223 | if opt.condition_GAN==1 then 224 | fake_AB = torch.cat(real_A,fake_B,2) 225 | else 226 | fake_AB = fake_B -- unconditional GAN, only penalizes structure in B 227 | end 228 | end 229 | 230 | -- create closure to evaluate f(X) and df/dX of discriminator 231 | local fDx = function(x) 232 | netD:apply(function(m) if torch.type(m):find('Convolution') then m.bias:zero() end end) 233 | netG:apply(function(m) if torch.type(m):find('Convolution') then m.bias:zero() end end) 234 | 235 | gradParametersD:zero() 236 | 237 | -- Real 238 | local output = netD:forward(real_AB) 239 | local label = torch.FloatTensor(output:size()):fill(real_label) 240 | if opt.gpu>0 then 241 | label = label:cuda() 242 | end 243 | 244 | local errD_real = criterion:forward(output, label) 245 | local df_do = criterion:backward(output, label) 246 | netD:backward(real_AB, df_do) 247 | 248 | -- Fake 249 | local output = netD:forward(fake_AB) 250 | label:fill(fake_label) 251 | local errD_fake = criterion:forward(output, label) 252 | local df_do = criterion:backward(output, label) 253 | netD:backward(fake_AB, df_do) 254 | 255 | errD = (errD_real + errD_fake)/2 256 | 257 | return errD, gradParametersD 258 | end 259 | 260 | -- create closure to evaluate f(X) and df/dX of generator 261 | local fGx = function(x) 262 | netD:apply(function(m) if torch.type(m):find('Convolution') then m.bias:zero() end end) 263 | netG:apply(function(m) if torch.type(m):find('Convolution') then m.bias:zero() end end) 264 | 265 | gradParametersG:zero() 266 | 267 | -- GAN loss 268 | local df_dg = torch.zeros(fake_B:size()) 269 | if opt.gpu>0 then 270 | df_dg = df_dg:cuda(); 271 | end 272 | 273 | if opt.use_GAN==1 then 274 | local output = netD.output -- netD:forward{input_A,input_B} was already executed in fDx, so save computation 275 | local label = torch.FloatTensor(output:size()):fill(real_label) -- fake labels are real for generator cost 276 | if opt.gpu>0 then 277 | label = label:cuda(); 278 | end 279 | errG = criterion:forward(output, label) 280 | local df_do = criterion:backward(output, label) 281 | df_dg = netD:updateGradInput(fake_AB, df_do):narrow(2,fake_AB:size(2)-output_nc+1, output_nc) 282 | else 283 | errG = 0 284 | end 285 | 286 | -- unary loss 287 | local df_do_AE = torch.zeros(fake_B:size()) 288 | if opt.gpu>0 then 289 | df_do_AE = df_do_AE:cuda(); 290 | end 291 | if opt.use_L1==1 then 292 | errL1 = criterionAE:forward(fake_B, real_B) 293 | df_do_AE = criterionAE:backward(fake_B, real_B) 294 | else 295 | errL1 = 0 296 | end 297 | 298 | netG:backward(real_A, df_dg + df_do_AE:mul(opt.lambda)) 299 | 300 | return errG, gradParametersG 301 | end 302 | 303 | 304 | 305 | 306 | -- train 307 | local best_err = nil 308 | paths.mkdir(opt.checkpoints_dir) 309 | paths.mkdir(opt.checkpoints_dir .. '/' .. opt.name) 310 | 311 | -- save opt 312 | file = torch.DiskFile(paths.concat(opt.checkpoints_dir, opt.name, 'opt.txt'), 'w') 313 | file:writeObject(opt) 314 | file:close() 315 | 316 | -- parse diplay_plot string into table 317 | opt.display_plot = string.split(string.gsub(opt.display_plot, "%s+", ""), ",") 318 | for k, v in ipairs(opt.display_plot) do 319 | if not util.containsValue({"errG", "errD", "errL1"}, v) then 320 | error(string.format('bad display_plot value "%s"', v)) 321 | end 322 | end 323 | 324 | -- display plot config 325 | local plot_config = { 326 | title = "Loss over time", 327 | labels = {"epoch", unpack(opt.display_plot)}, 328 | ylabel = "loss", 329 | } 330 | 331 | -- display plot vars 332 | local plot_data = {} 333 | local plot_win 334 | 335 | local counter = 0 336 | for epoch = 1, opt.niter do 337 | epoch_tm:reset() 338 | for i = 1, math.min(data:size(), opt.ntrain), opt.batchSize do 339 | tm:reset() 340 | 341 | -- load a batch and run G on that batch 342 | createRealFake() 343 | 344 | -- (1) Update D network: maximize log(D(x,y)) + log(1 - D(x,G(x))) 345 | if opt.use_GAN==1 then optim.adam(fDx, parametersD, optimStateD) end 346 | 347 | -- (2) Update G network: maximize log(D(x,G(x))) + L1(y,G(x)) 348 | optim.adam(fGx, parametersG, optimStateG) 349 | 350 | -- display 351 | counter = counter + 1 352 | if counter % opt.display_freq == 0 and opt.display then 353 | createRealFake() 354 | if opt.preprocess == 'colorization' then 355 | local real_A_s = util.scaleBatch(real_A:float(),100,100) 356 | local fake_B_s = util.scaleBatch(fake_B:float(),100,100) 357 | local real_B_s = util.scaleBatch(real_B:float(),100,100) 358 | disp.image(util.deprocessL_batch(real_A_s), {win=opt.display_id, title=opt.name .. ' input'}) 359 | disp.image(util.deprocessLAB_batch(real_A_s, fake_B_s), {win=opt.display_id+1, title=opt.name .. ' output'}) 360 | disp.image(util.deprocessLAB_batch(real_A_s, real_B_s), {win=opt.display_id+2, title=opt.name .. ' target'}) 361 | else 362 | disp.image(util.deprocess_batch(util.scaleBatch(real_A:float(),100,100)), {win=opt.display_id, title=opt.name .. ' input'}) 363 | disp.image(util.deprocess_batch(util.scaleBatch(fake_B:float(),100,100)), {win=opt.display_id+1, title=opt.name .. ' output'}) 364 | disp.image(util.deprocess_batch(util.scaleBatch(real_B:float(),100,100)), {win=opt.display_id+2, title=opt.name .. ' target'}) 365 | end 366 | end 367 | 368 | -- write display visualization to disk 369 | -- runs on the first batchSize images in the opt.phase set 370 | if counter % opt.save_display_freq == 0 and opt.display then 371 | local serial_batches=opt.serial_batches 372 | opt.serial_batches=1 373 | opt.serial_batch_iter=1 374 | 375 | local image_out = nil 376 | local N_save_display = 10 377 | local N_save_iter = torch.max(torch.Tensor({1, torch.floor(N_save_display/opt.batchSize)})) 378 | for i3=1, N_save_iter do 379 | 380 | createRealFake() 381 | print('save to the disk') 382 | if opt.preprocess == 'colorization' then 383 | for i2=1, fake_B:size(1) do 384 | if image_out==nil then image_out = torch.cat(util.deprocessL(real_A[i2]:float()),util.deprocessLAB(real_A[i2]:float(), fake_B[i2]:float()),3)/255.0 385 | else image_out = torch.cat(image_out, torch.cat(util.deprocessL(real_A[i2]:float()),util.deprocessLAB(real_A[i2]:float(), fake_B[i2]:float()),3)/255.0, 2) end 386 | end 387 | else 388 | for i2=1, fake_B:size(1) do 389 | if image_out==nil then image_out = torch.cat(util.deprocess(real_A[i2]:float()),util.deprocess(fake_B[i2]:float()),3) 390 | else image_out = torch.cat(image_out, torch.cat(util.deprocess(real_A[i2]:float()),util.deprocess(fake_B[i2]:float()),3), 2) end 391 | end 392 | end 393 | end 394 | image.save(paths.concat(opt.checkpoints_dir, opt.name , counter .. '_train_res.png'), image_out) 395 | 396 | opt.serial_batches=serial_batches 397 | end 398 | 399 | -- logging and display plot 400 | if counter % opt.print_freq == 0 then 401 | local loss = {errG=errG and errG or -1, errD=errD and errD or -1, errL1=errL1 and errL1 or -1} 402 | local curItInBatch = ((i-1) / opt.batchSize) 403 | local totalItInBatch = math.floor(math.min(data:size(), opt.ntrain) / opt.batchSize) 404 | print(('Epoch: [%d][%8d / %8d]\t Time: %.3f DataTime: %.3f ' 405 | .. ' Err_G: %.4f Err_D: %.4f ErrL1: %.4f'):format( 406 | epoch, curItInBatch, totalItInBatch, 407 | tm:time().real / opt.batchSize, data_tm:time().real / opt.batchSize, 408 | errG, errD, errL1)) 409 | 410 | local plot_vals = { epoch + curItInBatch / totalItInBatch } 411 | for k, v in ipairs(opt.display_plot) do 412 | if loss[v] ~= nil then 413 | plot_vals[#plot_vals + 1] = loss[v] 414 | end 415 | end 416 | 417 | -- update display plot 418 | if opt.display then 419 | table.insert(plot_data, plot_vals) 420 | plot_config.win = plot_win 421 | plot_win = disp.plot(plot_data, plot_config) 422 | end 423 | end 424 | 425 | -- save latest model 426 | if counter % opt.save_latest_freq == 0 then 427 | print(('saving the latest model (epoch %d, iters %d)'):format(epoch, counter)) 428 | torch.save(paths.concat(opt.checkpoints_dir, opt.name, 'latest_net_G.t7'), netG:clearState()) 429 | torch.save(paths.concat(opt.checkpoints_dir, opt.name, 'latest_net_D.t7'), netD:clearState()) 430 | end 431 | 432 | end 433 | 434 | 435 | parametersD, gradParametersD = nil, nil -- nil them to avoid spiking memory 436 | parametersG, gradParametersG = nil, nil 437 | 438 | if epoch % opt.save_epoch_freq == 0 then 439 | torch.save(paths.concat(opt.checkpoints_dir, opt.name, epoch .. '_net_G.t7'), netG:clearState()) 440 | torch.save(paths.concat(opt.checkpoints_dir, opt.name, epoch .. '_net_D.t7'), netD:clearState()) 441 | end 442 | 443 | print(('End of epoch %d / %d \t Time Taken: %.3f'):format( 444 | epoch, opt.niter, epoch_tm:time().real)) 445 | parametersD, gradParametersD = netD:getParameters() -- reflatten the params and get them 446 | parametersG, gradParametersG = netG:getParameters() 447 | end 448 | -------------------------------------------------------------------------------- /Graham_Ganssle/pix2pix/util/cudnn_convert_custom.lua: -------------------------------------------------------------------------------- 1 | -- modified from https://github.com/NVIDIA/torch-cudnn/blob/master/convert.lua 2 | -- removed error on nngraph 3 | 4 | -- modules that can be converted to nn seamlessly 5 | local layer_list = { 6 | 'BatchNormalization', 7 | 'SpatialBatchNormalization', 8 | 'SpatialConvolution', 9 | 'SpatialCrossMapLRN', 10 | 'SpatialFullConvolution', 11 | 'SpatialMaxPooling', 12 | 'SpatialAveragePooling', 13 | 'ReLU', 14 | 'Tanh', 15 | 'Sigmoid', 16 | 'SoftMax', 17 | 'LogSoftMax', 18 | 'VolumetricBatchNormalization', 19 | 'VolumetricConvolution', 20 | 'VolumetricFullConvolution', 21 | 'VolumetricMaxPooling', 22 | 'VolumetricAveragePooling', 23 | } 24 | 25 | -- goes over a given net and converts all layers to dst backend 26 | -- for example: net = cudnn_convert_custom(net, cudnn) 27 | -- same as cudnn.convert with gModule check commented out 28 | function cudnn_convert_custom(net, dst, exclusion_fn) 29 | return net:replace(function(x) 30 | --if torch.type(x) == 'nn.gModule' then 31 | -- io.stderr:write('Warning: cudnn.convert does not work with nngraph yet. Ignoring nn.gModule') 32 | -- return x 33 | --end 34 | local y = 0 35 | local src = dst == nn and cudnn or nn 36 | local src_prefix = src == nn and 'nn.' or 'cudnn.' 37 | local dst_prefix = dst == nn and 'nn.' or 'cudnn.' 38 | 39 | local function convert(v) 40 | local y = {} 41 | torch.setmetatable(y, dst_prefix..v) 42 | if v == 'ReLU' then y = dst.ReLU() end -- because parameters 43 | for k,u in pairs(x) do y[k] = u end 44 | if src == cudnn and x.clearDesc then x.clearDesc(y) end 45 | if src == cudnn and v == 'SpatialAveragePooling' then 46 | y.divide = true 47 | y.count_include_pad = v.mode == 'CUDNN_POOLING_AVERAGE_COUNT_INCLUDE_PADDING' 48 | end 49 | return y 50 | end 51 | 52 | if exclusion_fn and exclusion_fn(x) then 53 | return x 54 | end 55 | local t = torch.typename(x) 56 | if t == 'nn.SpatialConvolutionMM' then 57 | y = convert('SpatialConvolution') 58 | elseif t == 'inn.SpatialCrossResponseNormalization' then 59 | y = convert('SpatialCrossMapLRN') 60 | else 61 | for i,v in ipairs(layer_list) do 62 | if torch.typename(x) == src_prefix..v then 63 | y = convert(v) 64 | end 65 | end 66 | end 67 | return y == 0 and x or y 68 | end) 69 | end -------------------------------------------------------------------------------- /Graham_Ganssle/pix2pix/util/util.lua: -------------------------------------------------------------------------------- 1 | -- 2 | -- code derived from https://github.com/soumith/dcgan.torch 3 | -- 4 | 5 | local util = {} 6 | 7 | require 'torch' 8 | 9 | function util.normalize(img) 10 | -- rescale image to 0 .. 1 11 | local min = img:min() 12 | local max = img:max() 13 | 14 | img = torch.FloatTensor(img:size()):copy(img) 15 | img:add(-min):mul(1/(max-min)) 16 | return img 17 | end 18 | 19 | function util.normalizeBatch(batch) 20 | for i = 1, batch:size(1) do 21 | batch[i] = util.normalize(batch[i]:squeeze()) 22 | end 23 | return batch 24 | end 25 | 26 | function util.basename_batch(batch) 27 | for i = 1, #batch do 28 | batch[i] = paths.basename(batch[i]) 29 | end 30 | return batch 31 | end 32 | 33 | 34 | 35 | -- default preprocessing 36 | -- 37 | -- Preprocesses an image before passing it to a net 38 | -- Converts from RGB to BGR and rescales from [0,1] to [-1,1] 39 | function util.preprocess(img) 40 | -- RGB to BGR 41 | local perm = torch.LongTensor{3, 2, 1} 42 | img = img:index(1, perm) 43 | 44 | -- [0,1] to [-1,1] 45 | img = img:mul(2):add(-1) 46 | 47 | -- check that input is in expected range 48 | assert(img:max()<=1,"badly scaled inputs") 49 | assert(img:min()>=-1,"badly scaled inputs") 50 | 51 | return img 52 | end 53 | 54 | -- Undo the above preprocessing. 55 | function util.deprocess(img) 56 | -- BGR to RGB 57 | local perm = torch.LongTensor{3, 2, 1} 58 | img = img:index(1, perm) 59 | 60 | -- [-1,1] to [0,1] 61 | 62 | img = img:add(1):div(2) 63 | 64 | return img 65 | end 66 | 67 | function util.preprocess_batch(batch) 68 | for i = 1, batch:size(1) do 69 | batch[i] = util.preprocess(batch[i]:squeeze()) 70 | end 71 | return batch 72 | end 73 | 74 | function util.deprocess_batch(batch) 75 | for i = 1, batch:size(1) do 76 | batch[i] = util.deprocess(batch[i]:squeeze()) 77 | end 78 | return batch 79 | end 80 | 81 | 82 | 83 | -- preprocessing specific to colorization 84 | 85 | function util.deprocessLAB(L, AB) 86 | local L2 = torch.Tensor(L:size()):copy(L) 87 | if L2:dim() == 3 then 88 | L2 = L2[{1, {}, {} }] 89 | end 90 | local AB2 = torch.Tensor(AB:size()):copy(AB) 91 | AB2 = torch.clamp(AB2, -1.0, 1.0) 92 | -- local AB2 = AB 93 | L2 = L2:add(1):mul(50.0) 94 | AB2 = AB2:mul(110.0) 95 | 96 | L2 = L2:reshape(1, L2:size(1), L2:size(2)) 97 | 98 | im_lab = torch.cat(L2, AB2, 1) 99 | im_rgb = torch.clamp(image.lab2rgb(im_lab):mul(255.0), 0.0, 255.0)/255.0 100 | 101 | return im_rgb 102 | end 103 | 104 | function util.deprocessL(L) 105 | local L2 = torch.Tensor(L:size()):copy(L) 106 | L2 = L2:add(1):mul(255.0/2.0) 107 | 108 | if L2:dim()==2 then 109 | L2 = L2:reshape(1,L2:size(1),L2:size(2)) 110 | end 111 | L2 = L2:repeatTensor(L2,3,1,1)/255.0 112 | 113 | return L2 114 | end 115 | 116 | function util.deprocessL_batch(batch) 117 | local batch_new = {} 118 | for i = 1, batch:size(1) do 119 | batch_new[i] = util.deprocessL(batch[i]:squeeze()) 120 | end 121 | return batch_new 122 | end 123 | 124 | function util.deprocessLAB_batch(batchL, batchAB) 125 | local batch = {} 126 | 127 | for i = 1, batchL:size(1) do 128 | batch[i] = util.deprocessLAB(batchL[i]:squeeze(), batchAB[i]:squeeze()) 129 | end 130 | 131 | return batch 132 | end 133 | 134 | 135 | function util.scaleBatch(batch,s1,s2) 136 | local scaled_batch = torch.Tensor(batch:size(1),batch:size(2),s1,s2) 137 | for i = 1, batch:size(1) do 138 | scaled_batch[i] = image.scale(batch[i],s1,s2):squeeze() 139 | end 140 | return scaled_batch 141 | end 142 | 143 | 144 | 145 | function util.toTrivialBatch(input) 146 | return input:reshape(1,input:size(1),input:size(2),input:size(3)) 147 | end 148 | function util.fromTrivialBatch(input) 149 | return input[1] 150 | end 151 | 152 | 153 | 154 | function util.scaleImage(input, loadSize) 155 | -- replicate bw images to 3 channels 156 | if input:size(1)==1 then 157 | input = torch.repeatTensor(input,3,1,1) 158 | end 159 | 160 | input = image.scale(input, loadSize, loadSize) 161 | 162 | return input 163 | end 164 | 165 | function util.getAspectRatio(path) 166 | local input = image.load(path, 3, 'float') 167 | local ar = input:size(3)/input:size(2) 168 | return ar 169 | end 170 | 171 | function util.loadImage(path, loadSize, nc) 172 | local input = image.load(path, 3, 'float') 173 | input= util.preprocess(util.scaleImage(input, loadSize)) 174 | 175 | if nc == 1 then 176 | input = input[{{1}, {}, {}}] 177 | end 178 | 179 | return input 180 | end 181 | 182 | 183 | 184 | -- TO DO: loading code is rather hacky; clean it up and make sure it works on all types of nets / cpu/gpu configurations 185 | function util.load(filename, opt) 186 | if opt.cudnn>0 then 187 | require 'cudnn' 188 | end 189 | 190 | if opt.gpu > 0 then 191 | require 'cunn' 192 | end 193 | 194 | local net = torch.load(filename) 195 | 196 | if opt.gpu > 0 then 197 | net:cuda() 198 | 199 | -- calling cuda on cudnn saved nngraphs doesn't change all variables to cuda, so do it below 200 | if net.forwardnodes then 201 | for i=1,#net.forwardnodes do 202 | if net.forwardnodes[i].data.module then 203 | net.forwardnodes[i].data.module:cuda() 204 | end 205 | end 206 | end 207 | else 208 | net:float() 209 | end 210 | net:apply(function(m) if m.weight then 211 | m.gradWeight = m.weight:clone():zero(); 212 | m.gradBias = m.bias:clone():zero(); end end) 213 | return net 214 | end 215 | 216 | function util.cudnn(net) 217 | require 'cudnn' 218 | require 'util/cudnn_convert_custom' 219 | return cudnn_convert_custom(net, cudnn) 220 | end 221 | 222 | function util.containsValue(table, value) 223 | for k, v in pairs(table) do 224 | if v == value then return true end 225 | end 226 | return false 227 | end 228 | 229 | return util 230 | -------------------------------------------------------------------------------- /Graham_Ganssle/prep_images.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd dat/zips 4 | 5 | unzip fault_dyke_fold_model.zip 6 | mv model/ ../fault_dyke_fold_model 7 | unzip fold_dyke_fault_model.zip 8 | mv model/ ../fold_dyke_fault_model 9 | unzip gbasin_simplified_model.zip 10 | mv model/ ../gbasin_simplified_model 11 | 12 | rm -rf __MACOSX/ 13 | 14 | cd .. 15 | 16 | mkdir output_seismic 17 | mkdir pairs 18 | cd pairs 19 | mkdir A B 20 | cd A 21 | mkdir train test val 22 | cd ../B 23 | mkdir train test val 24 | 25 | cd ../../../img_frmt 26 | 27 | python modeler.py 28 | python pair_generator.py 29 | 30 | cd ../pix2pix 31 | 32 | python2 scripts/combine_A_and_B.py --fold_A ../dat/pairs/A/ --fold_B ../dat/pairs/B --fold_AB ../dat/pairs/ 33 | 34 | cd ../dat/pairs/train 35 | 36 | FLOOR=1 37 | RANGE=17600 38 | 39 | for i in {1..880} 40 | do 41 | number=0 42 | while [ "$number" -le $FLOOR ] 43 | do 44 | number=$RANDOM 45 | let "number %= $RANGE" 46 | done 47 | mv $number.jpg ../val/ 48 | echo 49 | done 50 | 51 | for i in {1..440} 52 | do 53 | number=0 54 | while [ "$number" -le $FLOOR ] 55 | do 56 | number=$RANDOM 57 | let "number %= $RANGE" 58 | done 59 | mv $number.jpg ../test/ 60 | echo 61 | done 62 | -------------------------------------------------------------------------------- /Matteo_Niccoli/9-Cant&Ethier-1994 3 classes.txt: -------------------------------------------------------------------------------- 1 | Porosity_percent Permeability_log Permeability_md label lithology 6.4 1.3 19.95 1 SS 5.5 0.71 5.13 1 SS 5.4 0.22 1.66 1 SS 3.5 0.03 1.07 1 SS 3.6 -0.18 0.66 1 SS 4.05 -0.4 0.4 1 SS 4.5 -0.48 0.33 1 SS 3.3 -0.57 0.27 1 SS 3.7 -0.61 0.25 1 SS 4.1 -0.85 0.14 1 SS 4 -0.96 0.11 1 SS 2.8 -0.71 0.19 1 SS 1.75 -0.7 0.2 1 SS 1.5 -0.5 0.32 1 SS 8 -0.31 0.49 1 SS 8.8 -0.61 0.25 1 SS 8.6 -0.55 0.28 1 SS 7.8 -0.61 0.25 1 SS 8.2 -0.69 0.21 1 SS 8.3 -0.63 0.23 1 SS 7.6 -0.8 0.16 1 SS 7.1 -0.82 0.15 1 SS 7 -0.77 0.17 1 SS 6.7 -0.8 0.16 1 SS 7.25 -0.96 0.11 1 SS 6.45 -0.95 0.11 1 SS 7.5 2.21 162.18 2 CBP 8.75 3.3 2000 2 CBP 4 3.3 2000 2 CBP 6.5 1.4 25.12 2 CBP 5.7 1.48 30.2 2 CBP 4.1 1.41 25.7 2 CBP 4.3 0.76 5.75 2 CBP 3.3 0.49 3.09 2 CBP 3.3 0.18 1.51 2 CBP 3.1 0.22 1.66 2 CBP 2.3 0.19 1.55 2 CBP 3.6 -0.02 0.95 2 CBP 2.2 -0.31 0.49 2 CBP 0.02 -0.13 0.74 2 CBP 0.9 -0.5 0.32 2 CBP 2.2 -0.54 0.29 2 CBP 3 -0.55 0.28 2 CBP 11.30 2.32 208.93 3 CU 11.55 3.30 2000.00 3 CU 8.80 1.91 81.28 3 CU 9.45 2.70 501.19 3 CU 9.20 2.76 575.44 3 CU 8.00 2.65 446.68 3 CU 7.25 2.17 147.91 3 CU 9.00 3.30 2000.00 3 CU 8.35 3.30 2000.00 3 CU 8.45 -0.60 0.25 3 CU -------------------------------------------------------------------------------- /Matteo_Niccoli/README.md: -------------------------------------------------------------------------------- 1 | # Machine Learning in Geoscience V: Introduction to Classification with SVMs 2 | Repo for the January 2018 ML focus issue published on the [CSEG Recorder](https://csegrecorder.com/editions/issue/2018-01) 3 | 4 | Open access article [here](https://csegrecorder.com/articles/view/machine-learning-in-geoscience-v-introduction-to-classification-with-svms) 5 | 6 | -------------------------------------------------------------------------------- /Matteo_Niccoli/aadm.txt: -------------------------------------------------------------------------------- 1 | 5522.3322421974 1.9868048293256 1986.8048293256 1 "Sh" 2 | 5587.187152432 2.2489267581903 2248.9267581903 1 "Sh" 3 | 5862.8205209289 2.1273238015005 2127.3238015005 1 "Sh" 4 | 5968.20975006 1.9138430553117 1913.8430553117 1 "Sh" 5 | 5838.4999295909 2.4110640337767 2411.0640337767 1 "Sh" 6 | 7435.5520941169 2.3624228511008 2362.4228511008 1 "Sh" 7 | 6924.8196760197 2.1435375290591 2143.5375290591 1 "Sh" 8 | 6608.6519886263 2.0732780429717 2073.2780429717 1 "Sh" 9 | 6422.1941217019 2.17326269625 2173.26269625 1 "Sh" 10 | 7103.1706791648 1.8868201760473 1886.8201760473 1 "Sh" 11 | 7200.4530445166 1.970591101767 1970.591101767 1 "Sh" 12 | 6965.3539949163 1.9678888138405 1967.8888138405 1 "Sh" 13 | 7459.8726854548 1.8003469624013 1800.3469624013 1 "Sh" 14 | 5376.4086941697 2.462407504379 2462.407504379 2 "SS" 15 | 4849.4625485139 2.4570029285262 2457.0029285262 2 "SS" 16 | 5627.7214713286 2.597521900701 2597.521900701 2 "SS" 17 | 5271.0194650385 2.7488500245817 2748.8500245817 2 "SS" 18 | 5044.0272792176 2.6137356282597 2613.7356282597 2 "SS" 19 | 4825.1419571759 2.892071284683 2892.071284683 2 "SS" 20 | 5522.3322421974 2.8704529812715 2870.4529812715 2 "SS" 21 | 5076.4547343349 2.7623614642139 2762.3614642139 2 "SS" 22 | 5279.1263288178 2.6596745230091 2659.6745230091 2 "SS" 23 | -------------------------------------------------------------------------------- /Matteo_Niccoli/figure1_crossplot_robustness_margin.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import matplotlib.pyplot as plt\n", 12 | "from matplotlib.colors import ListedColormap\n", 13 | "from matplotlib.colors import LinearSegmentedColormap\n", 14 | "import pylab as pl\n", 15 | "import numpy as np \n", 16 | "%matplotlib inline" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "#### Import data for the figure\n", 24 | "A subset from Figure 2 in:\n", 25 | "\n", 26 | "Alessandro Amato del Monte (2015), [Seismic petrophysics: Part 2](https://doi.org/10.1190/tle34060700.1), The Leading Edge, 34 (6), pages 700-704, and [SEG wiki](https://wiki.seg.org/wiki/Seismic_petrophysics:_Part_2)." 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": 2, 32 | "metadata": { 33 | "collapsed": true 34 | }, 35 | "outputs": [], 36 | "source": [ 37 | "data = np.loadtxt('aadm.txt', usecols = (0,1,3))" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 3, 43 | "metadata": { 44 | "collapsed": true 45 | }, 46 | "outputs": [], 47 | "source": [ 48 | "X = [[vpvs, ip] for vpvs, ip in zip(data[:,0], data[:,1])]\n", 49 | "Y = data[:,2]" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": 4, 55 | "metadata": { 56 | "collapsed": true 57 | }, 58 | "outputs": [], 59 | "source": [ 60 | "plt.rcParams.update({'font.size': 17})" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "#### Crossplot Vp/Vs ratio versus Ip" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 5, 73 | "metadata": {}, 74 | "outputs": [ 75 | { 76 | "data": { 77 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhMAAAH3CAYAAAABnt0tAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3X+cXndd5/3Xx6ahM5NaEyhukjIpS4ECe680CawFkqGu\nJbE0qwnL3VUrd7No4m5cCk12uXdFBRZ18e4oi6ZKgncUlVLFVmhaBlHIRCRbTKYqLiwlQH5IApYm\nbZnJlDTrZ/8419DpZDJzzXWu3/N6Ph7zuJhzzve6Pqcn5HrnnO+PyEwkSZJq9V2tLkCSJHU2w4Qk\nSSrFMCFJkkoxTEiSpFIME5IkqRTDhCRJKsUwIUmSSmlomIiIF0XEXRHx5YgYi4jTEfFARLwhIqLK\n9/ihSpvxiPh6RLw3IhY1sm5JklS9BQ1+/+cA3wP8PvD3wDOA1wC/C/wz4D/N1Dgirgf2Ag8Abwau\nBN4CXF15H0mS1GLRihkwI+Je4AeB78nMb89w3OeAhcA/nzguIn4S2A1syMy9zahXkiRdWKv6TBwF\nLqn8TCsiXkRx92L3lMDxAWAUuKmhFUqSpKo0+jEHABHRC/QClwLXAZuBv8rMx2ZotrLy+tnJGzPz\nbET89aT9kiSphZp1Z+KdwMPAV4DfBg4Ar5+lzdLK68lp9p0EltWtOkmSVLOm3JkA3gcMAZcD64Ar\ngNlGZPRUXqfrU/HEpP3niYgtwBaAvr6+VVdfffVc65UkqWMdOnTom5l5ebM+rylhIjO/BHyp8uud\nEfFLwP6IeGFmfvMCzcYrr8+YZt8lk/ZP93m7gF0Aq1evzoMHD9ZWuCRJHSgijjbz81rVAfNDwBJg\n4wzHTDzemO5xxlLgRL2LkiRJc9eqMDHxiGLxDMeMVF5fNnljRCwEXjppvyRJaqFGz4D57Avs+unK\n619VjuuNiKsj4lkTB2TmF4DPAz8VEZMfdbyBor/FHzWgZEmSNEeN7jPxvohYAgwDx4BnAv8KeAXw\nx5n5qcpxLwc+BbwDePuk9tuB+4BPRsTvUsyAeRvw58C9Da5dkiRVodGPOT5E0VHyJ4E7gJ+tfOY2\nqph0KjOHgA0Us2C+F3gj8H5gY7Zi6k5JknSeht6ZyMy7gLuqOG4fMO3CX5l5P3B/fSuTJEn14hLk\nkiSpFMOEJEkqxTAhSZJKMUxIkqRSDBOSJKkUw4QkSSrFMCFJkkoxTEiSpFIME5IkqRTDhCRJKsUw\nIUmSSjFMSJKkUgwTkiSpFMOEJEkqxTAhSZJKMUy0Sibs2wcbNsDy5bB4cfG6YQMMDxf7JUnqAAta\nXcC8NDQEW7bA6dMwNvZUcHj0UTh5sggZS5bArl2wbl1LS5UkaTbemWi2PXtg0yY4fhxGR8+/A5FZ\nbD92DDZuLI6XJKmNGSaaaWgItm2D8fHqjh8fL44fGmpsXZIklWCYaJbM4tFGtUFiwvg4bN1qHwpJ\nUtsyTDTL8HDRR6IWp07B/v31rUeSpDoxTDTL4GDR2bIWY2NFe82NI2YkqSkczdEsIyO1f3llwqFD\n9a2n2zliRpKaxjsTzXLmTLn2c+1rMZ85YkaSmsow0Sy9veXa9/TUp45u54gZSWo6w0SzrFwJEbW1\njYBVq+pbTzdyxIwktYRholm2b4e+vtra9vUV7TUzR8xIUksYJpplYKAYTVCLJUtg7dr61tONHDEj\nSS1hmGiWCNi9e+59H3p6ihEHtT4imU8cMSNJLWGYaKZ162DnzuoDRU8P3HGHQxer5YgZSWoJw0Sz\nbd4Md98N/f2waNH5dxwiiu39/XDPPXDLLS0psyM5YkaSWsIw0Qrr18ORI7B3L7z2tbBsWdGfYtky\nuPFGuO++Yr93JObGETOS1BLOgNkqEUWnzIGBVlfSPbZvL2a2HB2de1tHzEhSzbwzoe7hiBlJagnD\nhLqHI2YkqSUME+oujpiRpKYzTKj7OGJGkprKMKHu5IgZSWoaR3OoezliRpKawjsTkiSpFMOEJEkq\nxTAhSZJKMUxIkqRSDBOSJKmUhoaJiHhZRLw3Ij4XEaMRcSIi9kbE6irbXxQRPx0Rfx0R34qIf4iI\nT0bE+kbWLUmSqtfoOxNvBW4C9gNvAd4DvAh4ICJuqKL9e4DfBD4P7ADeDXwv8LGI2NiQiiVJ0pxE\nZjbuzSNeARzMzLOTtj2TIhx8LTNXztD2EuAxYG9mvm7S9iXACeDjmfnDs9WwevXqPHjwYImzkCSp\ns0TEocys6ilAPTT0zkRmfmZykKhsewTYB7x4luaXAAuBk1O2nwbGgTN1KlOSJJXQqhkwlwGPzHRA\nZj4aEX8DbI6Iz1IEkEXAduBi4NcaXaQkSZpd08NERKwBXkl1YeAm4E7gdydt+zpwXWb+VQPKkyRJ\nc9TUoaERsZQiHBwD3llFk1Hg7yiCx0ZgK0U/io9GxAtm+JwtEXEwIg4+/PDD5QuXJEkX1LQwERGX\nAfdTPKrYkJmPzXL8IuAzwJcz87bM/JPM3AWsoehP8e4Ltc3MXZm5OjNXX3755fU7CUmSdJ6mPOaI\niF5gL/BC4DWZ+bkqmr0O6AfumbwxMx+OiL+gCBWSJKnFGn5nIiIWAncD3w+8PjM/XWXTZZXXi6bZ\ntwCXTy8nE/btgw0bYPlyWLy4eN2wAYaHi/2SJFWhoV/IEXER8EHgeuDmzLzvAsf1UtyF+GZmfrOy\n+YuV15uBBycd2w+sBR5oVN1db2gItmyB06dhbOyp4PDoo3DyZBEyliyBXbtg3bqWlipJan+N/tf9\n7RSPKz4BXBQRN0/Zf09mjgEvBz4FvAN4e2XfvcDfArdFxBWV/ZcD/56iz8S7Glx7d9qzB7Ztg/Hx\n6fdnwuho8bNxI+zcCZs3N7dGSVJHaXSYuKbyen3lZ6rnAmPTNczMJyvDSN9KMZJjA/Ak8FngXZk5\nXP9yu9zQ0MxBYqrx8eL4pUthvcuhSJKm19DptNuB02lXZMKKFXD8+Nzb9vfDkSMQMbfPGx6GwUEY\nGYEzZ6C3F1auhB07YO3aub2fJKlqzZ5O206M88XwcNFHohanTsH+/TAwUN3x9smQpHmlqZNWqYUG\nB4sv9lqMjRXtq7FnD2zaVNwBGR09f1TIRJ+MY8eKPhl79tRWkySpbRgm5ouRkdqHe2bCoUOzH1dr\nn4yhodrqkiS1BcPEfHGm5CKrswWEzOLRRrVBYvL7bt3qvBaS1MEME/NFb2+59j09M++vR58MSVJH\nMkzMFytX1j56IgJWrZr5mGb1yZAktR3DxHyxfTv09dXWtq+vaD+TZvTJkCS1JcPEfDEwUKy/UYsl\nS4p5IWbS6D4ZkqS2ZZiYLyJg9+7Z+z5M1dNTzAcx2yOSRvfJkCS1LcPEfLJuXbHWRrVf3D09cMcd\n1U0s1eg+GZKktmWYmG82b4a77y6myF606PwAEFFs7++He+6BW26p7n0b3SdDktS2DBPz0fr1xVob\ne/fCa18Ly5YV/SmWLYMbb4T77iv2z2Wq60b3yZAktS3X5pivIooAUO16G9W83+7dxRTZc+lMWW2f\nDElS2/LOhOqnkX0yJEltyzCh+mpUnwxJUtsyTKj+GtEnQ5LUtuwzocaod58MSVLb8s6EJEkqxTAh\nSZJKMUxIkqRSDBOSJKkUw4QkSSrFMCFJkkoxTEiSpFIME2qsTNi3DzZsgOXLi8mrli8vfh8eLvZL\nkjqak1apcYaGYMsWOH0axsaeCg6PPgonTxYhY8mSYqEvZ8OUpI7lnQk1xp49sGkTHD8Oo6Pn34HI\nLLYfO1asNLpnT2vqlCSVZphQ/Q0NwbZt1S9FPj5eHD801Ni6JEkNYZhQfWUWjzaqDRITxsdh61b7\nUEhSBzJMqL6Gh4s+ErU4dQr2769vPZKkhjNMqL4GB4vOlrUYGyvaS5I6imFC9TUyUvujikw4dKi+\n9UiSGs4wofo6c6Zc+7n2tZAktZxhQvXV21uufU9PfeqQJDWNYUL1tXIlRNTWNgJWrapvPZKkhjNM\nqL62b4e+vtra9vUV7SVJHcUwofoaGCjW36jFkiWwdm1965EkNZxhQvUVAbt3z73vQ09PsUZHrY9I\nJEktY5iYL5q5eue6dbBzZ/WBoqcH7rjDxb4kqUO5auh80IrVOzdvhqVLiymyT516+udCcQeir89V\nQyWpC3hnotu1cvXO9evhyBHYuxde+1pYtqy4I7JsGdx4I9x3X7HfICFJHc07E92s1tU7ly4tgkA9\nRBSdMgcG6vN+kqS2452JbuXqnbVpZt8SSeoS3pnoVvVYvXO+3U1oRd8SSeoCDb0zEREvi4j3RsTn\nImI0Ik5ExN6IWD2H9+iNiHdGxEMR8UREfCMi7o+IKxtXeRdw9c65aWXfEknqcI2+M/FWYA3wYeC9\nwGJgK/BARGzIzPtnahwRi4BPAs8HdgP/q/IeLweWAEcaVnmnc/XO6rVD3xJJ6mCNDhO/CvxYZp6d\n2BARvw18HngXMGOYAH4ReAGwOjMPN6zKbuTqndUp27fkyBEn2pI07zX0MUdmfmZykKhsewTYB7x4\nprYR8d3ATwG7M/NwRFwcES4pWS1X76xOPfqWSNI816rRHMuAR2Y5Zg3QA3wxIj4EnAHORMSDEXFd\nowvseK7eWR37lkhSaU0PExGxBngl8KFZDn1+5fWXgauAzcAbgUuBj0fEPPm2q5Grd1bHviWSVFpT\nh4ZGxFLgTuAY8M5ZDl800Qy4LjO/VXmPTwCHgZ8FNl3gc7YAWwD6+/vLF96JJlbvHB2de9v5tHqn\nfUskqbSm3ZmIiMsoOlwuAjZk5mOzNJn4W/qjE0ECIDOPA/uBV12oYWbuyszVmbn68ssvL1l5h3L1\nzurYt0SSSmtKmIiIXmAv8ELgxsz8XBXNTlRevzHNvm9QDBHVTFy9c3b2LZGk0hoeJiJiIXA38P3A\n6zPz01U2nXgYfcU0+64AHq5Ded1v82a4+27o74dFi87/4owotvf3wz33wC23tKTMlrFviSSV1ugZ\nMC8CPghcD7whM++7wHG9EXF1RDxrYltmPgSMAD8cEc+edOzVFI84Pt7I2ruKq3de2ETfklrMp74l\nkjSDRnfAvB14HfAJ4KKIuHnK/nsyc4xiRstPAe8A3j5p/1uAPwM+ExG/BSwE3gQ8VjlW1XL1zulN\n9C3ZuHFunSnnW98SSZpBo8PENZXX6ys/Uz0XuOAg/8zcHxE/SDFb5juAcxSh462ZeaS+pWremuhb\nUu2U2vOxb4kkzaChYSIzX13lcfsohoBOt28/4L1kNdbmzcVaG1u3FjNbTl41FIo7EH19rhoqSdNo\n1QyYUvuxb4kk1aSpk1ZJbc++JZI0Z96ZkCRJpRgmJElSKYYJSZJUimFCkiSVYpiQJEmlGCYkSVIp\nhglJklSKYUKSJJVimJAkSaUYJiRJUimGCUmSVIphQpIklWKYkCRJpRgmJElSKYYJSZJUimFCkiSV\nYpiQJEmlGCYkSVIphglJklSKYUKSJJVimJAkSaUYJiRJUimGCbWXTNi3DzZsgOXLYfHi4nXDBhge\nLvZLktqKYaId+AVaGBqCFSuK877vPjhxAh59tHi97z648Ua48kr4+MdbXakkaRLDRKv5BVrYswc2\nbYLjx2F09PwAlVlsP3YMNm4sjpcktQXDRCv5BVoYGoJt22B8vLrjx8eL44eGGluXJKkqholW8Qu0\nkAlbtlT/32HC+Dhs3Tp/HgFJUhszTLSCX6BPGR6G06dra3vqFOzfX996JElzZphoBb9AnzI4CGNj\ntbUdGyvaS5JayjDRCn6BPmVkpPY7LZlw6FB965EkzZlhohX8An3KmTPl2s/1UZEkqe4ME63gF+hT\nenvLte/pqU8dkqSaGSZawS/Qp6xcCRG1tY2AVavqW48kac4ME63gF+hTtm+Hvr7a2vb1Fe0lSS1l\nmGgFv0CfMjBQTB9eiyVLYO3a+tYjSZozw0Qr+AX6lAjYvXvuj256emDXrtrv8EiS6sYw0Qp+gT7d\nunWwc2f1/z16euCOO4p2HSwz2XdkHxvu3MDyweUsfvdilg8uZ8OdGxg+Mkx20+RkkrpadPtfWKtX\nr86DBw+2uozp7dlT/ZTaE1+gt9zS8LJaZmiomOHz1KliPo3JfzYjikc8S5YUgarDg8TQ4SG23LuF\n00+cZuzsGMlT5xoEfQv7WNKzhF037mLdVZ19rpKaLyIOZebqZn2edyZaafNmuPtu6O+HRYvOv+MQ\nUWzv74d77unuIAGwfj0cOQJ798JrXwvLlhWPg5YtK1ZPve++Yn+HB4k9D+5h012bOP74cUbPjj4t\nSAAkyejZUY49doyNd21kz4NdusCbpK7hnYl2kFlMkX377cWEVuPjxZ2IVatgxw5Ys6b7Hm3MU0OH\nh9h01ybGz1U/V0jPgh7uvulu1l+1voGVSeomzb4zYZiQmiQzWfGeFRx//Pic2/Zf1s+RW48QhkpJ\nVfAxh9Slho8Oc/qJ2hZ4OzV+iv1Hu2iBN0ldpaFhIiJeFhHvjYjPRcRoRJyIiL0RMee0FBEXR8QX\nIiIj4m2NqFdqpMEDg4ydrW2Bt7GzYwwe6KIF3iR1lUbfmXgrcBOwH3gL8B7gRcADEXHDHN/rNuA5\n9S1Pap6REyPndbasVpIcOtlFC7xJ6iqNDhO/CjwnM7dl5u7M/BXg5cA3gXdV+yYRcQXwc8AvNaZM\nqfHOnCu3wNv4k120wJukrtLQMJGZn8nMs1O2PQLsA148h7f6NeBB4IP1q05qrt4F5RZ467m4ixZ4\nk9RVWtUBcxnwSDUHRsT1wCbgTQ2tSGqwlctWEtQ2GiMIVi3togXeJHWVpoeJiFgDvBL4UBXHLgR+\nHXh/Zj7Y6NqkRtp+7Xb6Fta2wFvfwj62X9tFC7xJ6ipNDRMRsRS4EzgGvLOKJtuBZwM/O8fP2RIR\nByPi4MMPPzz3QqUGGFgxwOJLalvgbUnPEtau6KIF3iR1laaFiYi4DLgfWARsyMzHZjn+OcDbgF/I\nzG/O5bMyc1dmrs7M1ZdffnnNNUv1FBHs3rCbngVz6/vQs6CHXTfucsIqSW2rKWEiInqBvcALgRsz\n83NVNPtFilEfH4uIKyPiSuCKyr7vqWy7pBH1So2y7qp17LxhZ9WBomdBD3fccIeLfUlqawsa/QGV\nfg93A98P/EhmfrrKpv2Vny9Ns2975ec6ipEhUsfYfM1mll66lK17t3Jq/JSrhkpVyEyGjw4zeGCQ\nkRMjnDl3ht4FvaxctpId1+5g7Yq13r1roYauzRERFwF3ARuBmzPzzgsc10sRHL458UgjIl4FPGvK\noc8G3kcxRPSPgE/P9gjEtTnUrjKT/Uf3c/uB2xk5OcL4k+P0XNzDqqWr2PGKHazpX+NfjhLFAnlb\n7t3C6SdOG76r1FULfUXErwFvBj4BfGCaQ+7JzLGIeDXwKeAdmfn2Gd7vSuCrwM9lZlWTXhkmJKlz\n7XlwD9vu31bVSrs9C3rYecNONl+zuQmVtbdmh4lGP+a4pvJ6feVnqucCtS1WIEnqakOHh6oOEgDj\n58bZdv82ll66lPVXrW9wdZrMJcglSW0nM1nxnhUcf/z4nNv2X9bPkVuPzOvHhC5BLkma94aPDnP6\nidM1tT01for9R/fXuSLNxDAhSWo7gwcGGTtb21PwsbNjDB4YrHNFmolhQpLUdkZOjDxt1MZcJMmh\nk4fqXJFmYpiQJLWdM+fOlGo//mR1nTZVH4YJSVLb6V3QW6p9z8Vzm7Ze5RgmJEltZ+WylQS1jcYI\nglVLV9W5Is3EMCFJajvbr91O38K+mtr2Lexj+7Xb61yRZmKYkCS1nYEVAyy+ZHFNbZf0LGHtirV1\nrkgzMUxIktpORLB7w+6qV9id0LOgh1037prXE1a1gmFCktSW1l21jp037Kw6UPQs6OGOG+5wsa8W\naPgS5JIk1WrzNZtZeulStu7dyqnxU64a2qYME5Kktrb+qvUcufUI+4/u5/YDtzNycoTxJ8fpubiH\nVUtXseMVO1jTv8ZHGy1kmJAktb2IYODKAQauHGh1KZqGfSYkSVIphglJklRK1WEiIl4SEZumbLsu\nIv48Ig5FhDOESJI0D82lz8SvAAHcDRARy4GPAk8ADwO/EhEPZ+YH6l6lJElqW3N5zLESGJ70+48D\nFwEvzcwXAx8DttWxNkmS1AHmEiYWA9+Y9PsPAcOZ+bXK7/cCL6hXYZIkqTPMJUx8E7gCICL6gGuB\nP5u0/2IcaipJ0rwzly//vwD+XUR8nuKuxMUUfSYmvAD42nQNJUlS95pLmPgvwCeAD1d+f3dmfgkg\nIi4C/jXFow5JkjSPVB0mMvOrEXE18GLgscw8Oml3L/DvgL+pc32SJKnNzdhnIiK2RcQzJ37PzHOZ\n+bdTggSZ+a3M/EhmHmlQnZIkqU3N1gHz14ETEfEnEfG6iFjYjKIkSVLnmC1M/AhFJ8vXAH8IfD0i\n3hcRr2p4ZZIkqSPMGCYy86OZ+XrgnwBbKPpE/CQwHBFfiYh3RoRzS0iSNI9VNc9EZj6emb+dmdcB\n/RQjO0aBtwFfiIgHKv0rntXAWqW6ykz2HdnHhjs3sHxwOYvfvZjlg8vZcOcGho8Mk5mtLlGSOkKU\n+QszIl5KMa32jwJLgScz85I61VYXq1evzoMHD7a6DLWZocNDbLl3C6efOM3Y2TGSp/5/EAR9C/tY\n0rOEXTfuYt1V61pYqSTNXUQcyszVzfq8skuQHwO+Avw9xSJgF5euSGqwPQ/uYdNdmzj++HFGz44+\nLUgAJMno2VGOPXaMjXdtZM+De1pUqSR1hjmHiYh4RkT83xHxUeAk8BvACuA9QNNSkFSLocNDbLt/\nG+Pnxqs6fvzcONvu38bQ4aEGVyZJnavqSasi4l9SPNLYBFxKsfT4h4HfA/40M/+xIRVKdZKZbLl3\nS9VBYsL4uXG27t3KkVuPEBENqk6SOteMYSIivg+4maf6RCSwjyJA/HFmjja6QKleho8Oc/qJ0zW1\nPTV+iv1H9zNw5UCdq5KkzjfbnYkHK6//E3gv8AeTlhyXOsrggUHGzo7V1Hbs7BiDBwYNE5I0jdnC\nxHuA381M19xQxxs5MXJeZ8tqJcmhk4fqXJEkdYfZwsRm4J9ExL3AUGbWdo9YagNnzp0p1X78ybn1\ntZCk+WK20Ry/DFwBfAD4RkQMR8SOiHhR40uT6qt3QW+p9j0X99SpEknqLrNNp/0rmbkWeDbFXYqv\nAf8Z+LuIOBwR74mIH4iIqkeFSK2yctlKgtpGYwTBqqWr6lyRJHWHaqfTPp2Zf5CZP0YRLH4AuAdY\nB/wZ8M2I+KOI+Amn1Fa72n7tdvoW9tXUtm9hH9uv3V7niiSpO8x50qrM/N+ZOZyZ/zEzXwQ8H3g7\n8D3A+4GTEbGtvmVK5Q2sGGDxJYtrarukZwlrV6ytc0WS1B3KTqdNZn45M9+TmdcDzwT+DfBQ6cqk\nOosIdm/YTc+CufV96FnQw64bdzlhlSRdQC3TaX93RGyMiNsqPxsj4jKAzBzNzD/OzE/Uv1SpvHVX\nrWPnDTurDhQ9C3q444Y7XOxLkmYwp46TEfEfgV8AeuBpPdnGI+Idmfkr9SxOaoTN12xm6aVL2bp3\nK6fGT7lqqCSVNJe1Od4CvBv4C+DXgS9Wdl0N/AfglyPiycz8tbpXKdXZ+qvWc+TWI+w/up/bD9zO\nyMkRxp8cp+fiHlYtXcWOV+xgTf8aH21IUhUis7oZASPiqxTLjf9gTmkUEd9FMarjuZn53EnbXwb8\nBHAd8FzgcWAEeHtmHpzl85ZQDEfdALyI4m7IYWA3sCsz/3c1da9evToPHpzxoyRJ6ioRcSgzm7aS\n91z6THwvcPfUIAFQWTH0jyvHTPZW4CZgP/AWium5XwQ8EBE3zPJ5rwD+G/Ctyut/Ao4CdwC/P4e6\nJUlSA82lz8TngOfNsP95lWMm+1XgxzLz7MSGiPht4PPAu4D7Z3i//wk8PzOPTNr2WxHxfuCNEfHL\nmfm3c6hfkiQ1wFzuTOwAbomIN1QeawDFI46IuAW4BXjarD6Z+ZnJQaKy7RGKZcxfPNOHZeZXpwSJ\nCX9ceZ2xvSRJao653Jn4OeCbwB7g9oj4SmX7P6WYX+Iw8PNTOqxlZk7XFX4Z8Mjcy/1OW0q0lyRJ\ndTSXMPECIIFjld8n+keMVX4WUsyGOaOIWAO8EpjzqI+IeAZwG3CCoh+GpC6XmQwfHWbwwCAjJ0Y4\nc+4MvQt6WblsJTuu3cHaFWsddSO12IyjOSLitRRLj1c1cmLWD4tYCvwVcA74vsx8bI7tfxv4t8DG\nzPyTGY7bAmwB6O/vX3X06NHai5bUMkOHh9hy7xZOP3Ha+UCkOWi30Rz3Aicqq4OWWjKxMkvm/cAi\nYEMNQeIXKILEz88UJAAyc1dmrs7M1ZdffnnNNUtqnT0P7mHTXZs4/vhxRs+OPi1IACTJ6NlRjj12\njI13bWTPg3taVKmk2cLEmykea7wJ+GxEfCEi/nNE9M/lQyKiF9gLvBC4MTOnjvqYrf1/oFhM7Dcy\n87/Opa2kzjN0eIht929j/Nx4VcePnxtn2/3bGDo81ODKJE1nxjCRme/NzJdRzHL5SxT9In4R+EpE\nfCoi/m1EfPdM7xERC4G7ge8HXp+Zn55LgZWRIv8d+AOKUCOpi2UmW+7dUnWQmDB+bpyte7dS7UR8\nkuqnqqGhmflQZv5cZj4PWEMxC+X/RbHk+Ncj4kMRcWNEXDS5XeX3DwLXA2/IzPume/+I6I2IqyPi\nWVO2v67yGfcBt0w3YZak7jJ8dJjTT5yuqe2p8VPsP2rfbKnZ5rTQF0Bm/iXwlxHxJuC1wM3ADwOv\npxg6OnkWzNuB1wGfAC6KiJunvN09mTkGvBz4FPAOiscZE1Nxf5BiBsyPAP9mSo/tv3XSKqn7DB4Y\nZOzsWE1tx86OMXhgkIErB+pclaSZzDlMTMjMJyPio8BZinUzfgh41pTDrqm8Xl/5meq5FMNKp/MS\niscqCynuhEz1DsAwIXWZkRMj53W2rFaSHDp5qM4VSZpNTWEiIl5OcUfiJooA8STwJ8DvTT4uM19d\nzftl5j6evqQ5mfk7wO/UUp+kznXm3JlS7cefnFtfC0nlzWUJ8ucBP175uYriy/8A8AvAXZlZ20NO\nSZqkd0Gpja5AAAAcXklEQVQvj/Joze17Lu6pYzWt56Rd6gQzhomIeCbwbyjuQrycIkB8GXgn8PuZ\n+eWGVyhpXlm5bCUnHzpZ06OOIFi1tNSUOG3lQpN2PcqjnHzoJPuO7HPSLrWF2UZznATeSzFN9vuA\nV2bm8zPzHQYJSY2w/drt9C3sq6lt38I+tl+7ffYDO4CTdqmTzPaY416KfhD3ZeaTTahH0jw3sGKA\nxZcsZvTs6JzbLulZwtoVaxtQVXPVOmnX0kuXsv6q9Q2uTjrfbJNWvS4z/8QgIalZIoLdG3bTs2Bu\nfR96FvSw68ZdHd9/wEm71ImqmrRKUmNlJvuO7GPDnRtYPricxe9ezPLB5Wy4cwPDR4bn3RfEuqvW\nsfOGnVUHip4FPdxxwx1d0W/ASbvUiWqeZ0JSfdjJbnqbr9nM0kuXsnXvVk6Nn5o3q4Y6aZc60YxL\nkHeD1atX58GDB1tdhjStPQ/uqfrZeM+CHnbesJPN12xuQmXtIzPZf3Q/tx+4nZGTI4w/OU7PxT2s\nWrqKHa/YwZr+NR3/aGOy5YPLOTF6oub2yy5dxtdu+1odK1InavYS5N6ZkFrETnbViQgGrhyYN//a\ndtIudSL7TEgtYCc7XUjvgt5S7btt0i51BsOE1AJ2stOFrFy2kqC2xzbdNmmXOodhQmqBenSyU3dy\n0i51IsOE1AKujKkLmZi0qxbdMmmXOo9hQmoBO9npQub7pF3qTIYJqQXsZKeZzOdJu9SZDBNSC9jJ\nTrPZfM1m7r7pbvov62fRwkXn/XkJgkULF9F/WT/33HQPt1xzS2sKlXCeCakltl+7nX1H9tW0mJWd\n7OaP9Vet58itR+bVpF3qTIYJqQVcGVPVmm+Tdqkz+ZhDagE72UnqJoYJqUXsZCepW/iYQ2qh+boy\nZjvJTIaPDjN4YJCREyOcOXeG3gW9rFy2kh3X7mDtirXeCZJm4aqhUhuYbytjtosLLf8OBjl1tmav\nGmqYkDQvufy7ulmzw4R9JiTNO7Uu/z50eKjBlUmdyTAhaV5x+Xep/gwTkuYVl3+X6s8wIWlecfl3\nqf4ME5LmFZd/l+rPMCFpXnH5d6n+DBOS5hWXf5fqzzAhaV5x+Xep/gwTkuaV7ddup29hX01tXf5d\nmp5hQtK8MrH8ey1c/l2anmFC0rzi8u9S/RkmJM07Lv8u1ZdLkEual1z+Xaofw4SkeWv9Ves5cusR\nl3+XSjJMSJrXIoKBKwcYuHKg1aVIHcs+E5IkqRTDhCRJKsUwIUmSSjFMSJKkUgwTkiSpFMOEJEkq\npaFhIiJeFhHvjYjPRcRoRJyIiL0RsXoO7/FDEfFARIxHxNcr77eokXVLkqTqNXqeibcCa4APA+8F\nFgNbgQciYkNm3j9T44i4HtgLPAC8GbgSeAtwNfCaxpWtbpCZDB8dZvDAICMnRjhz7gy9C3pZuWwl\nO67dwdoVa52MSJLqIDJz9qNqffOIVwAHM/PspG3PBD4PfC0zV87S/nPAQuCfZ+a3K9t+EtgNbMjM\nvbPVsHr16jx48GCJs1AnGjo8xJZ7t3D6idNOkyxp3omIQ5lZ9VOAshr6mCMzPzM5SFS2PQLsA148\nU9uIeBHwz4DdE0Gi4gPAKHBTfatVt9jz4B423bWJ448fZ/Ts6NOCBECSjJ4d5dhjx9h410b2PLin\nRZVKUndoVQfMZcAjsxwzcdfis5M3VsLJX0/aL33H0OEhtt2/jfFz41UdP35unG33b2Po8FCDK5Ok\n7tX0MBERa4BXAh+a5dClldeT0+w7SRFIpO/ITLbcu6XqIDFh/Nw4W/dupZGP/CSpmzU1TETEUuBO\n4BjwzlkO76m8fnuafU9M2j/d52yJiIMRcfDhhx+uqVZ1nuGjw5x+4nRNbU+Nn2L/0f11rkjqPpnJ\nviP72HDnBpYPLmfxuxezfHA5G+7cwPCRYUP5PNW0VUMj4jLgfmARsCYzH5ulycQ/L58xzb5LJu0/\nT2buAnZB0QFz7tWqEw0eGGTs7FhNbcfOjjF4YNCVI9UxWjFa6UIdmx/lUU4+dJJ9R/bZsXmeakqY\niIheiiGeLwRek5mfq6LZxOONZcCXpuxbCpyoX4XqBiMnRs7rbFmtJDl08lCdK5IaoxVf6nse3DNj\nf6SJjs2jZ0fZeNdGdt6wk83XbK7LZ6v9NfwxR0QsBO4Gvh94fWZ+usqmI5XXl03zfi+dtF8C4My5\nM6Xajz85t74WUiu0YrSSHZs1m0bPgHkR8EHgeuANmXnfBY7rjYirI+JZE9sy8wsU81H8VERMftTx\nBopHJX/UuMrViXoX9JZq33PxBbvhSG2hFV/qdmxWNRp9Z+J24HXAnwMXRcTNU376Kse9HPgC8DNT\n2m8HrgI+WelU+UvAb1Te794G164Os3LZSoLanhEHwaqlq+pckVQ/rfpSt2OzqtHoMHFN5fV64Pem\n+bl8psaZOQRsoJgF873AG4H3AxvTuKsptl+7nb6FfbMfOI2+hX1sv3Z7nSuS6qdVX+r16Nis7tfQ\nDpiZ+eoqj9sH0/+TsrJ+x4xreEgAAysGWHzJYkbPjs657ZKeJaxdsbYBVUn10arRSnZsVjVcglxd\nIyLYvWE3PQvm1vehZ0EPu27c5aJfamut+lK3Y7OqYZhQV1l31Tp23rCz6kDRs6CHO264wzHxanut\n+lK3Y7OqYZhQ19l8zWbuvulu+i/rZ9HCRed1ygyCRQsX0X9ZP/fcdA+3XHNLawqV5qBVX+p2bFY1\nDBPqSuuvWs+RW4+w90f38toXvJZlly5j8SWLWXbpMm58wY3c92P3ceTWI96RUMdo1Ze6HZtVjaZN\npy01W0QwcOWAU2SrK2y/djv7juyrqYNxmS91OzarGt6ZkKQOMPGlXosyX+p2bFY1DBOS1AFa+aVu\nx2bNxjAhSR2ilV/qdmzWTKLbJ5JcvXp1Hjx4sNVlSFLdDB0eYuverZwaP/W0VUOh+FLvW9jXsKXA\nM5P9R/dz+4HbGTk5wviT4/Rc3MOqpavY8YodrOlf46ONNhARhzJzddM+zzAhdabMZPjoMIMHBhk5\nMcKZc2foXdDLymUr2XHtDtauWOtf6l3ML3XNxDBRZ4YJdaOhw0NsuXcLp5843fR/mUpqf80OE/aZ\nkDrMngf3sOmuTRx//DijZ0fPm2I5SUbPjnLssWNsvGsjex7c06JKJc0XhgmpgwwdHmLb/duqXoZ6\n/Nw42+7fxtDhoQZXJmk+M0xIHSIz2XLvlqqDxITxc+Ns3buVbn+kKal1DBNShxg+OszpJ07X1PbU\n+Cn2H91f54okqWCYkDrE4IFBxs6O1dR27OwYgwcG61yRJBVcm0PqECMnRs7rbFmtJDl08lCdK1Kn\nc3ix6sUwIXWIM+fOlGo//uTc+lqou11oePGjPMrJh06y78g+hxeraj7mkDpE74LeUu17Lp7bmg7q\nXg4vVr0ZJqQOsXLZyvPWQ6hWEKxauqrOFakTObxYjWCYkDrE9mu307ewr6a2fQv72H7t9jpXpE7j\n8GI1imFC6hADKwZYfMnimtou6VnC2hVr61yROo3Di9UohgmpQ0QEuzfsrnr56Qk9C3rYdeMue+XL\n4cVqGMOE1EHWXbWOnTfsrDpQ9Czo4Y4b7rA3vgCHF6txHBoqdZjN12xm6aVL2bp3K6fGT7lqqKrm\n8GI1imFC6kDrr1rPkVuPsP/ofm4/cDsjJ0cYf3Kcnot7WLV0FTtesYM1/Wt8tKGn6V3Qy6M8WnN7\nhxfrQgwTUoeKCAauHGDgyoFWl6IOsXLZSk4+dLKmRx0OL9ZM7DMhSfOEw4vVKIYJSZonHF6sRjFM\nSNI84fBiNYphQpLmEYcXqxHsgClJ84zDi1VvhglJmoccXqx6MkxI0jzl8GLVi30mJElSKYYJSZJU\nimFCkiSVYpiQJEmlGCYkSVIphglJklSKYUKSJJVimJAkSaU4aZUkqaNkJsNHhxk8MMjIiRHOnDtD\n74JeVi5byY5rd7B2xVpn7mwyw4QkqWMMHR5iy71bOP3E6aetKfIoj3LyoZPsO7LPNUVaoOGPOSJi\nUUS8IyLuj4iHIyIj4u1zaH9RRPx0RPx1RHwrIv4hIj4ZEesbWLYkqc3seXAPm+7axPHHjzN6dvRp\ni5MBJMno2VGOPXaMjXdtZM+De1pU6fzTjD4TzwJ+HvjnwEgN7d8D/CbweWAH8G7ge4GPRcTGehUp\nSWpfQ4eH2Hb/NsbPjVd1/Pi5cbbdv42hw0MNrkzQnDBxEliemVcAb5xLw4i4BNgC3J2ZP5aZ78vM\nQWAN8G3glnoXK0lqL5nJlnu3VB0kJoyfG2fr3q1k5uwHq5SGh4nM/HZmnqix+SXAQopAMtlpYBw4\nU6Y2SVL7Gz46zOknTtfU9tT4KfYf3V/nijRVWw8NzcxHgb8BNkfEGyKiPyJeDLwfuBj4tZYWKElq\nuMEDg4ydHaup7djZMQYPDNa5Ik3VCaM5bgLuBH530ravA9dl5l+1piRJUrOMnBg5r7NltZLk0MlD\nda5IU7X1nYmKUeDvKO5CbAS2Ao8BH42IF0zXICK2RMTBiDj48MMPN69SSVLdnTlX7on2+JNz62uh\nuWvrMBERi4DPAF/OzNsy808ycxdFB8xLKEZ2nCczd2Xm6sxcffnllzexYklSvfUu6C3VvufinjpV\nogtp6zABvA7oB+6ZvDEzHwb+giJUSJK62MplKwlqm9EyCFYtXVXnijRVu4eJZZXXi6bZt4DO6PMh\nSSph+7Xb6VvYV1PbvoV9bL92e50r0lRtEyYiojciro6IZ03a/MXK681Tju0H1gL2qpGkLjewYoDF\nlyyuqe2SniWsXbG2zhVpqqaEiYj4mYh4G/Cmyqa1EfG2ys+KyraXA18AfmZS03uBvwVui4i7KtNq\n/xzwAEWfiXc1o35JUutEBLs37KZnwdz6PvQs6GHXjbtc9KsJmvWYYAewYtLv11V+AD4NHJ2uUWY+\nGRFrgLdSjOTYADwJfBZ4V2YON6xiSVLbWHfVOnbesLPqKbV7FvRwxw13uNhXk0S3TzO6evXqPHjw\nYKvLkCTVwdDhIbbu3cqp8VNPWzUUis6WfQv7XDUUiIhDmbm6WZ9nB0ZJUsdYf9V6jtx6hP1H93P7\ngdsZOTnC+JPj9Fzcw6qlq9jxih2s6V/jo40mM0xIkjpKRDBw5QADVw60uhRVtM1oDkmS1JkME5Ik\nqRTDhCRJKsUwIUmSSjFMSJKkUgwTkiSpFMOEJEkqxTAhSZJKMUxIkqRSDBOSJKkUw4QkSSrFMCFJ\nkkoxTEiSpFIME5IkqRTDhCRJKsUwIUmSSjFMSJKkUgwTkiSpFMOEJEkqxTAhSZJKMUxIkqRSDBOS\nJKkUw4QkSSrFMCFJkkoxTEiSpFIME5IkqRTDhCRJKsUwIUmSSjFMSJKkUgwTkiSpFMOEJEkqxTAh\nSZJKMUxIkqRSDBOSJKkUw4QkSSrFMCFJkkoxTEiSpFIME5IkqRTDhCRJKsUwIUmSSjFMSJKkUgwT\nkiSplIaHiYhYFBHviIj7I+LhiMiIePsc36M3It4ZEQ9FxBMR8Y3K+13ZkKIlSVLVFjThM54F/Dzw\nNWAEeM1cGkfEIuCTwPOB3cD/AhYDLweWAEfqWKskSZqjZoSJk8DyzDwREVcAx+fY/heBFwCrM/Nw\n3auTJEmlNPwxR2Z+OzNP1NI2Ir4b+Clgd2YejoiLI6KnvhVKkqQy2r0D5hqgB/hiRHwIOAOciYgH\nI+K61pYmSZKg/cPE8yuvvwxcBWwG3ghcCnw8Ila1qjBJklRoRp+JMhZVXgO4LjO/BRARnwAOAz8L\nbJraKCK2AFsA+vv7m1OpJEnzVLvfmRivvH50IkgAZOZxYD/wqukaZeauzFydmasvv/zyJpQpSdL8\n1e5hYqLj5jem2fcNiiGikiSphdo9TByqvF4xzb4rgIebWIskSZpG24SJyiyXV0fEsya2ZeZDFBNd\n/XBEPHvSsVdTPOL4ePMrlSRJkzWlA2ZE/AzwPcB3VzatjYi3Vf7372XmUYoZLT8FvAN4+6TmbwH+\nDPhMRPwWsBB4E/BY5VhJktRCzRrNsQNYMen36yo/AJ8Gjl6oYWbuj4gfBN5FER7OUYSOt2bmkYZU\nK0mSqtaUMJGZV1ZxzD6KIaDT7dsPrK1vVZIkqR7aps+EJEnqTIYJSZJUimFCkiSVYpiQJEmlGCYk\nSVIphglJklSKYUKSJJVimJAkSaUYJiRJUimGCUmSVIphQpIklWKYkCRJpRgmJElSKYYJSZJUimFC\nkiSVYpiQJEmlGCYkSVIphglJklSKYUKSJJVimJAkSaUYJiRJUimRma2uoaEi4lvAF1tdRws9C/hm\nq4toofl8/vP53MHz9/zn9/m/MDMvbdaHLWjWB7XQFzNzdauLaJWIOOj5z8/zn8/nDp6/5+/5N/Pz\nfMwhSZJKMUxIkqRS5kOY2NXqAlrM85+/5vO5g+fv+c9vTT3/ru+AKUmSGms+3JmQJEkNZJiQJEml\ntH2YiIg1EZGVnysmbb9l0vapP6+a5n3+SUT8fkQ8EhGjEfHJiFh1gc98QUR8NCIer/x8JCKe18jz\nrHzuq2c4p5unHPvdEfEbEfH1iBiPiP8REddf4H3b/twrn13V+XfjtZ9Sw0si4sMR8XBEPBERX4qI\nX5lyTNdd/0k1zHj+3Xr9I+J3ZjivjIgfn3RsV13/as+9W6995fOXRcSuiPhK5Zp+JSLeFxHPmXJc\nW177tu4zERELgBHgnwJ9wHMy8+8r+24B9gD/FXhoStOPZ+bDk96nDzgIfC8wCDwGbAOWAy/PzP81\n6dhlwIPAt4H3AAG8pfL60snvW28R8WrgU8BvAp+ZsvsvM/OrleMC2Ae8HPhV4BiwGVgF/GBmDk96\nz44498rnv5rqzv8WuuzaT6rh1cD9wOeBDwGPAv3A8zJz4i/Urrz+lRpezeznfwtdeP0j4lpgur+8\nfx54LnBFZn6jG6//HM79Frrz2l8G/B3QQ/H331HgRcBPA48AL8nMb7X1tc/Mtv2pnMw/AL8GJMUf\nqIl9t1S2vaqK99leOfa6SdsuB04DH55y7K8DZ4EXTNp2NXAOuL3B5/vqSp03z3Lc6yrH3TJp2yXA\nYeBgJ577HM+/66595bMWAV8DPgpcNA+vf7Xn35XX/wL1fy/wJHBft1//Ks+9K6898G8rtW6Ysv3f\nV7ZvbPdr3/T/c8zhP+5SiiT1k8DbmSFMAJcCC2Z4rweAv5tm+/uAJ4DeSdu+Duyd5tiPA8cbfM6v\nrpzTzRR/sV58gePuqvyBWDBl+3+utH9ep537HM+/66595XN+qnJeL6n83sc0X6pdfP2rPf+uvP4X\nqP/NlXO9qduvf5Xn3pXXftK5rp6yfWNl+7p2v/bt3GfiduBLwP8/y3H3A48D4xHx51OfB0XEdwHf\nB3x2mrafBZ4BvKRy7HKKNHyhY6+IiMvnchI1ugP4FvDtiHhgmudhK4EHM/PcNDVO7O/Uc4fZz39C\nt13711Ccz+UR8XlgFBiNiA9GxDMnHdet17/a85/Qbdd/Om+g+EfVRyZt69brP9V05z6h2679MEUY\n+PWIeEVELI+IHwR+GfgfwJ9Xjmvba9+WYSIiBoAfBd6Umf94gcPOAB8A3gT8CMXdi2uAv5jyB2sJ\nxX+4k9O8x8S2ZZXXpVO2z3RsIzwJ3APcBvwrittUy4ChiNgw6bilVdbYSecO1Z9/N157gOdTrJdz\nH8Vz0U0UzzpfD3wsIi6qHNet17/a8+/W6/80EfESivP6cGY+MWlXt17/75jh3Lvy2mfmg8C/o3i0\n8JfA3wOfoOgX8i8nhYe2vfZtt9BXpdPlTuAPMnNqJ7zvyMw/BP5w0qaPRMQfAX8D/H/AD1S291Re\nvz3N2zwx5Zi5HFt3mfmXFH+QviMiPgB8gaJTzL2Taqj3+bT03KH68+/Ga1+xCOgFdmfmv69suyci\nHgfeDbyWoj9BV15/qjz/Lr7+U72h8vqBKdu79fpPNu25d/m1Pwl8GvhTio6VL6f4h9UHIuL1WTx3\naNtr3453Jm4FVgBvnWvDzHyI4pbYmoh4RmXzeOX1GdM0uWTKMXM5tiky8xGKRz3/dNIwnXHqfz5t\nd+5wwfOf7rhuuPYT7//7U7b/QeX1VZOO68brX+35n6dLrv93VG5T/zjwVeAvpuzu1usPzHru5+mG\nax8RP0wRkm7LzF/PzI9k5s8C/4Gi0+W/mlRHW177tgoTleExv0Dx5bEwIq6MiCuB76kcckVMmmvi\nAo5R3HG5rPL7KYrENd1tmonbOycqrzPd0pl6bDMdq7xOPDc+SXU1dsO5w/nnP9NxnXztJ97/G1O2\nT/y+uPLarde/2vO/kE6//pP9AMUQvt+v/It0sm69/hNmOvcL6fRr/2bg85n5pSnb7668rqm8tu21\nb6swQfGXxaUUz8O+Ounn1sr+AxS3gWbyPIpn748CVPpc/A3wsmmO/RcU/8E/Xzn2axRDUS907N9n\nE8baT2PiX+QTnz0CvLTySGiyf1F5fRC65tzh/POf6bhOvvaHKq9TA/PE791+/as9/wvp9Os/2U9U\nXqc+4oDuvf4TZjr3C+n0a78MuGia7QumvLbvtW/0kJc5Do/ppehUM/XnQxQ9Xd8IvKZy7LOnab+S\n4g/Ux6Zs/4+V9q+etG1ivO3dU47dSTHe9vmTtk2Mt/3VBp//dOf0HIr/g3xh0rbXc+GxxiOdeO5z\nPP+uu/aVz/o+4B+BD03Z/ouVcxjo8utf7fl35fWf9Jl9FKOZPnOB/V15/as896689hR9oc4B10zZ\nflvlHH6i3a99U/7PUYf/0G/n/HkmDgMfBv5fYAvw3yl6+j4KvHhK+0XAFylu/fwXilnAPl/5Qzv1\n2OUUKe0YxaRZt1H0rD0BfG+Dz/OTwMeAt1GMuf9vPHW7avLEI99F8SxxHHgXsJXirs25ycd10rnP\n8fy77tpPquG3Kn/W76bo3f3+id+7/frP4fy79vpX6ri5cs4/fYH93Xz9Zzv3rrz2wCspAtHpSdf0\nt4H/DfxP4JJ2v/ZN+T9HHf5Dv53zw8R/pbjlc7pyEb4G/A6TJu2Y8h7LgA9W/sOOUUzbvPoCx74Q\n2EsxjvlxitR4VRPO802VPxjfrJzTP1T+j3PNNMdeRjEfwzcqf7A+S2Vik04897mcfzde+0mfv4Bi\nApovU/xL4WjlL42F3X79qz3/br7+lRo+ThGgF89wTLde/xnPvZuvPfBSiqHxxyp/9v+eYmrtZ3bC\ntW/rtTkkSVL7a7cOmJIkqcMYJiRJUimGCUmSVIphQpIklWKYkCRJpRgmJElSKYYJSZJUimFCkiSV\nYpiQNKuIuCUiMiIuuAy4pPnLMCFJkkoxTEiSpFIME5LmLCJ+JyLORcQVEfGRiPhWRDwSEb8ZEYta\nXZ+k5jJMSKpVUCwZfw54K/AR4KeBP2xlUZKab0GrC5DUsb4LGMnM/2diQ0ScBP5LRKzLzI+3rjRJ\nzeSdCUllvOcCv9/Y7EIktY5hQlIZX5z8S2Y+DJwGntuaciS1gmFCkiSVYpiQVMYLJ/8SEZcDi4Gv\ntqYcSa1gmJBUxpsv8Pt9zS5EUus4mkNSrf4RWBkRfwz8ObAa2Az8aWYOtbQySU3lnQlJtUrghyj+\nUfJu4EeAXcC/bmVRkpovMrPVNUjqMBHxO8DNmendTUnemZAkSeUYJiRJUimGCUmSVIp9JiRJUine\nmZAkSaUYJiRJUimGCUmSVIphQpIklWKYkCRJpRgmJElSKf8Hq2MU5kD9kMMAAAAASUVORK5CYII=\n", 78 | "text/plain": [ 79 | "" 80 | ] 81 | }, 82 | "metadata": {}, 83 | "output_type": "display_data" 84 | } 85 | ], 86 | "source": [ 87 | "SSX = [X[i][0] for i in range(0, len(Y)) if Y[i]==2]\n", 88 | "SSY = [X[i][1] for i in range(0, len(Y)) if Y[i]==2]\n", 89 | "ShX = [X[i][0] for i in range(0, len(Y)) if Y[i]==1]\n", 90 | "ShY = [X[i][1] for i in range(0, len(Y)) if Y[i]==1]\n", 91 | "\n", 92 | "\n", 93 | "fig = plt.figure(figsize=(8, 8))\n", 94 | "\n", 95 | "ax3 = fig.add_subplot(1, 1, 1)\n", 96 | "\n", 97 | "\n", 98 | "ax3.scatter(SSX, SSY, c = 'r', s = 250)\n", 99 | "ax3.scatter(ShX, ShY, c = 'g', s = 250)\n", 100 | "plt.xlim(4500, 8000)\n", 101 | "plt.ylim(1.5, 3)\n", 102 | "\n", 103 | "plt.tick_params(axis='both', which='major')\n", 104 | "\n", 105 | "fig.text(0.5, 0.05, 'Ip', ha='center')\n", 106 | "fig.text(0.04, 0.5, 'Vp/Vs', va='center', rotation='vertical')\n", 107 | "\n", 108 | "fig.savefig('aadm.png', dpi=500, bbox_inches='tight', pad_inches=0.2)\n", 109 | "plt.show()" 110 | ] 111 | } 112 | ], 113 | "metadata": { 114 | "anaconda-cloud": {}, 115 | "kernelspec": { 116 | "display_name": "Python [default]", 117 | "language": "python", 118 | "name": "python2" 119 | }, 120 | "language_info": { 121 | "codemirror_mode": { 122 | "name": "ipython", 123 | "version": 2 124 | }, 125 | "file_extension": ".py", 126 | "mimetype": "text/x-python", 127 | "name": "python", 128 | "nbconvert_exporter": "python", 129 | "pygments_lexer": "ipython2", 130 | "version": "2.7.13" 131 | } 132 | }, 133 | "nbformat": 4, 134 | "nbformat_minor": 1 135 | } 136 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ML 2 | Repo for the January 2018 ML focus issue published on the [CSEG Recorder](https://csegrecorder.com/) 3 | 4 | ## Contributors 5 | - [Matteo Niccoli](https://github.com/mycarta) - Support Vector Machines article - see `/Matteo_Niccoli/` above. 6 | - [Graham Ganssle](https:/gra.m-gan.sl) - Seismic Denoising by Image Translation Network - see `/Graham_Ganssle/` above. 7 | - [Brendon Hall](https:/github.com/brendonhall/) - Geochemical Facies Analysis using Unsupervised Machine Learning - see `/Brendon_Hall/` above. 8 | --------------------------------------------------------------------------------