├── .flake8 ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── datasets └── cement_strength.csv ├── images └── app-screenshot.png ├── requirements.txt └── src ├── cement_strength └── lightning_logs │ └── corn-mlp-cement │ └── version_0 │ ├── hparams.yaml │ └── metrics.csv ├── helper_code ├── __init__.py ├── dataset.py └── model.py └── main_mlp.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | exclude = imgproc.py,model.py -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | cement_strength 2 | .DS_Store 3 | 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | pip-wheel-metadata/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .nox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | *.py,cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | local_settings.py 64 | db.sqlite3 65 | db.sqlite3-journal 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | 77 | # PyBuilder 78 | target/ 79 | 80 | # Jupyter Notebook 81 | .ipynb_checkpoints 82 | 83 | # IPython 84 | profile_default/ 85 | ipython_config.py 86 | 87 | # pyenv 88 | .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | 116 | # Spyder project settings 117 | .spyderproject 118 | .spyproject 119 | 120 | # Rope project settings 121 | .ropeproject 122 | 123 | # mkdocs documentation 124 | /site 125 | 126 | # mypy 127 | .mypy_cache/ 128 | .dmypy.json 129 | dmypy.json 130 | 131 | # Pyre type checker 132 | .pyre/ 133 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/PyCQA/isort 3 | rev: 5.10.1 4 | hooks: 5 | - id: isort 6 | name: Format imports 7 | args: ["--profile", "black"] 8 | 9 | - repo: https://github.com/psf/black 10 | rev: 22.3.0 11 | hooks: 12 | - id: black 13 | name: Format code 14 | 15 | - repo: https://github.com/PyCQA/flake8 16 | rev: 4.0.1 17 | hooks: 18 | - id: flake8 19 | name: Check PEP8 -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2022, Sebastian Raschka 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # scipy2022-talk 2 | 3 | 4 | 5 | **Slides: [https://sebastianraschka.com/pdf/slides/2022-07-scipy-corn.pdf](https://sebastianraschka.com/pdf/slides/2022-07-scipy-corn.pdf)** 6 | 7 | 8 | 9 | # Using the Code 10 | 11 | 12 | 13 | ### Step 1: Install the requirements 14 | 15 | ```bash 16 | git clone https://github.com/rasbt/scipy2022-talk.git 17 | cd scipy2022-talk 18 | conda create -n coral-pytorch python=3.8 19 | conda activate coral-pytorch 20 | pip install -r requirements.txt 21 | python -m spacy download en_core_web_sm 22 | ``` 23 | 24 | ### Step 2: Run the code 25 | 26 | MLP with CORN loss 27 | 28 | ```bash 29 | cd src 30 | ``` 31 | 32 | ```bash 33 | python main_mlp.py \ 34 | --batch_size 16 \ 35 | --data_path ../datasets/ \ 36 | --learning_rate 0.01 \ 37 | --mixed_precision true \ 38 | --num_epochs 40 \ 39 | --num_workers 3 \ 40 | --output_path ./cement_strength \ 41 | --loss_mode corn 42 | ```` 43 | 44 | MLP with cross entropy loss 45 | 46 | ```bash 47 | python main_mlp.py \ 48 | ... 49 | --loss_mode crossentropy 50 | ``` 51 | 52 | 53 | 54 | ## More examples 55 | 56 | - PyTorch Hub for loading pre-trained models: [https://github.com/rasbt/ord-torchhub](https://github.com/rasbt/ord-torchhub) 57 | - Tutorials for using the various ordinal regression models with CNNs, RNNs, and MLPs: [https://github.com/Raschka-research-group/coral-pytorch](https://github.com/Raschka-research-group/coral-pytorch) 58 | - The CORN paper repository with detailed experiment logs: [https://github.com/Raschka-research-group/corn-ordinal-neuralnet](https://github.com/Raschka-research-group/corn-ordinal-neuralnet) 59 | 60 | 61 | 62 | ## Interactive Demo 63 | 64 | [![](images/app-screenshot.png)](https://bit.ly/3aCgSeG) 65 | 66 | You can try an interactive version at [https://bit.ly/3aCgSeG](https://bit.ly/3aCgSeG). 67 | 68 | (The source code for this interactive demo is available at [https://github.com/rasbt/ord-torchhub/tree/main/app](https://github.com/rasbt/ord-torchhub/tree/main/app).) 69 | 70 | 71 | 72 | ## References 73 | 74 | - Xintong Shi, Wenzhi Cao, and Sebastian Raschka 75 | *Deep Neural Networks for Rank-Consistent Ordinal Regression Based On Conditional Probabilities.* 76 | https://arxiv.org/abs/2111.08851 -------------------------------------------------------------------------------- /datasets/cement_strength.csv: -------------------------------------------------------------------------------- 1 | "response","V1","V2","V3","V4","V5","V6","V7","V8" 2 | 5,540,0,0,162,2.5,1040,676,28 3 | 5,540,0,0,162,2.5,1055,676,28 4 | 3,332.5,142.5,0,228,0,932,594,270 5 | 3,332.5,142.5,0,228,0,932,594,365 6 | 3,198.6,132.4,0,192,0,978.4,825.5,360 7 | 4,266,114,0,228,0,932,670,90 8 | 3,380,95,0,228,0,932,594,365 9 | 3,380,95,0,228,0,932,594,28 10 | 4,266,114,0,228,0,932,670,28 11 | 3,475,0,0,228,0,932,594,28 12 | 3,198.6,132.4,0,192,0,978.4,825.5,90 13 | 2,198.6,132.4,0,192,0,978.4,825.5,28 14 | 3,427.5,47.5,0,228,0,932,594,270 15 | 3,190,190,0,228,0,932,670,90 16 | 4,304,76,0,228,0,932,670,28 17 | 4,380,0,0,228,0,932,670,90 18 | 3,139.6,209.4,0,192,0,1047,806.9,90 19 | 4,342,38,0,228,0,932,670,365 20 | 3,380,95,0,228,0,932,594,90 21 | 3,475,0,0,228,0,932,594,180 22 | 3,427.5,47.5,0,228,0,932,594,180 23 | 2,139.6,209.4,0,192,0,1047,806.9,28 24 | 1,139.6,209.4,0,192,0,1047,806.9,3 25 | 3,139.6,209.4,0,192,0,1047,806.9,180 26 | 4,380,0,0,228,0,932,670,365 27 | 4,380,0,0,228,0,932,670,270 28 | 3,380,95,0,228,0,932,594,270 29 | 4,342,38,0,228,0,932,670,180 30 | 3,427.5,47.5,0,228,0,932,594,28 31 | 3,475,0,0,228,0,932,594,7 32 | 4,304,76,0,228,0,932,670,365 33 | 4,266,114,0,228,0,932,670,365 34 | 3,198.6,132.4,0,192,0,978.4,825.5,180 35 | 3,475,0,0,228,0,932,594,270 36 | 4,190,190,0,228,0,932,670,365 37 | 3,237.5,237.5,0,228,0,932,594,270 38 | 2,237.5,237.5,0,228,0,932,594,28 39 | 3,332.5,142.5,0,228,0,932,594,90 40 | 3,475,0,0,228,0,932,594,90 41 | 3,237.5,237.5,0,228,0,932,594,180 42 | 4,342,38,0,228,0,932,670,90 43 | 3,427.5,47.5,0,228,0,932,594,365 44 | 3,237.5,237.5,0,228,0,932,594,365 45 | 4,380,0,0,228,0,932,670,180 46 | 3,427.5,47.5,0,228,0,932,594,90 47 | 3,427.5,47.5,0,228,0,932,594,7 48 | 1,349,0,0,192,0,1047,806.9,3 49 | 3,380,95,0,228,0,932,594,180 50 | 2,237.5,237.5,0,228,0,932,594,7 51 | 2,380,95,0,228,0,932,594,7 52 | 3,332.5,142.5,0,228,0,932,594,180 53 | 4,190,190,0,228,0,932,670,180 54 | 2,237.5,237.5,0,228,0,932,594,90 55 | 4,304,76,0,228,0,932,670,90 56 | 1,139.6,209.4,0,192,0,1047,806.9,7 57 | 1,198.6,132.4,0,192,0,978.4,825.5,7 58 | 3,475,0,0,228,0,932,594,365 59 | 1,198.6,132.4,0,192,0,978.4,825.5,3 60 | 4,304,76,0,228,0,932,670,180 61 | 2,332.5,142.5,0,228,0,932,594,28 62 | 4,304,76,0,228,0,932,670,270 63 | 4,266,114,0,228,0,932,670,270 64 | 1,310,0,0,192,0,971,850.6,3 65 | 4,190,190,0,228,0,932,670,270 66 | 4,266,114,0,228,0,932,670,180 67 | 4,342,38,0,228,0,932,670,270 68 | 3,139.6,209.4,0,192,0,1047,806.9,360 69 | 2,332.5,142.5,0,228,0,932,594,7 70 | 3,190,190,0,228,0,932,670,28 71 | 5,485,0,0,146,0,1120,800,28 72 | 3,374,189.2,0,170.1,10.1,926.1,756.7,3 73 | 2,313.3,262.2,0,175.5,8.6,1046.9,611.8,3 74 | 2,425,106.3,0,153.5,16.5,852.1,887.1,3 75 | 3,425,106.3,0,151.4,18.6,936,803.7,3 76 | 2,375,93.8,0,126.6,23.4,852.1,992.6,3 77 | 3,475,118.8,0,181.1,8.9,852.1,781.5,3 78 | 3,469,117.2,0,137.8,32.2,852.1,840.5,3 79 | 2,388.6,97.1,0,157.9,12.1,852.1,925.7,3 80 | 3,531.3,0,0,141.8,28.2,852.1,893.7,3 81 | 2,318.8,212.5,0,155.7,14.3,852.1,880.4,3 82 | 3,401.8,94.7,0,147.4,11.4,946.8,852.1,3 83 | 3,362.6,189,0,164.9,11.6,944.7,755.8,3 84 | 2,323.7,282.8,0,183.8,10.3,942.7,659.9,3 85 | 2,379.5,151.2,0,153.9,15.9,1134.3,605,3 86 | 2,286.3,200.9,0,144.7,11.2,1004.6,803.7,3 87 | 3,439,177,0,186,11.1,884.9,707.9,3 88 | 3,389.9,189,0,145.9,22,944.7,755.8,3 89 | 2,337.9,189,0,174.9,9.5,944.7,755.8,3 90 | 4,374,189.2,0,170.1,10.1,926.1,756.7,7 91 | 3,313.3,262.2,0,175.5,8.6,1046.9,611.8,7 92 | 4,425,106.3,0,153.5,16.5,852.1,887.1,7 93 | 4,425,106.3,0,151.4,18.6,936,803.7,7 94 | 4,375,93.8,0,126.6,23.4,852.1,992.6,7 95 | 4,475,118.8,0,181.1,8.9,852.1,781.5,7 96 | 4,469,117.2,0,137.8,32.2,852.1,840.5,7 97 | 3,388.6,97.1,0,157.9,12.1,852.1,925.7,7 98 | 4,531.3,0,0,141.8,28.2,852.1,893.7,7 99 | 2,318.8,212.5,0,155.7,14.3,852.1,880.4,7 100 | 4,401.8,94.7,0,147.4,11.4,946.8,852.1,7 101 | 4,362.6,189,0,164.9,11.6,944.7,755.8,7 102 | 4,323.7,282.8,0,183.8,10.3,942.7,659.9,7 103 | 4,379.5,151.2,0,153.9,15.9,1134.3,605,7 104 | 3,286.3,200.9,0,144.7,11.2,1004.6,803.7,7 105 | 4,439,177,0,186,11.1,884.9,707.9,7 106 | 5,389.9,189,0,145.9,22,944.7,755.8,7 107 | 2,362.6,189,0,164.9,11.6,944.7,755.8,7 108 | 3,337.9,189,0,174.9,9.5,944.7,755.8,7 109 | 5,374,189.2,0,170.1,10.1,926.1,756.7,28 110 | 5,313.3,262.2,0,175.5,8.6,1046.9,611.8,28 111 | 5,425,106.3,0,153.5,16.5,852.1,887.1,28 112 | 5,425,106.3,0,151.4,18.6,936,803.7,28 113 | 4,375,93.8,0,126.6,23.4,852.1,992.6,28 114 | 5,475,118.8,0,181.1,8.9,852.1,781.5,28 115 | 5,469,117.2,0,137.8,32.2,852.1,840.5,28 116 | 4,388.6,97.1,0,157.9,12.1,852.1,925.7,28 117 | 4,531.3,0,0,141.8,28.2,852.1,893.7,28 118 | 4,318.8,212.5,0,155.7,14.3,852.1,880.4,28 119 | 5,401.8,94.7,0,147.4,11.4,946.8,852.1,28 120 | 5,362.6,189,0,164.9,11.6,944.7,755.8,28 121 | 5,323.7,282.8,0,183.8,10.3,942.7,659.9,28 122 | 4,379.5,151.2,0,153.9,15.9,1134.3,605,28 123 | 5,286.3,200.9,0,144.7,11.2,1004.6,803.7,28 124 | 5,439,177,0,186,11.1,884.9,707.9,28 125 | 5,389.9,189,0,145.9,22,944.7,755.8,28 126 | 4,337.9,189,0,174.9,9.5,944.7,755.8,28 127 | 5,374,189.2,0,170.1,10.1,926.1,756.7,56 128 | 5,313.3,262.2,0,175.5,8.6,1046.9,611.8,56 129 | 5,425,106.3,0,153.5,16.5,852.1,887.1,56 130 | 5,425,106.3,0,151.4,18.6,936,803.7,56 131 | 5,375,93.8,0,126.6,23.4,852.1,992.6,56 132 | 5,475,118.8,0,181.1,8.9,852.1,781.5,56 133 | 5,469,117.2,0,137.8,32.2,852.1,840.5,56 134 | 4,388.6,97.1,0,157.9,12.1,852.1,925.7,56 135 | 5,531.3,0,0,141.8,28.2,852.1,893.7,56 136 | 5,318.8,212.5,0,155.7,14.3,852.1,880.4,56 137 | 5,401.8,94.7,0,147.4,11.4,946.8,852.1,56 138 | 5,362.6,189,0,164.9,11.6,944.7,755.8,56 139 | 5,323.7,282.8,0,183.8,10.3,942.7,659.9,56 140 | 4,379.5,151.2,0,153.9,15.9,1134.3,605,56 141 | 5,286.3,200.9,0,144.7,11.2,1004.6,803.7,56 142 | 5,439,177,0,186,11.1,884.9,707.9,56 143 | 5,389.9,189,0,145.9,22,944.7,755.8,56 144 | 5,337.9,189,0,174.9,9.5,944.7,755.8,56 145 | 5,374,189.2,0,170.1,10.1,926.1,756.7,91 146 | 5,313.3,262.2,0,175.5,8.6,1046.9,611.8,91 147 | 5,425,106.3,0,153.5,16.5,852.1,887.1,91 148 | 5,425,106.3,0,151.4,18.6,936,803.7,91 149 | 5,375,93.8,0,126.6,23.4,852.1,992.6,91 150 | 5,475,118.8,0,181.1,8.9,852.1,781.5,91 151 | 5,469,117.2,0,137.8,32.2,852.1,840.5,91 152 | 5,388.6,97.1,0,157.9,12.1,852.1,925.7,91 153 | 5,531.3,0,0,141.8,28.2,852.1,893.7,91 154 | 5,318.8,212.5,0,155.7,14.3,852.1,880.4,91 155 | 5,401.8,94.7,0,147.4,11.4,946.8,852.1,91 156 | 5,362.6,189,0,164.9,11.6,944.7,755.8,91 157 | 4,379.5,151.2,0,153.9,15.9,1134.3,605,91 158 | 5,286.3,200.9,0,144.7,11.2,1004.6,803.7,91 159 | 5,439,177,0,186,11.1,884.9,707.9,91 160 | 5,389.9,189,0,145.9,22,944.7,755.8,91 161 | 5,337.9,189,0,174.9,9.5,944.7,755.8,91 162 | 1,222.4,0,96.7,189.3,4.5,967.1,870.3,3 163 | 2,222.4,0,96.7,189.3,4.5,967.1,870.3,14 164 | 2,222.4,0,96.7,189.3,4.5,967.1,870.3,28 165 | 2,222.4,0,96.7,189.3,4.5,967.1,870.3,56 166 | 3,222.4,0,96.7,189.3,4.5,967.1,870.3,100 167 | 1,233.8,0,94.6,197.9,4.6,947,852.2,3 168 | 2,233.8,0,94.6,197.9,4.6,947,852.2,14 169 | 2,233.8,0,94.6,197.9,4.6,947,852.2,28 170 | 2,233.8,0,94.6,197.9,4.6,947,852.2,56 171 | 3,233.8,0,94.6,197.9,4.6,947,852.2,100 172 | 1,194.7,0,100.5,165.6,7.5,1006.4,905.9,3 173 | 2,194.7,0,100.5,165.6,7.5,1006.4,905.9,14 174 | 2,194.7,0,100.5,165.6,7.5,1006.4,905.9,28 175 | 2,194.7,0,100.5,165.6,7.5,1006.4,905.9,56 176 | 3,194.7,0,100.5,165.6,7.5,1006.4,905.9,100 177 | 1,190.7,0,125.4,162.1,7.8,1090,804,3 178 | 2,190.7,0,125.4,162.1,7.8,1090,804,14 179 | 2,190.7,0,125.4,162.1,7.8,1090,804,28 180 | 3,190.7,0,125.4,162.1,7.8,1090,804,56 181 | 3,190.7,0,125.4,162.1,7.8,1090,804,100 182 | 1,212.1,0,121.6,180.3,5.7,1057.6,779.3,3 183 | 2,212.1,0,121.6,180.3,5.7,1057.6,779.3,14 184 | 2,212.1,0,121.6,180.3,5.7,1057.6,779.3,28 185 | 3,212.1,0,121.6,180.3,5.7,1057.6,779.3,56 186 | 3,212.1,0,121.6,180.3,5.7,1057.6,779.3,100 187 | 1,230,0,118.3,195.5,4.6,1029.4,758.6,3 188 | 2,230,0,118.3,195.5,4.6,1029.4,758.6,14 189 | 2,230,0,118.3,195.5,4.6,1029.4,758.6,28 190 | 2,230,0,118.3,195.5,4.6,1029.4,758.6,56 191 | 3,230,0,118.3,195.5,4.6,1029.4,758.6,100 192 | 1,190.3,0,125.2,161.9,9.9,1088.1,802.6,3 193 | 2,190.3,0,125.2,161.9,9.9,1088.1,802.6,14 194 | 2,190.3,0,125.2,161.9,9.9,1088.1,802.6,28 195 | 3,190.3,0,125.2,161.9,9.9,1088.1,802.6,56 196 | 3,190.3,0,125.2,161.9,9.9,1088.1,802.6,100 197 | 1,166.1,0,163.3,176.5,4.5,1058.6,780.1,3 198 | 2,166.1,0,163.3,176.5,4.5,1058.6,780.1,14 199 | 2,166.1,0,163.3,176.5,4.5,1058.6,780.1,28 200 | 2,166.1,0,163.3,176.5,4.5,1058.6,780.1,56 201 | 2,166.1,0,163.3,176.5,4.5,1058.6,780.1,100 202 | 1,168,42.1,163.8,121.8,5.7,1058.7,780.1,3 203 | 1,168,42.1,163.8,121.8,5.7,1058.7,780.1,14 204 | 2,168,42.1,163.8,121.8,5.7,1058.7,780.1,28 205 | 2,168,42.1,163.8,121.8,5.7,1058.7,780.1,56 206 | 3,168,42.1,163.8,121.8,5.7,1058.7,780.1,100 207 | 1,213.7,98.1,24.5,181.7,6.9,1065.8,785.4,3 208 | 2,213.7,98.1,24.5,181.7,6.9,1065.8,785.4,14 209 | 4,213.7,98.1,24.5,181.7,6.9,1065.8,785.4,28 210 | 4,213.7,98.1,24.5,181.7,6.9,1065.8,785.4,56 211 | 4,213.7,98.1,24.5,181.7,6.9,1065.8,785.4,100 212 | 1,213.8,98.1,24.5,181.7,6.7,1066,785.5,3 213 | 1,213.8,98.1,24.5,181.7,6.7,1066,785.5,14 214 | 3,213.8,98.1,24.5,181.7,6.7,1066,785.5,28 215 | 4,213.8,98.1,24.5,181.7,6.7,1066,785.5,56 216 | 4,213.8,98.1,24.5,181.7,6.7,1066,785.5,100 217 | 1,229.7,0,118.2,195.2,6.1,1028.1,757.6,3 218 | 2,229.7,0,118.2,195.2,6.1,1028.1,757.6,14 219 | 2,229.7,0,118.2,195.2,6.1,1028.1,757.6,28 220 | 2,229.7,0,118.2,195.2,6.1,1028.1,757.6,56 221 | 3,229.7,0,118.2,195.2,6.1,1028.1,757.6,100 222 | 1,238.1,0,94.1,186.7,7,949.9,847,3 223 | 2,238.1,0,94.1,186.7,7,949.9,847,14 224 | 2,238.1,0,94.1,186.7,7,949.9,847,28 225 | 3,238.1,0,94.1,186.7,7,949.9,847,56 226 | 3,238.1,0,94.1,186.7,7,949.9,847,100 227 | 1,250,0,95.7,187.4,5.5,956.9,861.2,3 228 | 2,250,0,95.7,187.4,5.5,956.9,861.2,14 229 | 2,250,0,95.7,187.4,5.5,956.9,861.2,28 230 | 3,250,0,95.7,187.4,5.5,956.9,861.2,56 231 | 3,250,0,95.7,187.4,5.5,956.9,861.2,100 232 | 1,212.5,0,100.4,159.3,8.7,1007.8,903.6,3 233 | 2,212.5,0,100.4,159.3,8.7,1007.8,903.6,14 234 | 2,212.5,0,100.4,159.3,8.7,1007.8,903.6,28 235 | 3,212.5,0,100.4,159.3,8.7,1007.8,903.6,56 236 | 3,212.5,0,100.4,159.3,8.7,1007.8,903.6,100 237 | 1,212.6,0,100.4,159.4,10.4,1003.8,903.8,3 238 | 2,212.6,0,100.4,159.4,10.4,1003.8,903.8,14 239 | 3,212.6,0,100.4,159.4,10.4,1003.8,903.8,28 240 | 3,212.6,0,100.4,159.4,10.4,1003.8,903.8,56 241 | 4,212.6,0,100.4,159.4,10.4,1003.8,903.8,100 242 | 1,212,0,124.8,159,7.8,1085.4,799.5,3 243 | 2,212,0,124.8,159,7.8,1085.4,799.5,14 244 | 3,212,0,124.8,159,7.8,1085.4,799.5,28 245 | 4,212,0,124.8,159,7.8,1085.4,799.5,56 246 | 4,212,0,124.8,159,7.8,1085.4,799.5,100 247 | 1,231.8,0,121.6,174,6.7,1056.4,778.5,3 248 | 2,231.8,0,121.6,174,6.7,1056.4,778.5,14 249 | 2,231.8,0,121.6,174,6.7,1056.4,778.5,28 250 | 3,231.8,0,121.6,174,6.7,1056.4,778.5,56 251 | 4,231.8,0,121.6,174,6.7,1056.4,778.5,100 252 | 1,251.4,0,118.3,188.5,5.8,1028.4,757.7,3 253 | 2,251.4,0,118.3,188.5,5.8,1028.4,757.7,14 254 | 2,251.4,0,118.3,188.5,5.8,1028.4,757.7,28 255 | 3,251.4,0,118.3,188.5,5.8,1028.4,757.7,56 256 | 3,251.4,0,118.3,188.5,5.8,1028.4,757.7,100 257 | 1,251.4,0,118.3,188.5,6.4,1028.4,757.7,3 258 | 2,251.4,0,118.3,188.5,6.4,1028.4,757.7,14 259 | 2,251.4,0,118.3,188.5,6.4,1028.4,757.7,28 260 | 3,251.4,0,118.3,188.5,6.4,1028.4,757.7,56 261 | 3,251.4,0,118.3,188.5,6.4,1028.4,757.7,100 262 | 1,181.4,0,167,169.6,7.6,1055.6,777.8,3 263 | 2,181.4,0,167,169.6,7.6,1055.6,777.8,14 264 | 2,181.4,0,167,169.6,7.6,1055.6,777.8,28 265 | 3,181.4,0,167,169.6,7.6,1055.6,777.8,56 266 | 4,181.4,0,167,169.6,7.6,1055.6,777.8,100 267 | 1,182,45.2,122,170.2,8.2,1059.4,780.7,3 268 | 2,182,45.2,122,170.2,8.2,1059.4,780.7,14 269 | 2,182,45.2,122,170.2,8.2,1059.4,780.7,28 270 | 3,182,45.2,122,170.2,8.2,1059.4,780.7,56 271 | 4,182,45.2,122,170.2,8.2,1059.4,780.7,100 272 | 1,168.9,42.2,124.3,158.3,10.8,1080.8,796.2,3 273 | 2,168.9,42.2,124.3,158.3,10.8,1080.8,796.2,14 274 | 2,168.9,42.2,124.3,158.3,10.8,1080.8,796.2,28 275 | 3,168.9,42.2,124.3,158.3,10.8,1080.8,796.2,56 276 | 4,168.9,42.2,124.3,158.3,10.8,1080.8,796.2,100 277 | 2,290.4,0,96.2,168.1,9.4,961.2,865,3 278 | 3,290.4,0,96.2,168.1,9.4,961.2,865,14 279 | 3,290.4,0,96.2,168.1,9.4,961.2,865,28 280 | 4,290.4,0,96.2,168.1,9.4,961.2,865,56 281 | 4,290.4,0,96.2,168.1,9.4,961.2,865,100 282 | 2,277.1,0,97.4,160.6,11.8,973.9,875.6,3 283 | 3,277.1,0,97.4,160.6,11.8,973.9,875.6,14 284 | 4,277.1,0,97.4,160.6,11.8,973.9,875.6,28 285 | 4,277.1,0,97.4,160.6,11.8,973.9,875.6,56 286 | 4,277.1,0,97.4,160.6,11.8,973.9,875.6,100 287 | 2,295.7,0,95.6,171.5,8.9,955.1,859.2,3 288 | 3,295.7,0,95.6,171.5,8.9,955.1,859.2,14 289 | 3,295.7,0,95.6,171.5,8.9,955.1,859.2,28 290 | 4,295.7,0,95.6,171.5,8.9,955.1,859.2,56 291 | 4,295.7,0,95.6,171.5,8.9,955.1,859.2,100 292 | 2,251.8,0,99.9,146.1,12.4,1006,899.8,3 293 | 2,251.8,0,99.9,146.1,12.4,1006,899.8,14 294 | 2,251.8,0,99.9,146.1,12.4,1006,899.8,28 295 | 3,251.8,0,99.9,146.1,12.4,1006,899.8,56 296 | 4,251.8,0,99.9,146.1,12.4,1006,899.8,100 297 | 1,249.1,0,98.8,158.1,12.8,987.8,889,3 298 | 2,249.1,0,98.8,158.1,12.8,987.8,889,14 299 | 2,249.1,0,98.8,158.1,12.8,987.8,889,28 300 | 3,249.1,0,98.8,158.1,12.8,987.8,889,56 301 | 4,249.1,0,98.8,158.1,12.8,987.8,889,100 302 | 2,252.3,0,98.8,146.3,14.2,987.8,889,3 303 | 3,252.3,0,98.8,146.3,14.2,987.8,889,14 304 | 4,252.3,0,98.8,146.3,14.2,987.8,889,28 305 | 4,252.3,0,98.8,146.3,14.2,987.8,889,56 306 | 5,252.3,0,98.8,146.3,14.2,987.8,889,100 307 | 2,246.8,0,125.1,143.3,12,1086.8,800.9,3 308 | 3,246.8,0,125.1,143.3,12,1086.8,800.9,14 309 | 4,246.8,0,125.1,143.3,12,1086.8,800.9,28 310 | 5,246.8,0,125.1,143.3,12,1086.8,800.9,56 311 | 5,246.8,0,125.1,143.3,12,1086.8,800.9,100 312 | 2,275.1,0,121.4,159.5,9.9,1053.6,777.5,3 313 | 3,275.1,0,121.4,159.5,9.9,1053.6,777.5,14 314 | 4,275.1,0,121.4,159.5,9.9,1053.6,777.5,28 315 | 4,275.1,0,121.4,159.5,9.9,1053.6,777.5,56 316 | 5,275.1,0,121.4,159.5,9.9,1053.6,777.5,100 317 | 2,297.2,0,117.5,174.8,9.5,1022.8,753.5,3 318 | 3,297.2,0,117.5,174.8,9.5,1022.8,753.5,14 319 | 4,297.2,0,117.5,174.8,9.5,1022.8,753.5,28 320 | 4,297.2,0,117.5,174.8,9.5,1022.8,753.5,56 321 | 4,297.2,0,117.5,174.8,9.5,1022.8,753.5,100 322 | 1,213.7,0,174.7,154.8,10.2,1053.5,776.4,3 323 | 2,213.7,0,174.7,154.8,10.2,1053.5,776.4,14 324 | 3,213.7,0,174.7,154.8,10.2,1053.5,776.4,28 325 | 4,213.7,0,174.7,154.8,10.2,1053.5,776.4,56 326 | 4,213.7,0,174.7,154.8,10.2,1053.5,776.4,100 327 | 1,213.5,0,174.2,154.6,11.7,1052.3,775.5,3 328 | 2,213.5,0,174.2,154.6,11.7,1052.3,775.5,14 329 | 4,213.5,0,174.2,154.6,11.7,1052.3,775.5,28 330 | 4,213.5,0,174.2,154.6,11.7,1052.3,775.5,56 331 | 5,213.5,0,174.2,154.6,11.7,1052.3,775.5,100 332 | 2,277.2,97.8,24.5,160.7,11.2,1061.7,782.5,3 333 | 4,277.2,97.8,24.5,160.7,11.2,1061.7,782.5,14 334 | 5,277.2,97.8,24.5,160.7,11.2,1061.7,782.5,28 335 | 5,277.2,97.8,24.5,160.7,11.2,1061.7,782.5,56 336 | 5,277.2,97.8,24.5,160.7,11.2,1061.7,782.5,100 337 | 2,218.2,54.6,123.8,140.8,11.9,1075.7,792.7,3 338 | 3,218.2,54.6,123.8,140.8,11.9,1075.7,792.7,14 339 | 4,218.2,54.6,123.8,140.8,11.9,1075.7,792.7,28 340 | 5,218.2,54.6,123.8,140.8,11.9,1075.7,792.7,56 341 | 5,218.2,54.6,123.8,140.8,11.9,1075.7,792.7,100 342 | 1,214.9,53.8,121.9,155.6,9.6,1014.3,780.6,3 343 | 3,214.9,53.8,121.9,155.6,9.6,1014.3,780.6,14 344 | 4,214.9,53.8,121.9,155.6,9.6,1014.3,780.6,28 345 | 4,214.9,53.8,121.9,155.6,9.6,1014.3,780.6,56 346 | 4,214.9,53.8,121.9,155.6,9.6,1014.3,780.6,100 347 | 1,218.9,0,124.1,158.5,11.3,1078.7,794.9,3 348 | 2,218.9,0,124.1,158.5,11.3,1078.7,794.9,14 349 | 2,218.9,0,124.1,158.5,11.3,1078.7,794.9,28 350 | 3,218.9,0,124.1,158.5,11.3,1078.7,794.9,56 351 | 4,218.9,0,124.1,158.5,11.3,1078.7,794.9,100 352 | 1,376,0,0,214.6,0,1003.5,762.4,3 353 | 2,376,0,0,214.6,0,1003.5,762.4,14 354 | 2,376,0,0,214.6,0,1003.5,762.4,28 355 | 3,376,0,0,214.6,0,1003.5,762.4,56 356 | 3,376,0,0,214.6,0,1003.5,762.4,100 357 | 5,500,0,0,140,4,966,853,28 358 | 5,475,0,59,142,1.9,1098,641,28 359 | 5,315,137,0,145,5.9,1130,745,28 360 | 5,505,0,60,195,0,1030,630,28 361 | 5,451,0,0,165,11.3,1030,745,28 362 | 3,516,0,0,162,8.2,801,802,28 363 | 5,520,0,0,170,5.2,855,855,28 364 | 4,528,0,0,185,6.9,920,720,28 365 | 4,520,0,0,175,5.2,870,805,28 366 | 4,385,0,136,158,20,903,768,28 367 | 3,500.1,0,0,200,3,1124.4,613.2,28 368 | 3,450.1,50,0,200,3,1124.4,613.2,28 369 | 4,397,17.2,158,167,20.8,967,633,28 370 | 4,333,17.5,163,167,17.9,996,652,28 371 | 3,334,17.6,158,189,15.3,967,633,28 372 | 4,405,0,0,175,0,1120,695,28 373 | 4,200,200,0,190,0,1145,660,28 374 | 3,516,0,0,162,8.3,801,802,28 375 | 2,145,116,119,184,5.7,833,880,28 376 | 3,160,128,122,182,6.4,824,879,28 377 | 3,234,156,0,189,5.9,981,760,28 378 | 5,250,180,95,159,9.5,860,800,28 379 | 5,475,0,0,162,9.5,1044,662,28 380 | 4,285,190,0,163,7.6,1031,685,28 381 | 5,356,119,0,160,9,1061,657,28 382 | 5,275,180,120,162,10.4,830,765,28 383 | 5,500,0,0,151,9,1033,655,28 384 | 1,165,0,143.6,163.8,0,1005.6,900.9,3 385 | 1,165,128.5,132.1,175.1,8.1,1005.8,746.6,3 386 | 2,178,129.8,118.6,179.9,3.6,1007.3,746.8,3 387 | 1,167.4,129.9,128.6,175.5,7.8,1006.3,746.6,3 388 | 2,172.4,13.6,172.4,156.8,4.1,1006.3,856.4,3 389 | 2,173.5,50.1,173.5,164.8,6.5,1006.2,793.5,3 390 | 1,167,75.4,167,164,7.9,1007.3,770.1,3 391 | 1,173.8,93.4,159.9,172.3,9.7,1007.2,746.6,3 392 | 1,190.3,0,125.2,166.6,9.9,1079,798.9,3 393 | 1,250,0,95.7,191.8,5.3,948.9,857.2,3 394 | 1,213.5,0,174.2,159.2,11.7,1043.6,771.9,3 395 | 1,194.7,0,100.5,170.2,7.5,998,901.8,3 396 | 1,251.4,0,118.3,192.9,5.8,1043.6,754.3,3 397 | 1,165,0,143.6,163.8,0,1005.6,900.9,14 398 | 2,165,128.5,132.1,175.1,8.1,1005.8,746.6,14 399 | 3,178,129.8,118.6,179.9,3.6,1007.3,746.8,14 400 | 2,167.4,129.9,128.6,175.5,7.8,1006.3,746.6,14 401 | 2,172.4,13.6,172.4,156.8,4.1,1006.3,856.4,14 402 | 2,173.5,50.1,173.5,164.8,6.5,1006.2,793.5,14 403 | 2,167,75.4,167,164,7.9,1007.3,770.1,14 404 | 2,173.8,93.4,159.9,172.3,9.7,1007.2,746.6,14 405 | 1,190.3,0,125.2,166.6,9.9,1079,798.9,14 406 | 2,250,0,95.7,191.8,5.3,948.9,857.2,14 407 | 2,213.5,0,174.2,159.2,11.7,1043.6,771.9,14 408 | 2,194.7,0,100.5,170.2,7.5,998,901.8,14 409 | 2,251.4,0,118.3,192.9,5.8,1043.6,754.3,14 410 | 2,165,0,143.6,163.8,0,1005.6,900.9,28 411 | 4,165,128.5,132.1,175.1,8.1,1005.8,746.6,28 412 | 3,178,129.8,118.6,179.9,3.6,1007.3,746.8,28 413 | 3,167.4,129.9,128.6,175.5,7.8,1006.3,746.6,28 414 | 2,172.4,13.6,172.4,156.8,4.1,1006.3,856.4,28 415 | 3,173.5,50.1,173.5,164.8,6.5,1006.2,793.5,28 416 | 3,167,75.4,167,164,7.9,1007.3,770.1,28 417 | 3,173.8,93.4,159.9,172.3,9.7,1007.2,746.6,28 418 | 2,190.3,0,125.2,166.6,9.9,1079,798.9,28 419 | 2,250,0,95.7,191.8,5.3,948.9,857.2,28 420 | 3,213.5,0,174.2,159.2,11.7,1043.6,771.9,28 421 | 3,194.7,0,100.5,170.2,7.5,998,901.8,28 422 | 2,251.4,0,118.3,192.9,5.8,1043.6,754.3,28 423 | 3,165,0,143.6,163.8,0,1005.6,900.9,56 424 | 4,165,128.5,132.1,175.1,8.1,1005.8,746.6,56 425 | 4,178,129.8,118.6,179.9,3.6,1007.3,746.8,56 426 | 4,167.4,129.9,128.6,175.5,7.8,1006.3,746.6,56 427 | 3,172.4,13.6,172.4,156.8,4.1,1006.3,856.4,56 428 | 4,173.5,50.1,173.5,164.8,6.5,1006.2,793.5,56 429 | 4,167,75.4,167,164,7.9,1007.3,770.1,56 430 | 4,173.8,93.4,159.9,172.3,9.7,1007.2,746.6,56 431 | 2,190.3,0,125.2,166.6,9.9,1079,798.9,56 432 | 3,250,0,95.7,191.8,5.3,948.9,857.2,56 433 | 4,213.5,0,174.2,159.2,11.7,1043.6,771.9,56 434 | 3,194.7,0,100.5,170.2,7.5,998,901.8,56 435 | 3,251.4,0,118.3,192.9,5.8,1043.6,754.3,56 436 | 3,165,0,143.6,163.8,0,1005.6,900.9,100 437 | 4,165,128.5,132.1,175.1,8.1,1005.8,746.6,100 438 | 4,178,129.8,118.6,179.9,3.6,1007.3,746.8,100 439 | 4,167.4,129.9,128.6,175.5,7.8,1006.3,746.6,100 440 | 3,172.4,13.6,172.4,156.8,4.1,1006.3,856.4,100 441 | 4,173.5,50.1,173.5,164.8,6.5,1006.2,793.5,100 442 | 4,167,75.4,167,164,7.9,1007.3,770.1,100 443 | 4,173.8,93.4,159.9,172.3,9.7,1007.2,746.6,100 444 | 2,190.3,0,125.2,166.6,9.9,1079,798.9,100 445 | 3,250,0,95.7,191.8,5.3,948.9,857.2,100 446 | 4,213.5,0,174.2,159.2,11.7,1043.6,771.9,100 447 | 3,194.7,0,100.5,170.2,7.5,998,901.8,100 448 | 3,251.4,0,118.3,192.9,5.8,1043.6,754.3,100 449 | 5,446,24,79,162,11.6,967,712,28 450 | 3,446,24,79,162,11.6,967,712,28 451 | 4,446,24,79,162,11.6,967,712,28 452 | 4,446,24,79,162,10.3,967,712,28 453 | 3,446,24,79,162,11.6,967,712,3 454 | 2,446,24,79,162,11.6,967,712,3 455 | 4,446,24,79,162,11.6,967,712,7 456 | 3,446,24,79,162,11.6,967,712,7 457 | 5,446,24,79,162,11.6,967,712,56 458 | 4,446,24,79,162,11.6,967,712,56 459 | 4,446,24,79,162,10.3,967,712,56 460 | 4,387,20,94,157,14.3,938,845,28 461 | 4,387,20,94,157,13.9,938,845,28 462 | 4,387,20,94,157,11.6,938,845,28 463 | 2,387,20,94,157,14.3,938,845,3 464 | 2,387,20,94,157,13.9,938,845,3 465 | 3,387,20,94,157,11.6,938,845,3 466 | 3,387,20,94,157,14.3,938,845,7 467 | 4,387,20,94,157,13.9,938,845,7 468 | 3,387,20,94,157,11.6,938,845,7 469 | 4,387,20,94,157,14.3,938,845,56 470 | 4,387,20,94,157,13.9,938,845,56 471 | 5,387,20,94,157,11.6,938,845,56 472 | 3,355,19,97,145,13.1,967,871,28 473 | 4,355,19,97,145,12.3,967,871,28 474 | 4,491,26,123,210,3.9,882,699,28 475 | 5,491,26,123,201,3.9,822,699,28 476 | 2,491,26,123,210,3.9,882,699,3 477 | 2,491,26,123,210,3.9,882,699,7 478 | 5,491,26,123,210,3.9,882,699,56 479 | 2,491,26,123,201,3.9,822,699,3 480 | 3,491,26,123,201,3.9,822,699,7 481 | 5,491,26,123,201,3.9,822,699,56 482 | 5,424,22,132,178,8.5,822,750,28 483 | 2,424,22,132,178,8.5,882,750,3 484 | 5,424,22,132,168,8.9,822,750,28 485 | 3,424,22,132,178,8.5,822,750,7 486 | 5,424,22,132,178,8.5,822,750,56 487 | 2,424,22,132,168,8.9,822,750,3 488 | 3,424,22,132,168,8.9,822,750,7 489 | 5,424,22,132,168,8.9,822,750,56 490 | 2,202,11,141,206,1.7,942,801,28 491 | 1,202,11,141,206,1.7,942,801,3 492 | 1,202,11,141,206,1.7,942,801,7 493 | 2,202,11,141,206,1.7,942,801,56 494 | 3,284,15,141,179,5.5,842,801,28 495 | 1,284,15,141,179,5.5,842,801,3 496 | 2,284,15,141,179,5.5,842,801,7 497 | 3,284,15,141,179,5.5,842,801,56 498 | 5,359,19,141,154,10.9,942,801,28 499 | 2,359,19,141,154,10.9,942,801,3 500 | 3,359,19,141,154,10.9,942,801,7 501 | 5,359,19,141,154,10.9,942,801,56 502 | 2,436,0,0,218,0,838.4,719.7,28 503 | 2,289,0,0,192,0,913.2,895.3,90 504 | 1,289,0,0,192,0,913.2,895.3,3 505 | 1,393,0,0,192,0,940.6,785.6,3 506 | 4,393,0,0,192,0,940.6,785.6,90 507 | 3,393,0,0,192,0,940.6,785.6,28 508 | 3,480,0,0,192,0,936.2,712.2,28 509 | 3,480,0,0,192,0,936.2,712.2,7 510 | 4,480,0,0,192,0,936.2,712.2,90 511 | 2,480,0,0,192,0,936.2,712.2,3 512 | 1,333,0,0,192,0,931.2,842.6,3 513 | 2,255,0,0,192,0,889.8,945,90 514 | 1,255,0,0,192,0,889.8,945,7 515 | 1,289,0,0,192,0,913.2,895.3,7 516 | 1,255,0,0,192,0,889.8,945,28 517 | 2,333,0,0,192,0,931.2,842.6,28 518 | 2,333,0,0,192,0,931.2,842.6,7 519 | 2,289,0,0,192,0,913.2,895.3,28 520 | 3,333,0,0,192,0,931.2,842.6,90 521 | 2,393,0,0,192,0,940.6,785.6,7 522 | 1,255,0,0,192,0,889.8,945,3 523 | 1,158.8,238.2,0,185.7,0,1040.6,734.3,7 524 | 2,239.6,359.4,0,185.7,0,941.6,664.3,7 525 | 1,238.2,158.8,0,185.7,0,1040.6,734.3,7 526 | 2,181.9,272.8,0,185.7,0,1012.4,714.3,28 527 | 2,193.5,290.2,0,185.7,0,998.2,704.3,28 528 | 1,255.5,170.3,0,185.7,0,1026.6,724.3,7 529 | 1,272.8,181.9,0,185.7,0,1012.4,714.3,7 530 | 3,239.6,359.4,0,185.7,0,941.6,664.3,28 531 | 2,220.8,147.2,0,185.7,0,1055,744.3,28 532 | 2,397,0,0,185.7,0,1040.6,734.3,28 533 | 2,382.5,0,0,185.7,0,1047.8,739.3,7 534 | 2,210.7,316.1,0,185.7,0,977,689.3,7 535 | 2,158.8,238.2,0,185.7,0,1040.6,734.3,28 536 | 1,295.8,0,0,185.7,0,1091.4,769.3,7 537 | 2,255.5,170.3,0,185.7,0,1026.6,724.3,28 538 | 1,203.5,135.7,0,185.7,0,1076.2,759.3,7 539 | 2,397,0,0,185.7,0,1040.6,734.3,7 540 | 2,381.4,0,0,185.7,0,1104.6,784.3,28 541 | 2,295.8,0,0,185.7,0,1091.4,769.3,28 542 | 3,228,342.1,0,185.7,0,955.8,674.3,28 543 | 1,220.8,147.2,0,185.7,0,1055,744.3,7 544 | 3,316.1,210.7,0,185.7,0,977,689.3,28 545 | 1,135.7,203.5,0,185.7,0,1076.2,759.3,7 546 | 1,238.1,0,0,185.7,0,1118.8,789.3,28 547 | 2,339.2,0,0,185.7,0,1069.2,754.3,7 548 | 1,135.7,203.5,0,185.7,0,1076.2,759.3,28 549 | 1,193.5,290.2,0,185.7,0,998.2,704.3,7 550 | 2,203.5,135.7,0,185.7,0,1076.2,759.3,28 551 | 2,290.2,193.5,0,185.7,0,998.2,704.3,7 552 | 1,181.9,272.8,0,185.7,0,1012.4,714.3,7 553 | 2,170.3,155.5,0,185.7,0,1026.6,724.3,28 554 | 3,210.7,316.1,0,185.7,0,977,689.3,28 555 | 2,228,342.1,0,185.7,0,955.8,674.3,7 556 | 2,290.2,193.5,0,185.7,0,998.2,704.3,28 557 | 1,381.4,0,0,185.7,0,1104.6,784.3,7 558 | 2,238.2,158.8,0,185.7,0,1040.6,734.3,28 559 | 1,186.2,124.1,0,185.7,0,1083.4,764.3,7 560 | 2,339.2,0,0,185.7,0,1069.2,754.3,28 561 | 1,238.1,0,0,185.7,0,1118.8,789.3,7 562 | 1,252.5,0,0,185.7,0,1111.6,784.3,28 563 | 3,382.5,0,0,185.7,0,1047.8,739.3,28 564 | 1,252.5,0,0,185.7,0,1111.6,784.3,7 565 | 2,316.1,210.7,0,185.7,0,977,689.3,7 566 | 1,186.2,124.1,0,185.7,0,1083.4,764.3,28 567 | 1,170.3,155.5,0,185.7,0,1026.6,724.3,7 568 | 2,272.8,181.9,0,185.7,0,1012.4,714.3,28 569 | 1,339,0,0,197,0,968,781,3 570 | 2,339,0,0,197,0,968,781,7 571 | 2,339,0,0,197,0,968,781,14 572 | 2,339,0,0,197,0,968,781,28 573 | 3,339,0,0,197,0,968,781,90 574 | 3,339,0,0,197,0,968,781,180 575 | 3,339,0,0,197,0,968,781,365 576 | 1,236,0,0,194,0,968,885,3 577 | 1,236,0,0,194,0,968,885,14 578 | 1,236,0,0,194,0,968,885,28 579 | 2,236,0,0,194,0,968,885,90 580 | 2,236,0,0,193,0,968,885,180 581 | 2,236,0,0,193,0,968,885,365 582 | 2,277,0,0,191,0,968,856,14 583 | 2,277,0,0,191,0,968,856,28 584 | 1,277,0,0,191,0,968,856,3 585 | 2,277,0,0,191,0,968,856,90 586 | 2,277,0,0,191,0,968,856,180 587 | 2,277,0,0,191,0,968,856,360 588 | 1,254,0,0,198,0,968,863,3 589 | 2,254,0,0,198,0,968,863,90 590 | 2,254,0,0,198,0,968,863,180 591 | 2,254,0,0,198,0,968,863,365 592 | 3,307,0,0,193,0,968,812,180 593 | 3,307,0,0,193,0,968,812,365 594 | 1,307,0,0,193,0,968,812,3 595 | 2,307,0,0,193,0,968,812,28 596 | 2,307,0,0,193,0,968,812,90 597 | 1,236,0,0,193,0,968,885,7 598 | 1,200,0,0,180,0,1125,845,7 599 | 1,200,0,0,180,0,1125,845,28 600 | 1,225,0,0,181,0,1113,833,7 601 | 1,225,0,0,181,0,1113,833,28 602 | 1,325,0,0,184,0,1063,783,7 603 | 2,325,0,0,184,0,1063,783,28 604 | 1,275,0,0,183,0,1088,808,7 605 | 2,275,0,0,183,0,1088,808,28 606 | 1,300,0,0,184,0,1075,795,7 607 | 2,300,0,0,184,0,1075,795,28 608 | 2,375,0,0,186,0,1038,758,7 609 | 3,375,0,0,186,0,1038,758,28 610 | 3,400,0,0,187,0,1025,745,28 611 | 2,400,0,0,187,0,1025,745,7 612 | 1,250,0,0,182,0,1100,820,7 613 | 2,250,0,0,182,0,1100,820,28 614 | 2,350,0,0,186,0,1050,770,7 615 | 3,350,0,0,186,0,1050,770,28 616 | 1,203.5,305.3,0,203.5,0,963.4,630,7 617 | 4,250.2,166.8,0,203.5,0,977.6,694.1,90 618 | 3,157,236,0,192,0,935.4,781.2,90 619 | 2,141.3,212,0,203.5,0,971.8,748.5,28 620 | 1,166.8,250.2,0,203.5,0,975.6,692.6,3 621 | 2,122.6,183.9,0,203.5,0,958.2,800.1,90 622 | 1,183.9,122.6,0,203.5,0,959.2,800,3 623 | 1,102,153,0,192,0,887,942,3 624 | 2,102,153,0,192,0,887,942,90 625 | 2,122.6,183.9,0,203.5,0,958.2,800.1,28 626 | 2,166.8,250.2,0,203.5,0,975.6,692.6,28 627 | 1,200,133,0,192,0,965.4,806.2,3 628 | 2,108.3,162.4,0,203.5,0,938.2,849,28 629 | 2,305.3,203.5,0,203.5,0,965.4,631,7 630 | 2,108.3,162.4,0,203.5,0,938.2,849,90 631 | 2,116,173,0,192,0,909.8,891.9,90 632 | 1,141.3,212,0,203.5,0,971.8,748.5,7 633 | 2,157,236,0,192,0,935.4,781.2,28 634 | 2,133,200,0,192,0,927.4,839.2,28 635 | 1,250.2,166.8,0,203.5,0,977.6,694.1,7 636 | 1,173,116,0,192,0,946.8,856.8,7 637 | 1,192,288,0,192,0,929.8,716.1,3 638 | 3,192,288,0,192,0,929.8,716.1,28 639 | 1,153,102,0,192,0,888,943.1,3 640 | 1,288,192,0,192,0,932,717.8,3 641 | 3,305.3,203.5,0,203.5,0,965.4,631,28 642 | 2,236,157,0,192,0,972.6,749.1,7 643 | 1,173,116,0,192,0,946.8,856.8,3 644 | 1,212,141.3,0,203.5,0,973.4,750,7 645 | 1,236,157,0,192,0,972.6,749.1,3 646 | 2,183.9,122.6,0,203.5,0,959.2,800,90 647 | 1,166.8,250.2,0,203.5,0,975.6,692.6,7 648 | 1,102,153,0,192,0,887,942,7 649 | 3,288,192,0,192,0,932,717.8,28 650 | 2,212,141.3,0,203.5,0,973.4,750,28 651 | 1,102,153,0,192,0,887,942,28 652 | 2,173,116,0,192,0,946.8,856.8,28 653 | 2,183.9,122.6,0,203.5,0,959.2,800,28 654 | 3,133,200,0,192,0,927.4,839.2,90 655 | 4,192,288,0,192,0,929.8,716.1,90 656 | 1,133,200,0,192,0,927.4,839.2,7 657 | 1,305.3,203.5,0,203.5,0,965.4,631,3 658 | 4,236,157,0,192,0,972.6,749.1,90 659 | 1,108.3,162.4,0,203.5,0,938.2,849,3 660 | 1,157,236,0,192,0,935.4,781.2,7 661 | 2,288,192,0,192,0,932,717.8,7 662 | 1,212,141.3,0,203.5,0,973.4,750,3 663 | 3,212,141.3,0,203.5,0,973.4,750,90 664 | 1,153,102,0,192,0,888,943.1,28 665 | 2,236,157,0,192,0,972.6,749.1,28 666 | 2,116,173,0,192,0,909.8,891.9,28 667 | 1,183.9,122.6,0,203.5,0,959.2,800,7 668 | 1,108.3,162.4,0,203.5,0,938.2,849,7 669 | 3,203.5,305.3,0,203.5,0,963.4,630,28 670 | 1,203.5,305.3,0,203.5,0,963.4,630,3 671 | 1,133,200,0,192,0,927.4,839.2,3 672 | 4,288,192,0,192,0,932,717.8,90 673 | 1,200,133,0,192,0,965.4,806.2,7 674 | 2,200,133,0,192,0,965.4,806.2,28 675 | 1,250.2,166.8,0,203.5,0,977.6,694.1,3 676 | 1,122.6,183.9,0,203.5,0,958.2,800.1,3 677 | 2,153,102,0,192,0,888,943.1,90 678 | 3,200,133,0,192,0,965.4,806.2,90 679 | 1,116,173,0,192,0,909.8,891.9,3 680 | 2,173,116,0,192,0,946.8,856.8,90 681 | 3,250.2,166.8,0,203.5,0,977.6,694.1,28 682 | 4,305.3,203.5,0,203.5,0,965.4,631,90 683 | 2,192,288,0,192,0,929.8,716.1,7 684 | 1,157,236,0,192,0,935.4,781.2,3 685 | 1,153,102,0,192,0,888,943.1,7 686 | 3,141.3,212,0,203.5,0,971.8,748.5,90 687 | 1,116,173,0,192,0,909.8,891.9,7 688 | 1,141.3,212,0,203.5,0,971.8,748.5,3 689 | 1,122.6,183.9,0,203.5,0,958.2,800.1,7 690 | 3,166.8,250.2,0,203.5,0,975.6,692.6,90 691 | 4,203.5,305.3,0,203.5,0,963.4,630,90 692 | 1,310,0,0,192,0,1012,830,3 693 | 1,310,0,0,192,0,1012,830,7 694 | 2,310,0,0,192,0,1012,830,28 695 | 3,310,0,0,192,0,1012,830,90 696 | 3,310,0,0,192,0,1012,830,120 697 | 1,331,0,0,192,0,1025,821,3 698 | 1,331,0,0,192,0,1025,821,7 699 | 2,331,0,0,192,0,1025,821,28 700 | 3,331,0,0,192,0,1025,821,90 701 | 3,331,0,0,192,0,1025,821,120 702 | 1,349,0,0,192,0,1056,809,3 703 | 1,349,0,0,192,0,1056,809,7 704 | 2,349,0,0,192,0,1056,809,28 705 | 3,349,0,0,192,0,1056,809,90 706 | 3,349,0,0,192,0,1056,809,120 707 | 1,238,0,0,186,0,1119,789,7 708 | 1,238,0,0,186,0,1119,789,28 709 | 1,296,0,0,186,0,1090,769,7 710 | 2,296,0,0,186,0,1090,769,28 711 | 2,297,0,0,186,0,1040,734,7 712 | 3,480,0,0,192,0,936,721,28 713 | 4,480,0,0,192,0,936,721,90 714 | 3,397,0,0,186,0,1040,734,28 715 | 1,281,0,0,186,0,1104,774,7 716 | 2,281,0,0,185,0,1104,774,28 717 | 1,500,0,0,200,0,1125,613,1 718 | 2,500,0,0,200,0,1125,613,3 719 | 2,500,0,0,200,0,1125,613,7 720 | 3,500,0,0,200,0,1125,613,14 721 | 3,500,0,0,200,0,1125,613,28 722 | 4,540,0,0,173,0,1125,613,7 723 | 5,540,0,0,173,0,1125,613,14 724 | 5,540,0,0,173,0,1125,613,28 725 | 5,540,0,0,173,0,1125,613,90 726 | 5,540,0,0,173,0,1125,613,180 727 | 5,540,0,0,173,0,1125,613,270 728 | 1,350,0,0,203,0,974,775,7 729 | 2,350,0,0,203,0,974,775,14 730 | 2,350,0,0,203,0,974,775,28 731 | 2,350,0,0,203,0,974,775,56 732 | 2,350,0,0,203,0,974,775,90 733 | 2,350,0,0,203,0,974,775,180 734 | 1,385,0,0,186,0,966,763,1 735 | 1,385,0,0,186,0,966,763,3 736 | 2,385,0,0,186,0,966,763,7 737 | 2,385,0,0,186,0,966,763,14 738 | 2,385,0,0,186,0,966,763,28 739 | 3,331,0,0,192,0,978,825,180 740 | 3,331,0,0,192,0,978,825,360 741 | 1,349,0,0,192,0,1047,806,3 742 | 1,331,0,0,192,0,978,825,3 743 | 2,382,0,0,186,0,1047,739,7 744 | 3,382,0,0,186,0,1047,739,28 745 | 1,382,0,0,186,0,1111,784,7 746 | 2,281,0,0,186,0,1104,774,28 747 | 2,339,0,0,185,0,1069,754,7 748 | 2,339,0,0,185,0,1069,754,28 749 | 1,295,0,0,185,0,1069,769,7 750 | 2,295,0,0,185,0,1069,769,28 751 | 1,238,0,0,185,0,1118,789,28 752 | 1,296,0,0,192,0,1085,765,7 753 | 2,296,0,0,192,0,1085,765,28 754 | 2,296,0,0,192,0,1085,765,90 755 | 1,331,0,0,192,0,879,825,3 756 | 1,331,0,0,192,0,978,825,7 757 | 2,331,0,0,192,0,978,825,28 758 | 3,331,0,0,192,0,978,825,90 759 | 1,349,0,0,192,0,1047,806,7 760 | 2,349,0,0,192,0,1047,806,28 761 | 3,349,0,0,192,0,1047,806,90 762 | 3,349,0,0,192,0,1047,806,180 763 | 3,349,0,0,192,0,1047,806,360 764 | 1,302,0,0,203,0,974,817,14 765 | 2,302,0,0,203,0,974,817,180 766 | 5,525,0,0,189,0,1125,613,180 767 | 4,500,0,0,200,0,1125,613,90 768 | 4,500,0,0,200,0,1125,613,180 769 | 4,500,0,0,200,0,1125,613,270 770 | 3,540,0,0,173,0,1125,613,3 771 | 1,252,0,0,185,0,1111,784,7 772 | 1,252,0,0,185,0,1111,784,28 773 | 2,339,0,0,185,0,1060,754,28 774 | 1,393,0,0,192,0,940,758,3 775 | 3,393,0,0,192,0,940,758,28 776 | 4,393,0,0,192,0,940,758,90 777 | 2,382,0,0,185,0,1047,739,7 778 | 3,382,0,0,185,0,1047,739,28 779 | 1,252,0,0,186,0,1111,784,7 780 | 1,310,0,0,192,0,970,850,7 781 | 2,310,0,0,192,0,970,850,28 782 | 3,310,0,0,192,0,970,850,90 783 | 3,310,0,0,192,0,970,850,180 784 | 3,310,0,0,192,0,970,850,360 785 | 2,525,0,0,189,0,1125,613,3 786 | 3,525,0,0,189,0,1125,613,7 787 | 4,525,0,0,189,0,1125,613,14 788 | 4,525,0,0,189,0,1125,613,28 789 | 5,525,0,0,189,0,1125,613,90 790 | 5,525,0,0,189,0,1125,613,270 791 | 2,322,0,0,203,0,974,800,14 792 | 2,322,0,0,203,0,974,800,28 793 | 2,322,0,0,203,0,974,800,180 794 | 2,302,0,0,203,0,974,817,28 795 | 3,397,0,0,185,0,1040,734,28 796 | 2,480,0,0,192,0,936,721,3 797 | 4,522,0,0,146,0,896,896,7 798 | 5,522,0,0,146,0,896,896,28 799 | 3,273,105,82,210,9,904,680,28 800 | 2,162,190,148,179,19,838,741,28 801 | 1,154,144,112,220,10,923,658,28 802 | 1,147,115,89,202,9,860,829,28 803 | 3,152,178,139,168,18,944,695,28 804 | 2,310,143,111,168,22,914,651,28 805 | 1,144,0,175,158,18,943,844,28 806 | 2,304,140,0,214,6,895,722,28 807 | 3,374,0,0,190,7,1013,730,28 808 | 2,159,149,116,175,15,953,720,28 809 | 2,153,239,0,200,6,1002,684,28 810 | 4,310,143,0,168,10,914,804,28 811 | 2,305,0,100,196,10,959,705,28 812 | 1,151,0,184,167,12,991,772,28 813 | 3,142,167,130,174,11,883,785,28 814 | 4,298,137,107,201,6,878,655,28 815 | 5,321,164,0,190,5,870,774,28 816 | 5,366,187,0,191,7,824,757,28 817 | 4,280,129,100,172,9,825,805,28 818 | 2,252,97,76,194,8,835,821,28 819 | 1,165,0,150,182,12,1023,729,28 820 | 3,156,243,0,180,11,1022,698,28 821 | 2,160,188,146,203,11,829,710,28 822 | 3,298,0,107,186,6,879,815,28 823 | 3,318,0,126,210,6,861,737,28 824 | 3,287,121,94,188,9,904,696,28 825 | 5,326,166,0,174,9,882,790,28 826 | 3,356,0,142,193,11,801,778,28 827 | 2,132,207,161,179,5,867,736,28 828 | 4,322,149,0,186,8,951,709,28 829 | 1,164,0,200,181,13,849,846,28 830 | 3,314,0,113,170,10,925,783,28 831 | 3,321,0,128,182,11,870,780,28 832 | 3,140,164,128,237,6,869,656,28 833 | 3,288,121,0,177,7,908,829,28 834 | 2,298,0,107,210,11,880,744,28 835 | 3,265,111,86,195,6,833,790,28 836 | 3,160,250,0,168,12,1049,688,28 837 | 3,166,260,0,183,13,859,827,28 838 | 3,276,116,90,180,9,870,768,28 839 | 2,322,0,116,196,10,818,813,28 840 | 2,149,139,109,193,6,892,780,28 841 | 2,159,187,0,176,11,990,789,28 842 | 2,261,100,78,201,9,864,761,28 843 | 2,237,92,71,247,6,853,695,28 844 | 3,313,0,113,178,8,1002,689,28 845 | 1,155,183,0,193,9,1047,697,28 846 | 2,146,230,0,202,3,827,872,28 847 | 2,296,0,107,221,11,819,778,28 848 | 2,133,210,0,196,3,949,795,28 849 | 3,313,145,0,178,8,867,824,28 850 | 1,152,0,112,184,8,992,816,28 851 | 2,153,145,113,178,8,1002,689,28 852 | 3,140,133,103,200,7,916,753,28 853 | 2,149,236,0,176,13,847,893,28 854 | 2,300,0,120,212,10,878,728,28 855 | 2,153,145,113,178,8,867,824,28 856 | 1,148,0,137,158,16,1002,830,28 857 | 3,326,0,138,199,11,801,792,28 858 | 1,153,145,0,178,8,1000,822,28 859 | 2,262,111,86,195,5,895,733,28 860 | 1,158,0,195,220,11,898,713,28 861 | 1,151,0,185,167,16,1074,678,28 862 | 2,273,0,90,199,11,931,762,28 863 | 2,149,118,92,183,7,953,780,28 864 | 2,143,169,143,191,8,967,643,28 865 | 4,260,101,78,171,10,936,763,28 866 | 4,313,161,0,178,10,917,759,28 867 | 3,284,120,0,168,7,970,794,28 868 | 3,336,0,0,182,3,986,817,28 869 | 1,145,0,134,181,11,979,812,28 870 | 3,150,237,0,174,12,1069,675,28 871 | 2,144,170,133,192,8,814,805,28 872 | 4,331,170,0,195,8,811,802,28 873 | 1,155,0,143,193,9,1047,697,28 874 | 2,155,183,0,193,9,877,868,28 875 | 1,135,0,166,180,10,961,805,28 876 | 3,266,112,87,178,10,910,745,28 877 | 4,314,145,113,179,8,869,690,28 878 | 3,313,145,0,127,8,1000,822,28 879 | 2,146,173,0,182,3,986,817,28 880 | 2,144,136,106,178,7,941,774,28 881 | 1,148,0,182,181,15,839,884,28 882 | 3,277,117,91,191,7,946,666,28 883 | 3,298,0,107,164,13,953,784,28 884 | 3,313,145,0,178,8,1002,689,28 885 | 2,155,184,143,194,9,880,699,28 886 | 4,289,134,0,195,6,924,760,28 887 | 2,148,175,0,171,2,1000,828,28 888 | 1,145,0,179,202,8,824,869,28 889 | 2,313,0,0,178,8,1000,822,28 890 | 2,136,162,126,172,10,923,764,28 891 | 1,155,0,143,193,9,877,868,28 892 | 2,255,99,77,189,6,919,749,28 893 | 3,162,207,172,216,10,822,638,28 894 | 2,136,196,98,199,6,847,783,28 895 | 2,164,163,128,197,8,961,641,28 896 | 2,162,214,164,202,10,820,680,28 897 | 2,157,214,152,200,9,819,704,28 898 | 2,149,153,194,192,8,935,623,28 899 | 2,135,105,193,196,6,965,643,28 900 | 2,159,209,161,201,7,848,669,28 901 | 1,144,15,195,176,6,1021,709,28 902 | 2,154,174,185,228,7,845,612,28 903 | 2,167,187,195,185,7,898,636,28 904 | 2,184,86,190,213,6,923,623,28 905 | 2,156,178,187,221,7,854,614,28 906 | 2,236.9,91.7,71.5,246.9,6,852.9,695.4,28 907 | 3,313.3,0,113,178.5,8,1001.9,688.7,28 908 | 1,154.8,183.4,0,193.3,9.1,1047.4,696.7,28 909 | 2,145.9,230.5,0,202.5,3.4,827,871.8,28 910 | 2,296,0,106.7,221.4,10.5,819.2,778.4,28 911 | 2,133.1,210.2,0,195.7,3.1,949.4,795.3,28 912 | 3,313.3,145,0,178.5,8,867.2,824,28 913 | 1,151.6,0,111.9,184.4,7.9,992,815.9,28 914 | 2,153.1,145,113,178.5,8,1001.9,688.7,28 915 | 3,139.9,132.6,103.3,200.3,7.4,916,753.4,28 916 | 2,149.5,236,0,175.8,12.6,846.8,892.7,28 917 | 2,299.8,0,119.8,211.5,9.9,878.2,727.6,28 918 | 2,153.1,145,113,178.5,8,867.2,824,28 919 | 1,148.1,0,136.6,158.1,16.1,1001.8,830.1,28 920 | 3,326.5,0,137.9,199,10.8,801.1,792.5,28 921 | 1,152.7,144.7,0,178.1,8,999.7,822.2,28 922 | 2,261.9,110.5,86.1,195.4,5,895.2,732.6,28 923 | 1,158.4,0,194.9,219.7,11,897.7,712.9,28 924 | 1,150.7,0,185.3,166.7,15.6,1074.5,678,28 925 | 2,272.6,0,89.6,198.7,10.6,931.3,762.2,28 926 | 2,149,117.6,91.7,182.9,7.1,953.4,780.3,28 927 | 2,143,169.4,142.7,190.7,8.4,967.4,643.5,28 928 | 4,259.9,100.6,78.4,170.6,10.4,935.7,762.9,28 929 | 4,312.9,160.5,0,177.6,9.6,916.6,759.5,28 930 | 3,284,119.7,0,168.3,7.2,970.4,794.2,28 931 | 3,336.5,0,0,181.9,3.4,985.8,816.8,28 932 | 1,144.8,0,133.6,180.8,11.1,979.5,811.5,28 933 | 3,150,236.8,0,173.8,11.9,1069.3,674.8,28 934 | 2,143.7,170.2,132.6,191.6,8.5,814.1,805.3,28 935 | 4,330.5,169.6,0,194.9,8.1,811,802.3,28 936 | 1,154.8,0,142.8,193.3,9.1,1047.4,696.7,28 937 | 2,154.8,183.4,0,193.3,9.1,877.2,867.7,28 938 | 1,134.7,0,165.7,180.2,10,961,804.9,28 939 | 3,266.2,112.3,87.5,177.9,10.4,909.7,744.5,28 940 | 4,314,145.3,113.2,178.9,8,869.1,690.2,28 941 | 3,312.7,144.7,0,127.3,8,999.7,822.2,28 942 | 2,145.7,172.6,0,181.9,3.4,985.8,816.8,28 943 | 2,143.8,136.3,106.2,178.1,7.5,941.5,774.3,28 944 | 1,148.1,0,182.1,181.4,15,838.9,884.3,28 945 | 3,277,116.8,91,190.6,7,946.5,665.6,28 946 | 3,298.1,0,107.5,163.6,12.8,953.2,784,28 947 | 3,313.3,145,0,178.5,8,1001.9,688.7,28 948 | 2,155.2,183.9,143.2,193.8,9.2,879.6,698.5,28 949 | 4,289,133.7,0,194.9,5.5,924.1,760.1,28 950 | 2,147.8,175.1,0,171.2,2.2,1000,828.5,28 951 | 1,145.4,0,178.9,201.7,7.8,824,868.7,28 952 | 2,312.7,0,0,178.1,8,999.7,822.2,28 953 | 2,136.4,161.6,125.8,171.6,10.4,922.6,764.4,28 954 | 1,154.8,0,142.8,193.3,9.1,877.2,867.7,28 955 | 2,255.3,98.8,77,188.6,6.5,919,749.3,28 956 | 3,272.8,105.1,81.8,209.7,9,904,679.7,28 957 | 2,162,190.1,148.1,178.8,18.8,838.1,741.4,28 958 | 1,153.6,144.2,112.3,220.1,10.1,923.2,657.9,28 959 | 1,146.5,114.6,89.3,201.9,8.8,860,829.5,28 960 | 3,151.8,178.1,138.7,167.5,18.3,944,694.6,28 961 | 3,309.9,142.8,111.2,167.8,22.1,913.9,651.2,28 962 | 1,143.6,0,174.9,158.4,17.9,942.7,844.5,28 963 | 2,303.6,139.9,0,213.5,6.2,895.5,722.5,28 964 | 3,374.3,0,0,190.2,6.7,1013.2,730.4,28 965 | 2,158.6,148.9,116,175.1,15,953.3,719.7,28 966 | 2,152.6,238.7,0,200,6.3,1001.8,683.9,28 967 | 4,310,142.8,0,167.9,10,914.3,804,28 968 | 2,304.8,0,99.6,196,9.8,959.4,705.2,28 969 | 1,150.9,0,183.9,166.6,11.6,991.2,772.2,28 970 | 3,141.9,166.6,129.7,173.5,10.9,882.6,785.3,28 971 | 4,297.8,137.2,106.9,201.3,6,878.4,655.3,28 972 | 5,321.3,164.2,0,190.5,4.6,870,774,28 973 | 5,366,187,0,191.3,6.6,824.3,756.9,28 974 | 4,279.8,128.9,100.4,172.4,9.5,825.1,804.9,28 975 | 2,252.1,97.1,75.6,193.8,8.3,835.5,821.4,28 976 | 1,164.6,0,150.4,181.6,11.7,1023.3,728.9,28 977 | 3,155.6,243.5,0,180.3,10.7,1022,697.7,28 978 | 3,160.2,188,146.4,203.2,11.3,828.7,709.7,28 979 | 3,298.1,0,107,186.4,6.1,879,815.2,28 980 | 3,317.9,0,126.5,209.7,5.7,860.5,736.6,28 981 | 3,287.3,120.5,93.9,187.6,9.2,904.4,695.9,28 982 | 5,325.6,166.4,0,174,8.9,881.6,790,28 983 | 3,355.9,0,141.6,193.3,11,801.4,778.4,28 984 | 2,132,206.5,160.9,178.9,5.5,866.9,735.6,28 985 | 4,322.5,148.6,0,185.8,8.5,951,709.5,28 986 | 1,164.2,0,200.1,181.2,12.6,849.3,846,28 987 | 3,313.8,0,112.6,169.9,10.1,925.3,782.9,28 988 | 3,321.4,0,127.9,182.5,11.5,870.1,779.7,28 989 | 3,139.7,163.9,127.7,236.7,5.8,868.6,655.6,28 990 | 3,288.4,121,0,177.4,7,907.9,829.5,28 991 | 2,298.2,0,107,209.7,11.1,879.6,744.2,28 992 | 3,264.5,111,86.5,195.5,5.9,832.6,790.4,28 993 | 3,159.8,250,0,168.4,12.2,1049.3,688.2,28 994 | 3,166,259.7,0,183.2,12.7,858.8,826.8,28 995 | 3,276.4,116,90.3,179.6,8.9,870.1,768.3,28 996 | 2,322.2,0,115.6,196,10.4,817.9,813.4,28 997 | 2,148.5,139.4,108.6,192.7,6.1,892.4,780,28 998 | 2,159.1,186.7,0,175.6,11.3,989.6,788.9,28 999 | 2,260.9,100.5,78.3,200.6,8.6,864.5,761.5,28 -------------------------------------------------------------------------------- /images/app-screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rasbt/scipy2022-talk/1511bed4fcccb4c5d5c7041b33671050ac173852/images/app-screenshot.png -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | coral_pytorch==1.3 2 | lightning==2022.6.15 3 | pytorch_lightning==1.6.4 4 | pre-commit==2.19.0 5 | matplotlib==3.5.2 6 | pandas==1.4.3 7 | scikit-learn==1.1 8 | torch==1.12 9 | watermark==2.3.1 -------------------------------------------------------------------------------- /src/cement_strength/lightning_logs/corn-mlp-cement/version_0/hparams.yaml: -------------------------------------------------------------------------------- 1 | learning_rate: 0.005 2 | loss_mode: corn 3 | -------------------------------------------------------------------------------- /src/cement_strength/lightning_logs/corn-mlp-cement/version_0/metrics.csv: -------------------------------------------------------------------------------- 1 | train_loss,epoch,step,valid_loss,valid_mae,train_mae,test_mae 2 | 5.686377048492432,0,9,,,, 3 | 4.851904392242432,0,19,,,, 4 | 4.560766696929932,0,29,,,, 5 | 4.513494968414307,0,39,,,, 6 | ,0,43,4.311710357666016,0.8374999761581421,, 7 | ,0,43,,,0.9801136255264282, 8 | 4.205481052398682,1,49,,,, 9 | 4.4384846687316895,1,59,,,, 10 | 3.868427276611328,1,69,,,, 11 | 3.606152296066284,1,79,,,, 12 | ,1,87,3.7330520153045654,0.699999988079071,, 13 | ,1,87,,,0.6761363744735718, 14 | 4.249462127685547,2,89,,,, 15 | 2.592904806137085,2,99,,,, 16 | 3.217111349105835,2,109,,,, 17 | 2.864898204803467,2,119,,,, 18 | 4.060671329498291,2,129,,,, 19 | ,2,131,3.36987566947937,0.574999988079071,, 20 | ,2,131,,,0.5454545617103577, 21 | 3.144517660140991,3,139,,,, 22 | 3.4679627418518066,3,149,,,, 23 | 3.5545456409454346,3,159,,,, 24 | 3.3830878734588623,3,169,,,, 25 | ,3,175,3.0788373947143555,0.44999998807907104,, 26 | ,3,175,,,0.46164771914482117, 27 | 3.2143707275390625,4,179,,,, 28 | 4.0908284187316895,4,189,,,, 29 | 2.718573808670044,4,199,,,, 30 | 2.3624396324157715,4,209,,,, 31 | 2.411545753479004,4,219,,,, 32 | ,4,219,2.8992321491241455,0.4000000059604645,, 33 | ,4,219,,,0.4119318127632141, 34 | 2.078325033187866,5,229,,,, 35 | 2.219708204269409,5,239,,,, 36 | 3.0616185665130615,5,249,,,, 37 | 2.581987142562866,5,259,,,, 38 | ,5,263,2.724926710128784,0.4000000059604645,, 39 | ,5,263,,,0.38352271914482117, 40 | 2.3958709239959717,6,269,,,, 41 | 2.9828999042510986,6,279,,,, 42 | 2.544111967086792,6,289,,,, 43 | 2.368887424468994,6,299,,,, 44 | ,6,307,2.618185043334961,0.3375000059604645,, 45 | ,6,307,,,0.36221590638160706, 46 | 2.3898067474365234,7,309,,,, 47 | 1.901157021522522,7,319,,,, 48 | 2.2202556133270264,7,329,,,, 49 | 2.399202346801758,7,339,,,, 50 | 2.846958875656128,7,349,,,, 51 | ,7,351,2.4954733848571777,0.32499998807907104,, 52 | ,7,351,,,0.3494318127632141, 53 | 2.0118448734283447,8,359,,,, 54 | 2.3135499954223633,8,369,,,, 55 | 3.042299509048462,8,379,,,, 56 | 2.522310733795166,8,389,,,, 57 | ,8,395,2.4212594032287598,0.32499998807907104,, 58 | ,8,395,,,0.33096590638160706, 59 | 2.5697579383850098,9,399,,,, 60 | 3.1671957969665527,9,409,,,, 61 | 1.9316233396530151,9,419,,,, 62 | 2.041757583618164,9,429,,,, 63 | 2.067247152328491,9,439,,,, 64 | ,9,439,2.3606855869293213,0.3125,, 65 | ,9,439,,,0.32102271914482117, 66 | 1.4858604669570923,10,449,,,, 67 | 1.7263303995132446,10,459,,,, 68 | 2.772451877593994,10,469,,,, 69 | 2.0726425647735596,10,479,,,, 70 | ,10,483,2.320737838745117,0.30000001192092896,, 71 | ,10,483,,,0.3011363744735718, 72 | 1.9955329895019531,11,489,,,, 73 | 2.1727757453918457,11,499,,,, 74 | 1.9627269506454468,11,509,,,, 75 | 1.7848414182662964,11,519,,,, 76 | ,11,527,2.251067638397217,0.26249998807907104,, 77 | ,11,527,,,0.2911931872367859, 78 | 1.6755291223526,12,529,,,, 79 | 1.5342800617218018,12,539,,,, 80 | 1.9754955768585205,12,549,,,, 81 | 1.887938380241394,12,559,,,, 82 | 2.263291597366333,12,569,,,, 83 | ,12,571,2.221355676651001,0.2750000059604645,, 84 | ,12,571,,,0.2769886255264282, 85 | 1.4265928268432617,13,579,,,, 86 | 1.7064507007598877,13,589,,,, 87 | 2.9439456462860107,13,599,,,, 88 | 1.965499758720398,13,609,,,, 89 | ,13,615,2.196830987930298,0.2750000059604645,, 90 | ,13,615,,,0.265625, 91 | 2.4323983192443848,14,619,,,, 92 | 2.956277847290039,14,629,,,, 93 | 1.7728325128555298,14,639,,,, 94 | 1.7976938486099243,14,649,,,, 95 | 1.86713445186615,14,659,,,, 96 | ,14,659,2.1789536476135254,0.26249998807907104,, 97 | ,14,659,,,0.265625, 98 | 1.0443023443222046,15,669,,,, 99 | 1.461202621459961,15,679,,,, 100 | 2.426748037338257,15,689,,,, 101 | 1.7628099918365479,15,699,,,, 102 | ,15,703,2.150516986846924,0.26249998807907104,, 103 | ,15,703,,,0.25284090638160706, 104 | 1.7426503896713257,16,709,,,, 105 | 1.9760528802871704,16,719,,,, 106 | 1.804573655128479,16,729,,,, 107 | 1.6291134357452393,16,739,,,, 108 | ,16,747,2.1168487071990967,0.25,, 109 | ,16,747,,,0.24573864042758942, 110 | 1.4254801273345947,17,749,,,, 111 | 1.3661457300186157,17,759,,,, 112 | 1.7574522495269775,17,769,,,, 113 | 1.7419302463531494,17,779,,,, 114 | 1.9507986307144165,17,789,,,, 115 | ,17,791,2.072906970977783,0.25,, 116 | ,17,791,,,0.23721590638160706, 117 | 1.184775948524475,18,799,,,, 118 | 1.4805525541305542,18,809,,,, 119 | 2.9691178798675537,18,819,,,, 120 | 1.6299564838409424,18,829,,,, 121 | ,18,835,2.0414767265319824,0.25,, 122 | ,18,835,,,0.23153409361839294, 123 | 2.486180305480957,19,839,,,, 124 | 2.804425001144409,19,849,,,, 125 | 1.699123740196228,19,859,,,, 126 | 1.6608200073242188,19,869,,,, 127 | 1.4941797256469727,19,879,,,, 128 | ,19,879,1.9925117492675781,0.23749999701976776,, 129 | ,19,879,,,0.2286931872367859, 130 | 0.8438809514045715,20,889,,,, 131 | 1.190993309020996,20,899,,,, 132 | 2.13226580619812,20,909,,,, 133 | 1.5627875328063965,20,919,,,, 134 | ,20,923,1.996665596961975,0.22499999403953552,, 135 | ,20,923,,,0.2286931872367859, 136 | 1.6873215436935425,21,929,,,, 137 | 1.8768389225006104,21,939,,,, 138 | 1.6694663763046265,21,949,,,, 139 | 1.565511703491211,21,959,,,, 140 | ,21,967,1.9552185535430908,0.22499999403953552,, 141 | ,21,967,,,0.23011364042758942, 142 | 1.3368548154830933,22,969,,,, 143 | 1.1551709175109863,22,979,,,, 144 | 1.5088489055633545,22,989,,,, 145 | 1.7811866998672485,22,999,,,, 146 | 1.6733871698379517,22,1009,,,, 147 | ,22,1011,1.95085871219635,0.23749999701976776,, 148 | ,22,1011,,,0.22301135957241058, 149 | 1.037822961807251,23,1019,,,, 150 | 1.4212918281555176,23,1029,,,, 151 | 2.9460041522979736,23,1039,,,, 152 | 1.4453213214874268,23,1049,,,, 153 | ,23,1055,1.9447543621063232,0.23749999701976776,, 154 | ,23,1055,,,0.22159090638160706, 155 | 2.5085697174072266,24,1059,,,, 156 | 2.5548596382141113,24,1069,,,, 157 | 1.6740692853927612,24,1079,,,, 158 | 1.5801887512207031,24,1089,,,, 159 | 1.2565430402755737,24,1099,,,, 160 | ,24,1099,1.924760103225708,0.23749999701976776,, 161 | ,24,1099,,,0.2088068127632141, 162 | 0.7137712240219116,25,1109,,,, 163 | 0.9088554382324219,25,1119,,,, 164 | 1.9024189710617065,25,1129,,,, 165 | 1.4894918203353882,25,1139,,,, 166 | ,25,1143,1.9430675506591797,0.23749999701976776,, 167 | ,25,1143,,,0.20170454680919647, 168 | 1.5951610803604126,26,1149,,,, 169 | 1.7917922735214233,26,1159,,,, 170 | 1.5907299518585205,26,1169,,,, 171 | 1.591123104095459,26,1179,,,, 172 | ,26,1187,1.9509222507476807,0.22499999403953552,, 173 | ,26,1187,,,0.20170454680919647, 174 | 1.1831315755844116,27,1189,,,, 175 | 1.013100266456604,27,1199,,,, 176 | 1.3782066106796265,27,1209,,,, 177 | 1.7958288192749023,27,1219,,,, 178 | 1.41576087474823,27,1229,,,, 179 | ,27,1231,1.9406707286834717,0.23749999701976776,, 180 | ,27,1231,,,0.19176135957241058, 181 | 0.9655105471611023,28,1239,,,, 182 | 1.2635399103164673,28,1249,,,, 183 | 2.9019429683685303,28,1259,,,, 184 | 1.4024635553359985,28,1269,,,, 185 | ,28,1275,1.913808822631836,0.23749999701976776,, 186 | ,28,1275,,,0.19602273404598236, 187 | 2.406031370162964,29,1279,,,, 188 | 2.297196865081787,29,1289,,,, 189 | 1.5067070722579956,29,1299,,,, 190 | 1.498245120048523,29,1309,,,, 191 | 1.1411991119384766,29,1319,,,, 192 | ,29,1319,1.9336020946502686,0.22499999403953552,, 193 | ,29,1319,,,0.1931818127632141, 194 | 0.7420676350593567,30,1329,,,, 195 | 0.7858269214630127,30,1339,,,, 196 | 1.7302597761154175,30,1349,,,, 197 | 1.4366636276245117,30,1359,,,, 198 | ,30,1363,1.9210898876190186,0.23749999701976776,, 199 | ,30,1363,,,0.1931818127632141, 200 | 1.572170615196228,31,1369,,,, 201 | 1.6987704038619995,31,1379,,,, 202 | 1.455669641494751,31,1389,,,, 203 | 1.5405012369155884,31,1399,,,, 204 | ,31,1407,1.9089457988739014,0.21250000596046448,, 205 | ,31,1407,,,0.1931818127632141, 206 | 1.1633782386779785,32,1409,,,, 207 | 0.9664570689201355,32,1419,,,, 208 | 1.1725276708602905,32,1429,,,, 209 | 1.8825591802597046,32,1439,,,, 210 | 1.2948020696640015,32,1449,,,, 211 | ,32,1451,1.9010493755340576,0.22499999403953552,, 212 | ,32,1451,,,0.19034090638160706, 213 | 0.9052057266235352,33,1459,,,, 214 | 1.271522879600525,33,1469,,,, 215 | 2.680840253829956,33,1479,,,, 216 | 1.3589295148849487,33,1489,,,, 217 | ,33,1495,1.8806202411651611,0.21250000596046448,, 218 | ,33,1495,,,0.1875, 219 | 2.4017491340637207,34,1499,,,, 220 | 2.187438726425171,34,1509,,,, 221 | 1.3211759328842163,34,1519,,,, 222 | 1.4469056129455566,34,1529,,,, 223 | 1.0341808795928955,34,1539,,,, 224 | ,34,1539,1.902356743812561,0.22499999403953552,, 225 | ,34,1539,,,0.18039773404598236, 226 | 0.6616907715797424,35,1549,,,, 227 | 0.696535587310791,35,1559,,,, 228 | 1.6039494276046753,35,1569,,,, 229 | 1.4090986251831055,35,1579,,,, 230 | ,35,1583,1.8757774829864502,0.23749999701976776,, 231 | ,35,1583,,,0.18323864042758942, 232 | 1.5110546350479126,36,1589,,,, 233 | 1.7032577991485596,36,1599,,,, 234 | 1.377750277519226,36,1609,,,, 235 | 1.5750406980514526,36,1619,,,, 236 | ,36,1627,1.8753913640975952,0.22499999403953552,, 237 | ,36,1627,,,0.18323864042758942, 238 | 1.1336421966552734,37,1629,,,, 239 | 0.8850898146629333,37,1639,,,, 240 | 1.0187864303588867,37,1649,,,, 241 | 1.7752968072891235,37,1659,,,, 242 | 1.2379224300384521,37,1669,,,, 243 | ,37,1671,1.8803951740264893,0.22499999403953552,, 244 | ,37,1671,,,0.18323864042758942, 245 | 0.8306497931480408,38,1679,,,, 246 | 1.1069612503051758,38,1689,,,, 247 | 2.4660065174102783,38,1699,,,, 248 | 1.3055065870285034,38,1709,,,, 249 | ,38,1715,1.8897325992584229,0.23749999701976776,, 250 | ,38,1715,,,0.17329545319080353, 251 | 2.3161213397979736,39,1719,,,, 252 | 1.8902870416641235,39,1729,,,, 253 | 1.3031080961227417,39,1739,,,, 254 | 1.4181183576583862,39,1749,,,, 255 | 0.9688653945922852,39,1759,,,, 256 | ,39,1759,1.8564445972442627,0.23749999701976776,, 257 | ,39,1759,,,0.1818181872367859, 258 | ,31,1760,,,,0.21250000596046448 259 | ,31,1760,,,,0.29499998688697815 260 | -------------------------------------------------------------------------------- /src/helper_code/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rasbt/scipy2022-talk/1511bed4fcccb4c5d5c7041b33671050ac173852/src/helper_code/__init__.py -------------------------------------------------------------------------------- /src/helper_code/dataset.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | from sklearn.model_selection import train_test_split 4 | from sklearn.preprocessing import StandardScaler 5 | from torch.utils.data import DataLoader, Dataset 6 | 7 | 8 | class ArrayDataset(Dataset): 9 | def __init__(self, feature_array, label_array, dtype=np.float32): 10 | self.features = feature_array.astype(dtype) 11 | self.labels = label_array 12 | 13 | def __getitem__(self, index): 14 | inputs = self.features[index] 15 | label = self.labels[index] 16 | return inputs, label 17 | 18 | def __len__(self): 19 | return self.features.shape[0] 20 | 21 | 22 | def get_cement_dataloaders(csv_path, batch_size, num_workers): 23 | data_df = pd.read_csv(csv_path) 24 | data_df["response"] = data_df["response"] - 1 # labels should start at 0 25 | data_labels = data_df["response"] 26 | data_features = data_df.loc[:, ["V1", "V2", "V3", "V4", "V5", "V6", "V7", "V8"]] 27 | 28 | # Split into 29 | # 70% train, 10% validation, 20% testing 30 | 31 | X_temp, X_test, y_temp, y_test = train_test_split( 32 | data_features.values, 33 | data_labels.values, 34 | test_size=0.2, 35 | random_state=1, 36 | stratify=data_labels.values, 37 | ) 38 | 39 | X_train, X_valid, y_train, y_valid = train_test_split( 40 | X_temp, y_temp, test_size=0.1, random_state=1, stratify=y_temp 41 | ) 42 | 43 | # Standardize features 44 | sc = StandardScaler() 45 | X_train_std = sc.fit_transform(X_train) 46 | X_valid_std = sc.transform(X_valid) 47 | X_test_std = sc.transform(X_test) 48 | 49 | train = ArrayDataset(X_train_std, y_train) 50 | valid = ArrayDataset(X_valid_std, y_valid) 51 | test = ArrayDataset(X_test_std, y_test) 52 | 53 | train_dataloader = DataLoader( 54 | train, batch_size=batch_size, num_workers=num_workers, drop_last=True 55 | ) 56 | 57 | val_dataloader = DataLoader(valid, batch_size=batch_size, num_workers=num_workers) 58 | 59 | test_dataloader = DataLoader(test, batch_size=batch_size, num_workers=num_workers) 60 | 61 | return train_dataloader, val_dataloader, test_dataloader 62 | -------------------------------------------------------------------------------- /src/helper_code/model.py: -------------------------------------------------------------------------------- 1 | import pytorch_lightning as pl 2 | import torch 3 | import torchmetrics 4 | from coral_pytorch.dataset import corn_label_from_logits 5 | from coral_pytorch.losses import corn_loss 6 | 7 | 8 | # Regular PyTorch Module 9 | class PyTorchMLP(torch.nn.Module): 10 | def __init__(self, input_size, hidden_units, num_classes, loss_mode): 11 | super().__init__() 12 | 13 | # num_classes is used by the corn loss function 14 | self.num_classes = num_classes 15 | 16 | # Initialize MLP layers 17 | all_layers = [] 18 | for hidden_unit in hidden_units: 19 | layer = torch.nn.Linear(input_size, hidden_unit) 20 | all_layers.append(layer) 21 | all_layers.append(torch.nn.ReLU()) 22 | input_size = hidden_unit 23 | 24 | # ------------------------------------------------------------- 25 | if loss_mode == "corn": 26 | output_layer = torch.nn.Linear(hidden_units[-1], num_classes - 1) 27 | elif loss_mode == "crossentropy": 28 | output_layer = torch.nn.Linear(hidden_units[-1], num_classes) 29 | else: 30 | raise ValueError("loss_mode must be 'corn' or 'crossentropy'.") 31 | # ------------------------------------------------------------- 32 | 33 | self.loss_mode = loss_mode 34 | 35 | all_layers.append(output_layer) 36 | self.model = torch.nn.Sequential(*all_layers) 37 | 38 | def forward(self, x): 39 | x = self.model(x) 40 | return x 41 | 42 | 43 | # LightningModule that receives a PyTorch model as input 44 | class LightningMLP(pl.LightningModule): 45 | def __init__(self, model, learning_rate, loss_mode): 46 | super().__init__() 47 | 48 | self.learning_rate = learning_rate 49 | self.loss_mode = loss_mode 50 | # The inherited PyTorch module 51 | self.model = model 52 | 53 | # Save settings and hyperparameters to the log directory 54 | # but skip the model parameters 55 | self.save_hyperparameters(ignore=["model"]) 56 | 57 | # Set up attributes for computing the MAE 58 | self.train_mae = torchmetrics.MeanAbsoluteError() 59 | self.valid_mae = torchmetrics.MeanAbsoluteError() 60 | self.test_mae = torchmetrics.MeanAbsoluteError() 61 | 62 | # Defining the forward method is only necessary 63 | # if you want to use a Trainer's .predict() method (optional) 64 | def forward(self, x): 65 | return self.model(x) 66 | 67 | # A common forward step to compute the loss and labels 68 | # this is used for training, validation, and testing below 69 | def _shared_step(self, batch): 70 | features, true_labels = batch 71 | logits = self(features) 72 | 73 | # ------------------------------------------------------------- 74 | if self.loss_mode == "corn": 75 | loss = corn_loss(logits, true_labels, num_classes=self.model.num_classes) 76 | predicted_labels = corn_label_from_logits(logits) 77 | elif self.loss_mode == "crossentropy": 78 | loss = torch.nn.functional.cross_entropy(logits, true_labels) 79 | predicted_labels = torch.argmax(logits, dim=1) 80 | else: 81 | raise ValueError("loss_mode must be 'corn' or 'crossentropy'.") 82 | # ------------------------------------------------------------- 83 | 84 | return loss, true_labels, predicted_labels 85 | 86 | def training_step(self, batch, batch_idx): 87 | loss, true_labels, predicted_labels = self._shared_step(batch) 88 | self.log("train_loss", loss) 89 | self.train_mae(predicted_labels, true_labels) 90 | self.log("train_mae", self.train_mae, on_epoch=True, on_step=False) 91 | return loss # this is passed to the optimzer for training 92 | 93 | def validation_step(self, batch, batch_idx): 94 | loss, true_labels, predicted_labels = self._shared_step(batch) 95 | self.log("valid_loss", loss) 96 | self.valid_mae(predicted_labels, true_labels) 97 | self.log( 98 | "valid_mae", self.valid_mae, on_epoch=True, on_step=False, prog_bar=True 99 | ) 100 | 101 | def test_step(self, batch, batch_idx): 102 | loss, true_labels, predicted_labels = self._shared_step(batch) 103 | self.test_mae(predicted_labels, true_labels) 104 | self.log("test_mae", self.test_mae, on_epoch=True, on_step=False) 105 | 106 | def configure_optimizers(self): 107 | optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate) 108 | return optimizer 109 | -------------------------------------------------------------------------------- /src/main_mlp.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import os 4 | import time 5 | 6 | import matplotlib.pyplot as plt 7 | import numpy as np 8 | import pandas as pd 9 | import pytorch_lightning as pl 10 | import torch 11 | from pytorch_lightning.callbacks import ModelCheckpoint 12 | from pytorch_lightning.loggers import CSVLogger 13 | from watermark import watermark 14 | 15 | from helper_code.dataset import get_cement_dataloaders 16 | from helper_code.model import LightningMLP, PyTorchMLP 17 | 18 | 19 | def parse_cmdline_args(parser=None): 20 | 21 | if parser is None: 22 | parser = argparse.ArgumentParser() 23 | 24 | parser.add_argument("--accelerator", type=str, default="auto") 25 | 26 | parser.add_argument("--batch_size", type=int, default=32) 27 | 28 | parser.add_argument("--data_path", type=str, default="./data") 29 | 30 | parser.add_argument("--learning_rate", type=float, default=0.0005) 31 | 32 | parser.add_argument( 33 | "--mixed_precision", type=str, choices=("true", "false"), default="false" 34 | ) 35 | 36 | parser.add_argument("--num_epochs", type=int, default=10) 37 | 38 | parser.add_argument("--num_workers", type=int, default=3) 39 | 40 | parser.add_argument("--num_devices", nargs="+", default="auto") 41 | 42 | parser.add_argument( 43 | "--loss_mode", type=str, choices=("corn", "crossentropy"), default="corn" 44 | ) 45 | 46 | parser.add_argument("--output_path", type=str, default="") 47 | 48 | parser.add_argument("--device_numbers", type=str, default="") 49 | 50 | parser.add_argument("--random_seed", type=int, default=-1) 51 | 52 | parser.add_argument("--strategy", type=str, default="") 53 | 54 | parser.set_defaults(feature=True) 55 | args = parser.parse_args() 56 | 57 | if not args.strategy: 58 | args.strategy = None 59 | 60 | if args.num_devices != "auto": 61 | args.num_devices = int(args.num_devices[0]) 62 | if args.device_numbers: 63 | args.num_devices = [int(i) for i in args.device_numbers.split(",")] 64 | 65 | d = {"true": True, "false": False} 66 | 67 | args.mixed_precision = d[args.mixed_precision] 68 | if args.mixed_precision: 69 | args.mixed_precision = 16 70 | else: 71 | args.mixed_precision = 32 72 | 73 | return args 74 | 75 | 76 | if __name__ == "__main__": 77 | 78 | parser = argparse.ArgumentParser() 79 | args = parse_cmdline_args(parser) 80 | 81 | log_out = os.path.join(args.output_path, f"{args.loss_mode}_traininglog.txt") 82 | if not os.path.exists(args.output_path): 83 | os.mkdir(args.output_path) 84 | logging.basicConfig( 85 | level=logging.INFO, 86 | format="%(asctime)s [%(levelname)s] %(message)s", 87 | handlers=[logging.FileHandler(log_out), logging.StreamHandler()], 88 | ) 89 | 90 | logging.info(watermark()) 91 | logging.info(watermark(packages="torch,pytorch_lightning,coral_pytorch")) 92 | 93 | torch.manual_seed(args.random_seed) 94 | 95 | csv_path = os.path.join(args.data_path, "cement_strength.csv") 96 | 97 | # Compute performance baselines 98 | 99 | train_loader, valid_loader, test_loader = get_cement_dataloaders( 100 | csv_path=csv_path, 101 | batch_size=args.batch_size, 102 | num_workers=args.num_workers, 103 | ) 104 | 105 | all_test_labels = [] 106 | for features, labels in test_loader: 107 | all_test_labels.append(labels) 108 | all_test_labels = torch.cat(all_test_labels) 109 | all_test_labels = all_test_labels.float() 110 | avg_prediction = torch.median(all_test_labels) # median minimizes MAE 111 | baseline_mae = torch.mean(torch.abs(all_test_labels - avg_prediction)) 112 | logging.info(f"Baseline MAE: {baseline_mae:.2f}") 113 | 114 | # Initialize model 115 | pytorch_model = PyTorchMLP( 116 | input_size=features.shape[1], 117 | hidden_units=(25, 20), 118 | num_classes=np.bincount(all_test_labels).shape[0], 119 | loss_mode=args.loss_mode, 120 | ) 121 | 122 | lightning_model = LightningMLP( 123 | pytorch_model, learning_rate=args.learning_rate, loss_mode=args.loss_mode 124 | ) 125 | 126 | callbacks = [ 127 | ModelCheckpoint(save_top_k=1, mode="min", monitor="valid_mae") 128 | ] # save top 1 model 129 | logger = CSVLogger( 130 | save_dir=os.path.join(args.output_path, "lightning_logs/"), 131 | name=f"{args.loss_mode}-mlp-cement", 132 | ) 133 | 134 | trainer = pl.Trainer( 135 | max_epochs=args.num_epochs, 136 | callbacks=callbacks, 137 | accelerator=args.accelerator, 138 | devices=args.num_devices, 139 | default_root_dir=args.output_path, 140 | strategy=args.strategy, 141 | logger=logger, 142 | precision=args.mixed_precision, 143 | deterministic=False, 144 | log_every_n_steps=10, 145 | ) 146 | 147 | start_time = time.time() 148 | trainer.fit( 149 | model=lightning_model, 150 | train_dataloaders=train_loader, 151 | val_dataloaders=valid_loader, 152 | ) 153 | 154 | train_time = time.time() 155 | runtime = (train_time - start_time) / 60 156 | logging.info(f"Training took {runtime:.2f} min.") 157 | 158 | before = time.time() 159 | val_acc = trainer.test(dataloaders=valid_loader, ckpt_path="best") 160 | runtime = (time.time() - before) / 60 161 | logging.info(f"Inference on the validation set took {runtime:.2f} min.") 162 | 163 | before = time.time() 164 | test_acc = trainer.test(dataloaders=test_loader, ckpt_path="best") 165 | runtime = (time.time() - before) / 60 166 | logging.info(f"Inference on the test set took {runtime:.2f} min.") 167 | 168 | runtime = (time.time() - start_time) / 60 169 | logging.info(f"The total runtime was {runtime:.2f} min.") 170 | 171 | logging.info(f"Validation accuracy: {val_acc}", ) 172 | logging.info(f"Test accuracy: {test_acc}") 173 | 174 | # Make plots 175 | 176 | metrics = pd.read_csv(f"{trainer.logger.log_dir}/metrics.csv") 177 | 178 | aggreg_metrics = [] 179 | agg_col = "epoch" 180 | for i, dfg in metrics.groupby(agg_col): 181 | agg = dict(dfg.mean()) 182 | agg[agg_col] = i 183 | aggreg_metrics.append(agg) 184 | 185 | df_metrics = pd.DataFrame(aggreg_metrics) 186 | 187 | df_metrics[["train_loss", "valid_loss"]].plot( 188 | grid=True, legend=True, xlabel="Epoch", ylabel="Loss" 189 | ) 190 | plt.savefig(os.path.join(args.output_path, f"{args.loss_mode}_loss_plot.pdf")) 191 | 192 | df_metrics[["train_mae", "valid_mae"]].plot( 193 | grid=True, legend=True, xlabel="Epoch", ylabel="MAE" 194 | ) 195 | plt.savefig(os.path.join(args.output_path, f"{args.loss_mode}_mae_plot.pdf")) 196 | --------------------------------------------------------------------------------