├── .gitignore ├── CONTRIBUTING.md ├── LICENSE ├── LitDeepChem.ipynb ├── LitHF.ipynb ├── LitNFFs.ipynb ├── README.md ├── SPDX.spdx ├── TPE.ipynb ├── example_files ├── metrics.csv └── nvidia-smi.csv ├── lit_data ├── __init__.py ├── data.py ├── lm_data.py ├── molnet_data.py └── nff_data.py ├── lit_models ├── __init__.py ├── deepchem_models.py ├── lit_chemgpt.py ├── lit_hf.py ├── lit_nffs.py └── models.py ├── prototyping.ipynb ├── requirements.txt ├── run.sh ├── submit.sh ├── tokenizers ├── pubchem10M_selfiesv2_tokenizer │ ├── special_tokens_map.json │ ├── tokenizer.json │ └── tokenizer_config.json └── pubchem10M_tokenizer │ ├── special_tokens_map.json │ ├── tokenizer.json │ └── tokenizer_config.json ├── tpe.py └── train.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | Your contributions are welcome! If you adapt a model for materials science or chemistry to work with LitMatter, 3 | we're happy to add it to the repo to increase visibility and usability for your model. If you use LitMatter for scaling experiments, let us know 4 | (by opening a PR or issue) and we'll add links to your paper, code, data, etc. 5 | 6 | ## Guidelines 7 | - Open a pull request that adds your classes and a notebook showing the basic training procedure and any other functionality you'd like to highlight. 8 | - In the PR description, give a summary of what your model does and why it might be interesting to investigate the scaling behavior. 9 | - If there's a model you'd like to see added to LitMatter, but you don't feel comfortable adding it yourself, open an Issue to see if someone else would like to contribute. 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Nathan Frey 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /LitDeepChem.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# LitMatter DeepChem\n", 8 | "* This notebook shows how to speed up [DeepChem](https://github.com/deepchem/deepchem) model training on [MoleculeNet](https://arxiv.org/abs/1703.00564) datasets using the LitMatter template. \n", 9 | "* In this example, we train a simple DeepChem `TorchModel` on the Tox21 dataset.\n", 10 | "* The training workflow shown here can be scaled to hundreds of GPUs by changing a single keyword argument!" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": [ 19 | "%load_ext autoreload\n", 20 | "%autoreload 2" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 2, 26 | "metadata": {}, 27 | "outputs": [], 28 | "source": [ 29 | "import torch" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": 3, 35 | "metadata": {}, 36 | "outputs": [], 37 | "source": [ 38 | "import deepchem as dc\n", 39 | "\n", 40 | "import pytorch_lightning as pl\n", 41 | "from pytorch_lightning.callbacks import ModelCheckpoint\n", 42 | "from pytorch_lightning import (LightningDataModule, LightningModule, Trainer,\n", 43 | " seed_everything)" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": {}, 49 | "source": [ 50 | "### Load a `LitMolNet` dataset\n", 51 | "Any MolNet dataset from `deepchem.molnet` can be used with LitMatter. The specific MolNet dataset and any pre-processing steps can be defined in `data.LitMolNet`" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 4, 57 | "metadata": {}, 58 | "outputs": [ 59 | { 60 | "name": "stderr", 61 | "output_type": "stream", 62 | "text": [ 63 | "RDKit WARNING: [11:14:30] WARNING: not removing hydrogen atom without neighbors\n", 64 | "[11:14:30] WARNING: not removing hydrogen atom without neighbors\n", 65 | "RDKit WARNING: [11:14:41] WARNING: not removing hydrogen atom without neighbors\n", 66 | "[11:14:41] WARNING: not removing hydrogen atom without neighbors\n" 67 | ] 68 | } 69 | ], 70 | "source": [ 71 | "from lit_data.molnet_data import LitMolNet\n", 72 | "\n", 73 | "dm = LitMolNet(loader=dc.molnet.load_tox21, batch_size=16)\n", 74 | "dm.prepare_data()\n", 75 | "dm.setup()" 76 | ] 77 | }, 78 | { 79 | "cell_type": "markdown", 80 | "metadata": {}, 81 | "source": [ 82 | "### Instantiate a `LitDeepChem` model\n", 83 | "Any `deepchem.models.torch_models.TorchModel` can be used with LitMatter. Here, we'll write our own custom base model in PyTorch and make a `TorchModel`." 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 9, 89 | "metadata": { 90 | "collapsed": true, 91 | "jupyter": { 92 | "outputs_hidden": true 93 | } 94 | }, 95 | "outputs": [ 96 | { 97 | "name": "stderr", 98 | "output_type": "stream", 99 | "text": [ 100 | "Exception ignored in: \n", 101 | "Traceback (most recent call last):\n", 102 | " File \"/home/gridsan/NA30490/.conda/envs/litmatter/lib/python3.8/site-packages/deepchem/models/models.py\", line 61, in __del__\n", 103 | " shutil.rmtree(self.model_dir)\n", 104 | " File \"/home/gridsan/NA30490/.conda/envs/litmatter/lib/python3.8/shutil.py\", line 709, in rmtree\n", 105 | " onerror(os.lstat, path, sys.exc_info())\n", 106 | " File \"/home/gridsan/NA30490/.conda/envs/litmatter/lib/python3.8/shutil.py\", line 707, in rmtree\n", 107 | " orig_st = os.lstat(path)\n", 108 | "FileNotFoundError: [Errno 2] No such file or directory: '/state/partition1/slurm_tmp/48690281.0.0/tmp60c4fwyb'\n" 109 | ] 110 | } 111 | ], 112 | "source": [ 113 | "from lit_models.deepchem_models import LitDeepChem\n", 114 | "\n", 115 | "base_model = torch.nn.Sequential(\n", 116 | "torch.nn.Linear(1024, 256),\n", 117 | " torch.nn.ReLU(),\n", 118 | " torch.nn.Linear(256, 12),\n", 119 | ")\n", 120 | "\n", 121 | "torch_model = dc.models.TorchModel(base_model, loss=torch.nn.MSELoss())\n", 122 | "\n", 123 | "model = LitDeepChem(torch_model, lr=1e-2)" 124 | ] 125 | }, 126 | { 127 | "cell_type": "markdown", 128 | "metadata": {}, 129 | "source": [ 130 | "### Train the model\n", 131 | "Simply change the `Trainer` flags as desired for multi-gpu and multi-node training." 132 | ] 133 | }, 134 | { 135 | "cell_type": "code", 136 | "execution_count": 11, 137 | "metadata": {}, 138 | "outputs": [ 139 | { 140 | "name": "stderr", 141 | "output_type": "stream", 142 | "text": [ 143 | "GPU available: True, used: True\n", 144 | "TPU available: False, using: 0 TPU cores\n", 145 | "IPU available: False, using: 0 IPUs\n" 146 | ] 147 | } 148 | ], 149 | "source": [ 150 | "trainer = Trainer(gpus=-1, # use all available GPUs on each node\n", 151 | "# num_nodes=1, # change to number of available nodes\n", 152 | "# accelerator='ddp',\n", 153 | " max_epochs=5,\n", 154 | " )" 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": 12, 160 | "metadata": {}, 161 | "outputs": [ 162 | { 163 | "name": "stderr", 164 | "output_type": "stream", 165 | "text": [ 166 | "/home/gridsan/NA30490/.conda/envs/litmatter/lib/python3.8/site-packages/pytorch_lightning/core/datamodule.py:469: LightningDeprecationWarning: DataModule.setup has already been called, so it will not be called again. In v1.6 this behavior will change to always call DataModule.setup.\n", 167 | " rank_zero_deprecation(\n", 168 | "LOCAL_RANK: 0 - CUDA_VISIBLE_DEVICES: [GPU-13636255-d3c9-b0ac-83c7-b25c82e0dbc5]\n", 169 | "Set SLURM handle signals.\n", 170 | "\n", 171 | " | Name | Type | Params\n", 172 | "---------------------------------------\n", 173 | "0 | model | Sequential | 265 K \n", 174 | "1 | loss_fn | MSELoss | 0 \n", 175 | "---------------------------------------\n", 176 | "265 K Trainable params\n", 177 | "0 Non-trainable params\n", 178 | "265 K Total params\n", 179 | "1.062 Total estimated model params size (MB)\n", 180 | "/home/gridsan/NA30490/.conda/envs/litmatter/lib/python3.8/site-packages/pytorch_lightning/callbacks/model_checkpoint.py:617: UserWarning: Checkpoint directory /home/gridsan/NA30490/litmatter_dev/litmatter/lightning_logs/version_48690281/checkpoints exists and is not empty.\n", 181 | " rank_zero_warn(f\"Checkpoint directory {dirpath} exists and is not empty.\")\n" 182 | ] 183 | }, 184 | { 185 | "name": "stdout", 186 | "output_type": "stream", 187 | "text": [ 188 | "Validation sanity check: 0%| | 0/2 [00:00 6 | PackageLicenseDeclared: MIT 7 | -------------------------------------------------------------------------------- /TPE.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Deep neural networking training performance estimation (TPE)\n", 8 | "This notebook shows an example of Training Performance Estimation (TPE) based on logfiles from training a PyTorch model. TPE uses Training Speed Estimation (TSE), which is a simple, efficient, and computationally cheap method to estimate final test performance based on training speed. It does not require hyperparameter-tuning, training data collection, or surrogate model training.\n", 9 | "\n", 10 | "TPE requires a logfile with 'epoch' and 'train_loss_step' columns. The logfile must including *training* loss for every step (loss after every minibatch).\n", 11 | "\n", 12 | "A suitable logfile is easy to generate with PyTorch Lightning by adding the following line to the `training_step` method of your `LightningModule`:\n", 13 | "```\n", 14 | "self.log('train_loss',\n", 15 | " loss,\n", 16 | " on_step=True\n", 17 | " )\n", 18 | "```\n", 19 | "\n", 20 | "TPE also uses GPU utilization and GPU power data collected using the `nvidia-smi` tool to relate DNN performance to energy consumption.\n", 21 | "\n", 22 | "[1] Ru, Robin, et al. \"Speedy Performance Estimation for Neural Architecture Search.\" \n", 23 | " Advances in Neural Information Processing Systems 34 (2021)." 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 58, 29 | "metadata": {}, 30 | "outputs": [ 31 | { 32 | "name": "stdout", 33 | "output_type": "stream", 34 | "text": [ 35 | "The autoreload extension is already loaded. To reload it, use:\n", 36 | " %reload_ext autoreload\n" 37 | ] 38 | } 39 | ], 40 | "source": [ 41 | "%load_ext autoreload\n", 42 | "%autoreload 2" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 59, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "import pandas as pd\n", 52 | "import numpy as np\n", 53 | "\n", 54 | "import matplotlib.pyplot as plt\n", 55 | "import seaborn as sns" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 60, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "%matplotlib inline" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 61, 70 | "metadata": {}, 71 | "outputs": [], 72 | "source": [ 73 | "df = pd.read_csv('data/metrics.csv')\n", 74 | "df = df.dropna(subset=['train_loss_step'])[['train_loss_step', 'epoch', 'step']]" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 62, 80 | "metadata": {}, 81 | "outputs": [ 82 | { 83 | "data": { 84 | "text/html": [ 85 | "
\n", 86 | "\n", 99 | "\n", 100 | " \n", 101 | " \n", 102 | " \n", 103 | " \n", 104 | " \n", 105 | " \n", 106 | " \n", 107 | " \n", 108 | " \n", 109 | " \n", 110 | " \n", 111 | " \n", 112 | " \n", 113 | " \n", 114 | " \n", 115 | " \n", 116 | " \n", 117 | " \n", 118 | " \n", 119 | " \n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | "
train_loss_stepepochstep
073.30311600
170.11953701
269.11881302
366.43689003
460.83742104
\n", 141 | "
" 142 | ], 143 | "text/plain": [ 144 | " train_loss_step epoch step\n", 145 | "0 73.303116 0 0\n", 146 | "1 70.119537 0 1\n", 147 | "2 69.118813 0 2\n", 148 | "3 66.436890 0 3\n", 149 | "4 60.837421 0 4" 150 | ] 151 | }, 152 | "execution_count": 62, 153 | "metadata": {}, 154 | "output_type": "execute_result" 155 | } 156 | ], 157 | "source": [ 158 | "df.head()" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": 63, 164 | "metadata": {}, 165 | "outputs": [ 166 | { 167 | "data": { 168 | "text/html": [ 169 | "
\n", 170 | "\n", 183 | "\n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | " \n", 189 | " \n", 190 | " \n", 191 | " \n", 192 | " \n", 193 | " \n", 194 | " \n", 195 | " \n", 196 | " \n", 197 | " \n", 198 | " \n", 199 | " \n", 200 | " \n", 201 | " \n", 202 | " \n", 203 | " \n", 204 | " \n", 205 | " \n", 206 | " \n", 207 | " \n", 208 | " \n", 209 | " \n", 210 | " \n", 211 | " \n", 212 | " \n", 213 | " \n", 214 | " \n", 215 | " \n", 216 | " \n", 217 | " \n", 218 | " \n", 219 | " \n", 220 | " \n", 221 | " \n", 222 | " \n", 223 | " \n", 224 | "
train_loss_stepepochstep
19433.147081291885
19444.167433291886
19453.883977291887
19463.260770291888
19473.825483291889
\n", 225 | "
" 226 | ], 227 | "text/plain": [ 228 | " train_loss_step epoch step\n", 229 | "1943 3.147081 29 1885\n", 230 | "1944 4.167433 29 1886\n", 231 | "1945 3.883977 29 1887\n", 232 | "1946 3.260770 29 1888\n", 233 | "1947 3.825483 29 1889" 234 | ] 235 | }, 236 | "execution_count": 63, 237 | "metadata": {}, 238 | "output_type": "execute_result" 239 | } 240 | ], 241 | "source": [ 242 | "df.tail()" 243 | ] 244 | }, 245 | { 246 | "cell_type": "code", 247 | "execution_count": 64, 248 | "metadata": {}, 249 | "outputs": [ 250 | { 251 | "data": { 252 | "text/html": [ 253 | "
\n", 254 | "\n", 267 | "\n", 268 | " \n", 269 | " \n", 270 | " \n", 271 | " \n", 272 | " \n", 273 | " \n", 274 | " \n", 275 | " \n", 276 | " \n", 277 | " \n", 278 | " \n", 279 | " \n", 280 | " \n", 281 | " \n", 282 | " \n", 283 | " \n", 284 | " \n", 285 | " \n", 286 | " \n", 287 | " \n", 288 | " \n", 289 | " \n", 290 | " \n", 291 | " \n", 292 | " \n", 293 | " \n", 294 | " \n", 295 | " \n", 296 | " \n", 297 | " \n", 298 | " \n", 299 | " \n", 300 | " \n", 301 | " \n", 302 | " \n", 303 | " \n", 304 | " \n", 305 | " \n", 306 | " \n", 307 | " \n", 308 | " \n", 309 | " \n", 310 | " \n", 311 | " \n", 312 | " \n", 313 | " \n", 314 | " \n", 315 | " \n", 316 | " \n", 317 | " \n", 318 | " \n", 319 | " \n", 320 | " \n", 321 | " \n", 322 | " \n", 323 | " \n", 324 | " \n", 325 | " \n", 326 | " \n", 327 | " \n", 328 | " \n", 329 | " \n", 330 | " \n", 331 | " \n", 332 | " \n", 333 | " \n", 334 | " \n", 335 | "
indexutilization.gpu [%]utilization.memory [%]power.draw [W]
count85670.00000085670.00000085670.00000085670.000000
mean0.50000024.7582933.54301443.927812
std0.50000323.3704983.81012415.505347
min0.0000000.0000000.00000024.500000
25%0.0000000.0000000.00000025.910000
50%0.50000031.0000003.00000049.380000
75%1.00000040.0000005.00000056.360000
max1.000000100.00000016.00000079.310000
\n", 336 | "
" 337 | ], 338 | "text/plain": [ 339 | " index utilization.gpu [%] utilization.memory [%] \\\n", 340 | "count 85670.000000 85670.000000 85670.000000 \n", 341 | "mean 0.500000 24.758293 3.543014 \n", 342 | "std 0.500003 23.370498 3.810124 \n", 343 | "min 0.000000 0.000000 0.000000 \n", 344 | "25% 0.000000 0.000000 0.000000 \n", 345 | "50% 0.500000 31.000000 3.000000 \n", 346 | "75% 1.000000 40.000000 5.000000 \n", 347 | "max 1.000000 100.000000 16.000000 \n", 348 | "\n", 349 | " power.draw [W] \n", 350 | "count 85670.000000 \n", 351 | "mean 43.927812 \n", 352 | "std 15.505347 \n", 353 | "min 24.500000 \n", 354 | "25% 25.910000 \n", 355 | "50% 49.380000 \n", 356 | "75% 56.360000 \n", 357 | "max 79.310000 " 358 | ] 359 | }, 360 | "execution_count": 64, 361 | "metadata": {}, 362 | "output_type": "execute_result" 363 | } 364 | ], 365 | "source": [ 366 | "df_energy = pd.read_csv('data/nvidia-smi.csv')\n", 367 | "df_energy = df_energy[['timestamp', ' index', ' utilization.gpu [%]',\n", 368 | " ' utilization.memory [%]', ' power.draw [W]']]\n", 369 | "df_energy.describe()" 370 | ] 371 | }, 372 | { 373 | "cell_type": "markdown", 374 | "metadata": {}, 375 | "source": [ 376 | "`E` controls the number of \"burn-in\" epochs that are discarde from the beginning of training in the TSE-E estimator, and `gamma` is a decay rate for the TSE-EMA estimator. Recommended values from the paper are used as defaults." 377 | ] 378 | }, 379 | { 380 | "cell_type": "code", 381 | "execution_count": 65, 382 | "metadata": {}, 383 | "outputs": [], 384 | "source": [ 385 | "from tpe import TrainingSpeedEstimator\n", 386 | "\n", 387 | "estimator = TrainingSpeedEstimator(E=1, gamma=0.999)" 388 | ] 389 | }, 390 | { 391 | "cell_type": "markdown", 392 | "metadata": {}, 393 | "source": [ 394 | "The TSE values can be used to predict the rank order of the performance of different architectures or hyperparameter configurations, or to directly predict the performance of a network trained to convergence." 395 | ] 396 | }, 397 | { 398 | "cell_type": "code", 399 | "execution_count": 66, 400 | "metadata": {}, 401 | "outputs": [ 402 | { 403 | "data": { 404 | "text/plain": [ 405 | "{'tse': 129.8276312691825,\n", 406 | " 'tsee': 5.670970326378232,\n", 407 | " 'tseema': 134.53728970599116,\n", 408 | " 'T_end': 30.0,\n", 409 | " 'energy_per_epoch (kJ)': 0.7100424525,\n", 410 | " 'energy_per_step (kJ)': 0.011270515119047619}" 411 | ] 412 | }, 413 | "execution_count": 66, 414 | "metadata": {}, 415 | "output_type": "execute_result" 416 | } 417 | ], 418 | "source": [ 419 | "tpe_dict = estimator.estimate(df, 10, df_energy=df_energy) # use up to first 10 epochs for estimation\n", 420 | "tpe_dict" 421 | ] 422 | }, 423 | { 424 | "cell_type": "markdown", 425 | "metadata": {}, 426 | "source": [ 427 | "Show training loss as a function of energy budget." 428 | ] 429 | }, 430 | { 431 | "cell_type": "code", 432 | "execution_count": 68, 433 | "metadata": {}, 434 | "outputs": [], 435 | "source": [ 436 | "eps = tpe_dict['energy_per_step (kJ)']\n", 437 | "\n", 438 | "df['energy_consumed'] = (df['step'] + 1) * eps\n" 439 | ] 440 | }, 441 | { 442 | "cell_type": "code", 443 | "execution_count": 69, 444 | "metadata": {}, 445 | "outputs": [ 446 | { 447 | "data": { 448 | "text/plain": [ 449 | "Text(0.5, 0, 'Total energy consumed (kJ)')" 450 | ] 451 | }, 452 | "execution_count": 69, 453 | "metadata": {}, 454 | "output_type": "execute_result" 455 | }, 456 | { 457 | "data": { 458 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAEGCAYAAACNaZVuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8rg+JYAAAACXBIWXMAAAsTAAALEwEAmpwYAAAv4UlEQVR4nO3deXxU9bn48c+TyU5CAiGEnYBsgiBgQFxwA6xa9/rD2tpiq7XXLi5d9dbebve22s322msrrVhatXVDpSoqRXAXCPu+EyCQhez7Mnl+f8wBQzKTTEJmyczzfr3ymnO+55w5D8fxmTPf8z3PEVXFGGNMdIkJdQDGGGOCz5K/McZEIUv+xhgThSz5G2NMFLLkb4wxUSg21AH4a8CAAZqdnR3qMIwxpldZt27dcVXNbNvea5J/dnY2ubm5oQ7DGGN6FRHJ89Zu3T7GGBOFLPkbY0wUsuRvjDFRyJK/McZEIUv+xhgThXrNaJ/uWLrpKC9vyCc2Rrhl5ggunTAw1CEZY0xYiNjk//g7+/jFsp0n59/aXsjvbp7K9dOGhjAqY4wJDxHZ7dPSojz+7v527X9ctS8E0RhjTPiJyOTf1NJCaU1ju/aiqvoQRGOMMeEnIpN/QqyLC8ZktGu3Pn9jjPGIyOQP8IsbpjAuK+Xk/PQR6fzgqjNDGJExxoSPiL3gOyIjmbfuu5htRyuIc8UwLis11CEZY0zYiNjkf8KkIWmhDsEYY8JOxHb7GGOM8S2gZ/4iMh54tlXTaOC/gL857dnAQWC+qpb19P7f3FbAUx/nUdPQzNVThnDb+dnExEhP78YYY3qdgCZ/Vd0FTAUQEReQD7wE3A+sUNWHROR+Z/77Pbnvf206yjf/seHk/PpD5eSX1/HDqyf25G6MMaZXCma3zxxgn6rmAdcBi532xcD1Pb2zv7x/oF3b06vzqGt09/SujDGm1wlm8v8s8A9nOktVjznTBUBWT++sorb9TV71TS3UN1nyN8aYoCR/EYkHrgWeb7tMVRVQH9vdKSK5IpJbXFzcpX2OH9R+aGdSXAzpyXFdeh9jjIlEwTrzvxJYr6qFznyhiAwGcF6LvG2kqgtVNUdVczIz2z1/uMvqmlrYfKTitN/HGGN6u2Al/1v4pMsHYCmwwJleALzS0zuMjfH+TxMb7GOMMYFP/iLSB5gHLGnV/BAwT0T2AHOd+R51U86wdm0TBqUyeajd9GWMMQFP/qpao6oZqlrRqq1EVeeo6lhVnauqpT2930vHD+SXN01hZEYyCbExzJuYxRO3zUDs1N8YYyK7vMP8nOHMzxke6jCMMSbsRHTyb2lR3tldzO7CKqaP7MeM7P6hDskYY8JCxCb/JncLX/7rWt7bc/xk2805w3n4pikhjMoYY8JDxBZ2W7a14JTED/Bs7mE2Hi4PTUDGGBNGIjb5b/aR5Lcc8d5ujDHRJGKT/5mD+3ap3RhjoknEJv+rzx7MjOx+p7Rdc/YQcuyirzHGRO4F34RYF898ZRZvbitgd2E100akc8m40y8RYYwxkSBikz9AnCuGq6cMCXUYxhgTdiK228cYY4xvlvyNMSYKWfI3xpgoZMnfGGOikCV/Y4yJQlGR/PcXV5NfXhfqMIwxJmxE9FDPLUcq+OKi1ZTVNgEwaUhf/nnnLFIT7Tm+xpjoFtFn/p/788cnEz/AtqOV3PfsxtAFZIwxYSJik//Gw2VUNTS3a39/73EvaxtjTHSJ2OTvdqvXdleMPcbRGGOC8QD3dBF5QUR2isgOETlPRPqLyHIR2eO89uv8nbqmsr7Ja/u4gak9vStjjOl1gnHm/3vgDVWdAJwN7ADuB1ao6lhghTPfo4qrG722nz08vad3ZYwxvU5Ak7+IpAEXAU8AqGqjqpYD1wGLndUWA9f39L5njx1ArJcunksnDOzpXRljTK8T6DP/UUAx8KSIbBCRv4hIHyBLVY856xQAWT2948FpSfzixskkx3n+iQKcf0YG546yev7GGBPo5B8LTAf+qKrTgBradPGoqgJer86KyJ0ikisiucXFxV3e+dwzs4iLdXn2A3y4r4QFi9bg2aUxxkSvQCf/I8ARVV3tzL+A58ugUEQGAzivRd42VtWFqpqjqjmZmV1/EMsDS7ZQUXfqhd/VB0r5eH9pl9/LGGMiSUCTv6oWAIdFZLzTNAfYDiwFFjhtC4BXArH/j/aXeG3PK6kJxO6MMabXCEZ5h28CT4tIPLAf+BKeL53nROR2IA+YH4gd+xrSf052j48sNcaYXiXgyV9VNwI5XhbNCfS+b5g2lEUfHDylLTEuhhH9kwO9a2OMCWsRe4cvwCXj2w/rrG9q4bXNx7ysbYwx0SOik/9BH337uwqqghyJMcaEl4hO/pMG9/XaPmVYenADMcaYMBPRyX9zfkW7NpfAnAldHzZqjDGRJKKT/4od7W8fcCvc9KePqPZS7tkYY6JFRCf/OJf3sZ5bj1by039tC3I0xhgTPiI6+c/oYDz/87lHaHa3BDEaY4wJHxGd/D9/brbPZQos2ZAftFiMMSacRHTyT0uOI76Df+FH+7yXfzDGmEgX0ckfIM7l+584rF9SECMxxpjwEfHJv19KvM9lF42zIZ/GmOgU8cn/8omDfC6bMMie52uMiU4Rn/wv9lLf54Tcg1bX3xgTnSI++c/M7o+P4f68sO5IcIMxxpgwEfHJPynexexxA7wuW7a1IMjRGGNMeIj45A9w67kjvba3KGw4VBbkaIwxJvSiIvlP8FHdE+Av7+0PYiTGGBMeoiL5D+uX7PMfml9eH9RYjDEmHERF8ge4avJgr+1JHd0CbIwxESpqMt9Dn5nstX31vlJqG628szEmugQ8+YvIQRHZIiIbRSTXaesvIstFZI/z6rv8Zg+pa/JewbMFWHPAxvsbY6JLsM78L1XVqaqa48zfD6xQ1bHACmc+oAorffftF1dZv78xJrr4lfxFJFFEviUiS0TkRRG5T0QST2O/1wGLnenFwPWn8V5+GTswxeeyhmYN9O6NMSas+Hvm/zdgEvAo8AdgIvB3P7dV4C0RWScidzptWap6zJkuALK8bSgid4pIrojkFhcX+7k730H4cs7IgPc6GWNMWIn1c72zVHViq/mVIrLdz20vVNV8ERkILBeRna0XqqqKiNfcrKoLgYUAOTk5p3V6nhjnYvbYAby35/gp7eOzUjmzg/sAjDEmEvl75r9eRGadmBGRc4FcfzZU1XzntQh4CZgJFIrIYOe9BgPtn7QeAA99ZgqTh6adnJ82PJ1nvzqrgy2MMSYy+Zv8zwE+dEbuHAQ+AmY4o3g2+9pIRPqISOqJaeByYCuwFFjgrLYAeKWb8XfJ0PQk/n77TG6ZOZypw9OZOKQvVfU2zNMYE3387fa5opvvnwW8JCIn9vWMqr4hImuB50TkdiAPmN/N9+8Sd4vyuT+vZvuxSgA2Hi7nzW0FLLvnIjJTE4IRgjHGhAW/kr+q5onIhcBYVX1SRAYAqap6oJPt9gNne2kvAeZ0J+DT8e7u4pOJ/4Tj1Y28sO4Id11yRrDDMcaYkPF3qOePgO8DDzhN8cBTgQoqUIqrGry27ymqCnIkxhgTWv72+d8AXAvUAKjqUaDXPQNxwmDvIW/NrwhyJMYYE1r+Jv9GVVWc4fLOxdtep6q+yWv7kbK6IEdijDGh5W/yf05EHgfSReQrwL+BvwQurMAYl+X9zD/eFTX17YwxBvD/gu+vRWQeUAmMB/5LVZcHNLIAyExNZPLQNLa06ea5ZebwEEVkjDGh4e8F34dVdbmqfldVv6Oqy0Xk4UAHFwiLvzyTS8dnIgLxsTF8YdZI7ps3PtRhGWNMUImnK7+TlUTWq+r0Nm2bVXVKwCJrIycnR3Nz/bqpuFOqyod7S2hqaeGCMQOIs24fY0yEEpF1rSoqn9Rht4+I3AV8DRjd5k7eVOCDng0xOAor61mwaA07CzzDOwenJbLothlW38cYE1U6O+V9BrgGTzmGa1r9naOqtwY4toD4xes7TiZ+gGMV9TywZEsIIzLGmODrMPmraoWqHgQeBApUNQ8YBdwqIumBD6/nvb+3pF3bxsPl1DRYjR9jTPTwt7P7RcAtImPwlFgejudXQa8zrF9Su7aMPvEkxrlCEI0xxoSGv8m/RVWbgRuBR1X1u8DgwIUVOF+/9AykTdv/yxmGK6ZtqzHGRC5/k3+TiNwCfBF41WmLC0xIgVVU1dDuqV7LthbQ0mKPcjTGRA9/k/+XgPOA/1HVAyIyCv8f4xhW3tha0K4tr6S2XbVPY4yJZP7e4bsduLvV/AHg5E1eIvKiqn6m58PreamJ3v/JvtqNMSYS9dTdTaN76H0C7ovnZdO2e/+yCQMZmdEra9UZY0y39NTpbq/pMJ81OoO/fmkmf35vP0WVDVx25kC+edmYUIdljDFBFZV9HReNy+SicZmhDsMYY0Kmp7p9bJykMcb0Il1O/iLST0TaFnT7fifbuERkg4i86syPEpHVIrJXRJ4VkfiuxmGMMab7/C3pvEpE+opIf2A98GcR+e2J5ar6VidvcQ+wo9X8w8AjqjoGKANu71rYxhhjToe/Z/5pqlqJ5w7fv6nqucBcfzYUkWHAp3Ge/CUiAlwGvOCsshi4vgsxG2OMOU3+Jv9YERkMzOeTO3z99Tvge0CLM58BlDvlIgCOAEO9bSgid4pIrojkFhcXd3G3xhhjfPE3+f8UeBPYq6prRWQ0sKezjUTkaqBIVdd1JzhVXaiqOaqak5lpo3OMMaan+HuH7/PA863m9wP+3NF7AXCtiFwFJAJ9gd/jeRB8rHP2PwzI72rgp6u2sZn9xTUM759MWlKvLFNkjDHd5u8F3186F3zjRGSFiBSLSKcPc1HVB1R1mKpmA58F3lbVzwMrgZuc1RYAr3Qz/m55Pvcw5/58BVc/+j7n/vzf/OmdfcHcvTHGhJy/3T6XOxd8rwYOAmOA757Gfr8PfEtE9uK5BvDEabxXlxwureX+JVuoqvdccqhvauGhZTtZl1cWrBCMMSbk/L3D98R6nwaeV9UKz6Ad/6nqKmCVM70fmNmlN+ghq3YV4fZSvnnFjkLOGdkvBBEZY0zw+Zv8XxWRnUAdcJeIZAL1gQsrcPokeP8nZ6QkBDkSY4wJHb+6fVT1fuB8IEdVm4Aa4LpABhYoZbWNXtvHZqUEORJjjAkdv878RSQOuBW4yOnueQf4UwDjCpgDx2u8thdW9MofMsYY0y3+XvD9I3AO8JjzN91p63Wmj/Der2/P8DXGRBN/+/xnqOrZrebfFpFNgQgo0C6fmEVsjNDc5qLvSxvyuXH6sBBFZYwxweXvmb9bRM44MePc4esOTEiBVdPobpf4AbbmV4QgGmOMCQ1/z/y/C6wUkf14avePxPNQ914nq28icS6hyX3qF8CJcf/GGBMN/C3vsEJExgLjnaZdqtoQuLACK9ZL8m9uUfYWVjEmKzVEURljTPB0mPxF5EYfi8aICKq6JAAxBdyYzBS25Fe2a39ze6Elf2NMVOjszP+aDpYp0CuT/7VnD/Wa/K3EgzEmWnSY/FXVr359EVmgqot7JqTAm58znD+8vZuK+lOvWb+9s4hVu4q4ZPzAEEVmjDHB0VMPcL+nh94nKNKS4zhraLrXZcu2FAQ3GGOMCYGeSv696g6p/PI6PtxX4nVZaqK/A6CMMab36qnk337gfBh7bOVenwF/dubwoMZijDGhEJVn/lt83NAV74phzEAb7WOMiXw9lfw/6KH3CYrsjGSv7Y3uFoqqrMCbMSby+VvVMwHPM3uzW2+jqj91Xr8RiOAC5e45Y1m66ZjXZW1v/jLGmEjk79XNV4AKYB3Qa+/sPSE1MY7MlHiKq0+t7X/m4L4MTU8KUVTGGBM8/ib/Yap6RUAjCaIvPbm2XeKfMCiVv35pRogiMsaY4PK3z/9DEZnc1TcXkUQRWSMim0Rkm4j8xGkfJSKrRWSviDwrIvFdfe/u2ni4nO3H2t/dW9vYTGNzS7DCMMaYkPI3+V8IrBORXSKyWUS2iMhmP7ZrAC5zngUwFbhCRGYBDwOPqOoYoAy4vRuxd0uz23uCP1Rax01/+pDqBqvuaYyJfP4m/yuBscDleOr9XE3HdX8AUI9qZzbO+VPgMuAFp30xcL3/IZ+e6SP6MdLHaJ/CygZe23w0WKEYY0zIdJj8RaSvM1nl469TIuISkY1AEbAc2AeUq+qJU+wjwFAf294pIrkikltcXOzP7joVEyMsum0Gw/p5v7BbUdfUI/sxxphw1tmZ/zPO6zog13ld12q+U6rqVtWpwDBgJjDB3+BUdaGq5qhqTmZmpr+bdeqMzBS+edkYr8sOldT22H6MMSZcdZj8VfVq53WUqo52Xk/8je7KjlS1HFgJnAeki8iJkUbDgPyuh959OwsqefDlrV6Xvbg+n9pG6/c3xkQ2v+/wFZF+IjJTRC468efHNpkiku5MJwHzgB14vgRuclZbgOc+gqB5bu0Rnzdz1TW5qayz5G+MiWz+3uF7B56yzcOAjcAs4CM8F247MhhYLCIuPF80z6nqqyKyHfiniPw3sAF4onvhd09Ds+9nz8e7hEFpiUGMxhhjgs/fm7zuAWYAH6vqpSIyAfh5Zxup6mZgmpf2/Xj6/0Pi2rOH8PTqQ16XNbqVvJIaRmb0CXJUxhgTPP52+9Sraj146vyo6k4+eZh7r9MnoePvvDy76GuMiXD+nvkfcfruXwaWi0gZkBeooAKtuMp3eaI4l3D28PTgBWOMMSHgV/JX1RucyR+LyEogDXgjYFEF2IxR/UlJiPV6N++XLxhFWlJcCKIyxpjg6bTbx7lJa+eJeVV9R1WXqmpjR9uFs5SEWH5381T6JrraLVv47n4ef2cf7hYr7WyMiVydJn9VdQO7RGREEOIJmtnjBiDS/gFkCvxi2U6+/vT64AdljDFB4m+ffz9gm4isAWpONKrqtQGJKggWvX+Aig7G87+xrYANh8qYNqJfEKMyxpjg8Df5J+Ip5naC4KnM2WutPlDa6Tr7iqot+RtjIpK/yT9WVd9p3eDcsdtrTR6axqpdHReLS0u2C7/GmMjUWVXPu0RkCzDeqeN/4u8A4E89/7D1xfOy6ZvY8XffhEF9O1xujDG9lT9VPa8BljqvJ/7OUdVbAxxbQGWmJnDjNK+VpAEYkBLP8P7e6/4bY0xv1+Gpr6pW4Hlw+y3BCSe4xg5K9bmstKaR+iY3iXHth4MaY0xv53dVz0j0menDiG0/2hMAEcEV42OhMcb0clGd/BPjXFxx1iCvy1wixLmi+vAYYyJY1Ge3C8Z4f0KYiN3ha4yJXFGf/D89xfuZf3pyfJAjMcaY4In65N83KZ6Lxg5o154QG8PFv1rJ3f/YQF5JjZctjTGm94r65A/wyM1TmTj41JE/h0rryCupZemmo8x//CN7rq8xJqJY8gf6JsVRWFnvc3lhZQNvbSsMYkTGGBNYlvyB49UNlNQ0dbjOE+8fYI0f9YCMMaY3CGjyF5HhIrJSRLaLyDYRucdp7y8iy0Vkj/Ma0uppA1MT6WxI/5b8CuY//hHP5x4OTlDGGBNAgT7zbwa+raoTgVnA10VkInA/sEJVxwIrnPmQ2Xi4HH+f3fLgy1tpdrcENiBjjAmwgCZ/VT2mquud6SpgBzAUuA5Y7Ky2GLg+kHF0ZvORcr/XbWhu4Xsvbqa+yR24gIwxJsCC1ucvItnANGA1kKWqx5xFBUCWj23uFJFcEcktLu64/PLpSOpi/Z4l6/NZsGhNgKIxxpjAC0ryF5EU4EXgXlWtbL1MVRXP0xPbUdWFqpqjqjmZmd7vxO0JIzK6Xr1z9YFSdhdUdr6iMcaEoYAnfxGJw5P4n1bVJU5zoYgMdpYPBooCHUdHzhnZjwRX14u4rdgZ0rCNMabbAj3aR4AngB2q+ttWi5YCC5zpBcArgYyjMwmxLpIT/H2o2SdirfCbMaaXCnT2ugD4AnCZiGx0/q4CHgLmicgeYK4zH1LdGcHz27d28eDLW6iq7/geAWOMCTddP93tAlV9H8/D3r2ZE8h9d9VlE7J4ZdPRLm1T19TCUx8f4mh5PYtumxGgyIwxpudZv4Xj9tmjur3t2zuLKKjwXR7CGGPCjSV/x6QhaQxJS+z29o3NduOXMab3sOTvcMUID980hfhuXMTtk+Dq1nBRY4wJFUv+rcwem8m6H85l1IA+XdqupsHNsi3H2FNYhee2BWOMCW8BveDbG6UmxnFGZgoHjnftAS53Pb0egJEZyfzhlulMHpYWiPCMMaZH2Jm/F9dPG9LtbfNKavn6M+tp8bdSnDHGhIAlfy+unjKEB66cQN/E7v0wOlRay67Cqh6Oyhhjeo4lfx++evEZbPivy7nm7K7/ChCgnz0A3hgTxqzPvwOuGOH3N09l7pkD+WhfCW/vLKKoqqHT7RJihYGpCUGI0BhjusfO/DsREyNcN3UoD31mCj+/4Sy/tqlvVmb9YgUbDpUFODpjjOkeS/5dkBzv/w+loqoGvrhoDWU1jQGMyBhjuseSfxfkZPfv9Fm/rVXVN/Pgy1upa7Snfhljwosl/y6Ij43hK12sAfTalmPM/PnyLj0q0hhjAs2Sfxfdf+WZpCZ07bGPVfVu7vr7+gBFZIwxXWfJv4tEhDfvuwjp4oO/8ivqKKo6tfJnRV0TW/Mr7GHwxpigs6Ge3TAkPZnBfRM52sUyzh/vK+HaqUMB+L+Ve3n07T3UN7WQlhTHT66dxPXThgYiXGOMacfO/LvpxunDurzNiYfFrN5fwq/e3EV9k6cMdEVdE99+biPHKup6NEZjjPHFkn833TN3LFO6WLzt7R1FFFbWsXx7YbtlboX/XbGnp8IzxpgOSW8pQZyTk6O5ubmhDuMU7hZl5c4iHnx5MwWV/o3nF2DcoBR2FVR7XT4yI5knb5vB6MyUHozUGBOtRGSdqua0bQ/omb+ILBKRIhHZ2qqtv4gsF5E9zmu/QMYQSK4YYe7ELHJG9vd7GwWfiR88VUG/8MSaU9r2FlXxn0s2c+2j73PvsxvszmFjzGkLdLfPX4Er2rTdD6xQ1bHACme+V8vs2/3HP3qTX17H4dJaGptbeGDJJub+9l2eWXOYzfkVvLzhKDc89iHP5h5ut11Ds5t3dxeTe7DUHipjjOlQQEf7qOq7IpLdpvk64BJnejGwCvh+IOMItM/NHMHTH+fR6O65hHvvsxtYl1fuc/n9L25m8pC+HC2vZ/ygVHYXVvG1p9fR0OyJITE2hodumsL1U20EkTGmvYD3+TvJ/1VVPcuZL1fVdGdagLIT8162vRO4E2DEiBHn5OXlBTTW07Eur4w/rNjDB/uKCadqDku+dj7TR/Sjoq6Jhe/uY+2BMkYN6MNXLx5t1xWMiQK++vxDmvyd+TJV7bTfPxwv+PqS87O3OF7TFOowAOibGMvssQNYtauYmlbfSmlJcbxx72wGpyWFMDpjTKCF5IKvD4UiMhjAeS0KQQwB9Z1PTQh1CCdV1jfz2paCUxI/eO4t+Mea9tcNjDHRIRTJfymwwJleALwSghgC6obpQxkzMPy7VArtpjJjolZAL/iKyD/wXNwdICJHgB8BDwHPicjtQB4wP5AxhEJCrIvnvnoeP1iymWXb2t/QFS4KKutZl1fK2oOe6wBzz8zC1aZm9Qd7j/PjpdvYU1RN/+Q4Rmb0IS42hpnZ/UiIc1Hf5ObKswZz1lDPDW95JTXsLapm8tA0BvbwKChjTM+xm7wC7L7nNvLS+vxQh+FVelIc5XWfXJuYkd2Pv99+LolxLrYeqeChN7bz/t7STt9HBB7+zBS25Vfwt4/zUAWXwJwzsyisrEdEuHnGcG6ZOSKQ/5yws2zLMR5btY9jFfVcPC6TB66awIAU74/3PFxaS1FVPWcNTSMhtmtVY43pSMgu+PaU3pr8AW587APWHyoPdRh+6RPvand9wB9JcUJdU8efpeunDuHnN04++US0QyU13PG3XPJKaumfHMeXLhzFV2aPRtqUTP14fwkHjtcwI7sfYwamdjm27qpvcnOotJbh/ZJJivc/IW8+Us6v39zFu3uOn9J+9vB0Xvn6Bae0NblbuOefG3l9yzHAc+Pg52aO4AefPpPEOPsSMKfPkn8IVTc0M+83qzhW2fnD3yNdcryLa88ewtjMFH72+o52y4enJ3DfvPEcKKll9IA+/Oy17ZS2Gjl14ZgBPHXHuadsU9/k5pHlu1myPp/K+ibmTszix9dMIjPV+1m2P55be5j/fm07lfXN9Elw8YOrJjJmYB9+/+897CmqJiUxls/OGM4dF44mplVX2dqDpXzuzx/T5OOej9fuvpBJQz6pCbXo/f389NX2x+GsoX158a7zI/pXQE1DM8u2FlDT0My8iVkMSbeRZ4FgyT/EqhuaWbL+CLsKqsg9UMKuoppQh9RrfWHWCH52/WQAlm46yvee30R9c8sp60wfkc6Sr516ll3T0Ex8bAxxrhjKahpZ+O5+1h8qY1xWKnfMHsXIjD4A7CuuZs5v3vErlm/NG4cIPJ97hMZmN2W1TTS0iaW1mdn9eO4/zgc8v2i+/OQaapu8r//b+Wd3q3psOFixo5D88jpumDaU1MS4dssPHq/h5oUfUeicELli4IefnsSC80dSVNVAenJcwL/4Xt9yjN+8tYtDpbXMGp3Bj66ZGNRflsFiyT/MHCmr5Xh1A5V1TWT1TWTT4XJ++Mq2DhOHOVV6UiwVdc34+gR/ZfYo7pg9mn+sOcSzaw9zrKKePvEubpw+jCUbjlDTcGr31sDUBBqa3FQ3NuP28z+DgM/9+/KdeeMYkZHM3f/c2OF6ozKSmTMxi3lnZnHu6AzA87l54v0D5Jd5EuuVkwd3ce8er20+xmOr9lJY2cDF4zL53hXjqWloZmDfRFISPN1yRVX1vLOrmIyUeAanJfLx/lKGpCcxZ8JAYl2egYL7iqv5n9e2s/5QOS0tSlwMlNc10/qHz5C0RK48axDLtxeSX1FPgkvokxhHcVX7X8LxsTE0NrfQNzGWu+eM5Y7Zo08ua3a3UN/ccjK+0/HI8t38vk0V3SFpiaz67qXEx3Y+CHL70Upe23wUd4ty63kjGdYv+bRjChRL/r3Ae3uKufNv66izJ3uZNmIEMlLiKa46tXpsaoKLH10zCRGorGtm1pgM/rXxKOvyykiIjaG0ppFh/ZJ54MoJjBzQhz2FVSx8bz8v5B455UsrBmjB82U2NiuFsQNTWLa1gBYv6aF/chzfv3IC8yYO4rxfrAjoCcvTd5zLBWMG8Ie397Dw3f1U1jdzzsh07rr4DHKy+5OeHN/l93xm9SH+86UtXpc9edsMLp0w8OR8WU0jqw+UEOuK4VBJLcP7J1NcVc9/vnSyViUC3DdvLHfPGdflWILBkn8vUVLdwIodRSTGu1h7oJS/fxy+JS2MCbTMlHimDE1jxa5ir8uT4mKIEWFQWiLXThnC52aNIDO14yHGc3/7DnuLvFfW/c7l46htdDNpSF+25Vfw+HsHcHv7BvRi2T2z6ZsUx+Pv7GNnQRVjMlO45dwRTB7ated+9DRL/r1QY3MLP311G8/nHqHJ3UJ2RjL7j9e2W29Q3wQuGpfJroIqNh2pCEGkxoSP/n3iOWdEP249byQXj8tEVXl3z3FW7CjE7VZeWH8kIL9WRg/oQ01j88nrGCdkpsbz1O2zOFJWyw9e2kJxVQOZqQn85Lqz+NSkQTS5W4hzutFaWpRXNx+lqKqem2eM8Hq9pKss+fdi9U1uWlRJjo/lp//axpMfHDz5k/2qyYN49JbpJ2/OKqqq5/09x/l4fwkrdxRSHCY1howJhTMy+1BS3UB5XXOoQ/HKFQPuFs+9Mmj760djMlOYNymLz587otvXFSz5R5DDpbXk5pUyJjOVyZ08SvKR5btY+O5+6ppacAn0YNVpY0yQuARe+caFJ++k7wpL/oYmdwu/fGMnz+Yepq7RTVyM+Bxm2NaNU4cwdlAqz6w+RHldE253i9/bGmNOX9/EWDb/+FNd3s6Sv/FqXV4p9/xzI0fK6k4OWxyclsi9c8dy/dShbD9WyZD0JLK81Ol5Y+sx7n9xs9ef1BMGpTI/ZzjLthxlR0EVzW5tNxbfGNM1//7WxV0uGmnJ33ToeHUDKQmxuGLk5MUnf/3to4P85b0DHK+u5+xh6fzk2rMYN+jUm2XKaxv53J8/ZvuxqpNtGX3i+MalY5h1RgYrdhRRWFnPq5uPUVZ76nWKsQP7UFrTREnNqcMcjYk2i27L4bIJWV3axpK/CTlVJTevjOKqBmaPGUBqUvuRDO4W5a3tBfx7eyENzS1ccdYgrp4yBIBtRyt44r0DHK2oIzM1gd0FVZTWNhHvEvokxDJmYAoTBvVl+oh+JMXFUFjVQEJsDL9fsZuj5fX0TYqlsKKe2sYWFM9NVL+aP4XXNxfwwrojtLhbmJbdj6HpyTQ0uYlzCVdNGYxb4d5/bqSq/pNfOMP7JXG4zFMSe1DfeGoaW05ZDpDRJ77TL6zYGKG51VDCASnxHK+2Lznj3Vv3XcS4rK7dhWzJ35hWVLVdAbmOHC6t5Zk1hyipbmDOmVl8atIgKuubiI0RkuNj2Vdczfde2My6vDJSE2O548LRfOOyMWw4VEacK4azhvTloTd28sbWAvokxHL91CHcMH0YA1MTeHtnEWsOlnLGgBSunTqEF9cf4Vdv7qK8tonUBBfVjW5O/G/av08cT3xxBsP6J3HXU+vJzSs7Jc7EWGj9HdTZRf6k2Bjq/OiOS0uKIyXBRX55vc91YgQuGJ2BG8+X+Mj+SWw4XM6eNqVMkuJiGNYvmfjYGL50fjZXTRnMHYtz+Xh/idebylqbMCiFnQXex+hHg3UPziXDR2VYXyz5GxMEFXVNJMe7utx11lZDs5uiygYGpyVysKSW5dsLSU+O4+opg0+O/a5vcvPrN3exdNNR4mOFW2dl89WLPFVRG5tbiHMJIkJDs5smt5J3vIZ39xRT1+imReHqKYOZMLgvJdUN5OaVIcA5I/tR0+Dmf9/ew9b8CiYO6cvdl40le4Cn7tG7u4v546p9HKuo4+JxmczPGc6ag6X0S47nU5MGea1+ureomt/9ezfbj1Vy1pA07p071uvzo90tyutbjvHKxnyKqhqoqG0ir9RzX0uMwN2XjeHeeePJK6nh9//ewysb84Myei0hNoZ7543lTyv3UlEfurvvzx6WxivfuLDL21nyN8b0OvuKqzlUUsu0EentSjlU1Dax9WgF2QP6UNfQzFrnC2xoWiI7i6p46qNDFFbW4YqJ4VMTB/KVi8/gC0+sOaVbLTZGWPiFHDYeKWNXQRXpyfF8/twRZA/ow4d7S8hIiWdGdv+T6x88XsPLG44QH+ti8tA0qhuaeXTlHrYfrToltqS4GB6/9RwOltTy67d2Uen8HMtMiadPgouDJV17it7YgSm89LXzSenGTV+W/I0xUe9waS0Pv7GTtQdLGTWgD/fOHccsp2je6b5vQ7PbZ1VQb92MO45V8q9NR4mPjeHGacMYkZFMYWU9b2wtIEZg9rhMjlc1MLRfEoPTul/u2pK/McZEIV/JPxQPcDfGGBNiIUv+InKFiOwSkb0icn+o4jDGmGgUkuQvIi7g/4ArgYnALSIyMRSxGGNMNArVmf9MYK+q7lfVRuCfwHUhisUYY6JOqJL/UOBwq/kjTtspROROEckVkdziYu8PczDGGNN1YX3BV1UXqmqOquZkZmaGOhxjjIkYp/8k5O7JB4a3mh/mtPm0bt264yLS3WcaDgCOd3PbSGfHpmN2fHyzY9OxcDk+I701hmScv4jEAruBOXiS/lrgc6q6LUD7y/U2ztXYsemMHR/f7Nh0LNyPT0jO/FW1WUS+AbwJuIBFgUr8xhhj2gtVtw+q+jrweqj2b4wx0SysL/j2oIWhDiCM2bHpmB0f3+zYdCysj0+vqe1jjDGm50TLmb8xxphWLPkbY0wUiujkb8XjOiYiB0Vki4hsFJGor5ctIotEpEhEtrZq6y8iy0Vkj/PaL5QxhoqPY/NjEcl3Pj8bReSqUMYYKiIyXERWish2EdkmIvc47WH92YnY5G/F4/x2qapODefxyEH0V+CKNm33AytUdSywwpmPRn+l/bEBeMT5/Ex1RvBFo2bg26o6EZgFfN3JNWH92YnY5I8VjzNdpKrvAqVtmq8DFjvTi4HrgxlTuPBxbAygqsdUdb0zXQXswFOrLKw/O5Gc/P0qHhflFHhLRNaJyJ2hDiZMZanqMWe6AMgKZTBh6BsistnpFgqrbo1QEJFsYBqwmjD/7ERy8jedu1BVp+PpGvu6iFwU6oDCmXrGRdvY6E/8ETgDmAocA34T0mhCTERSgBeBe1W1svWycPzsRHLy73LxuGijqvnOaxHwEp6uMnOqQhEZDOC8FoU4nrChqoWq6lbVFuDPRPHnR0Ti8CT+p1V1idMc1p+dSE7+a4GxIjJKROKBzwJLQxxT2BCRPiKSemIauBzY2vFWUWkpsMCZXgC8EsJYwsqJxOa4gSj9/IiIAE8AO1T1t60WhfVnJ6Lv8HWGnv2OT4rH/U9oIwofIjIaz9k+eGo8PRPtx0dE/gFcgqcUbyHwI+Bl4DlgBJAHzFfVqLvw6ePYXIKny0eBg8BXW/VxRw0RuRB4D9gCtDjN/4mn3z9sPzsRnfyNMcZ4F8ndPsYYY3yw5G+MMVHIkr8xxkQhS/7GGBOFLPkbY0wUsuRvukxEMlpVcixoU9kxvs2694pIsh/vuUpErLhcEIlItY/2JBF5R0RcInKJiLzqZZ3bROQPzvQ3ROTLgY7X9CxL/qbLVLXkRCVH4E+cWtmxsc3q9wKdJv9gEpGQPbu6l/gysERV3X6uvwj4ZgDjMQFgyd/0CBGZIyIbnOcDLBKRBBG5GxgCrBSRlc56fxSRXKfu+U/8eN9znLPQdSLyZqvb5VeJyMMiskZEdovIbKfdJSK/EpG1TsGxrzrtl4jIeyKyFNguIjEi8piI7HRqrb8uIjeJyGUi8nKr/c8TkZe8xDVDRD4UkU1ODKkikigiTzrHYIOIXOqse5uILBGRN5za7r9sFetfRWSrs819rf5tOc70ABE52Op9XnbiPeiccX/L2dfHItLfWe8MZ1/rnH/zBKd9lIh85Ozrvzs47J/Hy92ozr95g4ic0bpdVWuBgyISteUdeiVVtT/76/Yf8GPgQTwVVMc5bX/DU9wKPHd+Dmi1fn/n1QWsAqY486uAnDbvHQd8CGQ68zfjuVP7xPq/caavAv7tTN8JPOhMJwC5wCg8d6PWAKOcZTcBr+M5ARoElDltAuxstc9ngGvaxBUP7AdmOPN98dwl/e1W8U0ADgGJwG3O+mnOfB6eulPnAMtbvW9622OB547ag870bcBeIBXIBCqA/3CWPdLqmK8AxjrT5wJvO9NLgS86018Hqr3894wHClrNXwK8CpwPrANGtIrlD63W+wGemvYh/0zan39/duZveoILOKCqu535xYCvCqHzRWQ9sAGYhOdBO76MB84ClovIRjxfMsNaLT9RQGsdkO1MXw580Vl/NZABjHWWrVHVA870hcDzqtqiqgXASjhZffHvwK0ikg6cByzzEtcxVV3rbFOpqs3Oez7ltO3Ek+THOdusUNUKVa0HtgMj8XwhjBaRR0XkCqCSzq1U1SpVLcaT/P/ltG8BssVTWfJ84HnnGDwOnKjBcwHwD2f67z7efwBQ3qbtTGAhni/BQz62K8LzK8/0Etb3aYJGREYB38FzxlwmIn/FcybscxNgm6qe52N5g/Pq5pPPsgDfVNU32+z7Ejxn/v54Ek9SrcfzBdHs53YdaWg17QZinWNwNvAp4D+A+Xj625v5pEu27fFp/T4treZb8ByDGKBcPddjvOmsnkudl30ec9qmAUd9bJfobGt6CTvzNz3Bjeesc4wz/wXgHWe6Ck83BXi6R2qAChHJwvMcgY7sAjJF5DzwlM0VkUmdbPMmcJd4SuwiIuPEU7W0rQ+Azzh9/1l4ujcAUNWjeJLcg3i+CLzFNVhEZjj7SHUuIr+Hp78cERmHp6DXLl+BisgAIEZVX3T2Nd1ZdBBPlxB4uqL8pp468gdE5P85+xDnC+bEv/mzzvTnfWxfBrhEpPUXQDnwaeAXzpeoN+OI0qqevZUlf9MT6oEv4elqOFHZ8E/OsoXAGyKyUlU34enu2YmnL/2Djt5UPSOHbgIeFpFNwEY8XRod+QuebpX14nnY+ON4/4X7Ip6nu23H01WzHk83yglPA4dVdYePuG4GHnXiWo7nzPcxIMY5Bs8Ct6lqQ9vtWxkKrHK6Z54CHnDaf43nC2wDnm6Yrvo8cLsT2zY+eXzpPXge2rOFjp9q9xaeLqyTVLUQuBr4PxE5F88xbf1vuwDPcTC9hFX1NFFLRFJUtVpEMoA1wAVO/z/iGcO+QVWfCGmQISAi04H7VPULHazzCLBHVR8TkWnAtzpa34Qf6/M30exV56JuPPCzVol/HZ7uqW+HMLaQUdX1IrJSRFzqZay/iCzDc8x+7DQNAH4YxBBND7Azf2OMiULW52+MMVHIkr8xxkQhS/7GGBOFLPkbY0wUsuRvjDFR6P8DqGaod3NzC7gAAAAASUVORK5CYII=", 459 | "text/plain": [ 460 | "
" 461 | ] 462 | }, 463 | "metadata": { 464 | "needs_background": "light" 465 | }, 466 | "output_type": "display_data" 467 | } 468 | ], 469 | "source": [ 470 | "\n", 471 | "fig = sns.scatterplot(data=df, x='energy_consumed', y='train_loss_step', edgecolor='none',\n", 472 | " )\n", 473 | "plt.xlabel('Total energy consumed (kJ)')" 474 | ] 475 | } 476 | ], 477 | "metadata": { 478 | "interpreter": { 479 | "hash": "0d1a1d6214cf1d05c57a5553ca9b132dc10783f06e2a533ccedcb1463f3cc5db" 480 | }, 481 | "kernelspec": { 482 | "display_name": "Python 3.9.5 64-bit ('base': conda)", 483 | "language": "python", 484 | "name": "python3" 485 | }, 486 | "language_info": { 487 | "codemirror_mode": { 488 | "name": "ipython", 489 | "version": 3 490 | }, 491 | "file_extension": ".py", 492 | "mimetype": "text/x-python", 493 | "name": "python", 494 | "nbconvert_exporter": "python", 495 | "pygments_lexer": "ipython3", 496 | "version": "3.8.10" 497 | }, 498 | "orig_nbformat": 4 499 | }, 500 | "nbformat": 4, 501 | "nbformat_minor": 2 502 | } 503 | -------------------------------------------------------------------------------- /example_files/metrics.csv: -------------------------------------------------------------------------------- 1 | train_loss_step,epoch,step,val_loss,train_loss_epoch 2 | 73.30311584472656,0,0,, 3 | 70.11953735351562,0,1,, 4 | 69.11881256103516,0,2,, 5 | 66.4368896484375,0,3,, 6 | 60.83742141723633,0,4,, 7 | 61.61339569091797,0,5,, 8 | 61.708072662353516,0,6,, 9 | 58.74061584472656,0,7,, 10 | 56.37213897705078,0,8,, 11 | 59.03435134887695,0,9,, 12 | 57.55763244628906,0,10,, 13 | 55.84440612792969,0,11,, 14 | 55.710201263427734,0,12,, 15 | 54.55917739868164,0,13,, 16 | 55.520103454589844,0,14,, 17 | 55.48212432861328,0,15,, 18 | 54.958961486816406,0,16,, 19 | 53.738067626953125,0,17,, 20 | 55.145751953125,0,18,, 21 | 55.61221694946289,0,19,, 22 | 54.30539321899414,0,20,, 23 | 53.43278884887695,0,21,, 24 | 53.00509262084961,0,22,, 25 | 54.993080139160156,0,23,, 26 | 52.37932586669922,0,24,, 27 | 52.567684173583984,0,25,, 28 | 52.31357192993164,0,26,, 29 | 51.92424774169922,0,27,, 30 | 52.129676818847656,0,28,, 31 | 51.65960693359375,0,29,, 32 | 50.49150085449219,0,30,, 33 | 49.813053131103516,0,31,, 34 | 50.591819763183594,0,32,, 35 | 50.71535873413086,0,33,, 36 | 50.09684753417969,0,34,, 37 | 47.66724395751953,0,35,, 38 | 46.91264343261719,0,36,, 39 | 46.65463638305664,0,37,, 40 | 44.29966735839844,0,38,, 41 | 42.81734085083008,0,39,, 42 | 36.91740417480469,0,40,, 43 | 37.85597229003906,0,41,, 44 | 30.63938331604004,0,42,, 45 | 35.8826904296875,0,43,, 46 | 35.20132064819336,0,44,, 47 | 32.61607360839844,0,45,, 48 | 28.49247932434082,0,46,, 49 | 34.60320281982422,0,47,, 50 | 27.364471435546875,0,48,, 51 | 32.20196533203125,0,49,, 52 | 33.11958312988281,0,50,, 53 | 32.058345794677734,0,51,, 54 | 25.449506759643555,0,52,, 55 | 29.886842727661133,0,53,, 56 | 33.5547981262207,0,54,, 57 | 25.596492767333984,0,55,, 58 | 27.84454917907715,0,56,, 59 | 28.782344818115234,0,57,, 60 | 27.680511474609375,0,58,, 61 | 24.38654327392578,0,59,, 62 | 26.00394630432129,0,60,, 63 | 25.58213233947754,0,61,, 64 | 24.09906578063965,0,62,, 65 | ,0,62,23.368389129638672, 66 | ,0,62,,46.14325714111328 67 | 24.57670021057129,1,63,, 68 | 25.04378890991211,1,64,, 69 | 25.006532669067383,1,65,, 70 | 23.303611755371094,1,66,, 71 | 23.24982452392578,1,67,, 72 | 22.792526245117188,1,68,, 73 | 23.92302703857422,1,69,, 74 | 25.01243019104004,1,70,, 75 | 25.52919578552246,1,71,, 76 | 20.140092849731445,1,72,, 77 | 19.555652618408203,1,73,, 78 | 21.236825942993164,1,74,, 79 | 18.481521606445312,1,75,, 80 | 20.351015090942383,1,76,, 81 | 19.7858943939209,1,77,, 82 | 20.86186981201172,1,78,, 83 | 18.901142120361328,1,79,, 84 | 17.709062576293945,1,80,, 85 | 19.0351505279541,1,81,, 86 | 19.961896896362305,1,82,, 87 | 19.457904815673828,1,83,, 88 | 19.202754974365234,1,84,, 89 | 19.12071418762207,1,85,, 90 | 18.71422576904297,1,86,, 91 | 17.567466735839844,1,87,, 92 | 18.228832244873047,1,88,, 93 | 18.006072998046875,1,89,, 94 | 21.355422973632812,1,90,, 95 | 18.426721572875977,1,91,, 96 | 17.728986740112305,1,92,, 97 | 19.016054153442383,1,93,, 98 | 18.029624938964844,1,94,, 99 | 16.390830993652344,1,95,, 100 | 18.492433547973633,1,96,, 101 | 18.50178337097168,1,97,, 102 | 17.597667694091797,1,98,, 103 | 18.652605056762695,1,99,, 104 | 17.364097595214844,1,100,, 105 | 19.260753631591797,1,101,, 106 | 20.04104995727539,1,102,, 107 | 19.196582794189453,1,103,, 108 | 17.384061813354492,1,104,, 109 | 20.23893165588379,1,105,, 110 | 17.78493881225586,1,106,, 111 | 17.42573356628418,1,107,, 112 | 19.959041595458984,1,108,, 113 | 19.20347023010254,1,109,, 114 | 15.832507133483887,1,110,, 115 | 17.5062313079834,1,111,, 116 | 18.22281265258789,1,112,, 117 | 17.82927894592285,1,113,, 118 | 18.12294578552246,1,114,, 119 | 18.16066551208496,1,115,, 120 | 17.308149337768555,1,116,, 121 | 18.19068145751953,1,117,, 122 | 19.03152847290039,1,118,, 123 | 16.964750289916992,1,119,, 124 | 19.18012809753418,1,120,, 125 | 18.77076530456543,1,121,, 126 | 17.923524856567383,1,122,, 127 | 17.717771530151367,1,123,, 128 | 15.928282737731934,1,124,, 129 | 16.234628677368164,1,125,, 130 | ,1,125,16.172760009765625, 131 | ,1,125,,19.385820388793945 132 | 16.26230239868164,2,126,, 133 | 17.03636932373047,2,127,, 134 | 16.091169357299805,2,128,, 135 | 16.030948638916016,2,129,, 136 | 15.28546142578125,2,130,, 137 | 17.07131004333496,2,131,, 138 | 15.478259086608887,2,132,, 139 | 15.612208366394043,2,133,, 140 | 17.26461410522461,2,134,, 141 | 14.22684383392334,2,135,, 142 | 15.254233360290527,2,136,, 143 | 14.377325057983398,2,137,, 144 | 17.455982208251953,2,138,, 145 | 13.912960052490234,2,139,, 146 | 16.194643020629883,2,140,, 147 | 13.621794700622559,2,141,, 148 | 15.657341957092285,2,142,, 149 | 15.607707023620605,2,143,, 150 | 13.476383209228516,2,144,, 151 | 14.822732925415039,2,145,, 152 | 14.612367630004883,2,146,, 153 | 13.631420135498047,2,147,, 154 | 14.728585243225098,2,148,, 155 | 15.053729057312012,2,149,, 156 | 12.607903480529785,2,150,, 157 | 15.126401901245117,2,151,, 158 | 12.320189476013184,2,152,, 159 | 13.913947105407715,2,153,, 160 | 13.924324035644531,2,154,, 161 | 13.48940372467041,2,155,, 162 | 12.45267105102539,2,156,, 163 | 13.81796646118164,2,157,, 164 | 12.531159400939941,2,158,, 165 | 14.20812702178955,2,159,, 166 | 13.515217781066895,2,160,, 167 | 14.438188552856445,2,161,, 168 | 13.5935697555542,2,162,, 169 | 15.414556503295898,2,163,, 170 | 13.153566360473633,2,164,, 171 | 13.449665069580078,2,165,, 172 | 14.001715660095215,2,166,, 173 | 13.196492195129395,2,167,, 174 | 11.88573169708252,2,168,, 175 | 13.711161613464355,2,169,, 176 | 10.766975402832031,2,170,, 177 | 11.509916305541992,2,171,, 178 | 11.846670150756836,2,172,, 179 | 11.218088150024414,2,173,, 180 | 11.010222434997559,2,174,, 181 | 12.748520851135254,2,175,, 182 | 11.164408683776855,2,176,, 183 | 13.557666778564453,2,177,, 184 | 12.551591873168945,2,178,, 185 | 11.7612886428833,2,179,, 186 | 12.223938941955566,2,180,, 187 | 12.432621002197266,2,181,, 188 | 10.601661682128906,2,182,, 189 | 12.626341819763184,2,183,, 190 | 12.788729667663574,2,184,, 191 | 11.307029724121094,2,185,, 192 | 11.06952953338623,2,186,, 193 | 11.332565307617188,2,187,, 194 | 11.129986763000488,2,188,, 195 | ,2,188,10.942109107971191, 196 | ,2,188,,13.721624374389648 197 | 10.454428672790527,3,189,, 198 | 11.101632118225098,3,190,, 199 | 10.045605659484863,3,191,, 200 | 12.324226379394531,3,192,, 201 | 12.629939079284668,3,193,, 202 | 10.829217910766602,3,194,, 203 | 12.20980453491211,3,195,, 204 | 9.382366180419922,3,196,, 205 | 9.951586723327637,3,197,, 206 | 10.547001838684082,3,198,, 207 | 10.15742015838623,3,199,, 208 | 11.345754623413086,3,200,, 209 | 10.184985160827637,3,201,, 210 | 10.795218467712402,3,202,, 211 | 9.447660446166992,3,203,, 212 | 9.721808433532715,3,204,, 213 | 9.270075798034668,3,205,, 214 | 10.185582160949707,3,206,, 215 | 9.540520668029785,3,207,, 216 | 8.804728507995605,3,208,, 217 | 9.590593338012695,3,209,, 218 | 9.588921546936035,3,210,, 219 | 10.500115394592285,3,211,, 220 | 9.713438034057617,3,212,, 221 | 9.860062599182129,3,213,, 222 | 9.816818237304688,3,214,, 223 | 10.362757682800293,3,215,, 224 | 8.36940860748291,3,216,, 225 | 9.090271949768066,3,217,, 226 | 8.984882354736328,3,218,, 227 | 8.592009544372559,3,219,, 228 | 8.011869430541992,3,220,, 229 | 10.040078163146973,3,221,, 230 | 8.33886432647705,3,222,, 231 | 9.056356430053711,3,223,, 232 | 8.219870567321777,3,224,, 233 | 9.925003051757812,3,225,, 234 | 8.527854919433594,3,226,, 235 | 8.389899253845215,3,227,, 236 | 8.866915702819824,3,228,, 237 | 9.946029663085938,3,229,, 238 | 8.781946182250977,3,230,, 239 | 8.43613052368164,3,231,, 240 | 9.175660133361816,3,232,, 241 | 8.951778411865234,3,233,, 242 | 9.957847595214844,3,234,, 243 | 8.133800506591797,3,235,, 244 | 8.990163803100586,3,236,, 245 | 7.789275169372559,3,237,, 246 | 8.380254745483398,3,238,, 247 | 7.965381622314453,3,239,, 248 | 8.017948150634766,3,240,, 249 | 9.427712440490723,3,241,, 250 | 9.787081718444824,3,242,, 251 | 8.078832626342773,3,243,, 252 | 10.798677444458008,3,244,, 253 | 11.50265121459961,3,245,, 254 | 10.26675033569336,3,246,, 255 | 7.760987758636475,3,247,, 256 | 10.382448196411133,3,248,, 257 | 10.863277435302734,3,249,, 258 | 9.64267635345459,3,250,, 259 | 8.716047286987305,3,251,, 260 | ,3,251,9.840303421020508, 261 | ,3,251,,9.60273551940918 262 | 9.106163024902344,4,252,, 263 | 8.86844539642334,4,253,, 264 | 8.820881843566895,4,254,, 265 | 8.93307876586914,4,255,, 266 | 9.34444808959961,4,256,, 267 | 9.826735496520996,4,257,, 268 | 9.464713096618652,4,258,, 269 | 9.208133697509766,4,259,, 270 | 8.54936695098877,4,260,, 271 | 10.253825187683105,4,261,, 272 | 10.415204048156738,4,262,, 273 | 8.969281196594238,4,263,, 274 | 9.565828323364258,4,264,, 275 | 8.964186668395996,4,265,, 276 | 7.6101484298706055,4,266,, 277 | 7.976022243499756,4,267,, 278 | 9.234889030456543,4,268,, 279 | 8.804306030273438,4,269,, 280 | 8.618087768554688,4,270,, 281 | 8.41963005065918,4,271,, 282 | 7.824021339416504,4,272,, 283 | 8.900106430053711,4,273,, 284 | 8.824014663696289,4,274,, 285 | 8.736920356750488,4,275,, 286 | 7.512604713439941,4,276,, 287 | 9.075895309448242,4,277,, 288 | 8.192611694335938,4,278,, 289 | 7.001519203186035,4,279,, 290 | 8.792953491210938,4,280,, 291 | 8.309822082519531,4,281,, 292 | 8.335564613342285,4,282,, 293 | 8.195629119873047,4,283,, 294 | 7.923169136047363,4,284,, 295 | 8.780472755432129,4,285,, 296 | 7.951040744781494,4,286,, 297 | 8.269807815551758,4,287,, 298 | 9.106958389282227,4,288,, 299 | 7.391048908233643,4,289,, 300 | 9.04957103729248,4,290,, 301 | 9.09718132019043,4,291,, 302 | 7.408243179321289,4,292,, 303 | 7.07985258102417,4,293,, 304 | 7.6718268394470215,4,294,, 305 | 7.99207067489624,4,295,, 306 | 9.361908912658691,4,296,, 307 | 8.277976989746094,4,297,, 308 | 7.805195331573486,4,298,, 309 | 9.399969100952148,4,299,, 310 | 8.85080623626709,4,300,, 311 | 7.778735160827637,4,301,, 312 | 8.918455123901367,4,302,, 313 | 8.778336524963379,4,303,, 314 | 8.077250480651855,4,304,, 315 | 6.41297721862793,4,305,, 316 | 9.151148796081543,4,306,, 317 | 7.313802242279053,4,307,, 318 | 9.535452842712402,4,308,, 319 | 8.399871826171875,4,309,, 320 | 9.836891174316406,4,310,, 321 | 9.302785873413086,4,311,, 322 | 8.71826457977295,4,312,, 323 | 9.070879936218262,4,313,, 324 | 8.698028564453125,4,314,, 325 | ,4,314,9.566192626953125, 326 | ,4,314,,8.603453636169434 327 | 9.704119682312012,5,315,, 328 | 8.24032974243164,5,316,, 329 | 9.871636390686035,5,317,, 330 | 9.621983528137207,5,318,, 331 | 7.303684234619141,5,319,, 332 | 8.900466918945312,5,320,, 333 | 7.858149528503418,5,321,, 334 | 8.210700988769531,5,322,, 335 | 8.391328811645508,5,323,, 336 | 7.466488838195801,5,324,, 337 | 9.250576972961426,5,325,, 338 | 6.96451473236084,5,326,, 339 | 8.212549209594727,5,327,, 340 | 6.271629810333252,5,328,, 341 | 7.337388515472412,5,329,, 342 | 6.421463489532471,5,330,, 343 | 6.787315368652344,5,331,, 344 | 6.719034671783447,5,332,, 345 | 5.951465129852295,5,333,, 346 | 7.794149875640869,5,334,, 347 | 6.87146520614624,5,335,, 348 | 7.210449695587158,5,336,, 349 | 7.008394718170166,5,337,, 350 | 7.500245094299316,5,338,, 351 | 6.599060535430908,5,339,, 352 | 6.702608585357666,5,340,, 353 | 8.500309944152832,5,341,, 354 | 6.634476184844971,5,342,, 355 | 8.722763061523438,5,343,, 356 | 7.1678466796875,5,344,, 357 | 9.048087120056152,5,345,, 358 | 8.254663467407227,5,346,, 359 | 6.582541465759277,5,347,, 360 | 8.860356330871582,5,348,, 361 | 6.623701095581055,5,349,, 362 | 7.240386962890625,5,350,, 363 | 7.519444465637207,5,351,, 364 | 7.89873743057251,5,352,, 365 | 7.579752445220947,5,353,, 366 | 7.470797061920166,5,354,, 367 | 8.063901901245117,5,355,, 368 | 7.028637886047363,5,356,, 369 | 6.622432231903076,5,357,, 370 | 7.971213340759277,5,358,, 371 | 7.835540771484375,5,359,, 372 | 7.89925479888916,5,360,, 373 | 8.570209503173828,5,361,, 374 | 7.898471832275391,5,362,, 375 | 7.928337097167969,5,363,, 376 | 8.819403648376465,5,364,, 377 | 8.273792266845703,5,365,, 378 | 7.607531547546387,5,366,, 379 | 7.7733473777771,5,367,, 380 | 8.3447265625,5,368,, 381 | 7.052158355712891,5,369,, 382 | 9.23034954071045,5,370,, 383 | 7.127655506134033,5,371,, 384 | 9.404086112976074,5,372,, 385 | 8.21739387512207,5,373,, 386 | 6.437567710876465,5,374,, 387 | 7.4553937911987305,5,375,, 388 | 6.335513591766357,5,376,, 389 | 9.261387825012207,5,377,, 390 | ,5,377,7.345172882080078, 391 | ,5,377,,7.740843772888184 392 | 7.7666449546813965,6,378,, 393 | 7.852565288543701,6,379,, 394 | 7.264967918395996,6,380,, 395 | 7.521174430847168,6,381,, 396 | 8.583341598510742,6,382,, 397 | 7.748820781707764,6,383,, 398 | 6.6745829582214355,6,384,, 399 | 8.630276679992676,6,385,, 400 | 7.240371227264404,6,386,, 401 | 6.202889919281006,6,387,, 402 | 7.136613845825195,6,388,, 403 | 6.897251605987549,6,389,, 404 | 6.809789180755615,6,390,, 405 | 7.829868316650391,6,391,, 406 | 7.364218235015869,6,392,, 407 | 6.39121150970459,6,393,, 408 | 6.952315330505371,6,394,, 409 | 6.171481609344482,6,395,, 410 | 7.230430603027344,6,396,, 411 | 6.379294395446777,6,397,, 412 | 6.36237096786499,6,398,, 413 | 6.5474395751953125,6,399,, 414 | 6.273547649383545,6,400,, 415 | 6.9470415115356445,6,401,, 416 | 8.423047065734863,6,402,, 417 | 7.352096080780029,6,403,, 418 | 6.623494625091553,6,404,, 419 | 5.759678840637207,6,405,, 420 | 7.363819599151611,6,406,, 421 | 5.727313041687012,6,407,, 422 | 7.65757942199707,6,408,, 423 | 6.617045879364014,6,409,, 424 | 8.44874095916748,6,410,, 425 | 6.815079212188721,6,411,, 426 | 7.921926975250244,6,412,, 427 | 7.528592109680176,6,413,, 428 | 6.273694038391113,6,414,, 429 | 7.804966449737549,6,415,, 430 | 7.642895221710205,6,416,, 431 | 7.3533616065979,6,417,, 432 | 8.176980018615723,6,418,, 433 | 6.223080158233643,6,419,, 434 | 8.855629920959473,6,420,, 435 | 7.970775127410889,6,421,, 436 | 5.8094987869262695,6,422,, 437 | 6.914000511169434,6,423,, 438 | 6.469404697418213,6,424,, 439 | 6.5838847160339355,6,425,, 440 | 6.429031848907471,6,426,, 441 | 6.945876598358154,6,427,, 442 | 5.415705680847168,6,428,, 443 | 6.736809253692627,6,429,, 444 | 6.6403703689575195,6,430,, 445 | 6.339837551116943,6,431,, 446 | 5.436474323272705,6,432,, 447 | 6.265420436859131,6,433,, 448 | 6.443282127380371,6,434,, 449 | 5.797848701477051,6,435,, 450 | 6.327430248260498,6,436,, 451 | 5.835298538208008,6,437,, 452 | 5.897224426269531,6,438,, 453 | 5.739058017730713,6,439,, 454 | 6.2010650634765625,6,440,, 455 | ,6,440,5.952476978302002, 456 | ,6,440,,6.919124126434326 457 | 5.762752056121826,7,441,, 458 | 5.449957370758057,7,442,, 459 | 6.500965595245361,7,443,, 460 | 6.297009468078613,7,444,, 461 | 6.120701789855957,7,445,, 462 | 6.127405643463135,7,446,, 463 | 6.636565685272217,7,447,, 464 | 6.630315780639648,7,448,, 465 | 6.542285919189453,7,449,, 466 | 6.228938579559326,7,450,, 467 | 7.325135707855225,7,451,, 468 | 5.841026306152344,7,452,, 469 | 5.227263450622559,7,453,, 470 | 5.997660160064697,7,454,, 471 | 5.858622074127197,7,455,, 472 | 5.696664333343506,7,456,, 473 | 5.814026832580566,7,457,, 474 | 4.976727485656738,7,458,, 475 | 5.36075496673584,7,459,, 476 | 6.070958137512207,7,460,, 477 | 6.000618934631348,7,461,, 478 | 6.251817226409912,7,462,, 479 | 5.311310768127441,7,463,, 480 | 6.015685081481934,7,464,, 481 | 6.482149124145508,7,465,, 482 | 5.745794296264648,7,466,, 483 | 5.716214656829834,7,467,, 484 | 6.098439693450928,7,468,, 485 | 6.392386436462402,7,469,, 486 | 5.003539562225342,7,470,, 487 | 5.42917013168335,7,471,, 488 | 6.561523914337158,7,472,, 489 | 5.835817337036133,7,473,, 490 | 6.160029888153076,7,474,, 491 | 6.786645889282227,7,475,, 492 | 6.67962646484375,7,476,, 493 | 6.3199334144592285,7,477,, 494 | 6.477305889129639,7,478,, 495 | 6.302646636962891,7,479,, 496 | 5.664180278778076,7,480,, 497 | 6.22089147567749,7,481,, 498 | 6.218687534332275,7,482,, 499 | 6.162777900695801,7,483,, 500 | 6.837890625,7,484,, 501 | 5.9273200035095215,7,485,, 502 | 5.830747604370117,7,486,, 503 | 6.198801517486572,7,487,, 504 | 5.466357707977295,7,488,, 505 | 5.382290363311768,7,489,, 506 | 5.457794666290283,7,490,, 507 | 5.360813140869141,7,491,, 508 | 5.801913738250732,7,492,, 509 | 6.4174885749816895,7,493,, 510 | 7.109058856964111,7,494,, 511 | 5.2919440269470215,7,495,, 512 | 6.424715042114258,7,496,, 513 | 6.308163166046143,7,497,, 514 | 5.691483020782471,7,498,, 515 | 5.654868125915527,7,499,, 516 | 5.888469219207764,7,500,, 517 | 6.135838031768799,7,501,, 518 | 7.127680778503418,7,502,, 519 | 6.148764133453369,7,503,, 520 | ,7,503,6.45648193359375, 521 | ,7,503,,6.043056011199951 522 | 7.160425662994385,8,504,, 523 | 6.1049017906188965,8,505,, 524 | 7.513689041137695,8,506,, 525 | 6.971252918243408,8,507,, 526 | 6.32950496673584,8,508,, 527 | 7.340914726257324,8,509,, 528 | 6.605583667755127,8,510,, 529 | 6.297011375427246,8,511,, 530 | 7.054365158081055,8,512,, 531 | 6.863521575927734,8,513,, 532 | 7.168033123016357,8,514,, 533 | 6.617739677429199,8,515,, 534 | 6.9015960693359375,8,516,, 535 | 6.032690048217773,8,517,, 536 | 6.263741970062256,8,518,, 537 | 6.264430999755859,8,519,, 538 | 6.700878143310547,8,520,, 539 | 5.718634128570557,8,521,, 540 | 5.3966827392578125,8,522,, 541 | 7.43338680267334,8,523,, 542 | 6.6567559242248535,8,524,, 543 | 6.022279262542725,8,525,, 544 | 6.548285484313965,8,526,, 545 | 6.927608966827393,8,527,, 546 | 5.772929668426514,8,528,, 547 | 6.7140374183654785,8,529,, 548 | 6.2319769859313965,8,530,, 549 | 6.478689670562744,8,531,, 550 | 5.45790433883667,8,532,, 551 | 6.3733954429626465,8,533,, 552 | 6.204102516174316,8,534,, 553 | 6.244394302368164,8,535,, 554 | 6.829897403717041,8,536,, 555 | 6.843398571014404,8,537,, 556 | 5.933351993560791,8,538,, 557 | 5.899972915649414,8,539,, 558 | 6.621233940124512,8,540,, 559 | 7.354916095733643,8,541,, 560 | 6.67138671875,8,542,, 561 | 5.525303363800049,8,543,, 562 | 5.635727882385254,8,544,, 563 | 5.149949550628662,8,545,, 564 | 5.835223197937012,8,546,, 565 | 5.390637397766113,8,547,, 566 | 6.304524898529053,8,548,, 567 | 6.536520957946777,8,549,, 568 | 5.762648105621338,8,550,, 569 | 5.820493221282959,8,551,, 570 | 5.812705993652344,8,552,, 571 | 5.138638019561768,8,553,, 572 | 5.391231060028076,8,554,, 573 | 4.903124809265137,8,555,, 574 | 5.052671909332275,8,556,, 575 | 5.284234046936035,8,557,, 576 | 5.5078277587890625,8,558,, 577 | 5.10203218460083,8,559,, 578 | 5.072504997253418,8,560,, 579 | 4.675267219543457,8,561,, 580 | 5.4787445068359375,8,562,, 581 | 5.3333539962768555,8,563,, 582 | 6.622215270996094,8,564,, 583 | 5.719409465789795,8,565,, 584 | 6.200931549072266,8,566,, 585 | ,8,566,6.070155143737793, 586 | ,8,566,,6.154895782470703 587 | 6.084612846374512,9,567,, 588 | 4.948256492614746,9,568,, 589 | 5.7774858474731445,9,569,, 590 | 6.044223308563232,9,570,, 591 | 6.771674633026123,9,571,, 592 | 5.141257286071777,9,572,, 593 | 6.526460647583008,9,573,, 594 | 7.266423225402832,9,574,, 595 | 6.156888961791992,9,575,, 596 | 6.3989691734313965,9,576,, 597 | 5.873476505279541,9,577,, 598 | 5.2976603507995605,9,578,, 599 | 5.182430267333984,9,579,, 600 | 6.035244941711426,9,580,, 601 | 5.510403156280518,9,581,, 602 | 5.714284420013428,9,582,, 603 | 5.847296237945557,9,583,, 604 | 5.885131359100342,9,584,, 605 | 5.748134613037109,9,585,, 606 | 5.42967414855957,9,586,, 607 | 6.013915538787842,9,587,, 608 | 5.5318732261657715,9,588,, 609 | 5.491097450256348,9,589,, 610 | 5.948857307434082,9,590,, 611 | 5.702860355377197,9,591,, 612 | 6.153273105621338,9,592,, 613 | 5.968777179718018,9,593,, 614 | 5.517313480377197,9,594,, 615 | 6.23994255065918,9,595,, 616 | 7.10014009475708,9,596,, 617 | 6.449054718017578,9,597,, 618 | 5.0543928146362305,9,598,, 619 | 5.573074817657471,9,599,, 620 | 6.3560919761657715,9,600,, 621 | 5.7699360847473145,9,601,, 622 | 5.264081954956055,9,602,, 623 | 5.405777931213379,9,603,, 624 | 5.538191795349121,9,604,, 625 | 5.777958869934082,9,605,, 626 | 5.5675435066223145,9,606,, 627 | 5.120899677276611,9,607,, 628 | 6.596073627471924,9,608,, 629 | 5.738475322723389,9,609,, 630 | 5.883974552154541,9,610,, 631 | 5.9913129806518555,9,611,, 632 | 5.641613006591797,9,612,, 633 | 5.316676139831543,9,613,, 634 | 5.322931289672852,9,614,, 635 | 5.9350905418396,9,615,, 636 | 5.559268474578857,9,616,, 637 | 5.575308322906494,9,617,, 638 | 5.676176071166992,9,618,, 639 | 5.461007118225098,9,619,, 640 | 5.4945783615112305,9,620,, 641 | 6.296920299530029,9,621,, 642 | 4.540228843688965,9,622,, 643 | 5.678394794464111,9,623,, 644 | 5.197795391082764,9,624,, 645 | 5.320192813873291,9,625,, 646 | 4.6416239738464355,9,626,, 647 | 5.062762260437012,9,627,, 648 | 5.06893253326416,9,628,, 649 | 5.935719013214111,9,629,, 650 | ,9,629,5.385776996612549, 651 | ,9,629,,5.730437278747559 652 | 5.08565616607666,10,630,, 653 | 5.519975662231445,10,631,, 654 | 5.323436260223389,10,632,, 655 | 5.012823581695557,10,633,, 656 | 5.677609443664551,10,634,, 657 | 4.783915042877197,10,635,, 658 | 4.889618873596191,10,636,, 659 | 5.141172885894775,10,637,, 660 | 5.708235263824463,10,638,, 661 | 6.189376354217529,10,639,, 662 | 5.756820201873779,10,640,, 663 | 6.13010311126709,10,641,, 664 | 5.712520599365234,10,642,, 665 | 5.191246032714844,10,643,, 666 | 6.373267650604248,10,644,, 667 | 6.060494422912598,10,645,, 668 | 6.6404032707214355,10,646,, 669 | 6.489401817321777,10,647,, 670 | 6.569463729858398,10,648,, 671 | 6.596932888031006,10,649,, 672 | 5.703380584716797,10,650,, 673 | 5.843210220336914,10,651,, 674 | 6.748394966125488,10,652,, 675 | 6.088963508605957,10,653,, 676 | 5.9408979415893555,10,654,, 677 | 5.858645439147949,10,655,, 678 | 5.998233318328857,10,656,, 679 | 4.834767818450928,10,657,, 680 | 5.534855842590332,10,658,, 681 | 6.472038269042969,10,659,, 682 | 5.617411136627197,10,660,, 683 | 6.114610195159912,10,661,, 684 | 5.606054306030273,10,662,, 685 | 5.729515075683594,10,663,, 686 | 4.977911949157715,10,664,, 687 | 5.010675430297852,10,665,, 688 | 5.474560260772705,10,666,, 689 | 5.83678674697876,10,667,, 690 | 6.098820209503174,10,668,, 691 | 5.96679162979126,10,669,, 692 | 5.151430130004883,10,670,, 693 | 5.556178569793701,10,671,, 694 | 6.010837554931641,10,672,, 695 | 6.021397590637207,10,673,, 696 | 5.966726303100586,10,674,, 697 | 4.593477249145508,10,675,, 698 | 6.182512283325195,10,676,, 699 | 5.68410062789917,10,677,, 700 | 5.269922256469727,10,678,, 701 | 6.143287658691406,10,679,, 702 | 5.364930152893066,10,680,, 703 | 5.3929033279418945,10,681,, 704 | 5.543621063232422,10,682,, 705 | 6.217318534851074,10,683,, 706 | 4.41369104385376,10,684,, 707 | 6.069912910461426,10,685,, 708 | 4.74772834777832,10,686,, 709 | 5.433446884155273,10,687,, 710 | 5.717752456665039,10,688,, 711 | 5.076076984405518,10,689,, 712 | 5.340513706207275,10,690,, 713 | 5.14741325378418,10,691,, 714 | 5.9169535636901855,10,692,, 715 | ,10,692,5.068646430969238, 716 | ,10,692,,5.669001579284668 717 | 5.286689281463623,11,693,, 718 | 5.8081865310668945,11,694,, 719 | 5.869177341461182,11,695,, 720 | 4.937330722808838,11,696,, 721 | 5.332292556762695,11,697,, 722 | 5.852642059326172,11,698,, 723 | 5.544941425323486,11,699,, 724 | 6.2910871505737305,11,700,, 725 | 6.419247627258301,11,701,, 726 | 5.531971454620361,11,702,, 727 | 5.300348281860352,11,703,, 728 | 6.589051246643066,11,704,, 729 | 6.179576873779297,11,705,, 730 | 4.842992305755615,11,706,, 731 | 6.433864116668701,11,707,, 732 | 4.708789348602295,11,708,, 733 | 5.240222454071045,11,709,, 734 | 5.464208126068115,11,710,, 735 | 5.324116230010986,11,711,, 736 | 5.579864025115967,11,712,, 737 | 5.526078701019287,11,713,, 738 | 5.416823387145996,11,714,, 739 | 5.2295989990234375,11,715,, 740 | 5.352462291717529,11,716,, 741 | 5.870923042297363,11,717,, 742 | 5.383430480957031,11,718,, 743 | 5.055568695068359,11,719,, 744 | 5.574265956878662,11,720,, 745 | 4.861011028289795,11,721,, 746 | 4.387551307678223,11,722,, 747 | 5.013542652130127,11,723,, 748 | 4.799905776977539,11,724,, 749 | 5.6658782958984375,11,725,, 750 | 5.48728084564209,11,726,, 751 | 4.6840500831604,11,727,, 752 | 5.626829147338867,11,728,, 753 | 5.454986572265625,11,729,, 754 | 4.8422160148620605,11,730,, 755 | 4.882775783538818,11,731,, 756 | 5.07949686050415,11,732,, 757 | 5.604400634765625,11,733,, 758 | 5.441000461578369,11,734,, 759 | 5.151686668395996,11,735,, 760 | 5.1338300704956055,11,736,, 761 | 4.696131706237793,11,737,, 762 | 4.486143112182617,11,738,, 763 | 5.452517032623291,11,739,, 764 | 4.388451099395752,11,740,, 765 | 5.427948951721191,11,741,, 766 | 4.834362983703613,11,742,, 767 | 5.556121349334717,11,743,, 768 | 4.804437160491943,11,744,, 769 | 5.114900588989258,11,745,, 770 | 4.806400775909424,11,746,, 771 | 5.313166618347168,11,747,, 772 | 4.927936553955078,11,748,, 773 | 5.178097248077393,11,749,, 774 | 4.4029316902160645,11,750,, 775 | 4.835081577301025,11,751,, 776 | 4.711470603942871,11,752,, 777 | 5.2900519371032715,11,753,, 778 | 5.2304792404174805,11,754,, 779 | 6.162775993347168,11,755,, 780 | ,11,755,4.743951320648193, 781 | ,11,755,,5.289603233337402 782 | 4.457335948944092,12,756,, 783 | 5.364038467407227,12,757,, 784 | 5.3598198890686035,12,758,, 785 | 5.063503742218018,12,759,, 786 | 5.154303073883057,12,760,, 787 | 4.789880275726318,12,761,, 788 | 4.9888811111450195,12,762,, 789 | 6.14753532409668,12,763,, 790 | 4.786286354064941,12,764,, 791 | 6.440713405609131,12,765,, 792 | 5.538112163543701,12,766,, 793 | 4.495602607727051,12,767,, 794 | 5.174030303955078,12,768,, 795 | 5.041848659515381,12,769,, 796 | 5.060676097869873,12,770,, 797 | 5.128467082977295,12,771,, 798 | 5.253457069396973,12,772,, 799 | 5.709169864654541,12,773,, 800 | 5.304717063903809,12,774,, 801 | 5.055749893188477,12,775,, 802 | 5.060189723968506,12,776,, 803 | 4.911502838134766,12,777,, 804 | 5.460643768310547,12,778,, 805 | 4.676204681396484,12,779,, 806 | 5.505590915679932,12,780,, 807 | 5.185509204864502,12,781,, 808 | 5.012227535247803,12,782,, 809 | 4.7099928855896,12,783,, 810 | 4.534334659576416,12,784,, 811 | 5.040001392364502,12,785,, 812 | 5.425164222717285,12,786,, 813 | 4.852989673614502,12,787,, 814 | 4.748986721038818,12,788,, 815 | 4.96580171585083,12,789,, 816 | 5.016519546508789,12,790,, 817 | 5.189863681793213,12,791,, 818 | 5.176024436950684,12,792,, 819 | 4.943619251251221,12,793,, 820 | 5.124204158782959,12,794,, 821 | 4.703433513641357,12,795,, 822 | 5.160894393920898,12,796,, 823 | 4.9617085456848145,12,797,, 824 | 4.800718307495117,12,798,, 825 | 4.627663612365723,12,799,, 826 | 5.282848834991455,12,800,, 827 | 4.687926292419434,12,801,, 828 | 5.55894136428833,12,802,, 829 | 4.353331565856934,12,803,, 830 | 4.510629653930664,12,804,, 831 | 5.130037307739258,12,805,, 832 | 4.683703422546387,12,806,, 833 | 5.382894515991211,12,807,, 834 | 4.789149284362793,12,808,, 835 | 5.334902286529541,12,809,, 836 | 5.004060745239258,12,810,, 837 | 5.502352237701416,12,811,, 838 | 5.7565107345581055,12,812,, 839 | 5.120816707611084,12,813,, 840 | 5.201966285705566,12,814,, 841 | 4.588029384613037,12,815,, 842 | 5.240016937255859,12,816,, 843 | 4.850864887237549,12,817,, 844 | 5.171787261962891,12,818,, 845 | ,12,818,4.7834882736206055, 846 | ,12,818,,5.082763671875 847 | 4.3884196281433105,13,819,, 848 | 5.162976264953613,13,820,, 849 | 5.088871002197266,13,821,, 850 | 4.3905558586120605,13,822,, 851 | 4.705146312713623,13,823,, 852 | 4.684024333953857,13,824,, 853 | 4.71642541885376,13,825,, 854 | 4.761532306671143,13,826,, 855 | 4.850653648376465,13,827,, 856 | 4.298530578613281,13,828,, 857 | 4.45905065536499,13,829,, 858 | 4.690869331359863,13,830,, 859 | 4.742084503173828,13,831,, 860 | 4.9892191886901855,13,832,, 861 | 4.693549156188965,13,833,, 862 | 4.986348628997803,13,834,, 863 | 4.704021453857422,13,835,, 864 | 4.823391914367676,13,836,, 865 | 4.967955589294434,13,837,, 866 | 5.007245063781738,13,838,, 867 | 5.913240909576416,13,839,, 868 | 4.924989223480225,13,840,, 869 | 4.775089263916016,13,841,, 870 | 5.166823863983154,13,842,, 871 | 5.199530601501465,13,843,, 872 | 5.934963703155518,13,844,, 873 | 5.161189079284668,13,845,, 874 | 5.126204490661621,13,846,, 875 | 4.7929158210754395,13,847,, 876 | 5.58259391784668,13,848,, 877 | 5.1472392082214355,13,849,, 878 | 5.431150436401367,13,850,, 879 | 5.534207820892334,13,851,, 880 | 4.884673595428467,13,852,, 881 | 5.386565685272217,13,853,, 882 | 5.439675807952881,13,854,, 883 | 5.256084442138672,13,855,, 884 | 4.332817077636719,13,856,, 885 | 4.8011064529418945,13,857,, 886 | 4.602087020874023,13,858,, 887 | 4.478838920593262,13,859,, 888 | 4.613386631011963,13,860,, 889 | 5.644949913024902,13,861,, 890 | 4.69474983215332,13,862,, 891 | 5.167389392852783,13,863,, 892 | 4.625533103942871,13,864,, 893 | 5.358283996582031,13,865,, 894 | 5.552373886108398,13,866,, 895 | 5.058881759643555,13,867,, 896 | 5.07258415222168,13,868,, 897 | 4.891154766082764,13,869,, 898 | 4.533874988555908,13,870,, 899 | 4.903352737426758,13,871,, 900 | 5.6341962814331055,13,872,, 901 | 4.554901123046875,13,873,, 902 | 4.727592468261719,13,874,, 903 | 4.586785316467285,13,875,, 904 | 5.619190216064453,13,876,, 905 | 5.053255558013916,13,877,, 906 | 5.1052656173706055,13,878,, 907 | 4.737407207489014,13,879,, 908 | 4.567432880401611,13,880,, 909 | 4.613603591918945,13,881,, 910 | ,13,881,4.594615459442139, 911 | ,13,881,,4.959876537322998 912 | 4.754169464111328,14,882,, 913 | 4.645345211029053,14,883,, 914 | 4.512390613555908,14,884,, 915 | 4.648500442504883,14,885,, 916 | 4.79579496383667,14,886,, 917 | 4.5886664390563965,14,887,, 918 | 6.03233003616333,14,888,, 919 | 5.7962236404418945,14,889,, 920 | 4.988174915313721,14,890,, 921 | 6.507099628448486,14,891,, 922 | 5.677885055541992,14,892,, 923 | 5.198002338409424,14,893,, 924 | 5.833769798278809,14,894,, 925 | 6.127989292144775,14,895,, 926 | 5.801266193389893,14,896,, 927 | 4.599596977233887,14,897,, 928 | 5.677819728851318,14,898,, 929 | 5.347904205322266,14,899,, 930 | 5.192881107330322,14,900,, 931 | 5.063399314880371,14,901,, 932 | 4.952683448791504,14,902,, 933 | 5.568511486053467,14,903,, 934 | 5.367906093597412,14,904,, 935 | 5.13928747177124,14,905,, 936 | 5.652681827545166,14,906,, 937 | 4.860153675079346,14,907,, 938 | 4.9741034507751465,14,908,, 939 | 4.978092193603516,14,909,, 940 | 6.023555755615234,14,910,, 941 | 5.041252136230469,14,911,, 942 | 5.8084540367126465,14,912,, 943 | 6.060390949249268,14,913,, 944 | 4.075987815856934,14,914,, 945 | 5.969210624694824,14,915,, 946 | 5.037559509277344,14,916,, 947 | 6.272189140319824,14,917,, 948 | 5.588429927825928,14,918,, 949 | 4.702881813049316,14,919,, 950 | 4.947978496551514,14,920,, 951 | 4.836975574493408,14,921,, 952 | 5.7447919845581055,14,922,, 953 | 5.10955810546875,14,923,, 954 | 5.819656848907471,14,924,, 955 | 6.406569480895996,14,925,, 956 | 5.676794052124023,14,926,, 957 | 5.650678634643555,14,927,, 958 | 5.070691108703613,14,928,, 959 | 5.379695415496826,14,929,, 960 | 4.915615081787109,14,930,, 961 | 4.752549171447754,14,931,, 962 | 5.760737895965576,14,932,, 963 | 4.656272888183594,14,933,, 964 | 4.738716125488281,14,934,, 965 | 4.7933526039123535,14,935,, 966 | 4.687974452972412,14,936,, 967 | 4.26973819732666,14,937,, 968 | 4.639157295227051,14,938,, 969 | 4.491497039794922,14,939,, 970 | 4.768670082092285,14,940,, 971 | 4.563256740570068,14,941,, 972 | 5.352731704711914,14,942,, 973 | 5.223762035369873,14,943,, 974 | 4.702342987060547,14,944,, 975 | ,14,944,5.048121929168701, 976 | ,14,944,,5.223538398742676 977 | 4.790354251861572,15,945,, 978 | 5.45261812210083,15,946,, 979 | 4.440784454345703,15,947,, 980 | 5.2497687339782715,15,948,, 981 | 5.233611106872559,15,949,, 982 | 4.900553226470947,15,950,, 983 | 5.314888000488281,15,951,, 984 | 5.399381637573242,15,952,, 985 | 5.379068851470947,15,953,, 986 | 5.412644863128662,15,954,, 987 | 5.934132099151611,15,955,, 988 | 6.001433372497559,15,956,, 989 | 5.448832035064697,15,957,, 990 | 5.2954583168029785,15,958,, 991 | 5.2990803718566895,15,959,, 992 | 5.19788122177124,15,960,, 993 | 4.606207370758057,15,961,, 994 | 4.8288445472717285,15,962,, 995 | 4.693415641784668,15,963,, 996 | 4.336781978607178,15,964,, 997 | 4.70001220703125,15,965,, 998 | 4.478494644165039,15,966,, 999 | 3.91767954826355,15,967,, 1000 | 4.893589496612549,15,968,, 1001 | 4.621474742889404,15,969,, 1002 | 4.988601207733154,15,970,, 1003 | 6.083566188812256,15,971,, 1004 | 4.8959832191467285,15,972,, 1005 | 5.467155456542969,15,973,, 1006 | 5.708787441253662,15,974,, 1007 | 4.940130710601807,15,975,, 1008 | 4.9757981300354,15,976,, 1009 | 5.288604259490967,15,977,, 1010 | 4.9905781745910645,15,978,, 1011 | 5.260436534881592,15,979,, 1012 | 5.064195156097412,15,980,, 1013 | 5.6143107414245605,15,981,, 1014 | 6.700498104095459,15,982,, 1015 | 5.236699104309082,15,983,, 1016 | 5.5347137451171875,15,984,, 1017 | 5.659432888031006,15,985,, 1018 | 5.765166282653809,15,986,, 1019 | 5.210351467132568,15,987,, 1020 | 4.957396507263184,15,988,, 1021 | 6.076143264770508,15,989,, 1022 | 5.0246500968933105,15,990,, 1023 | 6.136105060577393,15,991,, 1024 | 5.817469596862793,15,992,, 1025 | 4.2033305168151855,15,993,, 1026 | 6.65869665145874,15,994,, 1027 | 7.129725933074951,15,995,, 1028 | 5.028881549835205,15,996,, 1029 | 6.47659969329834,15,997,, 1030 | 6.060141086578369,15,998,, 1031 | 5.010439395904541,15,999,, 1032 | 6.360997676849365,15,1000,, 1033 | 5.360500335693359,15,1001,, 1034 | 5.821353435516357,15,1002,, 1035 | 5.9384894371032715,15,1003,, 1036 | 5.483707904815674,15,1004,, 1037 | 5.4663238525390625,15,1005,, 1038 | 5.929457187652588,15,1006,, 1039 | 4.287657260894775,15,1007,, 1040 | ,15,1007,4.737375259399414, 1041 | ,15,1007,,5.348739147186279 1042 | 4.966866970062256,16,1008,, 1043 | 5.206647872924805,16,1009,, 1044 | 4.4572553634643555,16,1010,, 1045 | 4.849074840545654,16,1011,, 1046 | 4.743093967437744,16,1012,, 1047 | 5.1266584396362305,16,1013,, 1048 | 4.611486434936523,16,1014,, 1049 | 4.160142421722412,16,1015,, 1050 | 4.935816287994385,16,1016,, 1051 | 4.753876209259033,16,1017,, 1052 | 4.00526237487793,16,1018,, 1053 | 4.7850236892700195,16,1019,, 1054 | 5.4537739753723145,16,1020,, 1055 | 4.5559401512146,16,1021,, 1056 | 5.149532318115234,16,1022,, 1057 | 4.911074161529541,16,1023,, 1058 | 4.6880292892456055,16,1024,, 1059 | 4.488897323608398,16,1025,, 1060 | 4.672821998596191,16,1026,, 1061 | 4.859957695007324,16,1027,, 1062 | 4.675844192504883,16,1028,, 1063 | 4.253553867340088,16,1029,, 1064 | 4.632307052612305,16,1030,, 1065 | 4.676782608032227,16,1031,, 1066 | 4.941767692565918,16,1032,, 1067 | 4.279666900634766,16,1033,, 1068 | 5.253536224365234,16,1034,, 1069 | 4.506035327911377,16,1035,, 1070 | 5.021490097045898,16,1036,, 1071 | 4.715522289276123,16,1037,, 1072 | 5.075107574462891,16,1038,, 1073 | 5.606975555419922,16,1039,, 1074 | 5.048595428466797,16,1040,, 1075 | 4.895505428314209,16,1041,, 1076 | 5.499094486236572,16,1042,, 1077 | 4.808346748352051,16,1043,, 1078 | 6.002392292022705,16,1044,, 1079 | 5.5181379318237305,16,1045,, 1080 | 5.1970062255859375,16,1046,, 1081 | 5.9439826011657715,16,1047,, 1082 | 5.03574800491333,16,1048,, 1083 | 5.177852630615234,16,1049,, 1084 | 4.7931623458862305,16,1050,, 1085 | 5.438472747802734,16,1051,, 1086 | 5.386623859405518,16,1052,, 1087 | 4.108127593994141,16,1053,, 1088 | 5.86687707901001,16,1054,, 1089 | 5.830541133880615,16,1055,, 1090 | 4.612442493438721,16,1056,, 1091 | 6.889011383056641,16,1057,, 1092 | 4.683595657348633,16,1058,, 1093 | 6.6770782470703125,16,1059,, 1094 | 5.463740348815918,16,1060,, 1095 | 5.0740132331848145,16,1061,, 1096 | 5.930905818939209,16,1062,, 1097 | 4.717578887939453,16,1063,, 1098 | 5.589735507965088,16,1064,, 1099 | 6.6467671394348145,16,1065,, 1100 | 5.241649627685547,16,1066,, 1101 | 4.583596229553223,16,1067,, 1102 | 4.806493282318115,16,1068,, 1103 | 4.797600269317627,16,1069,, 1104 | 4.865991115570068,16,1070,, 1105 | ,16,1070,4.895069122314453, 1106 | ,16,1070,,5.051480293273926 1107 | 5.022207736968994,17,1071,, 1108 | 5.17120361328125,17,1072,, 1109 | 5.486886978149414,17,1073,, 1110 | 4.679941177368164,17,1074,, 1111 | 5.585179328918457,17,1075,, 1112 | 5.370107173919678,17,1076,, 1113 | 4.341958999633789,17,1077,, 1114 | 5.088565826416016,17,1078,, 1115 | 4.676380157470703,17,1079,, 1116 | 4.678451061248779,17,1080,, 1117 | 4.845133304595947,17,1081,, 1118 | 5.475495338439941,17,1082,, 1119 | 3.8957924842834473,17,1083,, 1120 | 4.682304382324219,17,1084,, 1121 | 4.651821613311768,17,1085,, 1122 | 4.269268035888672,17,1086,, 1123 | 5.0325493812561035,17,1087,, 1124 | 5.571716785430908,17,1088,, 1125 | 4.589348793029785,17,1089,, 1126 | 4.380695819854736,17,1090,, 1127 | 4.898689270019531,17,1091,, 1128 | 5.168288707733154,17,1092,, 1129 | 5.0264892578125,17,1093,, 1130 | 4.659473419189453,17,1094,, 1131 | 4.767295837402344,17,1095,, 1132 | 4.675022125244141,17,1096,, 1133 | 4.292496681213379,17,1097,, 1134 | 5.2304487228393555,17,1098,, 1135 | 4.34171724319458,17,1099,, 1136 | 5.184869289398193,17,1100,, 1137 | 4.64486837387085,17,1101,, 1138 | 4.602921009063721,17,1102,, 1139 | 5.344348907470703,17,1103,, 1140 | 3.7713401317596436,17,1104,, 1141 | 4.869604587554932,17,1105,, 1142 | 4.705495834350586,17,1106,, 1143 | 4.693638801574707,17,1107,, 1144 | 4.22860860824585,17,1108,, 1145 | 4.467991828918457,17,1109,, 1146 | 4.525279998779297,17,1110,, 1147 | 4.553408622741699,17,1111,, 1148 | 4.999642372131348,17,1112,, 1149 | 4.811901569366455,17,1113,, 1150 | 5.081335067749023,17,1114,, 1151 | 4.665421009063721,17,1115,, 1152 | 4.673797130584717,17,1116,, 1153 | 4.244839191436768,17,1117,, 1154 | 4.917979717254639,17,1118,, 1155 | 4.865512847900391,17,1119,, 1156 | 5.3418049812316895,17,1120,, 1157 | 4.955523490905762,17,1121,, 1158 | 4.647672653198242,17,1122,, 1159 | 5.0011162757873535,17,1123,, 1160 | 5.569622039794922,17,1124,, 1161 | 4.462286472320557,17,1125,, 1162 | 5.186787128448486,17,1126,, 1163 | 5.513263702392578,17,1127,, 1164 | 5.111606121063232,17,1128,, 1165 | 5.123501300811768,17,1129,, 1166 | 4.802769660949707,17,1130,, 1167 | 4.617763042449951,17,1131,, 1168 | 4.228151321411133,17,1132,, 1169 | 4.2723612785339355,17,1133,, 1170 | ,17,1133,4.720939636230469, 1171 | ,17,1133,,4.817692756652832 1172 | 5.267097473144531,18,1134,, 1173 | 5.0434956550598145,18,1135,, 1174 | 4.218253135681152,18,1136,, 1175 | 5.147789001464844,18,1137,, 1176 | 4.817934513092041,18,1138,, 1177 | 4.918338298797607,18,1139,, 1178 | 4.916634559631348,18,1140,, 1179 | 5.093603610992432,18,1141,, 1180 | 4.573021411895752,18,1142,, 1181 | 4.807254791259766,18,1143,, 1182 | 4.271000385284424,18,1144,, 1183 | 4.3216142654418945,18,1145,, 1184 | 5.021668910980225,18,1146,, 1185 | 4.182129859924316,18,1147,, 1186 | 3.6106011867523193,18,1148,, 1187 | 5.011496067047119,18,1149,, 1188 | 5.107738971710205,18,1150,, 1189 | 3.9423115253448486,18,1151,, 1190 | 4.239581108093262,18,1152,, 1191 | 4.6033196449279785,18,1153,, 1192 | 4.1263628005981445,18,1154,, 1193 | 4.563024997711182,18,1155,, 1194 | 4.264723777770996,18,1156,, 1195 | 5.129520893096924,18,1157,, 1196 | 5.081111907958984,18,1158,, 1197 | 4.507292747497559,18,1159,, 1198 | 4.305991172790527,18,1160,, 1199 | 4.474348545074463,18,1161,, 1200 | 4.782419204711914,18,1162,, 1201 | 4.245123863220215,18,1163,, 1202 | 4.697691440582275,18,1164,, 1203 | 4.4561944007873535,18,1165,, 1204 | 4.10843563079834,18,1166,, 1205 | 4.377858638763428,18,1167,, 1206 | 4.222017765045166,18,1168,, 1207 | 4.0514702796936035,18,1169,, 1208 | 4.364955425262451,18,1170,, 1209 | 4.881985664367676,18,1171,, 1210 | 4.162117958068848,18,1172,, 1211 | 4.510653972625732,18,1173,, 1212 | 4.22315788269043,18,1174,, 1213 | 4.125808238983154,18,1175,, 1214 | 4.207546234130859,18,1176,, 1215 | 5.29172420501709,18,1177,, 1216 | 3.93265438079834,18,1178,, 1217 | 4.344622611999512,18,1179,, 1218 | 4.102944374084473,18,1180,, 1219 | 4.263672351837158,18,1181,, 1220 | 4.277066707611084,18,1182,, 1221 | 3.9771411418914795,18,1183,, 1222 | 4.032279014587402,18,1184,, 1223 | 4.233733654022217,18,1185,, 1224 | 4.54669713973999,18,1186,, 1225 | 4.326045513153076,18,1187,, 1226 | 4.788237571716309,18,1188,, 1227 | 5.151019096374512,18,1189,, 1228 | 4.440698623657227,18,1190,, 1229 | 5.143302917480469,18,1191,, 1230 | 5.207489490509033,18,1192,, 1231 | 4.950526714324951,18,1193,, 1232 | 4.492456436157227,18,1194,, 1233 | 4.69150972366333,18,1195,, 1234 | 4.888209819793701,18,1196,, 1235 | ,18,1196,4.234966278076172, 1236 | ,18,1196,,4.537961959838867 1237 | 4.250354766845703,19,1197,, 1238 | 4.518134117126465,19,1198,, 1239 | 3.9188878536224365,19,1199,, 1240 | 4.757184028625488,19,1200,, 1241 | 4.231075286865234,19,1201,, 1242 | 4.632678985595703,19,1202,, 1243 | 4.1294121742248535,19,1203,, 1244 | 4.378912448883057,19,1204,, 1245 | 4.165542125701904,19,1205,, 1246 | 4.728405475616455,19,1206,, 1247 | 4.91476583480835,19,1207,, 1248 | 4.354232311248779,19,1208,, 1249 | 4.678824424743652,19,1209,, 1250 | 5.018047332763672,19,1210,, 1251 | 4.049205780029297,19,1211,, 1252 | 4.130670070648193,19,1212,, 1253 | 3.822171449661255,19,1213,, 1254 | 4.2201995849609375,19,1214,, 1255 | 3.8908586502075195,19,1215,, 1256 | 4.249598026275635,19,1216,, 1257 | 4.779221057891846,19,1217,, 1258 | 4.688283443450928,19,1218,, 1259 | 5.000621795654297,19,1219,, 1260 | 4.194244384765625,19,1220,, 1261 | 4.6987409591674805,19,1221,, 1262 | 5.199911594390869,19,1222,, 1263 | 4.995185375213623,19,1223,, 1264 | 3.767263174057007,19,1224,, 1265 | 4.455349922180176,19,1225,, 1266 | 4.124757289886475,19,1226,, 1267 | 3.9152989387512207,19,1227,, 1268 | 4.47376823425293,19,1228,, 1269 | 4.405805587768555,19,1229,, 1270 | 4.436086177825928,19,1230,, 1271 | 4.57030725479126,19,1231,, 1272 | 4.4897685050964355,19,1232,, 1273 | 4.714719295501709,19,1233,, 1274 | 4.063133716583252,19,1234,, 1275 | 4.8156256675720215,19,1235,, 1276 | 4.198724269866943,19,1236,, 1277 | 5.706092357635498,19,1237,, 1278 | 4.382630348205566,19,1238,, 1279 | 5.113243103027344,19,1239,, 1280 | 4.969574928283691,19,1240,, 1281 | 4.372098445892334,19,1241,, 1282 | 4.846581935882568,19,1242,, 1283 | 4.121110439300537,19,1243,, 1284 | 4.9958319664001465,19,1244,, 1285 | 4.481681823730469,19,1245,, 1286 | 4.385343074798584,19,1246,, 1287 | 4.239366054534912,19,1247,, 1288 | 4.828313827514648,19,1248,, 1289 | 4.907561302185059,19,1249,, 1290 | 4.705998420715332,19,1250,, 1291 | 5.160833358764648,19,1251,, 1292 | 3.8288540840148926,19,1252,, 1293 | 4.836104393005371,19,1253,, 1294 | 4.614962100982666,19,1254,, 1295 | 4.125386714935303,19,1255,, 1296 | 4.370940208435059,19,1256,, 1297 | 4.600517272949219,19,1257,, 1298 | 5.432960510253906,19,1258,, 1299 | 3.8852388858795166,19,1259,, 1300 | ,19,1259,4.857623100280762, 1301 | ,19,1259,,4.511913299560547 1302 | 4.670894622802734,20,1260,, 1303 | 4.793560028076172,20,1261,, 1304 | 4.373604774475098,20,1262,, 1305 | 5.292328834533691,20,1263,, 1306 | 4.975612640380859,20,1264,, 1307 | 5.126088619232178,20,1265,, 1308 | 4.075887680053711,20,1266,, 1309 | 4.155940055847168,20,1267,, 1310 | 4.777585029602051,20,1268,, 1311 | 4.168228626251221,20,1269,, 1312 | 4.368173122406006,20,1270,, 1313 | 4.508245944976807,20,1271,, 1314 | 4.331221580505371,20,1272,, 1315 | 4.23012113571167,20,1273,, 1316 | 4.963845252990723,20,1274,, 1317 | 4.373532295227051,20,1275,, 1318 | 4.744906902313232,20,1276,, 1319 | 4.808309555053711,20,1277,, 1320 | 4.094339370727539,20,1278,, 1321 | 4.542459964752197,20,1279,, 1322 | 3.9389171600341797,20,1280,, 1323 | 5.232359409332275,20,1281,, 1324 | 3.650916576385498,20,1282,, 1325 | 4.099653720855713,20,1283,, 1326 | 4.440587043762207,20,1284,, 1327 | 3.990643262863159,20,1285,, 1328 | 4.319326877593994,20,1286,, 1329 | 4.472752571105957,20,1287,, 1330 | 4.246542930603027,20,1288,, 1331 | 4.185726642608643,20,1289,, 1332 | 5.518279075622559,20,1290,, 1333 | 4.854487895965576,20,1291,, 1334 | 4.292128562927246,20,1292,, 1335 | 4.604689121246338,20,1293,, 1336 | 4.712833404541016,20,1294,, 1337 | 3.976072072982788,20,1295,, 1338 | 5.159608364105225,20,1296,, 1339 | 4.538395404815674,20,1297,, 1340 | 4.060936450958252,20,1298,, 1341 | 4.912857532501221,20,1299,, 1342 | 4.598552703857422,20,1300,, 1343 | 4.356742858886719,20,1301,, 1344 | 4.308551788330078,20,1302,, 1345 | 4.858848571777344,20,1303,, 1346 | 4.359607696533203,20,1304,, 1347 | 4.780844688415527,20,1305,, 1348 | 4.967533588409424,20,1306,, 1349 | 4.30204963684082,20,1307,, 1350 | 5.535268783569336,20,1308,, 1351 | 5.163754940032959,20,1309,, 1352 | 4.998495578765869,20,1310,, 1353 | 5.2105255126953125,20,1311,, 1354 | 4.574311256408691,20,1312,, 1355 | 3.991788148880005,20,1313,, 1356 | 5.029573917388916,20,1314,, 1357 | 4.185923099517822,20,1315,, 1358 | 4.189749240875244,20,1316,, 1359 | 4.941174507141113,20,1317,, 1360 | 4.215134143829346,20,1318,, 1361 | 4.298483848571777,20,1319,, 1362 | 4.356301784515381,20,1320,, 1363 | 5.117397785186768,20,1321,, 1364 | 5.118154048919678,20,1322,, 1365 | ,20,1322,4.149311065673828, 1366 | ,20,1322,,4.567717552185059 1367 | 4.096504211425781,21,1323,, 1368 | 3.6363422870635986,21,1324,, 1369 | 4.773014545440674,21,1325,, 1370 | 4.136007785797119,21,1326,, 1371 | 4.893486499786377,21,1327,, 1372 | 4.510501384735107,21,1328,, 1373 | 4.2009453773498535,21,1329,, 1374 | 4.385725021362305,21,1330,, 1375 | 4.097147464752197,21,1331,, 1376 | 3.9785735607147217,21,1332,, 1377 | 4.646903991699219,21,1333,, 1378 | 4.26634407043457,21,1334,, 1379 | 4.456892013549805,21,1335,, 1380 | 4.322936534881592,21,1336,, 1381 | 4.531918525695801,21,1337,, 1382 | 4.18916654586792,21,1338,, 1383 | 4.0587029457092285,21,1339,, 1384 | 4.222870826721191,21,1340,, 1385 | 4.506830215454102,21,1341,, 1386 | 4.3278069496154785,21,1342,, 1387 | 4.301077842712402,21,1343,, 1388 | 4.0542449951171875,21,1344,, 1389 | 3.9503097534179688,21,1345,, 1390 | 3.7835168838500977,21,1346,, 1391 | 4.417335510253906,21,1347,, 1392 | 4.007875919342041,21,1348,, 1393 | 5.325970649719238,21,1349,, 1394 | 5.6732330322265625,21,1350,, 1395 | 4.281500339508057,21,1351,, 1396 | 5.547438144683838,21,1352,, 1397 | 5.215240478515625,21,1353,, 1398 | 4.782719612121582,21,1354,, 1399 | 4.721865177154541,21,1355,, 1400 | 4.938504695892334,21,1356,, 1401 | 4.299477577209473,21,1357,, 1402 | 4.906397342681885,21,1358,, 1403 | 5.099999904632568,21,1359,, 1404 | 4.0613298416137695,21,1360,, 1405 | 5.053607940673828,21,1361,, 1406 | 5.6914167404174805,21,1362,, 1407 | 4.365829944610596,21,1363,, 1408 | 4.374881744384766,21,1364,, 1409 | 4.831451416015625,21,1365,, 1410 | 4.565305709838867,21,1366,, 1411 | 5.009014129638672,21,1367,, 1412 | 4.229321002960205,21,1368,, 1413 | 4.409148216247559,21,1369,, 1414 | 4.718257427215576,21,1370,, 1415 | 4.174450874328613,21,1371,, 1416 | 4.696829795837402,21,1372,, 1417 | 5.38485860824585,21,1373,, 1418 | 4.178585529327393,21,1374,, 1419 | 4.685055255889893,21,1375,, 1420 | 5.604656219482422,21,1376,, 1421 | 4.824380874633789,21,1377,, 1422 | 4.116884231567383,21,1378,, 1423 | 5.8420000076293945,21,1379,, 1424 | 5.806628227233887,21,1380,, 1425 | 5.457695960998535,21,1381,, 1426 | 4.3477702140808105,21,1382,, 1427 | 5.297301769256592,21,1383,, 1428 | 5.539650917053223,21,1384,, 1429 | 5.7228593826293945,21,1385,, 1430 | ,21,1385,4.840696811676025, 1431 | ,21,1385,,4.634769916534424 1432 | 5.138886451721191,22,1386,, 1433 | 4.534409046173096,22,1387,, 1434 | 5.314854621887207,22,1388,, 1435 | 5.131969451904297,22,1389,, 1436 | 4.185571193695068,22,1390,, 1437 | 4.401752948760986,22,1391,, 1438 | 5.166033744812012,22,1392,, 1439 | 5.31842041015625,22,1393,, 1440 | 3.9719126224517822,22,1394,, 1441 | 4.947325706481934,22,1395,, 1442 | 5.5413432121276855,22,1396,, 1443 | 5.315747261047363,22,1397,, 1444 | 4.563423156738281,22,1398,, 1445 | 4.317129135131836,22,1399,, 1446 | 5.330542087554932,22,1400,, 1447 | 4.093804836273193,22,1401,, 1448 | 4.09419059753418,22,1402,, 1449 | 4.575156211853027,22,1403,, 1450 | 5.000698566436768,22,1404,, 1451 | 4.302639007568359,22,1405,, 1452 | 3.641024112701416,22,1406,, 1453 | 4.689336776733398,22,1407,, 1454 | 5.447665214538574,22,1408,, 1455 | 4.5398759841918945,22,1409,, 1456 | 4.214755535125732,22,1410,, 1457 | 4.706885814666748,22,1411,, 1458 | 5.713982105255127,22,1412,, 1459 | 5.273558139801025,22,1413,, 1460 | 4.329583168029785,22,1414,, 1461 | 4.3453874588012695,22,1415,, 1462 | 5.322081089019775,22,1416,, 1463 | 5.671679496765137,22,1417,, 1464 | 4.299656867980957,22,1418,, 1465 | 4.331854343414307,22,1419,, 1466 | 5.147972583770752,22,1420,, 1467 | 5.00074577331543,22,1421,, 1468 | 4.492363929748535,22,1422,, 1469 | 4.734854698181152,22,1423,, 1470 | 5.4416022300720215,22,1424,, 1471 | 4.664603233337402,22,1425,, 1472 | 4.045997619628906,22,1426,, 1473 | 4.279038906097412,22,1427,, 1474 | 4.72022819519043,22,1428,, 1475 | 5.105127334594727,22,1429,, 1476 | 4.119150638580322,22,1430,, 1477 | 4.614912986755371,22,1431,, 1478 | 4.885225296020508,22,1432,, 1479 | 4.9718475341796875,22,1433,, 1480 | 4.544549942016602,22,1434,, 1481 | 4.606184959411621,22,1435,, 1482 | 5.154781818389893,22,1436,, 1483 | 5.209486484527588,22,1437,, 1484 | 4.355230331420898,22,1438,, 1485 | 5.190310478210449,22,1439,, 1486 | 5.808363437652588,22,1440,, 1487 | 6.250609397888184,22,1441,, 1488 | 4.998895168304443,22,1442,, 1489 | 4.675201416015625,22,1443,, 1490 | 4.820071697235107,22,1444,, 1491 | 5.230391979217529,22,1445,, 1492 | 4.725244522094727,22,1446,, 1493 | 4.756407260894775,22,1447,, 1494 | 5.024557113647461,22,1448,, 1495 | ,22,1448,4.585824012756348, 1496 | ,22,1448,,4.813356876373291 1497 | 4.997687339782715,23,1449,, 1498 | 5.849582195281982,23,1450,, 1499 | 4.887807846069336,23,1451,, 1500 | 4.189270973205566,23,1452,, 1501 | 4.40794563293457,23,1453,, 1502 | 4.654346942901611,23,1454,, 1503 | 4.137062072753906,23,1455,, 1504 | 4.291244983673096,23,1456,, 1505 | 4.724004745483398,23,1457,, 1506 | 4.06926155090332,23,1458,, 1507 | 5.194796085357666,23,1459,, 1508 | 4.447948932647705,23,1460,, 1509 | 4.90533447265625,23,1461,, 1510 | 5.00815486907959,23,1462,, 1511 | 3.8848934173583984,23,1463,, 1512 | 5.5524516105651855,23,1464,, 1513 | 4.6245927810668945,23,1465,, 1514 | 4.55587100982666,23,1466,, 1515 | 4.736939430236816,23,1467,, 1516 | 4.406261920928955,23,1468,, 1517 | 5.772685527801514,23,1469,, 1518 | 4.5972137451171875,23,1470,, 1519 | 5.0828938484191895,23,1471,, 1520 | 4.925374507904053,23,1472,, 1521 | 3.661048650741577,23,1473,, 1522 | 5.039359092712402,23,1474,, 1523 | 5.002663612365723,23,1475,, 1524 | 4.529832363128662,23,1476,, 1525 | 4.506438732147217,23,1477,, 1526 | 4.412172317504883,23,1478,, 1527 | 4.7989349365234375,23,1479,, 1528 | 4.89670467376709,23,1480,, 1529 | 4.682069301605225,23,1481,, 1530 | 4.2998270988464355,23,1482,, 1531 | 4.040720462799072,23,1483,, 1532 | 4.000328540802002,23,1484,, 1533 | 3.961662530899048,23,1485,, 1534 | 3.8242242336273193,23,1486,, 1535 | 4.274563789367676,23,1487,, 1536 | 3.8642783164978027,23,1488,, 1537 | 4.211822986602783,23,1489,, 1538 | 4.486345291137695,23,1490,, 1539 | 4.383007526397705,23,1491,, 1540 | 3.4828357696533203,23,1492,, 1541 | 4.2215118408203125,23,1493,, 1542 | 3.878612518310547,23,1494,, 1543 | 3.7709503173828125,23,1495,, 1544 | 4.624936580657959,23,1496,, 1545 | 4.175037384033203,23,1497,, 1546 | 3.7979583740234375,23,1498,, 1547 | 4.1424431800842285,23,1499,, 1548 | 4.60441780090332,23,1500,, 1549 | 4.171428203582764,23,1501,, 1550 | 4.8852386474609375,23,1502,, 1551 | 4.155167102813721,23,1503,, 1552 | 5.002504825592041,23,1504,, 1553 | 4.842251777648926,23,1505,, 1554 | 4.290362358093262,23,1506,, 1555 | 4.568325042724609,23,1507,, 1556 | 4.998455047607422,23,1508,, 1557 | 4.67210054397583,23,1509,, 1558 | 5.401025295257568,23,1510,, 1559 | 4.445271015167236,23,1511,, 1560 | ,23,1511,5.0287299156188965, 1561 | ,23,1511,,4.523004531860352 1562 | 4.5967559814453125,24,1512,, 1563 | 4.849061489105225,24,1513,, 1564 | 4.461356163024902,24,1514,, 1565 | 4.941972732543945,24,1515,, 1566 | 4.725610733032227,24,1516,, 1567 | 4.833481788635254,24,1517,, 1568 | 4.431479454040527,24,1518,, 1569 | 4.294970989227295,24,1519,, 1570 | 4.60357141494751,24,1520,, 1571 | 4.627566337585449,24,1521,, 1572 | 4.582000255584717,24,1522,, 1573 | 4.695913314819336,24,1523,, 1574 | 3.826324462890625,24,1524,, 1575 | 5.086932182312012,24,1525,, 1576 | 4.880674839019775,24,1526,, 1577 | 4.9108991622924805,24,1527,, 1578 | 4.538208961486816,24,1528,, 1579 | 4.235470771789551,24,1529,, 1580 | 4.791724681854248,24,1530,, 1581 | 4.054234504699707,24,1531,, 1582 | 3.8073041439056396,24,1532,, 1583 | 4.505108833312988,24,1533,, 1584 | 4.7382731437683105,24,1534,, 1585 | 4.52626895904541,24,1535,, 1586 | 4.327258110046387,24,1536,, 1587 | 3.4929733276367188,24,1537,, 1588 | 4.562881946563721,24,1538,, 1589 | 3.8364429473876953,24,1539,, 1590 | 4.250661849975586,24,1540,, 1591 | 4.540460586547852,24,1541,, 1592 | 4.993695259094238,24,1542,, 1593 | 4.786933898925781,24,1543,, 1594 | 4.284244537353516,24,1544,, 1595 | 4.483553886413574,24,1545,, 1596 | 4.219080448150635,24,1546,, 1597 | 3.7118406295776367,24,1547,, 1598 | 4.082046985626221,24,1548,, 1599 | 3.935572385787964,24,1549,, 1600 | 3.9707820415496826,24,1550,, 1601 | 3.5496368408203125,24,1551,, 1602 | 4.821918964385986,24,1552,, 1603 | 4.58780574798584,24,1553,, 1604 | 3.8133749961853027,24,1554,, 1605 | 3.746978998184204,24,1555,, 1606 | 4.4217987060546875,24,1556,, 1607 | 3.4515254497528076,24,1557,, 1608 | 4.678189754486084,24,1558,, 1609 | 4.225722312927246,24,1559,, 1610 | 4.05670690536499,24,1560,, 1611 | 4.35827112197876,24,1561,, 1612 | 3.739434242248535,24,1562,, 1613 | 3.7964630126953125,24,1563,, 1614 | 3.8129146099090576,24,1564,, 1615 | 3.5235702991485596,24,1565,, 1616 | 3.5513453483581543,24,1566,, 1617 | 3.713505744934082,24,1567,, 1618 | 3.8018362522125244,24,1568,, 1619 | 4.133028984069824,24,1569,, 1620 | 4.576939105987549,24,1570,, 1621 | 3.764822483062744,24,1571,, 1622 | 4.031696796417236,24,1572,, 1623 | 4.07636022567749,24,1573,, 1624 | 4.038512229919434,24,1574,, 1625 | ,24,1574,4.063985347747803, 1626 | ,24,1574,,4.276427268981934 1627 | 4.031047344207764,25,1575,, 1628 | 4.133069038391113,25,1576,, 1629 | 4.055121898651123,25,1577,, 1630 | 3.9912049770355225,25,1578,, 1631 | 3.4003164768218994,25,1579,, 1632 | 3.949845552444458,25,1580,, 1633 | 3.486274480819702,25,1581,, 1634 | 4.244314670562744,25,1582,, 1635 | 3.7562777996063232,25,1583,, 1636 | 3.7566568851470947,25,1584,, 1637 | 3.7263498306274414,25,1585,, 1638 | 3.5346639156341553,25,1586,, 1639 | 3.3216562271118164,25,1587,, 1640 | 3.931973457336426,25,1588,, 1641 | 3.6155099868774414,25,1589,, 1642 | 3.3646254539489746,25,1590,, 1643 | 3.502800941467285,25,1591,, 1644 | 3.757606267929077,25,1592,, 1645 | 3.4658753871917725,25,1593,, 1646 | 3.4729502201080322,25,1594,, 1647 | 4.02852201461792,25,1595,, 1648 | 4.093584060668945,25,1596,, 1649 | 3.66371488571167,25,1597,, 1650 | 4.086605072021484,25,1598,, 1651 | 4.095075607299805,25,1599,, 1652 | 3.9502034187316895,25,1600,, 1653 | 3.8094892501831055,25,1601,, 1654 | 4.1370673179626465,25,1602,, 1655 | 3.8244762420654297,25,1603,, 1656 | 3.9417593479156494,25,1604,, 1657 | 4.4868059158325195,25,1605,, 1658 | 3.5271739959716797,25,1606,, 1659 | 4.2895283699035645,25,1607,, 1660 | 4.181591987609863,25,1608,, 1661 | 4.305372714996338,25,1609,, 1662 | 3.61454439163208,25,1610,, 1663 | 4.439431190490723,25,1611,, 1664 | 3.8949997425079346,25,1612,, 1665 | 3.582059860229492,25,1613,, 1666 | 3.7938108444213867,25,1614,, 1667 | 4.355565547943115,25,1615,, 1668 | 4.285353183746338,25,1616,, 1669 | 3.463174819946289,25,1617,, 1670 | 3.608445882797241,25,1618,, 1671 | 3.825505256652832,25,1619,, 1672 | 4.212478160858154,25,1620,, 1673 | 3.7393834590911865,25,1621,, 1674 | 3.8873090744018555,25,1622,, 1675 | 3.5600717067718506,25,1623,, 1676 | 2.9105753898620605,25,1624,, 1677 | 3.268691301345825,25,1625,, 1678 | 3.9557178020477295,25,1626,, 1679 | 4.186121940612793,25,1627,, 1680 | 4.155292987823486,25,1628,, 1681 | 4.631263256072998,25,1629,, 1682 | 4.077895164489746,25,1630,, 1683 | 4.661962509155273,25,1631,, 1684 | 4.726379871368408,25,1632,, 1685 | 5.675137519836426,25,1633,, 1686 | 4.593196392059326,25,1634,, 1687 | 3.999932050704956,25,1635,, 1688 | 5.131218433380127,25,1636,, 1689 | 4.982819557189941,25,1637,, 1690 | ,25,1637,3.9476921558380127, 1691 | ,25,1637,,3.9623358249664307 1692 | 4.223199367523193,26,1638,, 1693 | 4.482442378997803,26,1639,, 1694 | 4.6451096534729,26,1640,, 1695 | 6.0839409828186035,26,1641,, 1696 | 4.48166036605835,26,1642,, 1697 | 3.6008999347686768,26,1643,, 1698 | 4.619619369506836,26,1644,, 1699 | 3.889960765838623,26,1645,, 1700 | 3.894956588745117,26,1646,, 1701 | 3.551830530166626,26,1647,, 1702 | 3.4773757457733154,26,1648,, 1703 | 4.230281352996826,26,1649,, 1704 | 3.935835361480713,26,1650,, 1705 | 3.8540995121002197,26,1651,, 1706 | 3.730198383331299,26,1652,, 1707 | 3.5737762451171875,26,1653,, 1708 | 3.89009165763855,26,1654,, 1709 | 3.449169635772705,26,1655,, 1710 | 3.5895023345947266,26,1656,, 1711 | 3.661552906036377,26,1657,, 1712 | 3.856954336166382,26,1658,, 1713 | 3.40981125831604,26,1659,, 1714 | 4.236619472503662,26,1660,, 1715 | 3.7224009037017822,26,1661,, 1716 | 4.394897937774658,26,1662,, 1717 | 4.310820579528809,26,1663,, 1718 | 4.610606670379639,26,1664,, 1719 | 4.783351421356201,26,1665,, 1720 | 4.236031532287598,26,1666,, 1721 | 4.540399551391602,26,1667,, 1722 | 4.59127140045166,26,1668,, 1723 | 4.784089088439941,26,1669,, 1724 | 4.072886943817139,26,1670,, 1725 | 3.8918797969818115,26,1671,, 1726 | 4.319969177246094,26,1672,, 1727 | 4.084169387817383,26,1673,, 1728 | 4.483240127563477,26,1674,, 1729 | 4.4991865158081055,26,1675,, 1730 | 3.8910655975341797,26,1676,, 1731 | 4.702871322631836,26,1677,, 1732 | 4.584614276885986,26,1678,, 1733 | 3.6417078971862793,26,1679,, 1734 | 4.207453727722168,26,1680,, 1735 | 3.468549966812134,26,1681,, 1736 | 4.04436731338501,26,1682,, 1737 | 3.9947094917297363,26,1683,, 1738 | 4.426445007324219,26,1684,, 1739 | 4.466556549072266,26,1685,, 1740 | 3.596703052520752,26,1686,, 1741 | 4.849447250366211,26,1687,, 1742 | 4.207601070404053,26,1688,, 1743 | 4.529609203338623,26,1689,, 1744 | 4.551419734954834,26,1690,, 1745 | 4.166670799255371,26,1691,, 1746 | 4.395847320556641,26,1692,, 1747 | 5.140127658843994,26,1693,, 1748 | 4.251502513885498,26,1694,, 1749 | 4.037837028503418,26,1695,, 1750 | 4.781957626342773,26,1696,, 1751 | 4.64054012298584,26,1697,, 1752 | 4.175112247467041,26,1698,, 1753 | 4.77278470993042,26,1699,, 1754 | 4.782887935638428,26,1700,, 1755 | ,26,1700,4.039224624633789, 1756 | ,26,1700,,4.217775821685791 1757 | 3.957984447479248,27,1701,, 1758 | 5.0077643394470215,27,1702,, 1759 | 5.060054302215576,27,1703,, 1760 | 4.473508834838867,27,1704,, 1761 | 4.286829471588135,27,1705,, 1762 | 5.162304878234863,27,1706,, 1763 | 4.34278678894043,27,1707,, 1764 | 3.712333917617798,27,1708,, 1765 | 4.559756278991699,27,1709,, 1766 | 3.887890577316284,27,1710,, 1767 | 3.83838152885437,27,1711,, 1768 | 3.7254762649536133,27,1712,, 1769 | 3.9357857704162598,27,1713,, 1770 | 3.574738025665283,27,1714,, 1771 | 3.8448431491851807,27,1715,, 1772 | 3.73892879486084,27,1716,, 1773 | 3.7087504863739014,27,1717,, 1774 | 3.91764760017395,27,1718,, 1775 | 3.413402557373047,27,1719,, 1776 | 3.5484912395477295,27,1720,, 1777 | 4.067906379699707,27,1721,, 1778 | 4.025295734405518,27,1722,, 1779 | 4.249591827392578,27,1723,, 1780 | 4.4538092613220215,27,1724,, 1781 | 3.7218260765075684,27,1725,, 1782 | 4.405604362487793,27,1726,, 1783 | 4.994332790374756,27,1727,, 1784 | 4.652610778808594,27,1728,, 1785 | 4.054772853851318,27,1729,, 1786 | 4.452105522155762,27,1730,, 1787 | 5.565505504608154,27,1731,, 1788 | 5.118598461151123,27,1732,, 1789 | 3.4834487438201904,27,1733,, 1790 | 4.6765289306640625,27,1734,, 1791 | 5.436917304992676,27,1735,, 1792 | 4.741717338562012,27,1736,, 1793 | 4.358441352844238,27,1737,, 1794 | 4.341142177581787,27,1738,, 1795 | 4.364372253417969,27,1739,, 1796 | 4.915125370025635,27,1740,, 1797 | 4.162100791931152,27,1741,, 1798 | 4.0703349113464355,27,1742,, 1799 | 4.270729064941406,27,1743,, 1800 | 4.363379001617432,27,1744,, 1801 | 3.855727434158325,27,1745,, 1802 | 3.798779010772705,27,1746,, 1803 | 3.82592511177063,27,1747,, 1804 | 4.005251407623291,27,1748,, 1805 | 3.417654275894165,27,1749,, 1806 | 4.084303379058838,27,1750,, 1807 | 3.1622884273529053,27,1751,, 1808 | 3.4982969760894775,27,1752,, 1809 | 3.398837089538574,27,1753,, 1810 | 4.174318790435791,27,1754,, 1811 | 3.2789018154144287,27,1755,, 1812 | 3.8999500274658203,27,1756,, 1813 | 4.166153907775879,27,1757,, 1814 | 3.520153284072876,27,1758,, 1815 | 3.4586684703826904,27,1759,, 1816 | 3.236131191253662,27,1760,, 1817 | 3.2527945041656494,27,1761,, 1818 | 3.287630081176758,27,1762,, 1819 | 3.674896240234375,27,1763,, 1820 | ,27,1763,3.43558669090271, 1821 | ,27,1763,,4.092848777770996 1822 | 3.3191559314727783,28,1764,, 1823 | 3.9885358810424805,28,1765,, 1824 | 4.376064777374268,28,1766,, 1825 | 3.4251887798309326,28,1767,, 1826 | 3.8528881072998047,28,1768,, 1827 | 3.4633867740631104,28,1769,, 1828 | 4.008551597595215,28,1770,, 1829 | 3.40682053565979,28,1771,, 1830 | 4.937422752380371,28,1772,, 1831 | 4.046829700469971,28,1773,, 1832 | 4.145428657531738,28,1774,, 1833 | 4.517838001251221,28,1775,, 1834 | 4.185873508453369,28,1776,, 1835 | 3.973900556564331,28,1777,, 1836 | 3.603739023208618,28,1778,, 1837 | 4.061074256896973,28,1779,, 1838 | 3.757845163345337,28,1780,, 1839 | 4.285098552703857,28,1781,, 1840 | 4.720296382904053,28,1782,, 1841 | 3.8399901390075684,28,1783,, 1842 | 4.582652568817139,28,1784,, 1843 | 4.385725498199463,28,1785,, 1844 | 3.8397645950317383,28,1786,, 1845 | 4.2910871505737305,28,1787,, 1846 | 3.8574295043945312,28,1788,, 1847 | 4.195829391479492,28,1789,, 1848 | 3.9262993335723877,28,1790,, 1849 | 3.493262529373169,28,1791,, 1850 | 4.164539337158203,28,1792,, 1851 | 3.7258265018463135,28,1793,, 1852 | 3.316110849380493,28,1794,, 1853 | 3.937565565109253,28,1795,, 1854 | 4.185245990753174,28,1796,, 1855 | 3.706930637359619,28,1797,, 1856 | 3.4742050170898438,28,1798,, 1857 | 3.4268507957458496,28,1799,, 1858 | 3.2284133434295654,28,1800,, 1859 | 3.8061962127685547,28,1801,, 1860 | 3.9135825634002686,28,1802,, 1861 | 3.9305355548858643,28,1803,, 1862 | 3.407109498977661,28,1804,, 1863 | 3.8564400672912598,28,1805,, 1864 | 3.668626546859741,28,1806,, 1865 | 3.5637199878692627,28,1807,, 1866 | 3.402794122695923,28,1808,, 1867 | 3.9711387157440186,28,1809,, 1868 | 4.1871771812438965,28,1810,, 1869 | 4.326279640197754,28,1811,, 1870 | 3.8890833854675293,28,1812,, 1871 | 3.7408416271209717,28,1813,, 1872 | 4.139998912811279,28,1814,, 1873 | 4.644080638885498,28,1815,, 1874 | 3.8025362491607666,28,1816,, 1875 | 4.580593585968018,28,1817,, 1876 | 4.304252624511719,28,1818,, 1877 | 3.5504000186920166,28,1819,, 1878 | 4.301846027374268,28,1820,, 1879 | 4.1504106521606445,28,1821,, 1880 | 3.2579610347747803,28,1822,, 1881 | 4.1143035888671875,28,1823,, 1882 | 4.2782464027404785,28,1824,, 1883 | 3.6061558723449707,28,1825,, 1884 | 3.803415536880493,28,1826,, 1885 | ,28,1826,3.9830121994018555, 1886 | ,28,1826,,3.935195207595825 1887 | 3.8253281116485596,29,1827,, 1888 | 3.2450852394104004,29,1828,, 1889 | 3.9075093269348145,29,1829,, 1890 | 3.887429714202881,29,1830,, 1891 | 3.517814874649048,29,1831,, 1892 | 3.346496343612671,29,1832,, 1893 | 3.325921058654785,29,1833,, 1894 | 3.3473734855651855,29,1834,, 1895 | 3.784994602203369,29,1835,, 1896 | 4.056641578674316,29,1836,, 1897 | 3.0317537784576416,29,1837,, 1898 | 4.163286209106445,29,1838,, 1899 | 3.6771702766418457,29,1839,, 1900 | 4.101377964019775,29,1840,, 1901 | 4.319696426391602,29,1841,, 1902 | 4.0110087394714355,29,1842,, 1903 | 3.9110586643218994,29,1843,, 1904 | 3.8093035221099854,29,1844,, 1905 | 3.7954721450805664,29,1845,, 1906 | 3.9810738563537598,29,1846,, 1907 | 3.501945734024048,29,1847,, 1908 | 3.729562282562256,29,1848,, 1909 | 3.6594157218933105,29,1849,, 1910 | 3.4892749786376953,29,1850,, 1911 | 3.531844139099121,29,1851,, 1912 | 4.277295112609863,29,1852,, 1913 | 4.180404186248779,29,1853,, 1914 | 3.408141613006592,29,1854,, 1915 | 3.2362921237945557,29,1855,, 1916 | 3.8749165534973145,29,1856,, 1917 | 3.622515916824341,29,1857,, 1918 | 3.494719982147217,29,1858,, 1919 | 4.0878424644470215,29,1859,, 1920 | 3.768465518951416,29,1860,, 1921 | 3.711237907409668,29,1861,, 1922 | 3.7006900310516357,29,1862,, 1923 | 3.4557695388793945,29,1863,, 1924 | 3.3056395053863525,29,1864,, 1925 | 4.136491775512695,29,1865,, 1926 | 4.0907440185546875,29,1866,, 1927 | 4.078444480895996,29,1867,, 1928 | 3.0689311027526855,29,1868,, 1929 | 4.42681884765625,29,1869,, 1930 | 4.3893938064575195,29,1870,, 1931 | 3.789205551147461,29,1871,, 1932 | 3.960798501968384,29,1872,, 1933 | 3.9609897136688232,29,1873,, 1934 | 3.816628932952881,29,1874,, 1935 | 3.2094948291778564,29,1875,, 1936 | 3.4564638137817383,29,1876,, 1937 | 3.8905158042907715,29,1877,, 1938 | 3.429051160812378,29,1878,, 1939 | 4.121053695678711,29,1879,, 1940 | 3.324392318725586,29,1880,, 1941 | 3.932819366455078,29,1881,, 1942 | 3.322354793548584,29,1882,, 1943 | 3.601562976837158,29,1883,, 1944 | 3.8353829383850098,29,1884,, 1945 | 3.147080898284912,29,1885,, 1946 | 4.16743278503418,29,1886,, 1947 | 3.8839774131774902,29,1887,, 1948 | 3.260769844055176,29,1888,, 1949 | 3.8254826068878174,29,1889,, 1950 | ,29,1889,3.733577251434326, 1951 | ,29,1889,,3.732724905014038 1952 | -------------------------------------------------------------------------------- /lit_data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ncfrey/litmatter/d355ccac45e3b6299d1773c536ad7617e49a6b8a/lit_data/__init__.py -------------------------------------------------------------------------------- /lit_data/data.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from pytorch_lightning import LightningDataModule 4 | 5 | from torch_geometric.data import DataLoader as PyGDataLoader 6 | 7 | from torch.utils.data import DataLoader 8 | 9 | import deepchem as dc 10 | 11 | 12 | class LitQM9(LightningDataModule): 13 | def __init__(self, datasets, batch_size=16, num_workers=4): 14 | super().__init__() 15 | self.datasets = datasets 16 | self.batch_size = batch_size 17 | self.num_workers = num_workers 18 | 19 | def prepare_data(self): 20 | """Download data if needed.""" 21 | pass 22 | 23 | def setup(self, stage: Optional[str] = None): 24 | """Apply transformations and split datasets.""" 25 | self.train_dataset, self.val_dataset, self.test_dataset = self.datasets 26 | 27 | def train_dataloader(self): 28 | return PyGDataLoader( 29 | self.train_dataset, 30 | batch_size=self.batch_size, 31 | num_workers=self.num_workers, 32 | pin_memory=True, 33 | ) 34 | 35 | def val_dataloader(self): 36 | return PyGDataLoader( 37 | self.val_dataset, 38 | batch_size=self.batch_size, 39 | num_workers=self.num_workers, 40 | pin_memory=True, 41 | ) 42 | 43 | def test_dataloader(self): 44 | return PyGDataLoader( 45 | self.test_dataset, 46 | batch_size=self.batch_size, 47 | num_workers=self.num_workers, 48 | pin_memory=True, 49 | ) 50 | -------------------------------------------------------------------------------- /lit_data/lm_data.py: -------------------------------------------------------------------------------- 1 | from pytorch_lightning import LightningDataModule 2 | 3 | from torch.utils.data import DataLoader 4 | 5 | import datasets 6 | import transformers 7 | from datasets import load_dataset, Dataset, DatasetDict, load_metric, load_from_disk 8 | from tokenizers import ( 9 | decoders, 10 | models, 11 | normalizers, 12 | pre_tokenizers, 13 | processors, 14 | trainers, 15 | Tokenizer, 16 | Regex, 17 | ) 18 | from transformers import BertTokenizerFast, PreTrainedTokenizerFast 19 | from transformers import ( 20 | AdamW, 21 | AutoModelForSequenceClassification, 22 | AutoTokenizer, 23 | get_linear_schedule_with_warmup, 24 | get_constant_schedule_with_warmup, 25 | set_seed, 26 | DataCollatorForLanguageModeling, 27 | ) 28 | 29 | SEED = 42 30 | 31 | class ChemDataModule(LightningDataModule): 32 | def __init__( 33 | self, data_dir, tokenizer_dir, batch_size=16, num_workers=4, debug=False 34 | ): 35 | super().__init__() 36 | self.data_dir = data_dir 37 | self.tokenizer_dir = tokenizer_dir 38 | self.batch_size = batch_size 39 | self.num_workers = num_workers 40 | 41 | self.new_tokenizer = PreTrainedTokenizerFast.from_pretrained(self.tokenizer_dir) 42 | self.new_tokenizer.add_special_tokens({"pad_token": "[PAD]"}) 43 | 44 | self.collate_fn = DataCollatorForLanguageModeling( 45 | tokenizer=self.new_tokenizer, mlm=False 46 | ) 47 | 48 | def prepare_data(self): 49 | self.lm_datasets = load_from_disk(self.data_dir) 50 | 51 | def setup(self, stage=None): 52 | self.train_set = self.lm_datasets["train"] 53 | self.val_set = self.lm_datasets["validation"] 54 | 55 | def train_dataloader(self): 56 | return DataLoader( 57 | self.train_set, 58 | shuffle=True, 59 | pin_memory=True, 60 | collate_fn=self.collate_fn, 61 | batch_size=self.batch_size, 62 | num_workers=self.num_workers, 63 | ) 64 | 65 | def val_dataloader(self): 66 | return DataLoader( 67 | self.val_set, 68 | pin_memory=True, 69 | collate_fn=self.collate_fn, 70 | batch_size=self.batch_size, 71 | num_workers=self.num_workers, 72 | ) 73 | 74 | def test_dataloader(self): 75 | return DataLoader( 76 | self.test_set, batch_size=self.batch_size, num_workers=self.num_workers 77 | ) 78 | 79 | class PubChemDataModule(LightningDataModule): 80 | def __init__(self, data_dir, dataset_size=7, batch_size=16, 81 | num_workers=4, debug=False, tokenizer_dir='.'): 82 | super().__init__() 83 | self.data_dir = data_dir 84 | self.batch_size = batch_size 85 | self.num_workers = num_workers 86 | self.dataset_size = dataset_size 87 | self.debug = debug 88 | self.tokenizer_dir = tokenizer_dir 89 | 90 | self.new_tokenizer = PreTrainedTokenizerFast.from_pretrained(self.tokenizer_dir+'pubchem10M_tokenizer/') 91 | self.new_tokenizer.add_special_tokens({'pad_token': '[PAD]'}) 92 | 93 | self.collate_fn = DataCollatorForLanguageModeling(tokenizer=self.new_tokenizer, mlm=False) 94 | 95 | def prepare_data(self): 96 | self.lm_datasets = load_from_disk(self.data_dir + 'pubchem10M_lmdataset') 97 | 98 | def setup(self, stage=None): 99 | # set training set size 100 | if self.dataset_size < 7: # less than 10M 101 | train_size = int(10**self.dataset_size) 102 | num_rows = int(train_size // self.batch_size) 103 | reduced_train = self.lm_datasets['train'].shuffle(seed=SEED).select(range(num_rows)) 104 | self.lm_datasets['train'] = reduced_train 105 | 106 | self.train_set = self.lm_datasets["train"] 107 | self.val_set = self.lm_datasets["validation"] 108 | 109 | def train_dataloader(self): 110 | return DataLoader(self.train_set, shuffle=True, pin_memory=True, collate_fn=self.collate_fn, batch_size=self.batch_size, num_workers=self.num_workers) 111 | 112 | def val_dataloader(self): 113 | return DataLoader(self.val_set, pin_memory=True, collate_fn=self.collate_fn, batch_size=self.batch_size, num_workers=self.num_workers) 114 | 115 | def test_dataloader(self): 116 | return DataLoader(self.test_set, batch_size=self.batch_size, num_workers=self.num_workers) -------------------------------------------------------------------------------- /lit_data/molnet_data.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from pytorch_lightning import LightningDataModule 4 | 5 | from torch_geometric.data import DataLoader as PyGDataLoader 6 | 7 | from torch.utils.data import DataLoader 8 | 9 | import deepchem as dc 10 | 11 | 12 | class LitMolNet(LightningDataModule): 13 | def __init__(self, loader=dc.molnet.load_tox21, batch_size=16, num_workers=4): 14 | super().__init__() 15 | self.loader = loader 16 | self.batch_size = batch_size 17 | self.num_workers = num_workers 18 | 19 | def prepare_data(self): 20 | """Download data if needed.""" 21 | pass 22 | 23 | def setup(self, stage: Optional[str] = None): 24 | """Apply transformations and split datasets.""" 25 | task, df, trans = self.loader() 26 | train, valid, test = df 27 | train, valid, test = ( 28 | train.make_pytorch_dataset(), 29 | valid.make_pytorch_dataset(), 30 | test.make_pytorch_dataset(), 31 | ) 32 | 33 | self.train_dataset, self.val_dataset, self.test_dataset = train, valid, test 34 | 35 | def train_dataloader(self): 36 | return DataLoader( 37 | self.train_dataset, 38 | batch_size=self.batch_size, 39 | num_workers=self.num_workers, 40 | pin_memory=True, 41 | ) 42 | 43 | def val_dataloader(self): 44 | return DataLoader( 45 | self.val_dataset, 46 | batch_size=self.batch_size, 47 | num_workers=self.num_workers, 48 | pin_memory=True, 49 | ) 50 | 51 | def test_dataloader(self): 52 | return DataLoader( 53 | self.test_dataset, 54 | batch_size=self.batch_size, 55 | num_workers=self.num_workers, 56 | pin_memory=True, 57 | ) 58 | -------------------------------------------------------------------------------- /lit_data/nff_data.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from pytorch_lightning import LightningDataModule 4 | 5 | 6 | from torch.utils.data import DataLoader 7 | 8 | from nff.data import Dataset, split_train_validation_test, collate_dicts, to_tensor 9 | from nff.train import get_trainer, get_model, load_model, hooks, metrics, evaluate 10 | 11 | 12 | class NFFDataModule(LightningDataModule): 13 | def __init__(self, path, batch_size=16, num_workers=4): 14 | super().__init__() 15 | self.path = path 16 | self.batch_size = batch_size 17 | self.num_workers = num_workers 18 | 19 | def prepare_data(self): 20 | """Download data if needed.""" 21 | pass 22 | 23 | def setup(self, stage: Optional[str] = None): 24 | """Apply transformations and split datasets.""" 25 | dataset = Dataset.from_file(self.path) 26 | ( 27 | self.train_dataset, 28 | self.val_dataset, 29 | self.test_dataset, 30 | ) = split_train_validation_test(dataset, val_size=0.2, test_size=0.2) 31 | 32 | def train_dataloader(self): 33 | return DataLoader( 34 | self.train_dataset, 35 | batch_size=self.batch_size, 36 | num_workers=self.num_workers, 37 | pin_memory=True, 38 | shuffle=True, 39 | collate_fn=collate_dicts, 40 | ) 41 | 42 | def val_dataloader(self): 43 | return DataLoader( 44 | self.val_dataset, 45 | batch_size=self.batch_size, 46 | num_workers=self.num_workers, 47 | pin_memory=True, 48 | collate_fn=collate_dicts, 49 | ) 50 | 51 | def test_dataloader(self): 52 | return DataLoader( 53 | self.test_dataset, 54 | batch_size=self.batch_size, 55 | num_workers=self.num_workers, 56 | pin_memory=True, 57 | collate_fn=collate_dicts, 58 | ) 59 | -------------------------------------------------------------------------------- /lit_models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ncfrey/litmatter/d355ccac45e3b6299d1773c536ad7617e49a6b8a/lit_models/__init__.py -------------------------------------------------------------------------------- /lit_models/deepchem_models.py: -------------------------------------------------------------------------------- 1 | import os.path as osp 2 | 3 | from typing import Optional, List, NamedTuple 4 | 5 | import torch 6 | from torch import Tensor 7 | import torch.nn.functional as F 8 | from torch.nn import Sequential, Linear, ReLU, GRU, ModuleList, BatchNorm1d, MSELoss 9 | 10 | import deepchem as dc 11 | 12 | from pytorch_lightning.callbacks import ModelCheckpoint 13 | from pytorch_lightning import ( 14 | LightningDataModule, 15 | LightningModule, 16 | Trainer, 17 | seed_everything, 18 | ) 19 | 20 | 21 | class LitDeepChem(LightningModule): 22 | def __init__(self, torch_model, lr=1e-2): 23 | """Define DeepChem TorchModel.""" 24 | super().__init__() 25 | 26 | self.model = torch_model.model # torch.nn.Module 27 | self.save_hyperparameters() 28 | self.lr = lr 29 | self.loss_fn = torch_model.loss 30 | 31 | def training_step(self, batch, batch_idx: int): 32 | # Modify for MolNet dataset as needed 33 | inputs = batch[0].float() 34 | y = batch[2].float() 35 | outputs = self.model(inputs) 36 | loss = self.loss_fn(outputs, y) 37 | self.log("train_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 38 | return loss 39 | 40 | def validation_step(self, batch, batch_idx: int): 41 | inputs = batch[0].float() 42 | y = batch[2].float() 43 | outputs = self.model(inputs) 44 | loss = self.loss_fn(outputs, y) 45 | self.log("val_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 46 | 47 | def test_step(self, batch, batch_idx: int): 48 | inputs = batch[0].float() 49 | y = batch[2].float() 50 | outputs = self.model(inputs) 51 | loss = self.loss_fn(outputs, y) 52 | self.log("test_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 53 | 54 | def configure_optimizers(self): 55 | optimizer = torch.optim.Adam(self.model.parameters(), lr=self.lr) 56 | return optimizer 57 | -------------------------------------------------------------------------------- /lit_models/lit_chemgpt.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn.functional as F 3 | from datasets import load_dataset 4 | import logging 5 | import math 6 | 7 | import sys 8 | import argparse 9 | 10 | import numpy as np 11 | import torch.nn as nn 12 | import torch.distributed as dist 13 | from torch.utils.data import DataLoader, TensorDataset 14 | from torch.autograd import Variable 15 | from sklearn.model_selection import train_test_split 16 | 17 | import pytorch_lightning as pl 18 | from pytorch_lightning import LightningModule, LightningDataModule, seed_everything 19 | from pytorch_lightning.callbacks import ModelCheckpoint 20 | from pytorch_lightning.callbacks.early_stopping import EarlyStopping 21 | from pytorch_lightning.loggers import CSVLogger 22 | from pytorch_lightning.plugins import DeepSpeedPlugin 23 | 24 | import numpy as np 25 | import pandas as pd 26 | 27 | import copy 28 | import os 29 | import socket 30 | import random 31 | from time import time 32 | import re 33 | 34 | import selfies as sf 35 | 36 | from scipy.optimize import curve_fit 37 | from sklearn.metrics import r2_score 38 | 39 | import seaborn as sns 40 | import matplotlib.pyplot as plt 41 | 42 | import datasets 43 | import transformers 44 | from datasets import load_dataset, Dataset, DatasetDict, load_metric, load_from_disk 45 | from tokenizers import decoders, models, normalizers, pre_tokenizers, processors, trainers, Tokenizer, Regex 46 | from transformers import BertTokenizerFast, PreTrainedTokenizerFast 47 | from datasets import load_dataset 48 | from transformers import AutoConfig, AutoModelForCausalLM 49 | from transformers import TrainingArguments 50 | from transformers import ( 51 | AdamW, 52 | AutoModelForSequenceClassification, 53 | AutoTokenizer, 54 | get_linear_schedule_with_warmup, 55 | get_constant_schedule_with_warmup, 56 | set_seed, 57 | DataCollatorForLanguageModeling, 58 | ) 59 | 60 | from transformers import GPTNeoForCausalLM, GPTNeoConfig 61 | 62 | 63 | class LitChemGPT(LightningModule): 64 | def __init__(self, model_size=16, dataset_size=2, num_epochs=2, lr=2e-5, from_pretrained=None, warmup_steps=100, 65 | reload_weight_path=None, reload_config_path=None, tokenizer_dir='.', logs_dir='loss_logs', cache_path='.'): 66 | super().__init__() 67 | self.save_hyperparameters() 68 | 69 | self.lr = lr 70 | self.seed = 42 71 | self.batch_size = 16 72 | self.tokenizer_dir = tokenizer_dir 73 | self.logs_dir = logs_dir 74 | self.cache_path = cache_path 75 | 76 | self.warmup_steps = warmup_steps 77 | # self.reload_weight_path = reload_weight_path 78 | # self.reload_config_path = reload_config_path 79 | 80 | # tokenizer 81 | tokenizer_dir = self.tokenizer_dir 82 | logs_dir = self.logs_dir 83 | num_proc = 16 84 | 85 | new_tokenizer = PreTrainedTokenizerFast.from_pretrained(tokenizer_dir+'pubchem10M_tokenizer/') 86 | new_tokenizer.add_special_tokens({'pad_token': '[PAD]'}) 87 | 88 | # Instantiate the model 89 | # random weights initialization 90 | cache_path = self.cache_path 91 | num_layers = 24 # default 24 92 | hidden_size = model_size # default 2048, must be divisible by num_heads (16) 93 | config = GPTNeoConfig(num_layers=num_layers, attention_types=[[['global', 'local'], num_layers // 2]], 94 | hidden_size=hidden_size, 95 | cache_path=cache_path) 96 | config.vocab_size = new_tokenizer.vocab_size 97 | max_len_tokenized = 512 98 | config.max_length = max_len_tokenized 99 | 100 | # from config does NOT load weights! `from_pretrained` does 101 | model = GPTNeoForCausalLM(config) 102 | model.resize_token_embeddings(len(new_tokenizer)) 103 | 104 | self.model = model 105 | 106 | def forward(self, batch): 107 | return self.model(**batch) 108 | 109 | def training_step(self, batch, batch_idx): 110 | outputs = self.model(**batch) 111 | loss = outputs.loss 112 | metrics = {} 113 | 114 | self.log('train_loss', loss, prog_bar=False, on_step=True, on_epoch=True) 115 | 116 | #self.logger.experiment.add_scalar("Train/step/loss", loss, self.trainer.global_step) 117 | #self.logger.experiment.add_scalar("Train/step/perplexity", metrics["perplexity"], self.trainer.global_step) 118 | 119 | return {'loss': loss,'progress_bar': {'loss': loss}, 'metrics': metrics} 120 | 121 | def training_epoch_end(self, outputs): 122 | avg_loss = torch.stack([x['loss'] for x in outputs]).mean() 123 | avg_perplexity = math.exp(avg_loss) 124 | metrics = {"loss": avg_loss, "perplexity": avg_perplexity} 125 | 126 | # self.logger.experiment.add_scalar("Train/epoch/loss", avg_loss, self.current_epoch) 127 | # self.logger.experiment.add_scalar("Train/epoch/perplexity", avg_perplexity, self.current_epoch) 128 | 129 | 130 | def validation_step(self, batch, batch_idx): 131 | 132 | outputs = self.model(**batch) 133 | loss = outputs.loss 134 | metrics = {} 135 | 136 | self.log("val_loss", loss, on_step=False, on_epoch=True) 137 | 138 | return {'val_loss': loss, 'metrics': metrics} 139 | 140 | def validation_epoch_end(self, outputs): 141 | avg_loss = torch.stack([x['val_loss'] for x in outputs]).mean() 142 | self.val_loss = avg_loss.item() 143 | if self.global_rank == 0: 144 | print('Val loss: {}'.format(self.val_loss)) 145 | avg_perplexity = math.exp(avg_loss) 146 | if self.global_rank == 0: 147 | print('Val perplexity: {}'.format(avg_perplexity)) 148 | 149 | metrics = {"val_loss": avg_loss, "val_perplexity": avg_perplexity} 150 | 151 | 152 | def sync_across_gpus(self, t): # t is a tensor 153 | # a work-around function to sync outputs across multiple gpus to compute a metric 154 | gather_t_tensor = [torch.ones_like(t) for _ in range(self.trainer.world_size)] 155 | torch.distributed.all_gather(gather_t_tensor, t) 156 | return torch.cat(gather_t_tensor) 157 | 158 | def test_step(self, batch, batch_idx): 159 | outputs = self.model(**batch) 160 | if isinstance(outputs[0],tuple): 161 | loss, metrics = outputs[0] 162 | else: 163 | loss = outputs.loss 164 | metrics = {} 165 | return {'test_loss': loss, 'metrics': metrics, "test_perplexity": metrics["perplexity"]} 166 | 167 | def test_epoch_end(self, outputs): 168 | avg_loss = torch.stack([x['test_loss'] for x in outputs]).mean() 169 | self.test_loss = avg_loss.item() 170 | perplexities = torch.stack([x['test_perplexity'] for x in outputs]) 171 | avg_perplexity = perplexities.mean() 172 | metrics = {"test_loss": avg_loss, "test_perplexity": avg_perplexity} 173 | print('avg perplexity:', avg_perplexity) 174 | 175 | if self.trainer.use_ddp: 176 | avg_perplexity_all = self.sync_across_gpus(perplexities).mean() 177 | print('average perplexity (all)', avg_perplexity_all) 178 | 179 | return metrics 180 | 181 | def configure_optimizers(self): 182 | optimizer = AdamW(self.model.parameters(), lr=self.lr) 183 | scheduler = get_constant_schedule_with_warmup(optimizer, num_warmup_steps=100) 184 | 185 | return {"optimizer": optimizer, 186 | "lr_scheduler": {"scheduler": scheduler}} -------------------------------------------------------------------------------- /lit_models/lit_hf.py: -------------------------------------------------------------------------------- 1 | import os.path as osp 2 | 3 | from typing import Optional, List, NamedTuple 4 | 5 | import math 6 | 7 | import torch 8 | from torch import Tensor 9 | import torch.nn.functional as F 10 | from torch.utils.data import DataLoader 11 | from torch.nn import Sequential, Linear, ReLU, GRU, ModuleList, BatchNorm1d, MSELoss 12 | 13 | from pytorch_lightning.callbacks import ModelCheckpoint 14 | from pytorch_lightning import ( 15 | LightningDataModule, 16 | LightningModule, 17 | Trainer, 18 | seed_everything, 19 | ) 20 | 21 | import datasets 22 | import transformers 23 | from datasets import load_dataset, Dataset, DatasetDict, load_metric, load_from_disk 24 | from tokenizers import ( 25 | decoders, 26 | models, 27 | normalizers, 28 | pre_tokenizers, 29 | processors, 30 | trainers, 31 | Tokenizer, 32 | Regex, 33 | ) 34 | from transformers import BertTokenizerFast, PreTrainedTokenizerFast 35 | from datasets import load_dataset 36 | from transformers import AutoConfig, AutoModelForCausalLM 37 | from transformers import TrainingArguments 38 | from transformers import ( 39 | AdamW, 40 | AutoModelForSequenceClassification, 41 | AutoTokenizer, 42 | get_linear_schedule_with_warmup, 43 | get_constant_schedule_with_warmup, 44 | set_seed, 45 | DataCollatorForLanguageModeling, 46 | ) 47 | 48 | from transformers import GPTNeoForCausalLM, GPTNeoConfig 49 | 50 | 51 | class LitHF(LightningModule): 52 | def __init__( 53 | self, 54 | tokenizer_dir, 55 | model_dir, 56 | from_pretrained=None, 57 | warmup_steps=100, 58 | reload_weight_path=None, 59 | reload_config_path=None, 60 | ): 61 | super().__init__() 62 | self.save_hyperparameters() 63 | 64 | self.lr = 2e-5 65 | self.seed = 42 66 | self.batch_size = 16 67 | self.model_dir = model_dir 68 | 69 | self.warmup_steps = warmup_steps 70 | 71 | # tokenizer 72 | self.tokenizer_dir = tokenizer_dir 73 | num_proc = 16 74 | 75 | new_tokenizer = PreTrainedTokenizerFast.from_pretrained(self.tokenizer_dir) 76 | new_tokenizer.add_special_tokens({"pad_token": "[PAD]"}) 77 | 78 | # Instantiate the model 79 | # random weights initialization 80 | num_layers = 8 # default 24 81 | hidden_size = 256 # default 2048, must be divisible by num_heads (16) 82 | config = GPTNeoConfig( 83 | num_layers=num_layers, 84 | attention_types=[[["global", "local"], num_layers // 2]], 85 | hidden_size=hidden_size, 86 | cache_path=self.model_dir, 87 | ) 88 | config.vocab_size = new_tokenizer.vocab_size 89 | max_len_tokenized = 512 90 | config.max_length = max_len_tokenized 91 | 92 | # from config does NOT load weights! `from_pretrained` does 93 | model = GPTNeoForCausalLM(config) 94 | model.resize_token_embeddings(len(new_tokenizer)) 95 | 96 | self.model = model 97 | 98 | def forward(self, batch): 99 | return self.model(**batch) 100 | 101 | def training_step(self, batch, batch_idx): 102 | outputs = self.model(**batch) 103 | loss = outputs.loss 104 | metrics = {} 105 | 106 | return {"loss": loss, "progress_bar": {"loss": loss}, "metrics": metrics} 107 | 108 | def training_epoch_end(self, outputs): 109 | avg_loss = torch.stack([x["loss"] for x in outputs]).mean() 110 | avg_perplexity = math.exp(avg_loss) 111 | metrics = {"loss": avg_loss, "perplexity": avg_perplexity} 112 | 113 | def validation_step(self, batch, batch_idx): 114 | 115 | outputs = self.model(**batch) 116 | loss = outputs.loss 117 | metrics = {} 118 | 119 | self.log("val_loss", loss, on_step=False, on_epoch=True) 120 | 121 | return {"val_loss": loss, "metrics": metrics} 122 | 123 | def validation_epoch_end(self, outputs): 124 | avg_loss = torch.stack([x["val_loss"] for x in outputs]).mean() 125 | self.val_loss = avg_loss.item() 126 | if self.global_rank == 0: 127 | print("Val loss: {}".format(self.val_loss)) 128 | avg_perplexity = math.exp(avg_loss) 129 | if self.global_rank == 0: 130 | print("Val perplexity: {}".format(avg_perplexity)) 131 | 132 | metrics = {"val_loss": avg_loss, "val_perplexity": avg_perplexity} 133 | 134 | def sync_across_gpus(self, t): # t is a tensor 135 | # a work-around function to sync outputs across multiple gpus to compute a metric 136 | gather_t_tensor = [torch.ones_like(t) for _ in range(self.trainer.world_size)] 137 | torch.distributed.all_gather(gather_t_tensor, t) 138 | return torch.cat(gather_t_tensor) 139 | 140 | def test_step(self, batch, batch_idx): 141 | outputs = self.model(**batch) 142 | if isinstance(outputs[0], tuple): 143 | loss, metrics = outputs[0] 144 | else: 145 | loss = outputs.loss 146 | metrics = {} 147 | return { 148 | "test_loss": loss, 149 | "metrics": metrics, 150 | "test_perplexity": metrics["perplexity"], 151 | } 152 | 153 | def test_epoch_end(self, outputs): 154 | avg_loss = torch.stack([x["test_loss"] for x in outputs]).mean() 155 | self.test_loss = avg_loss.item() 156 | perplexities = torch.stack([x["test_perplexity"] for x in outputs]) 157 | avg_perplexity = perplexities.mean() 158 | metrics = {"test_loss": avg_loss, "test_perplexity": avg_perplexity} 159 | print("avg perplexity:", avg_perplexity) 160 | 161 | if self.trainer.use_ddp: 162 | avg_perplexity_all = self.sync_across_gpus(perplexities).mean() 163 | print("average perplexity (all)", avg_perplexity_all) 164 | 165 | return metrics 166 | 167 | def configure_optimizers(self): 168 | optimizer = AdamW(self.model.parameters(), lr=self.lr) 169 | scheduler = get_constant_schedule_with_warmup(optimizer, num_warmup_steps=100) 170 | 171 | return {"optimizer": optimizer, "lr_scheduler": {"scheduler": scheduler}} 172 | -------------------------------------------------------------------------------- /lit_models/lit_nffs.py: -------------------------------------------------------------------------------- 1 | import os.path as osp 2 | 3 | from typing import Optional, List, NamedTuple 4 | 5 | import torch 6 | from torch import Tensor 7 | import torch.nn.functional as F 8 | from torch.nn import Sequential, Linear, ReLU, GRU, ModuleList, BatchNorm1d, MSELoss 9 | 10 | # from pytorch_lightning.metrics import Accuracy 11 | from pytorch_lightning.callbacks import ModelCheckpoint 12 | from pytorch_lightning import ( 13 | LightningDataModule, 14 | LightningModule, 15 | Trainer, 16 | seed_everything, 17 | ) 18 | 19 | from nff.data import Dataset, split_train_validation_test, collate_dicts, to_tensor 20 | from nff.train import get_trainer, get_model, load_model, hooks, metrics, evaluate 21 | from nff.train.loss import build_mse_loss 22 | 23 | 24 | class LitNFF(LightningModule): 25 | def __init__(self, model_params, loss_params, lr=3e-4): 26 | super().__init__() 27 | 28 | model = get_model(model_params, model_type=model_params["model_type"]) 29 | self.model = model 30 | self.save_hyperparameters() 31 | self.lr = lr 32 | 33 | self.loss_fn = build_mse_loss(loss_coef=loss_params) 34 | 35 | def training_step(self, batch, batch_idx: int): 36 | outputs = self.model(batch) 37 | loss = self.loss_fn(batch, outputs) 38 | self.log("train_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 39 | return loss 40 | 41 | def validation_step(self, batch, batch_idx: int): 42 | torch.set_grad_enabled(True) # needed for nffs 43 | outputs = self.model(batch) 44 | loss = self.loss_fn(batch, outputs) 45 | self.log("val_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 46 | 47 | def test_step(self, batch, batch_idx: int): 48 | torch.set_grad_enabled(True) # needed for nffs 49 | outputs = self.model(batch) 50 | loss = self.loss_fn(batch, outputs) 51 | self.log("test_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 52 | 53 | def configure_optimizers(self): 54 | trainable_params = filter(lambda p: p.requires_grad, self.model.parameters()) 55 | optimizer = torch.optim.Adam(trainable_params, lr=self.lr) 56 | scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau( 57 | optimizer, mode="min", factor=0.5, patience=30, min_lr=1e-7 58 | ) 59 | 60 | return { 61 | "optimizer": optimizer, 62 | "lr_scheduler": { 63 | "scheduler": scheduler, 64 | "monitor": "val_loss", 65 | }, 66 | } 67 | 68 | def forward(self, x): 69 | torch.set_grad_enabled(True) # needed for nffs 70 | return self.model(x) 71 | -------------------------------------------------------------------------------- /lit_models/models.py: -------------------------------------------------------------------------------- 1 | import os.path as osp 2 | 3 | from typing import Optional, List, NamedTuple 4 | 5 | import torch 6 | from torch import Tensor 7 | import torch.nn.functional as F 8 | from torch.nn import Sequential, Linear, ReLU, GRU, ModuleList, BatchNorm1d, MSELoss 9 | 10 | from pytorch_lightning.callbacks import ModelCheckpoint 11 | from pytorch_lightning import ( 12 | LightningDataModule, 13 | LightningModule, 14 | Trainer, 15 | seed_everything, 16 | ) 17 | 18 | import torch_geometric.transforms as T 19 | from torch_geometric.datasets import QM9 20 | from torch_geometric.nn import NNConv, Set2Set 21 | from torch_geometric.data import DataLoader 22 | from torch_geometric.utils import remove_self_loops 23 | 24 | from torch_geometric.nn import DimeNet, SchNet 25 | 26 | from utils import MyTransform, Batch, Complete 27 | 28 | target = 0 29 | dim = 64 30 | 31 | 32 | class LitDimeNet(LightningModule): 33 | def __init__(self, target=0): 34 | """Define PyTorch model.""" 35 | super().__init__() 36 | model = DimeNet( 37 | hidden_channels=128, 38 | out_channels=1, 39 | num_blocks=6, 40 | num_bilinear=8, 41 | num_spherical=7, 42 | num_radial=6, 43 | cutoff=5.0, 44 | ) 45 | self.model = model 46 | self.save_hyperparameters() 47 | self.target = target 48 | self.loss_fn = MSELoss() 49 | 50 | def training_step(self, batch, batch_idx: int): 51 | pred = self.model(batch.z, batch.pos, batch.batch) 52 | loss = self.loss_fn(pred.view(-1), batch.y[:, self.target]) 53 | self.log("train_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 54 | return loss 55 | 56 | def validation_step(self, batch, batch_idx: int): 57 | pred = self.model(batch.z, batch.pos, batch.batch) 58 | loss = self.loss_fn(pred.view(-1), batch.y[:, self.target]) 59 | self.log("val_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 60 | 61 | def test_step(self, batch, batch_idx: int): 62 | pred = self.model(batch.z, batch.pos, batch.batch) 63 | loss = self.loss_fn(pred.view(-1), batch.y[:, self.target]) 64 | self.log("test_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 65 | 66 | def configure_optimizers(self): 67 | optimizer = torch.optim.Adam(self.model.parameters(), lr=0.001) 68 | scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau( 69 | optimizer, mode="min", factor=0.7, patience=5, min_lr=0.00001 70 | ) 71 | return { 72 | "optimizer": optimizer, 73 | "lr_scheduler": { 74 | "scheduler": scheduler, 75 | "monitor": "val_loss", 76 | }, 77 | } 78 | 79 | 80 | class LitSchNet(LightningModule): 81 | def __init__(self, target): 82 | super().__init__() 83 | self.save_hyperparameters() 84 | 85 | model = SchNet( 86 | hidden_channels=128, 87 | num_filters=128, 88 | num_interactions=6, 89 | num_gaussians=50, 90 | cutoff=10.0, 91 | ) 92 | self.model = model 93 | self.target = target 94 | 95 | self.loss_fn = MSELoss() 96 | 97 | def training_step(self, batch: Batch, batch_idx: int): 98 | pred = self.model(batch.z, batch.pos, batch.batch) 99 | loss = self.loss_fn(pred.view(-1), batch.y[:, self.target]) 100 | self.log("train_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 101 | return loss 102 | 103 | def validation_step(self, batch: Batch, batch_idx: int): 104 | pred = self.model(batch.z, batch.pos, batch.batch) 105 | loss = self.loss_fn(pred.view(-1), batch.y[:, self.target]) 106 | self.log("val_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 107 | 108 | def test_step(self, batch: Batch, batch_idx: int): 109 | pred = self.model(batch.z, batch.pos, batch.batch) 110 | loss = self.loss_fn(pred.view(-1), batch.y[:, self.target]) 111 | self.log("test_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 112 | 113 | def configure_optimizers(self): 114 | optimizer = torch.optim.Adam(self.model.parameters(), lr=0.001) 115 | scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau( 116 | optimizer, mode="min", factor=0.7, patience=5, min_lr=0.00001 117 | ) 118 | return { 119 | "optimizer": optimizer, 120 | "lr_scheduler": { 121 | "scheduler": scheduler, 122 | "monitor": "val_loss", 123 | }, 124 | } 125 | 126 | 127 | class LitNNConv(LightningModule): 128 | def __init__(self, num_features, dim=64): 129 | super().__init__() 130 | self.save_hyperparameters() 131 | self.lin0 = torch.nn.Linear(num_features, dim) 132 | 133 | nn = Sequential(Linear(5, 128), ReLU(), Linear(128, dim * dim)) 134 | self.conv = NNConv(dim, dim, nn, aggr="mean") 135 | self.gru = GRU(dim, dim) 136 | 137 | self.set2set = Set2Set(dim, processing_steps=3) 138 | self.lin1 = torch.nn.Linear(2 * dim, dim) 139 | self.lin2 = torch.nn.Linear(dim, 1) 140 | 141 | def forward(self, data): 142 | out = F.relu(self.lin0(data.x)) 143 | h = out.unsqueeze(0) 144 | 145 | for i in range(3): 146 | m = F.relu(self.conv(out, data.edge_index, data.edge_attr)) 147 | out, h = self.gru(m.unsqueeze(0), h) 148 | out = out.squeeze(0) 149 | 150 | out = self.set2set(out, data.batch) 151 | out = F.relu(self.lin1(out)) 152 | out = self.lin2(out) 153 | return out.view(-1) 154 | 155 | def training_step(self, batch: Batch, batch_idx: int): 156 | y_hat = self(batch) 157 | loss = F.mse_loss(y_hat, batch.y) 158 | self.log("train_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 159 | return loss 160 | 161 | def validation_step(self, batch: Batch, batch_idx: int): 162 | y_hat = self(batch) 163 | loss = F.mse_loss(y_hat, batch.y) 164 | self.log("val_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 165 | 166 | def test_step(self, batch: Batch, batch_idx: int): 167 | y_hat = self(batch) 168 | loss = F.mse_loss(y_hat, batch.y) 169 | self.log("test_loss", loss, prog_bar=True, on_step=False, on_epoch=True) 170 | 171 | def configure_optimizers(self): 172 | optimizer = torch.optim.Adam(self.parameters(), lr=0.001) 173 | scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau( 174 | optimizer, mode="min", factor=0.7, patience=5, min_lr=0.00001 175 | ) 176 | return { 177 | "optimizer": optimizer, 178 | "lr_scheduler": { 179 | "scheduler": scheduler, 180 | "monitor": "val_loss", 181 | }, 182 | } 183 | -------------------------------------------------------------------------------- /prototyping.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# LitMatter Rapid Prototyping\n", 8 | "* This notebook shows how to experiment with Graph Neural Networks using the LitMatter template. \n", 9 | "* In this example, we train a [DimeNet](https://arxiv.org/abs/2003.03123) model on the [QM9](https://www.nature.com/articles/sdata201422) dataset. \n", 10 | "* The training workflow shown here can be scaled to hundreds of GPUs by changing a single keyword argument!" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": null, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": [ 19 | "import numpy as np\n", 20 | "import os\n", 21 | "\n", 22 | "from torch_geometric.datasets import QM9\n", 23 | "from torch_geometric.nn import DimeNet\n", 24 | "\n", 25 | "from pytorch_lightning.metrics import Accuracy\n", 26 | "from pytorch_lightning.callbacks import ModelCheckpoint\n", 27 | "from pytorch_lightning import (LightningDataModule, LightningModule, Trainer,\n", 28 | " seed_everything)\n", 29 | "\n", 30 | "from lit_models.models import LitDimeNet\n", 31 | "from lit_data.data import LitQM9" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": null, 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "seed_everything(11)" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "metadata": {}, 46 | "source": [ 47 | "### Load data\n", 48 | "Datasets are organized as a `pytorch_lightning.LightningDataModule`. All data downloading, processing, and transformations are defined in the `DataModule`. For simplicity, here we use the QM9 dataset available through PyTorch Geometric." 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": null, 54 | "metadata": {}, 55 | "outputs": [], 56 | "source": [ 57 | "dataset = QM9('data/QM9')\n", 58 | "target = 0\n", 59 | "_, datasets = DimeNet.from_qm9_pretrained('data/QM9', dataset, target)\n", 60 | "datamodule = LitQM9(datasets)\n", 61 | "datamodule.setup()" 62 | ] 63 | }, 64 | { 65 | "cell_type": "markdown", 66 | "metadata": {}, 67 | "source": [ 68 | "### Set up model\n", 69 | "The model is defined in a `pytorch_lightning.LightningModule`. Any PyTorch or PyTorch Geometric code can be used to build the model." 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": null, 75 | "metadata": {}, 76 | "outputs": [], 77 | "source": [ 78 | "model = LitDimeNet(target)" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "metadata": {}, 85 | "outputs": [], 86 | "source": [ 87 | "model.summarize()" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": null, 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [ 96 | "# add any desired callbacks\n", 97 | "checkpoint_callback = ModelCheckpoint(monitor='val_loss', save_top_k=1)" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": null, 103 | "metadata": {}, 104 | "outputs": [], 105 | "source": [ 106 | "trainer = Trainer(gpus=-1, # use all available GPUs on each node\n", 107 | " num_nodes=1, # change to number of available nodes\n", 108 | " accelerator='ddp',\n", 109 | " max_epochs=5,\n", 110 | " callbacks=[checkpoint_callback],\n", 111 | " )" 112 | ] 113 | }, 114 | { 115 | "cell_type": "markdown", 116 | "metadata": {}, 117 | "source": [ 118 | "### Model training and evaluation" 119 | ] 120 | }, 121 | { 122 | "cell_type": "code", 123 | "execution_count": null, 124 | "metadata": {}, 125 | "outputs": [], 126 | "source": [ 127 | "trainer.fit(model, datamodule=datamodule)" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": null, 133 | "metadata": {}, 134 | "outputs": [], 135 | "source": [ 136 | "trainer.test()" 137 | ] 138 | }, 139 | { 140 | "cell_type": "markdown", 141 | "metadata": {}, 142 | "source": [ 143 | "That's it! By changing the `num_nodes` argument, training can be distributed across all available GPUs. For longer training jobs on an HPC cluster, see the provided example batch scripts." 144 | ] 145 | } 146 | ], 147 | "metadata": { 148 | "kernelspec": { 149 | "display_name": "Python [conda env:.conda-litmatter]", 150 | "language": "python", 151 | "name": "conda-env-.conda-litmatter-py" 152 | }, 153 | "language_info": { 154 | "codemirror_mode": { 155 | "name": "ipython", 156 | "version": 3 157 | }, 158 | "file_extension": ".py", 159 | "mimetype": "text/x-python", 160 | "name": "python", 161 | "nbconvert_exporter": "python", 162 | "pygments_lexer": "ipython3", 163 | "version": "3.8.12" 164 | } 165 | }, 166 | "nbformat": 4, 167 | "nbformat_minor": 4 168 | } 169 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | torch 2 | torch-cluster 3 | torch-geometric 4 | torch-scatter 5 | torch-sparse 6 | torch-spline-conv 7 | torchmetrics 8 | torchvision 9 | pytorch-lightning 10 | scipy 11 | numpy 12 | pandas 13 | -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | #SBATCH --tasks-per-node=2 # set to number of GPUs per node 3 | #SBATCH --cpus-per-task=20 # set to number of cpus per node / number of tasks per node 4 | 5 | # load modules if needed 6 | 7 | export TOTAL_GPUS=${SLURM_NTASKS} # num nodes * num gpus per node 8 | export GPUS_PER_NODE=2 9 | export LOG_DIR="/path/to/logs" 10 | 11 | LOG_FILE=${LOG_DIR}/${TOTAL_GPUS}.log 12 | ERR_LOG=${LOG_DIR}/${TOTAL_GPUS}.err 13 | CONFIG=${LOG_DIR}/config.json 14 | 15 | # srun or mpirun depending on your system 16 | srun python train.py \ 17 | --task=${TASK} \ 18 | --batch_size=${BATCH_SIZE} \ 19 | --num_epochs=${NUM_EPOCHS} \ 20 | --num_nodes=${SLURM_NNODES} \ 21 | --log_dir=${LOG_DIR} 2>${ERR_LOG} 1>${LOG_FILE} -------------------------------------------------------------------------------- /submit.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Call with "./run.sh " 4 | export BATCH_SIZE=$1 5 | export NUM_EPOCHS=$2 6 | export NUM_NODES=$3 7 | 8 | export LOG_DIR="/path/to/logs" 9 | mkdir -p ${LOG_DIR} 10 | 11 | sbatch --output=${LOG_DIR}"/%j.log" \ 12 | --gres=gpu:volta:2 \ 13 | --nodes ${NUM_NODES} \ 14 | run.sh ${BATCH_SIZE} \ 15 | ${NUM_EPOCHS} -------------------------------------------------------------------------------- /tokenizers/pubchem10M_selfiesv2_tokenizer/special_tokens_map.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /tokenizers/pubchem10M_selfiesv2_tokenizer/tokenizer.json: -------------------------------------------------------------------------------- 1 | {"version":"1.0","truncation":null,"padding":null,"added_tokens":[{"id":0,"special":true,"content":"[UNK]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":1,"special":true,"content":"[PAD]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":2,"special":true,"content":"[CLS]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":3,"special":true,"content":"[SEP]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":4,"special":true,"content":"[MASK]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false}],"normalizer":null,"pre_tokenizer":{"type":"Split","pattern":{"Regex":"\\[(.*?)\\]"},"behavior":"Isolated","invert":false},"post_processor":{"type":"TemplateProcessing","single":[{"SpecialToken":{"id":"[CLS]","type_id":0}},{"Sequence":{"id":"A","type_id":0}},{"SpecialToken":{"id":"[SEP]","type_id":0}}],"pair":[{"SpecialToken":{"id":"[CLS]","type_id":0}},{"Sequence":{"id":"A","type_id":0}},{"SpecialToken":{"id":"[SEP]","type_id":0}},{"Sequence":{"id":"B","type_id":1}},{"SpecialToken":{"id":"[SEP]","type_id":1}}],"special_tokens":{"[CLS]":{"id":"[CLS]","ids":[2],"tokens":["[CLS]"]},"[SEP]":{"id":"[SEP]","ids":[3],"tokens":["[SEP]"]}}},"decoder":{"type":"WordPiece","prefix":"##","cleanup":true},"model":{"type":"WordPiece","unk_token":"[UNK]","continuing_subword_prefix":"##","max_input_chars_per_word":100,"vocab":{"[UNK]":0,"[PAD]":1,"[CLS]":2,"[SEP]":3,"[MASK]":4,"#":5,"+":6,"-":7,"1":8,"2":9,"3":10,"4":11,"=":12,"A":13,"B":14,"C":15,"D":16,"E":17,"F":18,"G":19,"H":20,"I":21,"K":22,"L":23,"M":24,"N":25,"O":26,"P":27,"R":28,"S":29,"T":30,"U":31,"V":32,"W":33,"X":34,"Y":35,"Z":36,"[":37,"]":38,"a":39,"b":40,"c":41,"d":42,"e":43,"f":44,"g":45,"h":46,"i":47,"l":48,"m":49,"n":50,"o":51,"r":52,"s":53,"t":54,"u":55,"y":56,"##N":57,"##b":58,"##]":59,"##T":60,"##l":61,"##H":62,"##2":63,"###":64,"##=":65,"##f":66,"##+":67,"##W":68,"##-":69,"##1":70,"##A":71,"##R":72,"##u":73,"##s":74,"##O":75,"##B":76,"##i":77,"##m":78,"##P":79,"##C":80,"##o":81,"##S":82,"##M":83,"##I":84,"##r":85,"##3":86,"##n":87,"##g":88,"##V":89,"##a":90,"##c":91,"##h":92,"##U":93,"##e":94,"##d":95,"##F":96,"##Y":97,"##G":98,"##K":99,"##4":100,"##t":101,"##Z":102,"##E":103,"##D":104,"##y":105,"##X":106,"##L":107,"##C]":108,"[C]":109,"##1]":110,"[=":111,"##Br":112,"##nc":113,"##anc":114,"##Branc":115,"##Branch":116,"##Branch1]":117,"[=C]":118,"##Ri":119,"##ng":120,"##Ring":121,"[Ring":122,"[Ring1]":123,"[Branch1]":124,"##O]":125,"##N]":126,"##2]":127,"[=Branch1]":128,"[N]":129,"##Branch2]":130,"[O]":131,"[=O]":132,"[#":133,"[Ring2]":134,"[Branch2]":135,"[=N]":136,"##S]":137,"[S]":138,"[#Branch1]":139,"[N":140,"##+1]":141,"[NH":142,"[F":143,"[F]":144,"[=Branch2]":145,"[#Branch2]":146,"[C":147,"##l]":148,"[Cl]":149,"[#C]":150,"##1+1]":151,"##-1]":152,"[P":153,"[P]":154,"[O":155,"[O-1]":156,"[NH1+1]":157,"[=Ring":158,"##2+1]":159,"[NH2+1]":160,"[=Ring1]":161,"[Br":162,"[Br]":163,"[NH1]":164,"##3+1]":165,"[NH3+1]":166,"[N+1]":167,"[=N":168,"[#N]":169,"[=NH":170,"[=S]":171,"[=NH1+1]":172,"[=Ring2]":173,"[S":174,"[Si":175,"[Si]":176,"[I":177,"[I]":178,"[=N+1]":179,"[B":180,"[=NH2+1]":181,"[B]":182,"[N-1]":183,"[=N-1]":184,"[S-1]":185,"[P+1]":186,"##H1]":187,"##e]":188,"[#N":189,"[#N+1]":190,"[C-1]":191,"[=P":192,"[=P]":193,"##3]":194,"[Se]":195,"[CH1]":196,"##Branch3]":197,"[Branch3]":198,"##H2]":199,"##n]":200,"[Sn]":201,"[SiH1]":202,"[SiH2]":203,"[A":204,"[B-1]":205,"[S+1]":206,"##s]":207,"[As]":208,"[G":209,"[T":210,"[=S":211,"[Ge]":212,"[Te]":213,"##H3]":214,"[SiH3]":215,"[C+1]":216,"[IH2]":217,"##O+1]":218,"[Al]":219,"[=O+1]":220,"##C-1]":221,"[=P+1]":222,"[Ring3]":223,"[#C-1]":224,"[H":225,"[Hg":226,"[O+1]":227,"[Hg]":228,"##b]":229,"[=Se]":230,"[CH2]":231,"##i]":232,"[PH1]":233,"[CH":234,"[#P":235,"[#P]":236,"##W]":237,"##2-1]":238,"[I+1]":239,"[Se":240,"[I-1]":241,"[=S+1]":242,"[Sb]":243,"[Pb]":244,"[Cl":245,"##+3]":246,"[=W]":247,"[=C":248,"[CH2-1]":249,"##H1+1]":250,"[Cl+3]":251,"##1-1]":252,"[=Si]":253,"##a]":254,"[=B":255,"[PH1+1]":256,"[Bi]":257,"[BH":258,"[=B]":259,"[Sn":260,"[Ga]":261,"[In]":262,"[=C+1]":263,"[=A":264,"[=I":265,"[SH1]":266,"[#S]":267,"[=As]":268,"##+2]":269,"[=SH1]":270,"##u]":271,"[SeH1]":272,"##r]":273,"[=IH2]":274,"[=R":275,"[Al":276,"[Se-1]":277,"[As":278,"[Ge":279,"[=Ru]":280,"[BH1]":281,"##V]":282,"[CH1-1]":283,"[=T":284,"[Tl]":285,"##o]":286,"[=Cr]":287,"[BH1-1]":288,"[=V]":289,"[Se+1]":290,"[=C-1]":291,"[NH1-1]":292,"[BH2-1]":293,"[W]":294,"[=Te]":295,"##e+1]":296,"##3-1]":297,"[BH3-1]":298,"[=Branch3]":299,"[As+1]":300,"##Mo]":301,"[#S":302,"[Sn+1]":303,"[GeH1]":304,"[Al-1]":305,"[=Si":306,"[R":307,"[=Z":308,"##t]":309,"[SnH1]":310,"[=SiH1]":311,"[AsH1]":312,"[Sn+2]":313,"[GeH2]":314,"[CH1+1]":315,"[Al+1]":316,"##eH1]":317,"[Te+1]":318,"[Hg+1]":319,"[=F":320,"[PH":321,"[SnH2]":322,"[=G":323,"[=PH1]":324,"[=Pt]":325,"[=Sn]":326,"[#Si]":327,"##r+2]":328,"##h]":329,"[=Mo]":330,"[=I]":331,"[=Fe]":332,"[Mo]":333,"[OH1+1]":334,"[=Ring3]":335,"[Si+1]":336,"[=Sb]":337,"[Cl+1]":338,"[P-1]":339,"[Sb":340,"[Z":341,"[=Zr+2]":342,"[TeH1]":343,"[V]":344,"[#O+1]":345,"[=Zr]":346,"[=Ge]":347,"[M":348,"[X":349,"##d]":350,"[=O":351,"[Po]":352,"[SnH3]":353,"[Al+2]":354,"[PH2+1]":355,"[Sn+3]":356,"[=Rh]":357,"[#SH1]":358,"[Ru]":359,"[Bi":360,"[=Se+1]":361,"[Fe]":362,"[Cr]":363,"[Br+2]":364,"[I+2]":365,"[#P+1]":366,"[GeH3]":367,"##H1-1]":368,"[=H":369,"[#W]":370,"[Pb":371,"[Si-1]":372,"[Cl+2]":373,"[=B-1]":374,"[=Au]":375,"[=Pd]":376,"[=Al]":377,"[Xe]":378,"##U]":379,"[I+3]":380,"[PH2-1]":381,"[K":382,"##f+2]":383,"##-2]":384,"##c]":385,"[#Mo]":386,"[=Ni]":387,"[=SH1+1]":388,"[=In]":389,"[=Ti":390,"[=Ti]":391,"[Re]":392,"[=Zn]":393,"[Sb+1]":394,"[Zr]":395,"[Mn]":396,"[K]":397,"##g]":398,"[=M":399,"[Pt]":400,"[SiH1-1]":401,"[IH3]":402,"[At]":403,"[Ti]":404,"[Ta]":405,"[=Co]":406,"[AsH2]":407,"[=Fe+1]":408,"[=GeH1]":409,"[=Os]":410,"[=OH1+1]":411,"[BiH2]":412,"[=Ti+2]":413,"[L":414,"##u+1]":415,"##Y]":416,"[Na]":417,"[Os]":418,"[=Nb]":419,"[IH1]":420,"[Au]":421,"[=Cu]":422,"[=Ir]":423,"[=Ru+1]":424,"[Li]":425,"##H2+1]":426,"##m]":427,"##n-1]":428,"##Yb]":429,"##4]":430,"##Eu]":431,"[Cs]":432,"[Ca]":433,"[Co]":434,"[OH2+1]":435,"[In-1]":436,"[=Bi]":437,"[AlH1]":438,"[=Ta]":439,"[=SiH2]":440,"[Ra]":441,"[Rh]":442,"[SbH1]":443,"[Zr+2]":444,"[XeH1]":445,"[PbH1]":446,"##sH1]":447,"##B]":448,"##o+2]":449,"##a-1]":450,"[=W":451,"[=U]":452,"[#C":453,"[#G":454,"[#B]":455,"[Ni]":456,"[Cu]":457,"[Pr]":458,"[Os":459,"[IH":460,"[=Pb]":461,"[Tl":462,"[=S-1]":463,"[Hf+2]":464,"[=Tc]":465,"[Ru":466,"[=Ga]":467,"[Sb-1]":468,"[Zn]":469,"[=Hf+2]":470,"[=Hg]":471,"[Pb+2]":472,"[=Mo+2]":473,"[U]":474,"[Eu]":475,"##f]":476,"##W+1]":477,"##-3]":478,"##Re]":479,"##i+1]":480,"##o+1]":481,"[=U":482,"[=Yb]":483,"[#T":484,"[#Branch3]":485,"[#Y]":486,"[#W+1]":487,"[#Re]":488,"[Ir":489,"[Ga-1]":490,"[Th]":491,"[Tc]":492,"[CH2+1]":493,"[=AsH1]":494,"[Rh":495,"[Rb]":496,"[PH1-1]":497,"[Mn":498,"[Bi+2]":499,"[=Hf]":500,"[=Mn]":501,"[=WH1]":502,"[#Ge]":503,"[W":504,"[Y]":505,"[Yb]":506,"##H4]":507,"##2+3]":508,"##Os]":509,"##i-1]":510,"##iH1]":511,"##Mo+1]":512,"##I]":513,"##r+1]":514,"##g+1]":515,"##VH1]":516,"##a+1]":517,"##eH2]":518,"[#U]":519,"[#Os]":520,"[#Mo+1]":521,"[#I]":522,"[Nb]":523,"[Ni-1]":524,"[Fe":525,"[Fe+1]":526,"[Cr+1]":527,"[Pt":528,"[Br+1]":529,"[SH2]":530,"[SH1+1]":531,"[Ir]":532,"[Ba]":533,"[=P-1]":534,"[Ag]":535,"[TeH2]":536,"[=SeH1]":537,"[=CH1-1]":538,"[=Bi+1]":539,"[Sn-1]":540,"[=Re]":541,"[AlH2]":542,"[=Tl]":543,"[#S-1]":544,"[#Sb]":545,"[#SiH1]":546,"[SbH2]":547,"[Mg+1]":548,"[Bi+1]":549,"[#Cr]":550,"[IH2+3]":551,"[TlH1]":552,"[TlH2]":553,"[=UH1]":554,"[Rh+1]":555,"[Mn-2]":556,"[U":557,"[E":558,"[VH1]":559,"##b+1]":560,"##Ho]":561,"##f+":562,"##1+3]":563,"##AsH1]":564,"##uH3]":565,"##s+2]":566,"##mH3]":567,"##o-3]":568,"##r-2]":569,"##nH2]":570,"##hH2]":571,"##d-1]":572,"##YH1]":573,"##Dy":574,"##La]":575,"[=Y]":576,"[=Eu]":577,"[=VH1]":578,"[=YH1]":579,"[#Eu]":580,"[#Ho]":581,"[#AsH1]":582,"[#Dy":583,"[#La]":584,"[No]":585,"[Ni+1]":586,"[Ce]":587,"[Cd]":588,"[Co-3]":589,"[PH2]":590,"[Pa]":591,"[Pm]":592,"[Pd-1]":593,"[Sr]":594,"[Sm]":595,"[SmH3]":596,"[Ba+1]":597,"[#Nd]":598,"[=PH2]":599,"[=PH1+1]":600,"[Ac]":601,"[Am]":602,"[Gd]":603,"[Tm]":604,"[Ta-1]":605,"[ThH2]":606,"[=Sb+1]":607,"[=SnH2]":608,"[Ho]":609,"[Hf+":610,"[Hg-1]":611,"[HgH1]":612,"[#Pr]":613,"[=Ce]":614,"[=Cd]":615,"[=Ba]":616,"[=Ag]":617,"[AlH1-1]":618,"[Al-2]":619,"[AsH4]":620,"[=Th]":621,"[=Tm]":622,"[#Se]":623,"[#SH1-1]":624,"[=Si+1]":625,"[=FeH1]":626,"[PH4]":627,"[Zr-2]":628,"[Mt]":629,"[Mo+1]":630,"[=Os+2]":631,"[BiH1]":632,"[PbH2]":633,"[PbH4]":634,"[=Ti+1]":635,"[=W-1]":636,"[#Ce]":637,"[#Co]":638,"[#Gd]":639,"[Os+1]":640,"[OsH2]":641,"[Os+2]":642,"[Os-3]":643,"[IH4]":644,"[IH1+3]":645,"[RuH2]":646,"[Ru+3]":647,"[Ru+2]":648,"[Ru-2]":649,"[=U+2]":650,"[#Te]":651,"[#Tb]":652,"[#Ta+1]":653,"[Ir+3]":654,"[Ir-2]":655,"[Ir-3]":656,"[RhH1]":657,"[MnH1]":658,"[WH2]":659,"[W+2]":660,"[Fe-1]":661,"[Fe-2]":662,"[Pt-1]":663,"[Pt-2]":664,"[U+3]":665,"[EuH3]":666,"[#Dy]":667,"[Hf+4]":668}}} -------------------------------------------------------------------------------- /tokenizers/pubchem10M_selfiesv2_tokenizer/tokenizer_config.json: -------------------------------------------------------------------------------- 1 | {"tokenizer_class": "PreTrainedTokenizerFast"} -------------------------------------------------------------------------------- /tokenizers/pubchem10M_tokenizer/special_tokens_map.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /tokenizers/pubchem10M_tokenizer/tokenizer.json: -------------------------------------------------------------------------------- 1 | {"version":"1.0","truncation":null,"padding":null,"added_tokens":[{"id":0,"special":true,"content":"[UNK]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":1,"special":true,"content":"[PAD]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":2,"special":true,"content":"[CLS]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":3,"special":true,"content":"[SEP]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":4,"special":true,"content":"[MASK]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false}],"normalizer":null,"pre_tokenizer":{"type":"Split","pattern":{"Regex":"\\[(.*?)\\]"},"behavior":"Isolated","invert":false},"post_processor":{"type":"TemplateProcessing","single":[{"SpecialToken":{"id":"[CLS]","type_id":0}},{"Sequence":{"id":"A","type_id":0}},{"SpecialToken":{"id":"[SEP]","type_id":0}}],"pair":[{"SpecialToken":{"id":"[CLS]","type_id":0}},{"Sequence":{"id":"A","type_id":0}},{"SpecialToken":{"id":"[SEP]","type_id":0}},{"Sequence":{"id":"B","type_id":1}},{"SpecialToken":{"id":"[SEP]","type_id":1}}],"special_tokens":{"[CLS]":{"id":"[CLS]","ids":[2],"tokens":["[CLS]"]},"[SEP]":{"id":"[SEP]","ids":[3],"tokens":["[SEP]"]}}},"decoder":{"type":"WordPiece","prefix":"##","cleanup":true},"model":{"type":"WordPiece","unk_token":"[UNK]","continuing_subword_prefix":"##","max_input_chars_per_word":100,"vocab":{"[UNK]":0,"[PAD]":1,"[CLS]":2,"[SEP]":3,"[MASK]":4,"#":5,"+":6,"-":7,"1":8,"2":9,"3":10,"4":11,"=":12,"A":13,"B":14,"C":15,"D":16,"E":17,"F":18,"G":19,"H":20,"I":21,"K":22,"L":23,"M":24,"N":25,"O":26,"P":27,"R":28,"S":29,"T":30,"U":31,"V":32,"W":33,"X":34,"Y":35,"Z":36,"[":37,"]":38,"_":39,"a":40,"b":41,"c":42,"d":43,"e":44,"f":45,"g":46,"h":47,"i":48,"l":49,"m":50,"n":51,"o":52,"p":53,"r":54,"s":55,"t":56,"u":57,"x":58,"y":59,"##T":60,"##c":61,"##e":62,"##x":63,"##p":64,"##l":65,"##]":66,"##m":67,"###":68,"##S":69,"##-":70,"##Z":71,"##r":72,"##=":73,"##P":74,"##A":75,"##s":76,"##H":77,"##I":78,"##3":79,"##N":80,"##b":81,"##G":82,"##2":83,"##R":84,"##a":85,"##+":86,"##Y":87,"##M":88,"##n":89,"##i":90,"##u":91,"##f":92,"##U":93,"##B":94,"##F":95,"##C":96,"##d":97,"##X":98,"##O":99,"##t":100,"##h":101,"##o":102,"##1":103,"##_":104,"##E":105,"##g":106,"##D":107,"##y":108,"##W":109,"##V":110,"##L":111,"##4":112,"##K":113,"##C]":114,"[C]":115,"##1]":116,"[B":117,"[Br":118,"##ch":119,"##an":120,"[Bran":121,"[Branch":122,"[=":123,"##1_":124,"[Branch1_":125,"[=C]":126,"##Ri":127,"##ng":128,"##Ring":129,"[Ring":130,"##2]":131,"[Ring1]":132,"[Branch1_1]":133,"##O]":134,"##N]":135,"[Branch1_2]":136,"[N]":137,"##2_":138,"[Branch2_":139,"[O]":140,"[=O]":141,"[Ring2]":142,"##l]":143,"[Branch2_1]":144,"##xp":145,"##3]":146,"##exp":147,"##expl]":148,"[=N]":149,"##S]":150,"[S]":151,"[Branch1_3]":152,"[N":153,"##+expl]":154,"[NH":155,"[F":156,"[F]":157,"[Branch2_2]":158,"[Branch2_3]":159,"[#":160,"[C":161,"[Cl]":162,"[#C]":163,"##-expl]":164,"[P":165,"[P]":166,"[O":167,"[O-expl]":168,"[NH+expl]":169,"[E":170,"##l=":171,"##xpl=":172,"[Expl=":173,"[Expl=Ring":174,"##2+expl]":175,"[NH2+expl]":176,"[Expl=Ring1]":177,"[Br]":178,"[NHexpl]":179,"##3+expl]":180,"[NH3+expl]":181,"[N+expl]":182,"[=N":183,"[#N]":184,"[Expl=Ring2]":185,"[=NH":186,"[=S]":187,"[=NH+expl]":188,"[S":189,"[Si":190,"[Siexpl]":191,"[I":192,"[I]":193,"[=N+expl]":194,"[=NH2+expl]":195,"[B]":196,"[N-expl]":197,"[=N-expl]":198,"[S-expl]":199,"[P+expl]":200,"##Hexpl]":201,"##eexpl]":202,"[#N":203,"[#N+expl]":204,"[C-expl]":205,"[=P":206,"[=P]":207,"[Seexpl]":208,"[CHexpl]":209,"##3_":210,"[Branch3_":211,"[Branch3_1]":212,"##H2":213,"##H2expl]":214,"##nexpl]":215,"[Snexpl]":216,"[SiHexpl]":217,"[SiH2expl]":218,"[A":219,"[B-expl]":220,"[S+expl]":221,"##sexpl]":222,"[Asexpl]":223,"[G":224,"##3expl]":225,"[T":226,"[=S":227,"[Geexpl]":228,"[Teexpl]":229,"##H3expl]":230,"[SiH3expl]":231,"[Al":232,"##O+expl]":233,"[C+expl]":234,"[IH2expl]":235,"[=O+expl]":236,"[Alexpl]":237,"##C-expl]":238,"[=P+expl]":239,"[Ring3]":240,"[#C-expl]":241,"[H":242,"[Hg":243,"[Hgexpl]":244,"[O+expl]":245,"##bexpl]":246,"[=Seexpl]":247,"[CH2expl]":248,"##iexpl]":249,"[PHexpl]":250,"[#P":251,"[#P]":252,"##Wexpl]":253,"##H2-expl]":254,"[I+expl]":255,"[Se":256,"[I-expl]":257,"[=S+expl]":258,"[Sbexpl]":259,"[Pbexpl]":260,"##H+expl]":261,"[Cl":262,"##+3expl]":263,"[=C":264,"[=Wexpl]":265,"[CH2-expl]":266,"[Cl+3expl]":267,"[=Siexpl]":268,"##aexpl]":269,"[=B":270,"[PH+expl]":271,"[Biexpl]":272,"[=B]":273,"[Sn":274,"[Gaexpl]":275,"##H-expl]":276,"[=C+expl]":277,"[Inexpl]":278,"[=A":279,"[=I":280,"[SHexpl]":281,"[#S]":282,"##2expl]":283,"[=Asexpl]":284,"##+2expl]":285,"[=SHexpl]":286,"##uexpl]":287,"[SeHexpl]":288,"##rexpl]":289,"[=IH2expl]":290,"[=R":291,"[Se-expl]":292,"##lexpl]":293,"[As":294,"[Ge":295,"[=Ruexpl]":296,"[BHexpl]":297,"##Vexpl]":298,"[CH-expl]":299,"[=T":300,"[Tlexpl]":301,"##oexpl]":302,"[=Crexpl]":303,"[BH-expl]":304,"[=Vexpl]":305,"[Se+expl]":306,"[=C-expl]":307,"[NH-expl]":308,"[BH2-expl]":309,"[Wexpl]":310,"[=Teexpl]":311,"##e+expl]":312,"##H3":313,"[BH3":314,"[BH3-expl]":315,"[As+expl]":316,"##Moexpl]":317,"[Branch3_2]":318,"[#S":319,"[Sn+expl]":320,"[GeHexpl]":321,"[Al-expl]":322,"[=Si":323,"[R":324,"[=Z":325,"##texpl]":326,"[SnHexpl]":327,"[=SiHexpl]":328,"[AsHexpl]":329,"[Sn+2expl]":330,"[GeH2expl]":331,"[CH+expl]":332,"[Al+expl]":333,"##eHexpl]":334,"[Te+expl]":335,"[Hg+expl]":336,"[=F":337,"[SnH2expl]":338,"[=G":339,"[=PHexpl]":340,"[=Ptexpl]":341,"[=Snexpl]":342,"[#Siexpl]":343,"##r+2expl]":344,"##H2+expl]":345,"##hexpl]":346,"[=Moexpl]":347,"[=I]":348,"[=Feexpl]":349,"[Moexpl]":350,"[OH+expl]":351,"[Si+expl]":352,"[=Sbexpl]":353,"[Cl+expl]":354,"[P-expl]":355,"[Expl=Ring3]":356,"[Sb":357,"[Z":358,"[=Zr+2expl]":359,"[TeHexpl]":360,"[Vexpl]":361,"[#O+expl]":362,"[=Zrexpl]":363,"[=Geexpl]":364,"[M":365,"[X":366,"##dexpl]":367,"[=O":368,"[Poexpl]":369,"[PH2+expl]":370,"[Al+2expl]":371,"[SnH3expl]":372,"[Sn+3expl]":373,"[=Rhexpl]":374,"[#SHexpl]":375,"[Ruexpl]":376,"[Bi":377,"[=Se+expl]":378,"[Br+2expl]":379,"[Feexpl]":380,"[Crexpl]":381,"[I+2expl]":382,"[#P+expl]":383,"[GeH3expl]":384,"[=H":385,"[#Wexpl]":386,"[Pb":387,"[Si-expl]":388,"[Cl+2expl]":389,"[=B-expl]":390,"[=Auexpl]":391,"[=Pdexpl]":392,"[=Alexpl]":393,"[Xeexpl]":394,"##Uexpl]":395,"[PH2-expl]":396,"[I+3expl]":397,"[K":398,"##cexpl]":399,"##-2expl]":400,"##f+2expl]":401,"[#Moexpl]":402,"[=Niexpl]":403,"[=SH+expl]":404,"[=Inexpl]":405,"[=Ti":406,"[=Tiexpl]":407,"[Reexpl]":408,"[=Znexpl]":409,"[Sb+expl]":410,"[Zrexpl]":411,"[Mnexpl]":412,"[Kexpl]":413,"##gexpl]":414,"[=M":415,"[Ptexpl]":416,"[SiH-expl]":417,"[IH3expl]":418,"[Atexpl]":419,"[Tiexpl]":420,"[Taexpl]":421,"[=Coexpl]":422,"[AsH2expl]":423,"[=Fe+expl]":424,"[=GeHexpl]":425,"[=Osexpl]":426,"[=OH+expl]":427,"[BiH2expl]":428,"[=Ti+2expl]":429,"[L":430,"##Yexpl]":431,"##u+expl]":432,"[Naexpl]":433,"[Osexpl]":434,"[=Nbexpl]":435,"[IHexpl]":436,"[Auexpl]":437,"[=Cuexpl]":438,"[=Irexpl]":439,"[=Ru+expl]":440,"[Liexpl]":441,"[t":442,"##mexpl]":443,"##Ybexpl]":444,"##n-expl]":445,"##4expl]":446,"[Csexpl]":447,"[Caexpl]":448,"[Coexpl]":449,"[OH2+expl]":450,"[In-expl]":451,"[AlHexpl]":452,"[=Biexpl]":453,"[=Taexpl]":454,"[=SiH2expl]":455,"[Raexpl]":456,"[Rhexpl]":457,"[SbHexpl]":458,"[Zr+2expl]":459,"[XeHexpl]":460,"[PbHexpl]":461,"[teexpl]":462,"##sHexpl]":463,"##H4expl]":464,"##a-expl]":465,"##B]":466,"##o+2expl]":467,"[=W":468,"[=Uexpl]":469,"[Niexpl]":470,"[#G":471,"[#C":472,"[#B]":473,"[Cuexpl]":474,"[Prexpl]":475,"[Os":476,"[=Pbexpl]":477,"[Tl":478,"[=S-expl]":479,"[Hf+2expl]":480,"[=Tcexpl]":481,"[Ru":482,"[=Gaexpl]":483,"[Sb-expl]":484,"[Znexpl]":485,"[=Hf+2expl]":486,"[=Hgexpl]":487,"[Pb+2expl]":488,"[=Mo+2expl]":489,"[Uexpl]":490,"##-3expl]":491,"##Reexpl]":492,"##i+expl]":493,"##fexpl]":494,"##o+expl]":495,"##W+expl]":496,"[=U":497,"[=Ybexpl]":498,"[#T":499,"[#Yexpl]":500,"[#Reexpl]":501,"[#W+expl]":502,"[CH2+expl]":503,"[PH-expl]":504,"[Euexpl]":505,"[Ir":506,"[Branch3_3]":507,"[Ga-expl]":508,"[Thexpl]":509,"[Tcexpl]":510,"[=AsHexpl]":511,"[Rh":512,"[Rbexpl]":513,"[Mn":514,"[Bi+2expl]":515,"[=Hfexpl]":516,"[=Mnexpl]":517,"[=WHexpl]":518,"[#Geexpl]":519,"[W":520,"[Yexpl]":521,"[Ybexpl]":522,"##eH2expl]":523,"##r+expl]":524,"##I]":525,"##a+expl]":526,"##Mo+expl]":527,"##i-expl]":528,"##iHexpl]":529,"##Osexpl]":530,"##Euexpl]":531,"##g+expl]":532,"##VHexpl]":533,"[Baexpl]":534,"[Br+expl]":535,"[Nbexpl]":536,"[Ni-expl]":537,"[Fe":538,"[Fe+expl]":539,"[#Uexpl]":540,"[#I]":541,"[#Mo+expl]":542,"[#Osexpl]":543,"[Cr+expl]":544,"[Pt":545,"[SH2expl]":546,"[SH+expl]":547,"[IH2":548,"[Irexpl]":549,"[=P-expl]":550,"[Agexpl]":551,"[TeH2expl]":552,"[=SeHexpl]":553,"[AlH2expl]":554,"[=CH-expl]":555,"[=Bi+expl]":556,"[Sn-expl]":557,"[=Reexpl]":558,"[=Tlexpl]":559,"[#S-expl]":560,"[#Sbexpl]":561,"[#SiHexpl]":562,"[SbH2expl]":563,"[Mg+expl]":564,"[Bi+expl]":565,"[#Crexpl]":566,"[TlHexpl]":567,"[TlH2expl]":568,"[=UHexpl]":569,"[Rh+expl]":570,"[Mn-2expl]":571,"[IH2+3expl]":572,"[U":573,"[VHexpl]":574,"##mH3expl]":575,"##r-2expl]":576,"##AsHexpl]":577,"##s+2expl]":578,"##H+3expl]":579,"##Hoexpl]":580,"##b+expl]":581,"##+4expl]":582,"##YHexpl]":583,"##nH2expl]":584,"##uH3expl]":585,"##f+4expl]":586,"##d-expl]":587,"##hH2expl]":588,"##o-3expl]":589,"##Dy":590,"##Laexpl]":591,"[Ba+expl]":592,"[=Yexpl]":593,"[=Euexpl]":594,"[=VHexpl]":595,"[=YHexpl]":596,"[Noexpl]":597,"[Ni+expl]":598,"[#Euexpl]":599,"[#AsHexpl]":600,"[#Hoexpl]":601,"[#Dy":602,"[#Laexpl]":603,"[Ceexpl]":604,"[Cdexpl]":605,"[Co-3expl]":606,"[PH2expl]":607,"[Paexpl]":608,"[Pmexpl]":609,"[PH4expl]":610,"[Pd-expl]":611,"[EuH3expl]":612,"[Srexpl]":613,"[Smexpl]":614,"[SmH3expl]":615,"[IH4expl]":616,"[IH+3expl]":617,"[#Ndexpl]":618,"[=PH2expl]":619,"[=PH+expl]":620,"[Acexpl]":621,"[Amexpl]":622,"[Gdexpl]":623,"[Tmexpl]":624,"[Ta-expl]":625,"[ThH2expl]":626,"[=Sb+expl]":627,"[=SnH2expl]":628,"[AlH-expl]":629,"[Al-2expl]":630,"[Hoexpl]":631,"[Hf+4expl]":632,"[Hg-expl]":633,"[HgHexpl]":634,"[#Prexpl]":635,"[=Ceexpl]":636,"[=Cdexpl]":637,"[=Baexpl]":638,"[=Agexpl]":639,"[AsH4expl]":640,"[=Thexpl]":641,"[=Tmexpl]":642,"[#Seexpl]":643,"[#SH-expl]":644,"[=Si+expl]":645,"[=FeHexpl]":646,"[Zr-2expl]":647,"[Mtexpl]":648,"[Mo+expl]":649,"[=Os+2expl]":650,"[BiHexpl]":651,"[PbH2expl]":652,"[PbH4expl]":653,"[=Ti+expl]":654,"[=W-expl]":655,"[#Gdexpl]":656,"[#Ceexpl]":657,"[#Coexpl]":658,"[Os+expl]":659,"[OsH2expl]":660,"[Os+2expl]":661,"[Os-3expl]":662,"[RuH2expl]":663,"[Ru+3expl]":664,"[Ru+2expl]":665,"[Ru-2expl]":666,"[=U+2expl]":667,"[#Teexpl]":668,"[#Tbexpl]":669,"[#Ta+expl]":670,"[Ir+3expl]":671,"[Ir-2expl]":672,"[Ir-3expl]":673,"[RhHexpl]":674,"[MnHexpl]":675,"[WH2expl]":676,"[W+2expl]":677,"[Fe-expl]":678,"[Fe-2expl]":679,"[Pt-expl]":680,"[Pt-2expl]":681,"[U+3expl]":682,"[#Dyexpl]":683}}} -------------------------------------------------------------------------------- /tokenizers/pubchem10M_tokenizer/tokenizer_config.json: -------------------------------------------------------------------------------- 1 | {"tokenizer_class": "PreTrainedTokenizerFast"} -------------------------------------------------------------------------------- /tpe.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | from scipy.optimize import curve_fit 4 | 5 | class TrainingSpeedEstimator(): 6 | """Implements training speed estimators from 7 | Ru, Robin, et al. "Speedy Performance Estimation for Neural Architecture Search." 8 | Advances in Neural Information Processing Systems 34 (2021). 9 | """ 10 | 11 | def __init__(self, E=1, gamma=0.999, norm=True): 12 | """ 13 | Parameters 14 | ---------- 15 | E: int 16 | number of "burn-in" epochs to throw away at beginning of training 17 | for TSE-E estimator 18 | 19 | gamma: float 20 | hyperparam for exponential moving average 21 | 22 | 23 | normalize: bool 24 | boolean for whether or not to normalize loss data before fitting curve; normalization 25 | is simply dividing by the maximum loss value and generally gives better results (default True) 26 | 27 | """ 28 | 29 | self.E = E 30 | self.gamma = gamma 31 | self.normalize = norm 32 | 33 | def estimate(self, df_train, T, df_energy=None): 34 | """ 35 | Parameters 36 | --------- 37 | df_train: Pandas dataframe 38 | dataframe with 'epoch' and 'train_loss_step' columns 39 | T: int 40 | number of epochs to consider in estimation 41 | df_energy: Pandas dataframe (Optional) 42 | dataframe with GPU index, timestamps, and power draw from nvidia-smi 43 | Returns 44 | ------- 45 | tse_dict: dict 46 | Results from three TSE estimation methods for training loss curve 47 | """ 48 | 49 | B = len(df_train[df_train['epoch']==T]) # number of steps (minibatches) in an epoch 50 | T_end = df_train.iloc[-1].epoch + 1 # number of total epochs 51 | 52 | tse = df_train[df_train['epoch'] < T].train_loss_stp.sum() / B 53 | 54 | tsee = df_train[(df_train['epoch'] >= T - self.E + 1) & (df_train['epoch'] <= T)].train_loss_stp.sum() / B 55 | 56 | tseema = 0 57 | for t in range(0, T+1): 58 | sum_losses = df_train[df_train['epoch']==t].train_loss_stp.sum() / B * self.gamma ** (T-t) 59 | tseema += sum_losses 60 | 61 | if df_energy is not None: 62 | energies = [] 63 | for idx in df_energy[' index'].unique(): 64 | df0 = df_energy[df_energy[' index']==idx].reset_index(drop=True) # power by GPU device index 65 | E = self._compute_energy(df0) 66 | energies.append(E) 67 | total_energy = np.sum(energies) / 1e3 68 | else: 69 | total_energy = 0 70 | 71 | energy_per_epoch = total_energy / T_end 72 | energy_per_step = total_energy / len(df_train) 73 | tpe_dict = {'tse': tse, 'tsee': tsee, 'tseema': tseema, 74 | 'T_end': T_end, 'energy_per_epoch (kJ)': energy_per_epoch, 75 | 'energy_per_step (kJ)': energy_per_step} 76 | 77 | return tpe_dict 78 | 79 | def _compute_energy(self, df): 80 | ts = pd.to_datetime(df['timestamp']) 81 | ts = ts - ts[0] 82 | ts = ts.dt.total_seconds().to_numpy() 83 | # Quadrature by trapezoidal rule 84 | deltas = ts[1:] - ts[0: -1] 85 | power = df[' power.draw [W]'].to_numpy() 86 | avg_powers = 0.5 * (power[1:] + power[0: -1]) 87 | energy = deltas * avg_powers # units of watts * seconds 88 | return np.sum(energy) -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | import numpy as np 4 | import os 5 | 6 | from torch_geometric.datasets import QM9 7 | from torch_geometric.nn import DimeNet 8 | 9 | from pytorch_lightning.metrics import Accuracy 10 | from pytorch_lightning.callbacks import ModelCheckpoint 11 | from pytorch_lightning import (LightningDataModule, LightningModule, Trainer, 12 | seed_everything) 13 | 14 | from .lit_models.models import LitDimeNet 15 | from .lit_data.data import LitQM9 16 | 17 | 18 | def format_args(config): 19 | if "TASK" in config: 20 | config["TASK"] = str(config["TASK"]) 21 | if "BATCH_SIZE" in config: 22 | config["BATCH_SIZE"] = int(config["BATCH_SIZE"]) 23 | if "NUM_EPOCHS" in config: 24 | config["NUM_EPOCHS"] = int(config["NUM_EPOCHS"]) 25 | if "NUM_GPUS" in config: 26 | config["NUM_GPUS"] = int(config["NUM_GPUS"]) 27 | 28 | 29 | def train_from_config(config): 30 | task = config.task 31 | batch_size = config.batch_size 32 | num_train_epochs = config.num_epochs 33 | num_nodes = config.num_nodes 34 | 35 | seed_everything(42) 36 | 37 | dataset = QM9('data/QM9') 38 | target = 0 39 | _, datasets = DimeNet.from_qm9_pretrained('data/QM9', dataset, target) 40 | datamodule = LitQM9(datasets) 41 | datamodule.setup() 42 | model = LitDimeNet(target) 43 | 44 | # set up checkpointing 45 | checkpoint_callback = ModelCheckpoint(monitor='val_loss', save_top_k=1) 46 | 47 | trainer = Trainer( 48 | gpus=-1, # number of GPUs per node 49 | num_nodes=num_nodes, 50 | accelerator='ddp', 51 | max_epochs=num_train_epochs, 52 | callbacks=[checkpoint_callback]) 53 | 54 | trainer.fit(model, datamodule=datamodule) 55 | trainer.test() 56 | 57 | 58 | if __name__ == "__main__": 59 | parser = argparse.ArgumentParser() 60 | parser.add_argument("--batch_size", type=int) 61 | parser.add_argument("--num_epochs", type=int) 62 | parser.add_argument("--num_nodes", type=int) 63 | parser.add_argument("--log_dir", type=str) 64 | parser.add_argument("--config", 65 | type=str, 66 | help="JSON Config filename for training parameters") 67 | args, unknown = parser.parse_known_args() 68 | train_from_config(args) 69 | --------------------------------------------------------------------------------