├── .gitignore ├── LICENSE ├── README.md ├── environment.yml ├── w1-neurons ├── current.csv ├── spikes.csv ├── traces.csv └── w1-neurons.ipynb ├── w2-synapses-networks ├── order-sensitive-network.png └── w2-synapses-networks.ipynb ├── w3-brain-structure └── w3-exercise.ipynb ├── w4-learning └── w4-learning-exercise.ipynb ├── w5-snn └── w5-snn-exercise.ipynb ├── w6-understanding └── w6-understanding-exercise.ipynb └── w8-neuromorphic ├── chip.py ├── dataset ├── dataset_labels ├── models.py ├── training.py ├── w8-neuromorphic-exercise-v1.ipynb └── w8-neuromorphic-exercise.ipynb /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 neuro4ml 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # exercises 2 | Exercises for "Neuroscience for machine learners" course 3 | 4 | ## Week by week 5 | 6 | * W1. Neurons. [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/neuro4ml/exercises/blob/main/w1-neurons/w1-neurons.ipynb) 7 | * W2. Synapses and networks. [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/neuro4ml/exercises/blob/main/w2-synapses-networks/w2-synapses-networks.ipynb) 8 | * W3. Brain structure [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/neuro4ml/exercises/blob/main/w3-brain-structure/w3-exercise.ipynb) 9 | * W4. Learning rules [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/neuro4ml/exercises/blob/main/w4-learning/w4-learning-exercise.ipynb) 10 | * W5. Training SNNs [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/neuro4ml/exercises/blob/main/w5-snn/w5-snn-exercise.ipynb) 11 | * W6. Understanding neural networks [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/neuro4ml/exercises/blob/main/w6-understanding/w6-understanding-exercise.ipynb) 12 | * W8. Neuromorphic computing [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/neuro4ml/exercises/blob/main/w8-neuromorphic/w8-neuromorphic-exercise.ipynb) 13 | 14 | ## Local install 15 | 16 | ``` 17 | conda env create -f environment.yml 18 | conda activate neuro4ml 19 | jupyter notebook 20 | ``` -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: neuro4ml 2 | channels: 3 | - pytorch 4 | - conda-forge 5 | dependencies: 6 | - python=3.9 7 | - matplotlib 8 | - seaborn 9 | - scipy 10 | - jupyter 11 | - notebook 12 | - ipykernel 13 | - ipywidgets 14 | - ipympl 15 | - pytorch 16 | - cpuonly 17 | - pip 18 | - pip: 19 | - brian2 20 | - snntorch 21 | - omegaconf 22 | - tqdm -------------------------------------------------------------------------------- /w1-neurons/spikes.csv: -------------------------------------------------------------------------------- 1 | 5.500000000000000000e+00 9.699999999999999289e+00 1.800000000000000000e+01 1.830000000000000071e+01 2.320000000000000284e+01 2.900000000000000000e+01 2.930000000000000426e+01 3.370000000000000284e+01 3.870000000000000284e+01 4.479999999999999716e+01 4.790000000000000568e+01 4.939999999999999858e+01 5.020000000000000284e+01 5.390000000000000568e+01 6.020000000000000284e+01 6.940000000000000568e+01 7.720000000000000284e+01 8.520000000000000284e+01 8.590000000000000568e+01 8.959999999999999432e+01 9.209999999999999432e+01 9.550000000000000000e+01 9.860000000000000853e+01 9.990000000000000568e+01 1.035000000000000000e+02 1.067999999999999972e+02 1.084000000000000057e+02 1.128000000000000114e+02 1.192999999999999972e+02 1.248000000000000114e+02 1.249000000000000057e+02 1.277000000000000028e+02 1.360000000000000000e+02 1.505000000000000000e+02 1.635000000000000000e+02 1.662000000000000171e+02 1.748000000000000114e+02 1.749000000000000057e+02 1.804000000000000057e+02 1.825000000000000000e+02 1.861999999999999886e+02 1.916000000000000227e+02 1.925000000000000000e+02 1.997000000000000171e+02 2.050000000000000000e+02 2.083000000000000114e+02 2.151000000000000227e+02 2.159000000000000057e+02 2.178000000000000114e+02 2.222000000000000171e+02 2.267000000000000171e+02 2.282000000000000171e+02 2.328000000000000114e+02 2.360000000000000000e+02 2.360999999999999943e+02 2.375999999999999943e+02 2.419000000000000057e+02 2.512000000000000171e+02 2.523000000000000114e+02 2.569000000000000341e+02 2.570999999999999659e+02 2.580999999999999659e+02 2.619000000000000341e+02 2.690000000000000000e+02 2.696999999999999886e+02 2.791999999999999886e+02 2.801999999999999886e+02 2.886000000000000227e+02 2.910000000000000568e+02 2.945000000000000568e+02 2.968000000000000114e+02 2.975000000000000000e+02 3.048000000000000114e+02 3.057000000000000455e+02 3.092000000000000455e+02 3.115999999999999659e+02 3.159000000000000341e+02 3.179000000000000341e+02 3.214000000000000341e+02 3.274000000000000341e+02 3.291000000000000227e+02 3.301000000000000227e+02 3.326999999999999886e+02 3.409000000000000341e+02 3.446000000000000227e+02 3.463000000000000114e+02 3.496999999999999886e+02 3.511000000000000227e+02 3.556000000000000227e+02 3.596999999999999886e+02 3.657000000000000455e+02 3.660000000000000000e+02 3.668999999999999773e+02 3.693999999999999773e+02 3.710000000000000000e+02 3.727000000000000455e+02 3.763999999999999773e+02 3.815999999999999659e+02 3.817000000000000455e+02 3.819000000000000341e+02 3.833000000000000114e+02 3.839000000000000341e+02 3.845999999999999659e+02 3.900999999999999659e+02 3.909000000000000341e+02 3.916999999999999886e+02 3.955000000000000000e+02 3.964000000000000341e+02 3.991999999999999886e+02 4.001999999999999886e+02 4.003000000000000114e+02 4.061000000000000227e+02 4.065000000000000000e+02 4.068000000000000114e+02 4.140000000000000000e+02 4.171999999999999886e+02 4.191000000000000227e+02 4.231000000000000227e+02 4.308999999999999773e+02 4.341000000000000227e+02 4.386000000000000227e+02 4.483000000000000114e+02 4.571999999999999886e+02 4.591000000000000227e+02 5.045000000000000568e+02 5.161000000000000227e+02 5.180000000000000000e+02 5.251000000000000227e+02 5.326000000000000227e+02 5.327000000000000455e+02 5.327000000000000455e+02 5.378000000000000682e+02 5.435000000000000000e+02 5.449000000000000909e+02 5.525000000000000000e+02 5.528000000000000682e+02 5.642000000000000455e+02 5.661000000000000227e+02 5.666999999999999318e+02 5.746999999999999318e+02 5.765000000000000000e+02 5.789000000000000909e+02 5.809000000000000909e+02 5.846000000000000227e+02 5.866999999999999318e+02 5.882000000000000455e+02 5.936000000000000227e+02 5.957000000000000455e+02 5.961000000000000227e+02 5.963000000000000682e+02 6.006000000000000227e+02 6.008999999999999773e+02 6.023999999999999773e+02 6.045000000000000000e+02 6.078999999999999773e+02 6.100000000000000000e+02 6.120999999999999091e+02 6.147000000000000455e+02 6.157999999999999545e+02 6.161000000000000227e+02 6.196000000000000227e+02 6.235000000000000000e+02 6.257000000000000455e+02 6.268999999999999773e+02 6.304000000000000909e+02 6.306000000000000227e+02 6.320000000000000000e+02 6.323000000000000682e+02 6.326000000000000227e+02 6.334000000000000909e+02 6.381000000000000227e+02 6.386000000000000227e+02 6.401000000000000227e+02 6.415000000000000000e+02 6.444000000000000909e+02 6.455000000000001137e+02 6.497000000000000455e+02 6.501000000000000227e+02 6.548999999999999773e+02 6.558000000000000682e+02 6.573999999999999773e+02 6.595000000000001137e+02 6.600000000000000000e+02 6.602000000000000455e+02 6.647000000000000455e+02 6.656000000000000227e+02 6.715000000000000000e+02 6.731000000000000227e+02 6.745999999999999091e+02 6.755999999999999091e+02 6.778000000000000682e+02 6.779000000000000909e+02 6.829000000000000909e+02 6.848000000000000682e+02 6.879000000000000909e+02 6.906999999999999318e+02 6.939000000000000909e+02 6.949000000000000909e+02 7.009000000000000909e+02 7.023000000000000682e+02 7.053999999999999773e+02 7.129000000000000909e+02 7.182000000000000455e+02 7.206000000000000227e+02 7.236000000000000227e+02 7.310000000000000000e+02 7.324000000000000909e+02 7.326000000000000227e+02 7.418999999999999773e+02 7.422000000000000455e+02 7.446000000000000227e+02 7.507999999999999545e+02 7.541000000000000227e+02 7.604000000000000909e+02 7.634000000000000909e+02 7.648000000000000682e+02 7.695000000000000000e+02 7.735000000000001137e+02 7.758000000000000682e+02 7.762999999999999545e+02 7.825000000000001137e+02 7.868999999999999773e+02 7.873999999999999773e+02 7.896000000000000227e+02 7.951000000000000227e+02 7.976000000000000227e+02 8.002000000000000455e+02 8.022000000000000455e+02 8.035000000000000000e+02 8.045000000000000000e+02 8.059000000000000909e+02 8.085000000000000000e+02 8.126000000000000227e+02 8.132999999999999545e+02 8.142000000000000455e+02 8.156000000000000227e+02 8.172000000000000455e+02 8.189000000000000909e+02 8.252000000000000455e+02 8.306999999999999318e+02 8.340000000000001137e+02 8.413999999999999773e+02 8.436000000000000227e+02 8.508999999999999773e+02 8.523000000000000682e+02 8.686000000000000227e+02 8.715000000000000000e+02 8.790000000000000000e+02 8.794000000000000909e+02 8.878999999999999773e+02 8.880000000000000000e+02 8.886000000000000227e+02 8.978000000000000682e+02 8.984000000000000909e+02 9.060000000000000000e+02 9.072999999999999545e+02 9.076000000000000227e+02 9.088999999999999773e+02 9.110000000000000000e+02 9.155000000000001137e+02 9.172999999999999545e+02 9.217000000000000455e+02 9.242000000000000455e+02 9.255000000000000000e+02 9.277000000000000455e+02 9.286000000000000227e+02 9.298000000000000682e+02 9.325000000000000000e+02 9.338000000000000682e+02 9.396000000000000227e+02 9.421000000000000227e+02 9.450000000000000000e+02 9.466000000000000227e+02 9.472999999999999545e+02 9.478000000000000682e+02 9.491000000000000227e+02 9.536999999999999318e+02 9.559000000000000909e+02 9.562000000000000455e+02 9.596000000000000227e+02 9.666999999999999318e+02 9.691000000000000227e+02 9.777000000000000455e+02 9.778999999999999773e+02 9.826000000000000227e+02 9.863000000000000682e+02 9.894000000000000909e+02 9.957000000000000455e+02 9.960000000000000000e+02 9.982000000000000455e+02 2 | 1.000000000000000000e+00 2.000000000000000000e+00 8.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 2.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+00 4.000000000000000000e+00 3.000000000000000000e+00 8.000000000000000000e+00 3.000000000000000000e+00 3.000000000000000000e+00 3.000000000000000000e+00 1.000000000000000000e+00 3.000000000000000000e+00 8.000000000000000000e+00 3.000000000000000000e+00 1.000000000000000000e+00 3.000000000000000000e+00 9.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 3.000000000000000000e+00 1.000000000000000000e+00 3.000000000000000000e+00 8.000000000000000000e+00 9.000000000000000000e+00 3.000000000000000000e+00 3.000000000000000000e+00 3.000000000000000000e+00 3.000000000000000000e+00 9.000000000000000000e+00 3.000000000000000000e+00 9.000000000000000000e+00 3.000000000000000000e+00 9.000000000000000000e+00 3.000000000000000000e+00 9.000000000000000000e+00 3.000000000000000000e+00 3.000000000000000000e+00 0.000000000000000000e+00 3.000000000000000000e+00 2.000000000000000000e+00 3.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 2.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 3.000000000000000000e+00 8.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 3.000000000000000000e+00 2.000000000000000000e+00 9.000000000000000000e+00 4.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 9.000000000000000000e+00 4.000000000000000000e+00 3.000000000000000000e+00 1.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 9.000000000000000000e+00 1.000000000000000000e+00 3.000000000000000000e+00 1.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 1.000000000000000000e+00 7.000000000000000000e+00 9.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 3.000000000000000000e+00 1.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 7.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 4.000000000000000000e+00 3.000000000000000000e+00 6.000000000000000000e+00 7.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 6.000000000000000000e+00 9.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 7.000000000000000000e+00 6.000000000000000000e+00 9.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+00 7.000000000000000000e+00 4.000000000000000000e+00 6.000000000000000000e+00 7.000000000000000000e+00 9.000000000000000000e+00 3.000000000000000000e+00 2.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 4.000000000000000000e+00 3.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 7.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 4.000000000000000000e+00 2.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 7.000000000000000000e+00 2.000000000000000000e+00 8.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 4.000000000000000000e+00 2.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 2.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+00 6.000000000000000000e+00 9.000000000000000000e+00 2.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 9.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 8.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+00 8.000000000000000000e+00 7.000000000000000000e+00 9.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 9.000000000000000000e+00 8.000000000000000000e+00 9.000000000000000000e+00 2.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 8.000000000000000000e+00 5.000000000000000000e+00 2.000000000000000000e+00 5.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 5.000000000000000000e+00 5.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 5.000000000000000000e+00 6.000000000000000000e+00 5.000000000000000000e+00 2.000000000000000000e+00 4.000000000000000000e+00 6.000000000000000000e+00 5.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 3.000000000000000000e+00 6.000000000000000000e+00 5.000000000000000000e+00 4.000000000000000000e+00 2.000000000000000000e+00 8.000000000000000000e+00 2.000000000000000000e+00 5.000000000000000000e+00 2.000000000000000000e+00 3.000000000000000000e+00 2.000000000000000000e+00 3.000000000000000000e+00 1.000000000000000000e+00 5.000000000000000000e+00 6.000000000000000000e+00 5.000000000000000000e+00 6.000000000000000000e+00 9.000000000000000000e+00 8.000000000000000000e+00 6.000000000000000000e+00 6.000000000000000000e+00 9.000000000000000000e+00 8.000000000000000000e+00 6.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 9.000000000000000000e+00 6.000000000000000000e+00 5.000000000000000000e+00 9.000000000000000000e+00 6.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+00 2.000000000000000000e+00 9.000000000000000000e+00 5.000000000000000000e+00 6.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 4.000000000000000000e+00 9.000000000000000000e+00 7.000000000000000000e+00 2.000000000000000000e+00 5.000000000000000000e+00 4.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 4.000000000000000000e+00 9.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 5.000000000000000000e+00 4.000000000000000000e+00 6.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+00 3 | -------------------------------------------------------------------------------- /w2-synapses-networks/order-sensitive-network.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neuro4ml/exercises/4c92694c811ad21db589a8cc7d6a42888b63e13d/w2-synapses-networks/order-sensitive-network.png -------------------------------------------------------------------------------- /w2-synapses-networks/w2-synapses-networks.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Neuro4ML - Week 2 - Synapses and networks\n", 8 | "\n", 9 | "In this week's exercise you'll use different synapse models, excitation and inhibition, and network structure to design a neural network that can determine the temporal order of its inputs." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 1, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import numpy as np\n", 19 | "from scipy.integrate import solve_ivp\n", 20 | "import matplotlib.pyplot as plt" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "## Part 1 - Delays using biexponential synapses" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "metadata": {}, 33 | "source": [ 34 | "In the cell below we check properties of the biexponential synapse that has equations:\n", 35 | "\n", 36 | "$$\\begin{aligned}\n", 37 | "\\tau \\frac{dv}{dt} &= ax-v \\\\\n", 38 | "\\tau_x \\frac{dx}{dt} &= -x\n", 39 | "\\end{aligned}$$\n", 40 | "\n", 41 | "If we want to know the effect of an incoming spike that increases $x$ by 1, we can solve with initial conditions\n", 42 | "\n", 43 | "$$\\begin{aligned}\n", 44 | "v(0) &= 0 \\\\\n", 45 | "x(0) &= 1\n", 46 | "\\end{aligned}$$\n", 47 | "\n", 48 | "We can see that we can solve for $x$ on its own to just get exponential decay so $x(t)=\\exp(-t/\\tau_x)$. Then we can just guess (by putting the equations into matrix vector form and noting the matrix is upper triangular and so the eigenvalues are just the diagonal elements) that $v(t)=A\\cdot\\exp(-t/\\tau_x)+B\\cdot\\exp(-t/\\tau)$ and substitute to get the solution:\n", 49 | "\n", 50 | "$$\\begin{aligned}\n", 51 | "v(t) &= \\frac{a\\tau_x}{\\tau-\\tau_x}\\left(e^{-t/\\tau}-e^{-t/\\tau_x}\\right) \\\\\n", 52 | "x(t) &= e^{-t/\\tau_x}\n", 53 | "\\end{aligned}$$\n", 54 | "\n", 55 | "Setting $v^\\prime(t_0)=0$ and solving for $t_0$ to find the maximum we get\n", 56 | "\n", 57 | "$$t_0 = \\frac{\\tau\\tau_x}{\\tau-\\tau_x}\\log\\frac{\\tau}{\\tau_x}$$\n", 58 | "\n", 59 | "and the maximum value is\n", 60 | "\n", 61 | "$$v(t_0) = a\\left(\\frac{\\tau}{\\tau_x}\\right)^{-\\frac{\\tau}{\\tau-\\tau_x}}$$\n", 62 | "\n", 63 | "Note that we can choose to set $a=(\\tau/\\tau_x)^{\\tau/(\\tau-\\tau_x)}$ if we want to normalise so that the maximum value $v(t_0)=1$.\n", 64 | "\n", 65 | "### Task 1A\n", 66 | "\n", 67 | "The code below solves this differential equation using Scipy's numerical ODE solver. Write your own code using Euler integration with a time step of ``dt`` and compare to the scipy solution. Compare to the analytic solution derived above.\n", 68 | "\n", 69 | "Reminder: the Euler method for a differential equation $z^\\prime=f(z)$ is:\n", 70 | "\n", 71 | "$$z(t+\\delta t)\\approx z(t)+\\delta t \\cdot f(z)$$" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": 2, 77 | "metadata": {}, 78 | "outputs": [ 79 | { 80 | "data": { 81 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAnYAAAHWCAYAAAD6oMSKAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAB+2klEQVR4nO3deVhU1f8H8PedGYZhHfZNEFAUFFdwA0PNBVzLNi1NM23RSlPLymzTSvNbllppllv6M7XFytJyX5NcEFzBXUEFWWTfYe7vj5FJYslhu7O8X88zT3DnzJ3PeAXfnXPPOYIoiiKIiIiIyOjJpC6AiIiIiBoGgx0RERGRiWCwIyIiIjIRDHZEREREJoLBjoiIiMhEMNgRERERmQgGOyIiIiITwWBHREREZCIUUhfQUDQaDW7evAk7OzsIgiB1OUREREQNQhRF5ObmwsvLCzJZ7X1yJhPsbt68CR8fH6nLICIiImoUSUlJ8Pb2rrWNyQQ7Ozs7ANoPbW9vL3E15kdTUIALEb0AAK0O7IfM2lriioiIiExDTk4OfHx8dFmnNiYT7CqGX+3t7RnsJKBRKGArlwPQXgMGOyIiooZ1L7eacfIEERERkYlgsCMiIiIyEQx2RERERCbCZO6xI2kJFhZwf/st3ddERETU9BjsqEEIFhZwGj1a6jKIiIjMGodiiYiIiEwEe+yoQYjl5Sg4FgMAsO4SCuHO0idERETUdBjsqEGIxcVIfOopAEDg8RgIXMeOiIioyXEoloiIiMhEMNgRERERmQgGOyIiIiITwWBHREREZCLqFOyWLFkCf39/qFQqhIaG4sCBAzW23bRpEwYMGABXV1fY29sjLCwM27Ztq9Rm9erVEAShyqOoqKgu5RERERGZJb2D3caNGzF16lTMmjULsbGxiIiIwKBBg5CYmFht+/3792PAgAHYunUrYmJicP/992PYsGGIjY2t1M7e3h7JycmVHiqVqm6fioiIiMgMCaIoivq8oHv37ggJCcHSpUt1x9q0aYPhw4dj3rx593SO4OBgjBw5Eu+88w4AbY/d1KlTkZWVpU8pleTk5ECtViM99Tbs7e2rPC/IAIXFP2urlRaX13guQQAUyjq2LSkHavoTFQCLOrYtKylHbVfKwrKObUvLIWrq31YsKUHOxu8gCIDTmDHQCApoNDUXoVDKIAgCAKC8VFN7WwsZBNmdtmUaaMobpq3cQgZZXdqWa6Apq6WtQoBMLtO7raZcg/Ja2soUAuR1aasRUV5a80WWyQXIFfq3FTUiyhqqrUyA3OJOW1FEWUnDtNXr556/I6pv20C/IwA9f+75O6JKW/6OMN/fETk5OXBxc0J2dna1Gedueq1jV1JSgpiYGLzxxhuVjkdGRuLQoUP3dA6NRoPc3Fw4OTlVOp6XlwdfX1+Ul5ejU6dOeP/999G5c+caz1NcXIzi4mLd9zk5OQCAVa8fhJXSpkp733bOGPpSR933K2ccqPFie7VywEOvhOi+XzPrEIrySqtt6+Zrh8dmdtV9v/69w8i9Xf0QsqOnDUa92133/Q/zjiEzOb/atnZOKoydG677/ucFx5F6LbfatipbC0z4JEL3/W+fn8DNC1nVtlUoZXh+cR/d938uO41rpzOqbQsAL37VV/f1zlVncel4Wo1tE+8PRZEownbLObgn5EN+raDGtuM/vg9WdkoAwMEfL+D0vhs1th3zQRjsXawAAH//ehlxO6rvHQaAx9/pBmcvWwBAzB9XcXTL1RrbPvpGF7j7aX9ATuxOQvSmSzW2HT6tM5oFOgIAzh64if0bztfYdsiLHeDX3gUAcP7wLexeE19j26hn2yEg1A0AcDkuHdu+OV1j275j26BNuCcAIPHsbWz58mSNbXs93hrt+3gDAJIvZOGXz2JrbBv2cEuERPoCANISc/HjR8dqbNt1iB+6DWsBALidko8Nc47U2LbTgObo+UgAACD3dhHWvhVdY9t2vZuh9xOBAICivFKsnHGwxrZBPTzQb1xbAEBZiQZfv7yvxrYtQ1wx8Ln2uu9ra8vfEVqN+TviuUW9dUFw77oEJPydUmNb/o7Q4u8ILXP/HVFYUv3vgeroNRSbnp6O8vJyuLu7Vzru7u6OlJSaf0DvtmDBAuTn52PEiBG6Y0FBQVi9ejU2b96M9evXQ6VSoWfPnrhw4UKN55k3bx7UarXu4ePjo89HoUaw6fgN/Bp3E+sOJyL+Zo7U5RAREZkdvYZib968iWbNmuHQoUMICwvTHf/www+xdu1aJCQk1Pr69evX45lnnsGvv/6K/v3719hOo9EgJCQEvXr1wuLFi6ttU12PnY+PD4di9W2rxzBLcXEZfjiahIU7LyC7UPt/HjKZgGAve7T3sIV/zk2oLGRIdvfFjaxixN/IxoXUPF0t1ko5Hgv1xjMRLeDuZMVhFnCYRdfWzIdZ/mls3L8jOBTL3xH8HXGnrbEMxbq4uEAul1fpnUtNTa3Si/dvGzduxIQJE/DDDz/UGuoAQCaToWvXrrX22FlaWsLS0rLKcQtLeaVfNDW5lzZ1aqtsnLaKxmprcW9tk24X4KX1sTiRlAUA8He1wVNhvngoxBtqKwtoCgpwLuQJAMCI4zGQ3dlSLDWnCH+cTsF3hxNx7lYulv99DRtjb2BKv1YY19MPFnIZ5BYy3GvFcoUM8nv8W9tobeUy3OtWuPq0lcllkDVGW5kA2T3+HdanrSAT7vlnQ6+2QuO0BRrx556/I/Ruq9fPPX9HAODvCF1bM/wdoc/r9RqKVSqVCA0NxY4dOyod37FjB8LDw2t4lbanbty4cfjuu+8wZMiQ/3wfURQRFxcHT09PfcqjRrL/fBqGfn4QJ5KyYGepwLvD2mLHtF4Y19MfaiuLWl/rZq/CU+F++HNqBFY/3RXtmtkjt7gMH26Nx0NL/sKFW9XfE0RERET606vHDgCmT5+OMWPGoEuXLggLC8PXX3+NxMRETJw4EQAwc+ZM3LhxA2vWrAGgDXVjx47FokWL0KNHD11vn5WVFdRqNQBg9uzZ6NGjB1q1aoWcnBwsXrwYcXFx+PLLLxvqc1IdbTp+Ha/+cAIaEejo44Clo0Pg5WCl93kEQUCfQDdEtHLFjzFJmLs1Aadv5GDI5wfxztC2GN29uW7YhYiIiOpG72A3cuRIZGRkYM6cOUhOTka7du2wdetW+PpqZ8wkJydXWtNu2bJlKCsrw4svvogXX3xRd/ypp57C6tWrAQBZWVl47rnnkJKSArVajc6dO2P//v3o1q1bPT8e1cdPMdfx6o8nIIrAIyHemPtwO1gq7r07uDpymYCRXZujT6AbXvvxJPadT8Nbv5zGiaQsvD+8HVR6DOUQERFRZXqvY2eoKtaxu5cbC+m//Xk6BZPWxUAUgSd7NMecB9rpbgyujvYeu1AAQOBd99jVRhRFfL3/Mub/mQCNCHTxdcSKp7pCbV378C4REZE50SfjcK9YquLMzWxM2xgHUQSe6Pbfoa6uBEHA871bYs347rBXKXDsWiZGLItGSja3kiMiIqoLBjuqJD2vGM+tiUFhaTkiWrng/QeDGyXU3e2+Vi74fmIY3O0tce5WLh5ZegiX0vIa9T2JiIhMEYMd6YiiiNd/PIkbWYVo4WKDL54IgUJ+b39FBIUCLi++CJcXX4Sg0PvWTQR52OPHieFo4WKDG1mFeOLrv3E1/d5X2iYiIiIGO7rL+iNJ2JWQCqVChqVPhup1r5ugVMJ18ktwnfwSBKWyTu/v42SNHyaGIcjDDqm5xRi9/DCuZ9a8LRkRERFVxmBHAICr6fl4//ezAIDXogIR6GEnSR3OtpZYO6E7Wrhqe+5GLz+MWzm8546IiOheMNgRRFHE27+eRmFpOcJaOGN8T3/9z6HRoPjCBRRfuABRU8ueQvfA1c4S3z3TAz5OVriWUYCxK44gp6j6zZOJiIjoHwx2hD9Pp+DAhXQoFTJ89Ej7Ok2WEIuKcHnYA7g87AGIRfXvYfNQq/DdMz10Eype+L/jKC2vX2AkIiIydQx2Zq6gpEw3BDuxd0v4OttIXNE/fJysseKprrBWynHwYjre+vk0TGTZRSIiokbBYGfmvtp3GTezi+DtaIUX+rSUupwq2jVT4/MnOkMmABuPJWHJ3ktSl0RERGSwGOzMWEZeMVYcuAwAeHNwG4PdzqtfG3e8OywYAPDJ9nPYnXBL4oqIiIgME4OdGVu69xLyS8rRvpkag9p5SF1OrZ4K98OTPZpDFIGpG+K4xh0REVE1GOzMVEp2Edb8fQ0A8EpkawhC4+4u0RDeGRqMzs0dkFNUhon/F4OCkjKpSyIiIjIoDHZmasneiygp06CrnyN6t3aVupx7olTI8NWToXCxtURCSi5e/+kUJ1MQERHdhcHODN3OL8H3x5IAAFP7N0xvnaBQwGn8eDiNH1+nLcXulbu9CktGh0AhE/DbiZvYcDSp0d6LiIjI2DDYmaE10VdRVKpBu2b2CG/p3CDnFJRKuL82A+6vzajzlmL3qpu/E2ZEBQIAZv92Bhdu5Tbq+xERERkLBjszU1hSjjXR2nvrnuvV0ijuravOsxEtENHKBUWlGkxeH4ui0nKpSyIiIpIcg52Z+fH4ddzOL4G3oxUGN+BMWFGjQcn1Gyi5fqPeW4rdC5lMwIIRHeFiq0RCSi7mbY1v9PckIiIydAx2ZkQURXx76CoAYMJ9/lDIG+7yi0VFuNS/Py71798gW4rdCzc7FT55rCMA4Nvoa9h+JqVJ3peIiMhQMdiZkSNXbuNiah6slXI8GuotdTkNok+gG56N8AcAvPbTSdzKaZpQSUREZIgY7MzIusOJAIAHO3nBTmUhcTUNZ0ZUENo1s0dWQSne+Okkl0AhIiKzxWBnJjLyivHnae1Q5ahuvhJX07CUChk+HdEJSrkMe86l4YeY61KXREREJAkGOzPxY8x1lJRr0MFbjfbeaqnLaXCt3e0wPbI1AOD9387iZlahxBURERE1PQY7MyCKIjbeWZB4VLfmElfTeJ6NaIHOzR2QW1yG1zkkS0REZojBzgycvJ6Ny2n5UFnIMLSjl9TlNBq5TMAnj3WEpUKGAxfS8d2RRKlLIiIialIMdmbg59gbAIDIth6wtWyk7b4UCjiOegKOo54AGnFLsf/S0tVWtyvFh1vikXS7QLJaiIiImhqDnYkrLdfgtxM3AQAPdW7WaO8jUyrh8c478HjnHcgaeUux/zK+pz+6+TmhoKQcs345zSFZIiIyGwx2Ju7AhTRk5JfA2UaJiFYuUpfTJGQyAR890h5KhQz7z6fh17ibUpdERETUJBjsTNzPsdpQM6yjV4PuNPFvoiii7PZtlN2+bRA9ZC1cbTH5/gAAwJzfzyIzv0TiioiIiBofg50JKywpx86ztwAAwxtxGBYAxMJCXAjviQvhPSEWGsZSI8/3bolAdzvczi/BB1u4lywREZk+BjsTtu98GgpLy9HMwQodTXDtuv+iVMgw9+H2EATgp+PX8dfFdKlLIiIialQMdibsz9PJAIBB7TwgCILE1Ugj1NcRY3pod9p48+dTKCotl7giIiKixsNgZ6KKy8qxKz4VADCovYfE1UhrRlQgPOxVuJZRgM93X5C6HCIiokbDYGeiDl3MQG5xGdztLdHZx1HqciRlp7LA7AeDAQBf77+My2l5EldERETUOBjsTNTWU9ph2IHBHpDJzHMY9m6Rbd1xf6ArSstFvLv5jEHM3CUiImpoDHYmqFwjYme8djbswHaeEldjGARBwHsPBEN5Z7uxP06nSF0SERFRg2OwM0FxSZnILCiF2soCXf2aaBhWoYB6+HCohw+XdEux2vg622Bi75YAgPd/P4v84jKJKyIiImpYDHYmaHeCdtJEr9aujboo8d1kSiW8PpoHr4/mSb6lWG1e6NMSPk5WSM4uwue7L0pdDhERUYNisDNBuxPSAAB9g1wlrsTwqCzkeG+YdiLF8gOXcTE1V+KKiIiIGg6DnYm5mVWI+OQcyASgd2u3JntfURShKSiApqDA4Ccm9Gvjjv5t3FGmEfHOr5xIQUREpoPBzsTsOacdhu3c3BFONk03JCoWFuJcSCjOhYQazJZitXl3WFtYKmQ4dCkDv51MlrocIiKiBsFgZ2L23Lm/rm9Q0/XWGSMfJ2u8eH8AAGDe1ngUlHAiBRERGT8GOxNSXFaOvy5mAADuD2Sw+y/P9WoBb0ftRIpl+y5LXQ4REVG9MdiZkOPXslBYWg5XO0u08bSTuhyDp7KQ483BbQAAy/Zfws0swx9CJiIiqg2DnQk5eFE7G/a+ABcIAnebuBeD2nmgm78Tiko1+OiPBKnLISIiqhcGOxNy8M4wbM8AF4krMR6CIOCdoW0hCMDmEzdx7OptqUsiIiKqMwY7E5FdUIpT17MAAD0DnKUtxsi0a6bG4119AACzfzsLjYbLnxARkXFisDMR0ZfToRGBlq428FRbNX0BcjnsoqJgFxUFyOVN//719EpkIOwsFTh1Ixs/Hb8udTlERER1wmBnIg5eTAcARLSSZrcJmaUlvBcthPeihZBZWkpSQ3242Fpicj/t8if/23YOedxHloiIjBCDnYn4i/fX1du4cH/4OVsjLbcYS/ZwH1kiIjI+DHYmIDm7EFfS8yETgO4tnKQux2gpFTLMGtIWALD84BUkZhRIXBEREZF+GOxMwJEr2pmc7ZqpYa+ykKQGTUEB4oPaID6oDTQFxhuI+rdxw30BLigp02Du1nipyyEiItILg50J+PuyNth182NvXX0JgoC3h7aFTAD+PJOCo1z+hIiIjAiDnQk4ckV7f133FlzmpCEEethhZNfmAIAPt8RDFLn8CRERGQcGOyOXnleMS2n5AICufo4SV2M6pg1oBWulHHFJWdhyKlnqcoiIiO4Jg52RO3rn/rogDzs4WCslrsZ0uNmpMLF3SwDA/D8TUFxWLnFFRERE/43BzsgdvhPsuvvz/rqG9kyEP9zsLJF0uxBro69JXQ4REdF/YrAzchXBrps/769raNZKBV6NDAQAfL77IrIKSiSuiIiIqHYMdkYsu7AUCSk5AICu/hLfXyeXw6Z3L9j07mWUW4rV5JFQbwR52CG7sBRf7OaixUREZNgY7IxYbGImRBHwc7aGm51K0lpklpZovmwZmi9bZpRbitVELhPw5uA2AIBvo69y0WIiIjJoDHZG7HhiFgAgpDlnwzamXq1dEdHKBaXlIuZvS5C6HCIiohox2Bmx2MRMAEBnXwa7xvbm4DYQBGDLyWQcv/PnTkREZGgY7IyURiMiTtdj5yBpLYB2S7GEziFI6Bxi1FuK1aSNpz0eC/UGAMzlosVERGSgGOyM1IXUPOQWl8FaKUegu53U5QAAxMJCiIWFUpfRaKYPCITKQoZj1zKx7UyK1OUQERFVwWBnpCqGAzt6O0Ah52VsCh5qFZ6LaAEA+OiPBJSUaSSuiIiIqDImAiNVcX9diK+DtIWYmed6t4SLrSWuZhRg49FEqcshIiKqhMHOSHFGrDRsLRV4uX8rAMCiXReQX1wmcUVERET/qFOwW7JkCfz9/aFSqRAaGooDBw7U2HbTpk0YMGAAXF1dYW9vj7CwMGzbtq1Ku59++glt27aFpaUl2rZti59//rkupZmF7IJSXEzNAwB0ZrBrco939YGfszXS80qw/MAVqcshIiLS0TvYbdy4EVOnTsWsWbMQGxuLiIgIDBo0CImJ1Q9L7d+/HwMGDMDWrVsRExOD+++/H8OGDUNsbKyuTXR0NEaOHIkxY8bgxIkTGDNmDEaMGIHDhw/X/ZOZsJM3sgBoFyZ2slFKW4wZspDLMCMqCADw9f5LSM8rlrgiIiIiLUHUc92G7t27IyQkBEuXLtUda9OmDYYPH4558+bd0zmCg4MxcuRIvPPOOwCAkSNHIicnB3/88YeuzcCBA+Ho6Ij169ff0zlzcnKgVquRnZ0Ne3t7PT6R8flyz0V8vO0cHujohcVPdJa6HACApqgISc8+BwDw+eZryFTS7oTR2ERRxPAv/8KJ69kYF+6H9x4IlrokIiIyUfpkHL167EpKShATE4PIyMhKxyMjI3Ho0KF7OodGo0Fubi6cnJx0x6Kjo6ucMyoq6p7PaW5OXs8CAHTwVktbyF1kKhV8166B79o1Jh/qAEAQBLw+SNtrt+7wNVzLyJe4IiIiIj2DXXp6OsrLy+Hu7l7puLu7O1JS7m1drwULFiA/Px8jRozQHUtJSdH7nMXFxcjJyan0MBenrmcDANo3M5xgZ47CW7qgd2tXlJaLWLD9vNTlEBER1W3yhCAIlb4XRbHKseqsX78e7733HjZu3Ag3N7d6nXPevHlQq9W6h4+Pjx6fwHil5RbjZnYRBAEIZrCT3OsDgyAIwOYTN3H6RrbU5RARkZnTK9i5uLhALpdX6UlLTU2t0uP2bxs3bsSECRPw/fffo3///pWe8/Dw0PucM2fORHZ2tu6RlJSkz0cxWqfuTJwIcLWFraVC2mLuoikowPmwcJwPCzfJLcVq0tbLHsM7NQOgXbSYiIhISnoFO6VSidDQUOzYsaPS8R07diA8PLzG161fvx7jxo3Dd999hyFDhlR5PiwsrMo5t2/fXus5LS0tYW9vX+lhDk5WDMMa0P11FcozM1GemSl1GU1u+oDWUMplOHgxHQcupEldDhERmTG9h2KnT5+O5cuXY+XKlYiPj8e0adOQmJiIiRMnAtD2pI0dO1bXfv369Rg7diwWLFiAHj16ICUlBSkpKcjO/mfY6uWXX8b27dsxf/58JCQkYP78+di5cyemTp1a/09oYirur+vAYViD4eNkjSd7+ALQ9tppNHpNNCciImowege7kSNHYuHChZgzZw46deqE/fv3Y+vWrfD11f7DlpycXGlNu2XLlqGsrAwvvvgiPD09dY+XX35Z1yY8PBwbNmzAqlWr0KFDB6xevRobN25E9+7dG+Ajmg5RFHHyRkWPnYO0xVAlL/UNgK2lAmdu5uC3kzelLoeIiMyU3uvYGSpzWMcuObsQYfN2Qy4TcGZ2FFQWcqlL0tEUFOBcSCgAIPB4DGTW1hJX1PS+2H0Bn2w/Dx8nK+ya3gdKBXfsIyKi+mu0dexIWqdvaJd0aeVma1ChjrTG3+cPVztLJN0uxHeHr0ldDhERmSEGOyNy5qZ2GDbYi/fXGSJrpQJT+7cCACzefRG5RaUSV0REROaGwc6InL2p7bFr62WAQ80yGVTt2kHVrh0gM9+/ViO6+KCFiw1u55fgmwNXpC6HiIjMjPn+C2yEzibfCXaehhfsZCoV/H/8Af4//mAWW4rVxEIuw4yoQADA8gOXkZpbJHFFRERkThjsjER2YSmuZxYCMMxgR/8Y2M4DHX0cUFBSjs93XZS6HCIiMiMMdkYi/k5vnbejFdTWFhJXQ7URBAEzBwUBANYfScSV9HyJKyIiInPBYGckdPfXGWhvnaawEBf79sPFvv2gKSyUuhzJ9WjhjPsDXVGmEfHJ9nNSl0NERGaCwc5I6O6vM8SJEwAgiii9eROlN28CprE0Yr29NjAIggBsOZmMk9ezpC6HiIjMAIOdkThj4D12VFUbT3s81KkZAO1WYyayFjgRERkwBjsjUFKmwcXUXAAG3GNH1Zo2oDWUchkOXcrAgQvpUpdDREQmjsHOCFxIzUVpuQh7lQLNHKykLof04ONkjTFh2n2U5/+ZAI2GvXZERNR4GOyMQHyytreujac9BEGQuBrS14v3B8DOUoEzN3Pw28mbUpdDREQmjMHOCJy/pQ12QR52EldCdeFko8TzvVsAABZsP4+SMo3EFRERkalisDMC51K0wS7Qw4DvrxMEKANaQhnQEmCvYhXj7/OHq50lEm8XYP2RRKnLISIiE8VgZwT+CXa2EldSM5mVFVr+/jta/v47ZFa8D/DfrJUKvNyvFQBg8a4LyCsuk7giIiIyRQx2Bi67oBQpOdr9Rlu5cyjWmI3s6gN/Fxtk5Jfgm/2XpS6HiIhMEIOdgTt/Z5mTZg5WsFdxKzFjZiGX4dXIQADA8gOXkZZbLHFFRERkahjsDFzCnWHY1u6GOwwLaLcUuzR0KC4NHcotxWoxuL0HOnqrkV9Sji92X5C6HCIiMjEMdgbufEWwM/QZsaKIkouXUHLxErcUq4UgCHh9UBAAYN3hRFzLyJe4IiIiMiUMdgbuHJc6MTnhLV3Qq7UryjQiFmw/L3U5RERkQhjsDJgoiroZsa05ccKkvD5Qe6/d5hM3cfpGtsTVEBGRqWCwM2CpucXILiyFXCagpath32NH+gn2UmN4Jy8A2q3GiIiIGgKDnQGr6K3zc7aGykIucTXU0F6JDISFXMCBC+k4eCFd6nKIiMgEMNgZsIqtxDgMa5p8nKwxursvAG2vnUbDSSdERFQ/DHYG7GJqHgCglZsRDMMKAiy8vGDh5cUtxfQwuW8AbC0VOHUjG1tPJ0tdDhERGTkGOwNWEexaGkGwk1lZIWD3LgTs3sUtxfTgbGuJZyNaAAA+3nYOpeUaiSsiIiJjxmBnoERRxMU0bbALMIJgR3X3TIQ/XGyVuJZRgA1HEqUuh4iIjBiDnYHKyC9BVkEpBAGcEWvibCwVeLlfKwDAol0XkV9cJnFFRERkrBjsDFTFMKy3o5VRzIjVFBXhyqOP4cqjj0FTVCR1OUbn8W7N4etsjfS8Yqw4eEXqcoiIyEgx2BmoimAXYCy9dRoNik6fRtHp04CG94npy0Iuw6uR2kWLl+27hIy8YokrIiIiY8RgZ6B0wY7315mNIe090b6ZGvkl5fhiz0WpyyEiIiPEYGegLnHihNmRyQS8PjAIAPB/f19D0u0CiSsiIiJjw2BnoC7cYrAzR/e1ckFEKxeUlotYsP2c1OUQEZGRYbAzQLlFpUjJ0U5ACHDlrhPmpqLX7tcTN3HmZrbE1RARkTFhsDNAl9LyAQCudpZQW1tIXA01tXbN1BjW0QuiCPzvT/baERHRvWOwM0BGNyP2DrmjI+SOjlKXYRJejWwNhUzAvvNpOHQpXepyiIjISDDYGaDLaRVbidlIXMm9k1lbo3X0IbSOPgSZtbXU5Rg9X2cbjO7eHAAw/48EiKIocUVERGQMGOwM0JV07VBsCxfj6rGjhvVS31awVspx4no2/jidInU5RERkBBjsDFBFsPN3NZ4eO2p4rnaWeDaiBQDg423nUFrOhZ+JiKh2DHYGRqMR7+qxM55gpykqwrUxY3FtzFhuKdaAnu3VAs42SlxJz8f3x5KkLoeIiAwcg52BSc4pQnGZBhZyAc0crKQu595pNCg4ehQFR49yS7EGZGupwOS+AQCAhTsvoKCkTOKKiIjIkDHYGZgrd5Y6ae5kDYWcl4eAUd194eNkhbTcYqw8eEXqcoiIyIAxORiYK+naGbH+nDhBdygVMrwaGQgAWLbvMm7nl0hcERERGSoGOwNzueL+Ok6coLsM6+CFYC975BaX4cs9F6Uuh4iIDBSDnYHRzYg1ookT1PhkMkG31dja6GtIul0gcUVERGSIGOwMDIMd1SSilQt6BjijpFyDj7dxqzEiIqqKwc6AlJRpdD0xxrTUSQXBygqClRHN5DUygiDgzcFtIAjA5hM3cSIpS+qSiIjIwDDYGZDE2wXQiICNUg5XO0upy9GLzNoaQbHHERR7nFuKNaJgLzUe7uwNAPhwazy3GiMiokoY7AzI3TtOCIIgcTVkqF6Nag1LhQxHrtzGjrO3pC6HiIgMCIOdAeFSJ3QvPNVWuq3GPvojgVuNERGRDoOdAbmSrr2/zt/Z+IYyNcXFSHz+eSQ+/zw0xcVSl2PyJvZpCRdbJS6n52P9kUSpyyEiIgPBYGdAKiZONHc2vokTKC9H/r79yN+3Hygvl7oak2drqcDU/q0BaLcayykqlbgiIiIyBAx2BuTa7X+2EyP6L4939UFLVxvczi/B0r2XpC6HiIgMAIOdgSgt1+BmVhEAwNcIh2Kp6SnkMswc1AYAsOLgFdzIKpS4IiIikhqDnYG4kVmIco0IS4UMbka21AlJp18bN/Ro4YSSMg0WcNFiIiKzx2BnIBIr7q9zsuZSJ3TPBEHArMFtAQCbYm/g9I1siSsiIiIpMdgZiGt3gh2HYUlf7b3VeKhzMwDAB1vOctFiIiIzxmBnIBIzKiZOGOGMWJLcq1GBUCpk+PvybexOSJW6HCIikgiDnYFINPIeO5m1NdokxKNNQjy3FJNAMwcrTLjPHwAwd2s8yrhoMRGRWWKwMxDXMv65x46oLib1aQknGyUupeVjw9EkqcshIiIJMNgZAFEU/5k8YaQ9diQ9e5UFpvZvBQBYuPM8crloMRGR2WGwMwDpeSUoKCmHIADejlZSl1MnmuJiXH95Kq6/PJVbiknoiW7N0cLFBul5XLSYiMgcMdgZgIreOk97FSwVcomrqaPycuRu24bcbdu4pZiELOQyzBysXbR4+cErum3qiIjIPDDYGYDEiq3EOAxLDaB/Gzf0DHBGSZkGH/2RIHU5RETUhBjsDEDFxAlfLnVCDUAQBLw9tC1kArDlVDKOXLktdUlERNREGOwMACdOUEML8rDHE92aAwDm/H4GGg0XLSYiMgcMdgYgkUudUCOYPqA17CwVOH0jBz8dvy51OURE1ATqFOyWLFkCf39/qFQqhIaG4sCBAzW2TU5OxqhRoxAYGAiZTIapU6dWabN69WoIglDlUVRUVJfyjA63E6PG4GxriSn9tMuf/G/bOeQVl0lcERERNTa9g93GjRsxdepUzJo1C7GxsYiIiMCgQYOQmJhYbfvi4mK4urpi1qxZ6NixY43ntbe3R3JycqWHSqXStzyjU1BShrRc7fIgvMeOGtpT4X7wc7ZGWm4xlu69KHU5RETUyPQOdp9++ikmTJiAZ555Bm3atMHChQvh4+ODpUuXVtvez88PixYtwtixY6FWq2s8ryAI8PDwqPQwB0m3CwEA9ioF1NYWEldTd4KVFQKPxyDweAwEK+Nci88UKRUyvHln+ZNvDnD5EyIiU6dXsCspKUFMTAwiIyMrHY+MjMShQ4fqVUheXh58fX3h7e2NoUOHIjY2ttb2xcXFyMnJqfQwRtcytEud+Dobd2+dIAiQWVtDZm0NQRCkLofuMqCtO5c/ISIyE3oFu/T0dJSXl8Pd3b3ScXd3d6SkpNS5iKCgIKxevRqbN2/G+vXroVKp0LNnT1y4cKHG18ybNw9qtVr38PHxqfP7S4kzYqmxCYKAt4Zw+RMiInNQp8kT/+6REUWxXr00PXr0wJNPPomOHTsiIiIC33//PVq3bo3PP/+8xtfMnDkT2dnZukdSknFueq4LdkY+I1ZTUoKbb8zEzTdmQlNSInU59C9tPO3xOJc/ISIyeXoFOxcXF8jl8iq9c6mpqVV68epVlEyGrl271tpjZ2lpCXt7+0oPY5RkIsEOZWXI/uUXZP/yC1DG2ZeG6O7lT37k8idERCZJr2CnVCoRGhqKHTt2VDq+Y8cOhIeHN1hRoigiLi4Onp6eDXZOQ3UjSzt5opkDJxxQ43K5a/mTj7n8CRGRSdJ7KHb69OlYvnw5Vq5cifj4eEybNg2JiYmYOHEiAO0Q6dixYyu9Ji4uDnFxccjLy0NaWhri4uJw9uxZ3fOzZ8/Gtm3bcPnyZcTFxWHChAmIi4vTndNUiaKIG5l3gp0jgx01vruXP1myh8ufEBGZGoW+Lxg5ciQyMjIwZ84cJCcno127dti6dSt8fX0BaBck/veadp07d9Z9HRMTg++++w6+vr64evUqACArKwvPPfccUlJSoFar0blzZ+zfvx/dunWrx0czfNmFpcgvKQfAHjtqGhXLnzy3NgbLD1zBiC4+8HMx7hnZZB5EUURZWRnKy8ulLoWowcnlcigUigZZVUIQRdEk7qLOycmBWq1Gdna20dxvd/pGNoZ+fhAutkoce2uA1OXUi6agAOdCQgEAgcdjILM28nsGTZgoihi78ggOXEhHvyA3rBjXVeqSiGpVUlKC5ORkFBRwHUYyXdbW1vD09IRSqazynD4ZR+8eO2o413XDsAxB1HQEQcC7w4IxcOF+7EpIxe6EW+gb1HCTn4gakkajwZUrVyCXy+Hl5QWlUsm1MsmkiKKIkpISpKWl4cqVK2jVqhVksjotWgKAwU5SFRMnvDkMS00swM0W4+/zx9f7L2POb2fRM8AFlgq51GURVVFSUgKNRgMfHx9YcySATJSVlRUsLCxw7do1lJSU1GtL1bpHQqo3U5o4IVhZodWhv9Dq0F/cUsxITO4bADc7S1zNKMDyA1ekLoeoVvXpwSAyBg31d5w/KRK6kaW9X8QUJk4IggCFkxMUTk4cJjESdioLzBwcBAD4YvdFJGcXSlwRERHVF4OdhLiGHUlteKdm6OLriMLScszdyn1kicxNnz59MHXqVKnLaBSrV6+Gg4OD1GU0OQY7CZnSUKympAQpc+YgZc4cbilmRARBwHsPBEMQgN9O3ET0pQypSyIyGePGjYMgCNoRDYUCzZs3x6RJk5CZmVmpnZ+fHxYuXKj7XhRFvPLKK7Czs8Pu3burnPf555+HIAiVXgNoQ1rF+1U8Hn/88Vpr3LRpE95///17/kxXr16FIAiIi4u759c0hX//GQLa5dnOnz8vTUESYrCTSH5xGTILSgGYRrBDWRkyv1uPzO/Wc0sxI9OumRqju2v3kZ392xmUlWskrojIdAwcOBDJycm4evUqli9fjt9++w0vvPBCje3Ly8sxYcIErFmzBrt370bfvn0rPf/LL7/g8OHD8PLyqvb1zz77LJKTk3WPZcuW1Vqfk5MT7Ozs9P9gTaBi7cK6srKygpubWwNWZBwY7CRSMQxrr1LAXmUhcTVk7l4ZEAgHawskpOTi//6+JnU5RCbD0tISHh4e8Pb2RmRkJEaOHInt27dX27a4uBiPPfYYduzYgf3796Nr18prTN64cQMvvfQS1q1bBwuL6v/dsLa2hoeHh+6hVqtrre/fQ7F+fn6YO3cuxo8fDzs7OzRv3hxff/217nl/f38A2o0HBEFAnz59dM+tWrUKbdq0gUqlQlBQEJYsWVLpvQ4dOoROnTpBpVKhS5cu+OWXXyr1/u3duxeCIGDbtm3o0qULLC0tceDAAVy6dAkPPvgg3N3dYWtri65du2Lnzp2VPsO1a9cwbdo0XU8lUP1Q7NKlS9GyZUsolUoEBgZi7dq1lZ4XBAHLly/HQw89BGtra7Rq1QqbN2+u9c/Q0DDYSeQG17AjA+Joo8SrkYEAgE93nEd6XrHEFRHVTBRFFJSUSfKoz5r+ly9fxp9//lltKMvLy8OQIUNw5swZ/PXXX2jTpk2l5zUaDcaMGYMZM2YgODi4xvdYt24dXFxcEBwcjFdffRW5ubl617lgwQJ06dIFsbGxeOGFFzBp0iQkJGjvwT1y5AgAYOfOnUhOTsamTZsAAN988w1mzZqFDz/8EPHx8Zg7dy7efvttfPvttwCA3NxcDBs2DO3bt8fx48fx/vvv4/XXX6/2/V977TXMmzcP8fHx6NChA/Ly8jB48GDs3LkTsbGxiIqKwrBhw3S7XG3atAne3t66HbGSk5OrPe/PP/+Ml19+Ga+88gpOnz6N559/Hk8//TT27NlTqd3s2bMxYsQInDx5EoMHD8bo0aNx+/Ztvf8cpcJ17CRynRMnyMA80a051h9JxJmbOfj4z3OY/2gHqUsiqlZhaTnavrNNkvc+OycK1sp7/6fz999/h62tLcrLy1FUVAQA+PTTT6u0e//992FnZ4ezZ89WO3w4f/58KBQKTJkypcb3Gj16NPz9/eHh4YHTp09j5syZOHHiBHbs2HHP9QLA4MGDdcPFr7/+Oj777DPs3bsXQUFBcHV1BQA4OzvDw8OjUv0LFizAww8/DEDbs3f27FksW7YMTz31FNatWwdBEPDNN99ApVKhbdu2uHHjBp599tkq7z9nzhwMGPDPbkzOzs7o2LGj7vsPPvgAP//8MzZv3oyXXnoJTk5OkMvlsLOzq1TTv33yyScYN26c7rNNnz4df//9Nz755BPcf//9unbjxo3DE088AQCYO3cuPv/8cxw5cgQDBw7U689RKuyxk0hFj523KdxfRyZBLhMw+wFtT8DGY0k4npj5H68gov9y//33Iy4uDocPH8bkyZMRFRWFyZMnV2kXGRmJ/Px8zJ07t8pzMTExWLRoEVavXl3rclLPPvss+vfvj3bt2uHxxx/Hjz/+iJ07d+L48eN61dyhwz//UycIAjw8PJCamlpj+7S0NCQlJWHChAmwtbXVPT744ANcunQJAHDu3Dl06NCh0sK7Ne0H36VLl0rf5+fn47XXXkPbtm3h4OAAW1tbJCQkVNmX/r/Ex8ejZ8+elY717NkT8fHxlY7d/fltbGxgZ2dX6+c3NOyxkwiXOiFD1MXPCY+GeuPHmOuY9fNp/PZSTyjk/P8/MixWFnKcnRMl2Xvrw8bGBgEBAQCAxYsX4/7778fs2bOrzETt168fpkyZggcffBDl5eX4/PPPdc8dOHAAqampaN68ue5YeXk5XnnlFSxcuBBXr16t9r1DQkJgYWGBCxcuICQk5J5r/vdQsSAI0GhqnlRV8dw333yD7t27V3pOLtf+eYmiWCWU1jSsbWNjU+n7GTNmYNu2bfjkk08QEBAAKysrPProoyipwwoM1dXw72P6fn5Dw2AnkRuZdxYnZo8dGZiZg4Kw4+wtxCfnYE30NYy/z1/qkogqEQRBr+FQQ/Luu+9i0KBBmDRpUpWZrQMGDMDvv/+OYcOGQaPR4IsvvoAgCBgzZgz69+9fqW1UVBTGjBmDp59+usb3OnPmDEpLS+Hp6dlg9VdsUF9eXq475u7ujmbNmuHy5csYPXp0ta8LCgrCunXrUFxcDEtLSwDAsWPH7uk9Dxw4gHHjxuGhhx4CoL0f8d9hVqlUVqqpOm3atMHBgwcxduxY3bFDhw5VuZ/R2BnnT4YJuJ5pWj12gkqFlndmKQn12OOOpOdsa4nXBwbhzZ9P4dMd5zGkgyfc7XlNiRpCnz59EBwcjLlz5+KLL76o8nzfvn2xZcsWDB06FKIo4ssvv4SzszOcnZ0rtbOwsICHhwcCA7WTni5duoR169Zh8ODBcHFxwdmzZ/HKK6+gc+fOVYYf68PNzQ1WVlb4888/4e3tDZVKBbVajffeew9TpkyBvb09Bg0ahOLiYhw7dgyZmZmYPn06Ro0ahVmzZuG5557DG2+8gcTERHzyyScAqvai/VtAQAA2bdqEYcOGQRAEvP3221V60Pz8/LB//348/vjjsLS0hIuLS5XzzJgxAyNGjEBISAj69euH3377DZs2bao0w9YUcIxFAsVl5UjN1c46NJV77ASZDErvZlB6N4PAPR2N3uNdfdDJxwF5xWV4//ezUpdDZFKmT5+Ob775BklJSdU+36dPH2zduhVr167FpEmT7mkmrlKpxK5duxAVFYXAwEBMmTIFkZGR2Llzp244tCEoFAosXrwYy5Ytg5eXFx588EEAwDPPPIPly5dj9erVaN++PXr37o3Vq1frlkext7fHb7/9hri4OHTq1AmzZs3CO++8AwD/ueH9Z599BkdHR4SHh2PYsGGIioqqMrQ8Z84cXL16FS1bttRN8Pi34cOHY9GiRfj4448RHByMZcuWYdWqVZWWbDEFglifudsGJCcnB2q1GtnZ2bC3t5e6nFpdTc9Hn0/2QmUhQ/ycgdxblQzS6RvZeOCLg9CIwNoJ3RDRqvpflkSNqaioCFeuXIG/v/9/BgAyLuvWrcPTTz+N7OxsWFmZRidHfdT2d12fjMOuFQncPXHCVEKdWFKCW//7GLf+9zFEbilmEto1U+OpcD8AwNu/nEZRae33rxAR1WbNmjU4ePAgrly5gl9++QWvv/46RowYwVDXwBjsJGCKixOLZWW4vXIlbq9cCZFbipmM6QNaw83OElczCrBs32WpyyEiI5aSkoInn3wSbdq0wbRp0/DYY49V2tWCGgaDnQS4ODEZCzuVBd4e2hYA8OXei7iani9xRURkrF577TVcvXpVN+T42WefwdradDo4DAWDnQS4ODEZk6EdPHFfgAtKyjR4Z/OZem2pREREjYvBTgI3su6sYcceOzICgiBgzoPBUMpl2H8+DX+cTpG6JCIiqgGDnQRSsrX7BXqqOcOLjEMLV1tM7NMSADDnt7PIK+Z9lEREhojBromJoohkXbBjjx0Zjxf6tERzJ2uk5BThk23npC6HiIiqwWDXxLIKSlFcpl0x211tKXE1RPdOZSHHhw+1AwB8G30VsYmZEldERET/xmDXxCp661xslbBUNNxq4FITVCq0+G0zWvy2mVuKmbCIVq54uHMziCIwc9MplJYbz8bYRETmgMGuiaXkaGfEepjY/XWCTAbLVq1g2aoVtxQzcW8NbQtHawskpOTi6/1c247IEPj5+WHhwoX1OsfevXshCAKysrIapKbGsnr1ajg4OBjMeQwN/wVuYjeztD12Hva8v46Mk5ONUre23aJdF3CFa9sR1erQoUOQy+UYOHCg1KXo9OnTB1OnTq10LDw8HMnJyVCr1dIU1YiqC74jR47E+fPnpSmoETHYNTFTnRErlpQg7fMvkPb5F9xSzAw81LkZIlpp17Z7c9Mprm1HVIuVK1di8uTJOHjwIBITE6Uup0ZKpRIeHh4ms9Xlf7GysoKbm5vUZTQ4BrsmVnGPnakNxYplZUj/8kukf/kltxQzA4Ig4MPh7aGykCH6cgZ+OHZd6pKIDFJ+fj6+//57TJo0CUOHDsXq1asrPV8x/Llr1y506dIF1tbWCA8Px7lz/8w8v3TpEh588EG4u7vD1tYWXbt2xc6dO2t8z/Hjx2Po0KGVjpWVlcHDwwMrV67EuHHjsG/fPixatAiCIEAQBFy9erXaodi//voLvXv3hrW1NRwdHREVFYXMzOonTlUMbf7yyy9o3bo1VCoVBgwYgKSkpErtfvvtN4SGhkKlUqFFixaYPXs2yu76d+PTTz9F+/btYWNjAx8fH7zwwgvIy8ur8fNmZGSgW7dueOCBB1BUVFTl+T59+uDatWuYNm2a7vPeXW+F9957D506dcLKlSvRvHlz2NraYtKkSSgvL8f//vc/eHh4wM3NDR9++GGl82dnZ+O5556Dm5sb7O3t0bdvX5w4caLGehsbg10Tq7jHztR67Mj8NHe2xrT+rQEAH26NR1puscQVkbnRFBTU/Cguvve2/woDNbWri40bNyIwMBCBgYF48sknsWrVqmp7uGfNmoUFCxbg2LFjUCgUGD9+vO65vLw8DB48GDt37kRsbCyioqIwbNiwGnv/nnnmGfz5559ITk7WHdu6dSvy8vIwYsQILFq0CGFhYXj22WeRnJyM5ORk+Pj4VDlPXFwc+vXrh+DgYERHR+PgwYMYNmwYysvLa/y8BQUF+PDDD/Htt9/ir7/+Qk5ODh5//HHd89u2bcOTTz6JKVOm4OzZs1i2bBlWr15dKSzJZDIsXrwYp0+fxrfffovdu3fjtddeq/b9rl+/joiICAQFBWHTpk1QVTN5b9OmTfD29sacOXN0n7cmly5dwh9//IE///wT69evx8qVKzFkyBBcv34d+/btw/z58/HWW2/h77//BqBdwmzIkCFISUnB1q1bERMTg5CQEPTr1w+3b9+u8X0alWgisrOzRQBidna21KXU6v5P9oi+r/8u/nUxTepSGlR5fr54NjBIPBsYJJbn50tdDjWR0rJycfCi/aLv67+LL313XOpyyAQVFhaKZ8+eFQsLC6s8V/E7p7rHteeeq9Q2vlPnGttefXJMpbbneoRV264uwsPDxYULF4qiKIqlpaWii4uLuGPHDt3ze/bsEQGIO3fu1B3bsmWLCKDaz1yhbdu24ueff6773tfXV/zss88qPT9//nzd98OHDxfHjRun+753797iyy+/XOmcFbVkZmaKoiiKTzzxhNizZ897/qyrVq0SAYh///237lh8fLwIQDx8+LAoiqIYEREhzp07t9Lr1q5dK3p6etZ43u+//150dnau9D5qtVo8d+6c2Lx5c3Hy5MmiRqOptbZ///ncfZ4K7777rmhtbS3m5OTojkVFRYl+fn5ieXm57lhgYKA4b948URRFcdeuXaK9vb1YVFRU6dwtW7YUly1bVmtN/1bb33V9Mg577JqQKIp33WPHyRNk/BRyGT56uANkAvDbiZvYk5AqdUlEBuPcuXM4cuSIrsdKoVBg5MiRWLlyZZW2HTp00H3t6ekJAEhN1f485efn47XXXkPbtm3h4OAAW1tbJCQk1Hq/3jPPPINVq1bpzrNly5ZKvYD3oqLHTh8KhQJdunTRfR8UFAQHBwfEx8cDAGJiYjBnzhzY2trqHhU9hwV3ekX37NmDAQMGoFmzZrCzs8PYsWORkZGB/Px/JmoVFhbivvvuw/Dhw7F48eIGuy/Qz88PdnZ2uu/d3d3Rtm1byO5a7cHd3V13bWJiYpCXlwdnZ+dKn+nKlSu4dOlSg9SkL4Uk72qmcorKUFCi7cLmUCyZivbeaozv6Y/lB6/grV9OY/u0XrCx5K8WanyBx2NqflJeeZ3Q1n8drLntv5ZoCthV8/1r+lixYgXKysrQrFkz3TFRFGFhYYHMzEw4OjrqjltYWOi+rggpGo12ncgZM2Zg27Zt+OSTTxAQEAArKys8+uijKKllotrYsWPxxhtvIDo6GtHR0fDz80NERIRe9VtZ1a0DorqQdfdnmj17Nh5++OEqbVQqFa5du4bBgwdj4sSJeP/99+Hk5ISDBw9iwoQJKC0t1bW1tLRE//79sWXLFsyYMQPe3t51qvXf7r4OFXVXd6zi2mg0Gnh6emLv3r1VziXVUir87duEKnrrHK0toLIwncWJiaZHtsYfp1NwI6sQH287h/ceCJa6JDIDMmtrydvWpKysDGvWrMGCBQsQGRlZ6blHHnkE69atw0svvXRP5zpw4ADGjRuHhx56CID2nrurV6/W+hpnZ2cMHz4cq1atQnR0NJ5++ulKzyuVylrvlQO0vYi7du3C7Nmz76lOQPu5jx07hm7dugHQ9lpmZWUhKCgIABASEoJz584hICCg2tcfO3YMZWVlWLBgga6X7Pvvv6/STiaTYe3atRg1ahT69u2LvXv3wsvLq8a67uXz1kVISAhSUlKgUCjg5+fX4OevCw7FNqHk7IrFiTkMS6bFWqnAvIfbAwBWH7qKI1ckummYyED8/vvvyMzMxIQJE9CuXbtKj0cffRQrVqy453MFBARg06ZNiIuLw4kTJzBq1Chdj1FtnnnmGXz77beIj4/HU089Vek5Pz8/HD58GFevXkV6enq155s5cyaOHj2KF154ASdPnkRCQgKWLl2K9PT0Gt/TwsICkydPxuHDh3H8+HE8/fTT6NGjhy7ovfPOO1izZg3ee+89nDlzBvHx8di4cSPeeustAEDLli1RVlaGzz//HJcvX8batWvx1VdfVftecrkc69atQ8eOHdG3b1+kpKTUWJefnx/279+PGzdu1Fq/vvr374+wsDAMHz4c27Ztw9WrV3Ho0CG89dZbOHbsWIO9jz4Y7JqQqa5hBwCCpSX8fvgefj98D8GSe+Cao16tXTGyi3Zm3Ws/nkBhScP/3zGRsVixYgX69+9f7WK/jzzyCOLi4nD8+PF7Otdnn30GR0dHhIeHY9iwYYiKikJISMh/vq5///7w9PREVFRUld6sV199FXK5HG3btoWrq2u19+u1bt0a27dvx4kTJ9CtWzeEhYXh119/hUJR82CftbU1Xn/9dYwaNQphYWGwsrLChg0bdM9HRUXh999/x44dO9C1a1f06NEDn376KXx9fQEAnTp1wqeffor58+ejXbt2WLduHebNm1fj+ykUCqxfvx7BwcHo27ev7t63f5szZw6uXr2Kli1bwtXVtdY/N30IgoCtW7eiV69eGD9+PFq3bo3HH38cV69ehbu7e4O9j141iaJprCyak5MDtVqN7Oxs2NvbS11OtT7bcR6Ldl3AqO7NMfeh9lKXQ9TgcopKEfnpfqTkFGHCff66HSqI6qqoqAhXrlyBv79/tUtZUM0KCgrg5eWFlStXVntPW0NbvXo1pk6davBbkhmq2v6u65Nx2GPXhHQ9dvb85USmyV5loRuSXfnXFcRc45AsUVPTaDS4efMm3n77bajVajzwwANSl0RNiMGuCd3U3WNnesFOLClBxooVyFixgluKmbn7g9zwaKg3RBGY8cNJFJVySJaoKSUmJqJZs2b4/vvvsXLlylqHTsn0MNg1IVNew04sK0Pqx58g9eNPuKUY4e0hbeFmZ4nL6fn4bIfpbbJNZMj8/PwgiiKSkpL0XoeuPsaNG8dhWAPAYNeEUkx0n1iif1NbW+juI/3mwGUcT6x+b0kiImpYDHZNJLeoFLnF2p4sU5wVS/Rv/du64+HOzaARgRk/nOCQLBFRE2CwayK3crS9dfYqBVflJ7PxzrC2cLWzxKW0fCzadUHqcoiITB6DXRNJNuH764hq4mCtxIfD2wEAlu27hBNJWdIWRERk4hjsmkgy768jMxUZ7IEHOnpBIwKvcEiWiKhRMdg1EVPedYLov8x+IBiudpa4mJqHj7edk7ocIiKTxWDXREy9x06wtETzb79F82+/5ZZiVIWjjRLzH9HOkl1x8AoOXWq4vRqJSLvEycKFC+t1jr1790IQBINfsmT16tVwcHBo8vcVBAG//PJLk7+vvhjsmkjKncWJTbXHTpDLYdO9G2y6d4Mgl0tdDhmgvkHueKJbcwDahYtzikolroioaRw6dAhyuRwDBw6UuhSdPn36YOrUqZWOhYeHIzk5udr9bcl4MNg1kYoeO3duJ0Zm7K0hbdDcyRo3sgoxe/NZqcshahIrV67E5MmTcfDgQSQmJkpdTo2USiU8PDwgCILUpVA9MNg1kYrlTkx1VqxYWorb69bh9rp1EEvZE0PVs7FU4NMRHSETgJ+OX8efp1OkLomoUeXn5+P777/HpEmTMHToUKxevbrS8xXDn7t27UKXLl1gbW2N8PBwnDv3z72oly5dwoMPPgh3d3fY2tqia9eu2LlzZ43vOX78eAwdOrTSsbKyMnh4eGDlypUYN24c9u3bh0WLFkEQBAiCgKtXr1Y7FPvXX3+hd+/esLa2hqOjI6KiopCZWf2C4xVDpL/88gtat24NlUqFAQMGICkpqVK73377DaGhoVCpVGjRogVmz56Nsrt2LPr000/Rvn172NjYwMfHBy+88ALy8vJq/LwZGRno1q0bHnjgARQVFVV5fubMmejRo0eV4x06dMC7774LADh69CgGDBgAFxcXqNVq9O7dG8ePH6/xPav7s4qLi9P9WVY4dOgQevXqBSsrK/j4+GDKlCnIz8+v8bwNgcGuCZSUaZBZoA07bnamef+ZWFqKW+9/gFvvf8BgR7Xq4ueE53u3BAC8+fMppOUWS1wRGavS4vIaH2X/mn1da9uSe2tbFxs3bkRgYCACAwPx5JNPYtWqVRBFsUq7WbNmYcGCBTh27BgUCgXGjx+vey4vLw+DBw/Gzp07ERsbi6ioKAwbNqzG3r9nnnkGf/75J5KTk3XHtm7diry8PIwYMQKLFi1CWFgYnn32WSQnJyM5ORk+Pj5VzhMXF4d+/fohODgY0dHROHjwIIYNG4by8pr/LAoKCvDhhx/i22+/xV9//YWcnBw8/vjjuue3bduGJ598ElOmTMHZs2exbNkyrF69Gh9++KGujUwmw+LFi3H69Gl8++232L17N1577bVq3+/69euIiIhAUFAQNm3aBJWq6qjY6NGjcfjwYVy6dEl37MyZMzh16hRGjx4NAMjNzcVTTz2FAwcO4O+//0arVq0wePBg5Obm1vhZ/8upU6cQFRWFhx9+GCdPnsTGjRtx8OBBvPTSS3U+573gSrlNIC1P+w+XUi6Dg7WFxNUQSW9q/1bYk5CKhJRczNx0Et+M7cLhH9Lb1y/vq/E533bOGPpSR933K2ccQFmJptq2Xq0c8NArIbrv18w6hKK8qv+D+uJXffWuccWKFXjyyScBAAMHDkReXh527dqF/v37V2r34Ycfonfv3gCAN954A0OGDEFRURFUKhU6duyIjh3/+SwffPABfv75Z2zevLnakBAeHo7AwECsXbtWF4hWrVqFxx57DLa2tgC0w67W1tbw8PCosfb//e9/6NKlC5YsWaI7FhwcXOvnLS0txRdffIHu3bsDAL799lu0adMGR44cQbdu3fDhhx/ijTfewFNPPQUAaNGiBd5//3289tprut6zu+/98/f3x/vvv49JkyZVqgMAzp8/jwEDBuDBBx/U9T5Wp127dujQoQO+++47vP322wCAdevWoWvXrmjdujUAoG/fytd22bJlcHR0xL59+6r0ft6rjz/+GKNGjdJ9nlatWmHx4sXo3bs3li5dWm0IbQjssWsCFcOwrnaW/MeLCIClQo6Fj3eCUi7DzvhU/HDsutQlETW4c+fO4ciRI7oeK4VCgZEjR2LlypVV2nbo0EH3taenJwAgNTUVgHY497XXXkPbtm3h4OAAW1tbJCQk1Hq/3jPPPINVq1bpzrNly5ZKvYD3oqLHTh8KhQJdunTRfR8UFAQHBwfEx8cDAGJiYjBnzhzY2trqHhU9hwUFBQCAPXv2YMCAAWjWrBns7OwwduxYZGRkVBrCLCwsxH333Yfhw4dj8eLF//lv6+jRo7Fu3ToAgCiKWL9+va63DtD+GU2cOBGtW7eGWq2GWq1GXl5eve6JjImJwerVqyt91qioKGg0Gly5cqXO5/0v7LFrAqk52h47N3vTHIYlqosgD3u8Etka8/5IwOzfzqCbvxP8XGykLouMyHOLetf4nPCvbovxH0fU3PZfmWDsh+H1KUtnxYoVKCsrQ7NmzXTHRFGEhYUFMjMz4ejoqDtuYfHPaE5FSNFotD2MM2bMwLZt2/DJJ58gICAAVlZWePTRR1FSUlLje48dOxZvvPEGoqOjER0dDT8/P0RE1PxnUB0rq7rdE15dyLr7M82ePRsPP/xwlTYqlQrXrl3D4MGDMXHiRLz//vtwcnLCwYMHMWHCBJTedZuPpaUl+vfvjy1btmDGjBnw9vautaZRo0bhjTfewPHjx1FYWIikpKRKQ8Tjxo1DWloaFi5cCF9fX1haWiIsLKzGP2OZTPsX7O5h9dJ/3Yak0Wjw/PPPY8qUKVVe37x581rrrQ8GuyaQmqvtsTPV++uI6uqZiBbYnZCKw1du4+UNsfhxUjgs5BxIoHtjYXnvSys1VtualJWVYc2aNViwYAEiIyMrPffII49g3bp193yv1YEDBzBu3Dg89NBDALT33N19g351nJ2dMXz4cKxatQrR0dF4+umnKz2vVCprvVcO0PYi7tq1C7Nnz76nOgHt5z527Bi6desGQNtrmZWVhaCgIABASEgIzp07h4CAgGpff+zYMZSVlWHBggW68PT9999XaSeTybB27VqMGjUKffv2xd69e+Hl5VVjXd7e3ujVqxfWrVuHwsJC9O/fH+7u7rrnDxw4gCVLlmDw4MEAgKSkJKSn17zepqurKwAgOTlZF9Dj4uIqtQkJCcGZM2dq/KyNhb9Bm0BFjx2XOiGqTC4T8NnITrBXKXDiejY+23Fe6pKIGsTvv/+OzMxMTJgwAe3atav0ePTRR7FixYp7PldAQAA2bdqEuLg4nDhxAqNGjdL15tXmmWeewbfffov4+HjdPW0V/Pz8cPjwYVy9ehXp6enVnm/mzJk4evQoXnjhBZw8eRIJCQlYunRprYHHwsICkydPxuHDh3H8+HE8/fTT6NGjhy7ovfPOO1izZg3ee+89nDlzBvHx8di4cSPeeustAEDLli1RVlaGzz//HJcvX8batWvx1VdfVftecrkc69atQ8eOHdG3b1+kpNQ+y3706NHYsGEDfvjhB919jxUCAgKwdu1axMfH4/Dhwxg9enStPZYBAQHw8fHBe++9h/Pnz2PLli1YsGBBpTavv/46oqOj8eKLLyIuLg4XLlzA5s2bMXny5FrrrC8GuybAHjuimnk5WOGjR7T3Fy3dd4m7UpBJWLFiBfr371/tYr+PPPII4uLial1O426fffYZHB0dER4ejmHDhiEqKgohISH/+br+/fvD09MTUVFRVXqzXn31VcjlcrRt2xaurq7V3kvWunVrbN++HSdOnEC3bt0QFhaGX3/9FQpFzYN91tbWeP311zFq1CiEhYXBysoKGzZs0D0fFRWF33//HTt27EDXrl3Ro0cPfPrpp/D19QUAdOrUCZ9++inmz5+Pdu3aYd26dZg3b16N76dQKLB+/XoEBwejb9++uvsSq/PYY48hIyMDBQUFGD58eKXnVq5ciczMTHTu3BljxozBlClT4ObmVuO5LCwssH79eiQkJKBjx46YP38+Pvjgg0ptOnTogH379uHChQuIiIhA586d8fbbb+vuoWwsgljdvGsjlJOTA7VajezsbNjb20tdTiVPrTyCfefT8L9HO2BEl6pTyk2BWFaGvIMHAQC2990HoZYffKLqvPHTSWw4mgQPexX+nBoBB2ul1CWRASgqKsKVK1fg7+/faLMITVVBQQG8vLywcuXKau9pa2irV6/G1KlTDX5LMkNV2991fTIOe+yaQOqddbpMucdOUChg16cP7Pr0YaijOnlnWFu0cLFBSk4R3vjpVLVrfRHRf9NoNLh58ybefvttqNVqPPDAA1KXRE2Iwa4JpOZUDMXy/zaJamKtVGDR451hIRfw55kUbDia9N8vIqIqEhMT0axZM3z//fdYuXJlrUOnZHoY7BpZabkGGfna6dLuJrzciVhaiqxNPyNr08/ceYLqrL23GjOiAgEAs387g4upNW8jRETV8/PzgyiKSEpK0nsduvoYN24ch2ENAINdI0u/s+uEQibA0YTvGRJLS5H85ptIfvNNBjuql2fua4H7AlxQVKrBlPWxKC6r21ZORETmiMGukd26s9SJq50lZDLuOkH0X2QyAZ+O6AgnGyXOJudg7pZ4qUsiIjIaDHaNTHd/HdewI7pnbvYqLHhMuzfmt9HX8Mep5P94BZk6TqYhU9dQf8cZ7BqZOcyIJWoM9we54fneLQAAr/10EokZBRJXRFKo2GqrYh9RIlNV8Xf87u3l6oJTZRpZRY+dKU+cIGosr0YG4tjVTMRcy8RL64/jh4lhsFTUf7snMh5yuRwODg66hWetra3/c8N3ImMiiiIKCgqQmpoKBwcHyOX1+x1Xp2C3ZMkSfPzxx0hOTkZwcDAWLlxY4+bCycnJeOWVVxATE4MLFy5gypQpWLhwYZV2P/30E95++21cunQJLVu2xIcffqjbF8+Y/dNjx6FYIn1ZyGVY/ERnDFl8ACevZ+OjPxLw7rBgqcuiJubh4QEAte4qQGTsHBwcdH/X60PvYLdx40ZMnToVS5YsQc+ePbFs2TIMGjQIZ8+eRfPmzau0Ly4uhqurK2bNmoXPPvus2nNGR0dj5MiReP/99/HQQw/h559/xogRI3Dw4EF0795d/09lQG7lcDsxovpo5mCFBY91xIRvj2HVX1fRo4UzooLr/8uPjIcgCPD09ISbmxtKOeueTJCFhUW9e+oq6L2lWPfu3RESEoKlS5fqjrVp0wbDhw+vdT83AOjTpw86depUpcdu5MiRyMnJwR9//KE7NnDgQDg6OmL9+vX3VJehbik2ZPEBnLmZg1XjuuL+oJr3nTN2YlkZcnfuBADY9e/P3Seowc3dGo+v91+GvUqBLVMi4ONkLXVJRERNotG2FCspKUFMTAwiIyMrHY+MjMShQ4f0r/SO6OjoKueMioqq1zkNRcVQrKuJ99gJCgXsBw6E/cCBDHXUKGZEBaJzcwfkFJXhpfWxKCnTSF0SEZHB0SvYpaeno7y8HO7u7pWOu7u7IyUlpc5FpKSk6H3O4uJi5OTkVHoYmrJyjW6BYjdOniCqFwu5DJ8/0RlqKwucSMrC3K1c346I6N/qtNzJv2ckiaJY71lK+p5z3rx5UKvVuoePj0+93r8xZOSXQBQBuUyAs41pBzuxrAw5f/6JnD//hFhWJnU5ZKK8Ha3x6Qjt+narD13Fr3E3JK6IiMiw6BXsXFxcIJfLq/SkpaamVulx04eHh4fe55w5cyays7N1j6Qkw9swvGLihIutEnIT33VCLCnBjanTcGPqNIglJVKXQyasXxt3TO4bAAB446dTSEgxvN56IiKp6BXslEolQkNDsWPHjkrHd+zYgfDw8DoXERYWVuWc27dvr/WclpaWsLe3r/QwNKl3thNz564TRA1qav/WiGjlgsLSckxcG4OcIs6UJCIC6jAUO336dCxfvhwrV65EfHw8pk2bhsTEREycOBGAtidt7NixlV4TFxeHuLg45OXlIS0tDXFxcTh79qzu+Zdffhnbt2/H/PnzkZCQgPnz52Pnzp2YOnVq/T6dxLjrBFHjkMsELH68M5o5WOFqRgFe+f4ENBpuOUVEpPf0xZEjRyIjIwNz5sxBcnIy2rVrh61bt8LX1xeAdkHixMTESq/p3Lmz7uuYmBh899138PX1xdWrVwEA4eHh2LBhA9566y28/fbbaNmyJTZu3Ggya9i5cnFiogbnaKPE0idD8OjSaOw4ewtL913Ci/cHSF0WEZGk9F7HzlAZ4jp2MzedwvojiZjavxWm9m8tdTmNSlNQgHMhoQCAwOMxkFlzjTFqGhuOJOKNTacgE4A147vjvlYuUpdERNSgGm0dO9JPqm7XCfbYETWWx7s1x8guPtCIwJQNsbiRVSh1SUREkmGwa0S8x46oacx+MBjtmtnjdn4Jnl97DIUl5VKXREQkCQa7RpSaq+2xM4dZsYKFBTznzoXn3LkQLCykLofMjMpCjqWjQ+Fko8TpGzl4/aeTMJG7TIiI9MJg10g0GhHpedr13Ex9OzFAG+wcHn4IDg8/xGBHkvBxssaS0SFQyARsPnETX+27LHVJRERNjsGukWQWlKD8zvILzrZKiashMg89Wjjj3QeCAQD/25aA3Qm3JK6IiKhpMdg1koreOgdrC1jITf+PWSwrQ+7evcjdu5dbipGknuzeHE90aw5RBF5eH4eLqXlSl0RE1GRMP3FIJD1PO3HC1db0h2EB7ZZi1ydOwvWJk7ilGElKEATMfiAY3fyckFtchufWHEN2IXemICLzwGDXSCqCnYuZBDsiQ6JUyLDkyRB4qVW4nJ6PKetjdbdGEBGZMga7RpJ2Z6kTFzOYOEFkiFxsLfH12C5QWciw73waPvojXuqSiIgaHYNdI6m4x86FEyeIJNOumRofP9oRAPDNgStYfyTxP15BRGTcGOwaCYdiiQzDsI5emNq/FQDg7V9O4+CFdIkrIiJqPAx2jcTcJk8QGbKX+7XCQ52boUwjYtK6GFy4lSt1SUREjYLBrpHoeuzsOBRLJDVBEPDRI+3R1c8RuUVlGP/tUd3PKBGRKWGwayTpuRX32JlHj51gYQH3t9+C+9tvcecJMkiWCjmWjekCX2drJN0uxHNrjqGolHvKEpFpYbBrBKIoIiPfvO6xEyws4DR6NJxGj2awI4PlZKPEynFdYa9S4HhiFmb8yD1lici0MNg1guzCUpSWczsxIkPU0tUWX40JhUIm4LcTN/HZjvNSl0RE1GAY7BpBxb079ioFLBVyiatpGmJ5OfIPH0H+4SMQyzm8RYYtvKUL5j7cHgCwePdFbOAyKERkIhjsGkFaxf11ZrQ4sVhcjMSnnkLiU09BLOZN6WT4RnTxweS+AQCAWb+cxq74WxJXRERUfwx2jYBr2BEZh+kDWuOxUG+Ua0S8+N1xHE/MlLokIqJ6YbBrBFzDjsg4CIKAuQ+3R59AVxSVajBh9VFcTsuTuiwiojpjsGsE//TYceIEkaGzkMvw5agQdPBWI7OgFGNXHkFqbpHUZRER1QmDXSMwtzXsiIydjaUCK8d1ha+zNa5nFuLpVUeRW1QqdVlERHpjsGsE/+w6wWBHZCxcbC2xZnw3ONsoceZmDl5YdxwlZRqpyyIi0guDXSPg5Aki4+TrbINVT3eFtVKOAxfSMW1jHMo1XMCYiIwHg10jSM+rGIo1n3vsBIUCbjNehduMVyEoFFKXQ1RnHbwd8NWTobCQC9hyKhlvbjrF3SmIyGgw2DUwURSRZoY9doJSCecJE+A8YQIEpfkEWjJNvVq7YtHjnSETgI3HkjB3azzDHREZBQa7BpZbXKa7L8eV99gRGa3B7T3x0cMdAADfHLiCL/dclLgiIqL/xmDXwNJztb11tpYKqCzMYzsxQLulWOGpUyg8dYpbipHJGNHVB28NaQMA+GT7eayJviptQURE/4E3QzUwc7y/DtBuKXb1sREAgMDjMRCsrSWuiKhhPBPRAjlFZVi86wLe+fUM7FQKPNTZW+qyiIiqxR67BsYZsUSmZ1r/VhgX7gcAePWHk/jzdIq0BRER1YDBroEx2BGZHkEQ8M7QtngkRLuv7OT1x7Hz7C2pyyIiqoLBroGl5VYsTmxeQ7FEpk4mE/C/RztgWEcvlJaLeGHdcew5lyp1WURElTDYNTD22BGZLrlMwGcjOmJwew+UlGvw/NoY7D+fJnVZREQ6DHYNLI37xBKZNIVchkWPd0ZUsDtKyjR4ds0x/HUxXeqyiIgAMNg1OPbYEZk+C7kMnz8Rgv5t3FBcpsGEb4/i78sZUpdFRMRg19Aqgp2rmd1jJygUcHnxRbi8+CK3FCOzoFTI8OXoENwf6IqiUg3Grz6KI1duS10WEZk5BrsGJIriP8HOViVxNU1LUCrhOvkluE5+iVuKkdmwVMix9MlQRLRyQUFJOZ5aeQSHOCxLRBJisGtABSXlKCrVbifmbGYLFBOZK5WFHN+M7YJerV1RWFqOp1cfxT5OqCAiiTDYNaCMO7tOqCxksLE0r+FIUaNB8YULKL5wAaJGI3U5RE1KZSHH12NCdffcPfvtMa5zR0SSYLBrQBn52mFYZxvzmzghFhXh8rAHcHnYAxCLiqQuh6jJqSzkWDI6FIPaaZdCmfh/MfjjVLLUZRGRmWGwa0C387U9dk42HIYlMkdKhQyfP9EZD3byQplGxEvrY/Fr3A2pyyIiM8Jg14AyGOyIzJ5CLsOnIzrh0VDt9mNTN8bh+6NJUpdFRGaCwa4BVfTYOTPYEZk1uUzA/x7pgCe6NYcoAq/9dBLLD1yWuiwiMgMMdg2IQ7FEVEEmEzD3oXZ45j5/AMAHW+Lxvz8TIIqixJURkSljsGtAFbNinbjUCREBEAQBs4a0wWsDAwEAS/Zewps/n0a5huGOiBoHg10DyizgUCwRVSYIAl7oE4C5D7WHIADrjyRi8vrjKC4rl7o0IjJB5rXYWiP7Z/KE+S13IigUcBo/Xvc1EVU2qntzOFhb4OUNsdh6KgW5Rcfw1ZOhZrfmJRE1Lv5GaUC376xj52RjIXElTU9QKuH+2gypyyAyaIPbe8JOpcDza2Nw4EI6Rn3zN1aM6woXW/P7n0Eiahwcim1At/PMt8eOiO5NRCtXfPdsDzhYW+DE9Ww8vOQQrqTnS10WEZkIBrsGUlRajvwS7T0z5jgrVtRoUHL9Bkqu3+CWYkT/oZOPA36aFA4fJysk3i7Aw0v+Qsy1TKnLIiITwGDXQCqWOrGQC7BXmd8It1hUhEv9++NS//7cUozoHrR0tcWmST3RwVuNzIJSjPrmb/x5OkXqsojIyDHYNZCKYOdorYQgCBJXQ0TGwNXOEhue64F+QW4oLtNg0roYrPrritRlEZERY7BrINxOjIjqwlqpwLIxoRjdXbtLxezfzuKD389Cw7XuiKgOGOwaSMWMWGcuTkxEelLIZfhgeDvdQsbLD17B8/8Xg/ziMokrIyJjw2DXQDI4I5aI6qFiIeNFj3eCUiHDjrO38MjSQ7ieWSB1aURkRBjsGkjFPXbcdYKI6uPBTs2w8bkecLG1REJKLoZ/yRmzRHTvGOwayG3eY0dEDaRzc0dsfqkn2nraIz2vBE98/Tc2Hb8udVlEZAQY7BqI2U+eUCjgOOoJOI56AuCWYkT15uVghR8nhSEq2B0l5RpM//4E5v+ZwEkVRFQrBrsGYu5DsTKlEh7vvAOPd96BTGmefwZEDc1aqcDS0aF48f6WAICley/hmTXHkF1YKnFlRGSoGOwaCIdiiagxyGQCZkQFYeHITrBUyLA7IRUPfnEQ51JypS6NiAwQg10Dycgz7+VORFFE2e3bKLt9G6LIoSKihja8czP8NCkczRyscDWjAA8t+Qu/n7wpdVlEZGAY7BpAabkGOUXa9abMdbkTsbAQF8J74kJ4T4iFhVKXQ2SS2jVT47fJ9+G+ABcUlJTjpe9iMW9rPMrKuT8zEWkx2DWAzDvDsIIAqK0sJK6GiEyZk40Sq5/uiud7twAALNt/GU+tOqK7HYSIzBuDXQPIuGufWLmM+8QSUeNSyGWYOagNvhwVAmulHH9dzMCQxQdw7OptqUsjIokx2DUATpwgIikM6eCJn1/oiRYuNkjOLsLIr//GV/sucUkUIjPGYNcAGOyISCqBHnbYPPk+PNDRC+UaER/9kYBn1hzT3SJCROaFwa4BmPsadkQkLVtLBRY93glzH2oP5Z0lUYYsPsCtyIjMUJ2C3ZIlS+Dv7w+VSoXQ0FAcOHCg1vb79u1DaGgoVCoVWrRoga+++qrS86tXr4YgCFUeRUVFdSmvyZn9rhNEJDlBEDCqe3P88kJP+LvY4GZ2EUYui8YyDs0SmRW9g93GjRsxdepUzJo1C7GxsYiIiMCgQYOQmJhYbfsrV65g8ODBiIiIQGxsLN58801MmTIFP/30U6V29vb2SE5OrvRQqVR1+1RN7Hb+nTXszDnYKRRQDx8O9fDh3FKMSEJtveyx+aWeGNrBE2UaEfP+SMCYlYeRkm0c/6NMRPUjiHquJtu9e3eEhIRg6dKlumNt2rTB8OHDMW/evCrtX3/9dWzevBnx8fG6YxMnTsSJEycQHR0NQNtjN3XqVGRlZdXxYwA5OTlQq9XIzs6Gvb19nc9TFy+si8HWUyl4b1hbjOvp36TvTURUHVEUseFoEub8dhaFpeVwsLbARw+3x8B2nlKXRkR60ifj6NVjV1JSgpiYGERGRlY6HhkZiUOHDlX7mujo6Crto6KicOzYMZSW/rPfYV5eHnx9feHt7Y2hQ4ciNja21lqKi4uRk5NT6SGVjLw7Q7G25rk4MREZHkEQ8ES35vh9yn1o30yNrIJSTPy/43j9x5PILy6TujwiaiR6Bbv09HSUl5fD3d290nF3d3ekpKRU+5qUlJRq25eVlSE9PR0AEBQUhNWrV2Pz5s1Yv349VCoVevbsiQsXLtRYy7x586BWq3UPHx8ffT5Kg+LkCW3vgKagAJqCAm4pRmRAWrra4qdJ4ZjUpyUEAdh4LAlDFh/AiaQsqUsjokZQp8kTglB5EV5RFKsc+6/2dx/v0aMHnnzySXTs2BERERH4/vvv0bp1a3z++ec1nnPmzJnIzs7WPZKSkuryURoElzvRbil2LiQU50JCuaUYkYFRKmR4fWAQvnumBzzVKlzNKMAjSw/hi90XuB0ZkYnRK9i5uLhALpdX6Z1LTU2t0itXwcPDo9r2CoUCzs7O1Rclk6Fr16619thZWlrC3t6+0kMKGo2IzAL22BGR4Qtr6Yw/X+6FIXcmVnyy/TweWXoIF27lSl0aETUQvYKdUqlEaGgoduzYUen4jh07EB4eXu1rwsLCqrTfvn07unTpAguL6vdVFUURcXFx8PQ0/Jt8swpLUbGSgCODHREZOLW1Bb54ojMWPNYRdioFTlzPxpDFB7F07yX23hGZAL2HYqdPn47ly5dj5cqViI+Px7Rp05CYmIiJEycC0A6Rjh07Vtd+4sSJuHbtGqZPn474+HisXLkSK1aswKuvvqprM3v2bGzbtg2XL19GXFwcJkyYgLi4ON05DVnFUid2KgUs5FzvmYgMnyAIeCTUGzum9cb9ga4oKddg/p8JeOSraFxMZe8dkTHTe8GxkSNHIiMjA3PmzEFycjLatWuHrVu3wtfXFwCQnJxcaU07f39/bN26FdOmTcOXX34JLy8vLF68GI888oiuTVZWFp577jmkpKRArVajc+fO2L9/P7p169YAH7FxZRZoZ/aa8/11RGScPNQqrBzXFT/GXMec38/iRFIWBi8+iOkDWuPZiBaQy2q+d5qIDJPe69gZKqnWsdt+JgXPrY1BRx8H/PpizyZ7X0OjKSjAuZBQAEDg8RjIrK0lroiI9JGSXYQ3Np3E3nNpAIBOPg6Y/0gHBHrYSVwZETXaOnZUVdadHjtH6+rvFyQiMgYeahVWjeuK/z3aAXaWCsQlZWHI4gP4eFsCikrLpS6PiO4Rg109VcyIdbQ286FYuRx2UVGwi4oC5HKpqyGiOhAEASO6+GD79F6IbOuOMo2IL/dcQtTC/ThwIU3q8ojoHnBTz3qquMfOwcx77GSWlvBetFDqMoioAXiqrfD12C7YdiYF7/56BtcyCjBmxREM7+SFt4a2hQt32SEyWOyxq6fMisWJzb3HjohMTlSwB3ZM74Vx4X4QBOCXuJvot2AfNh5NhEZjErdnE5kcBrt6qhiKdeCsWCIyQXYqC7z3QDB+eaEn2nraI7uwFK//dAqPLYvG6RvZUpdHRP/CYFdPnDyhpSkoQHxQG8QHtYGmoEDqcoiogXX0ccDml3pi1uA2sLKQI+ZaJoZ9cRBv/nxKt60iEUmPwa6eOHmCiMyFQi7Ds71aYPervfFARy+IIvDd4UTc/8lerIm+yp0riAwAg109cfIEEZkbT7UVFj/RGRuf64EgDztkF5binV/PYNgXf+Hw5QypyyMyawx29SCKIrLYY0dEZqp7C2f8Pvk+vP9gMNRWFohPzsHIr//GS98dR9Jt3pJBJAUGu3rIKy5D2Z2ZYQx2RGSOFHIZxoT5Yc+rfTCqe3MIAvD7yWT0W7APH245i+w7oxpE1DQY7OqhYuKEpUIGKyUX5SUi8+Vko8Tch9pjy+QI3BfggpJyDb45cAW9Pt6D5Qcuo7iMu1cQNQUGu3rgxAkiosraetlj7YRuWP10VwS6a++/+2BLPAZ8uh+/n7wJE9menMhgceeJeuDEibvI5bDp3Uv3NRGZL0EQ0CfQDRGtXPFjTBIWbD+PxNsFeOm7WCz3uYIZUYHoGeAidZlEJonBrh44ceIfMktLNF+2TOoyiMiAyGUCRnZtjmEdvfDN/itYtv8S4pKyMHr5YYS1cMarUYEI9XWUukwik8Kh2Hqo2E7M0YY9dkRENbFWKvBy/1bYO6MPxoX7QSmXIfpyBh5ZeghPrzrCHSyIGhCDXT38MxTLHjsiov/iZqfCew8EY8+MPni8qw/kMgF7zqVh6OcHMen/YnDhVq7UJRIZPQa7evhnKJY9dpqCAiR0DkFC5xBuKUZEtWrmYIWPHumAndN748FOXhAE4I/TKYhcuB8vfXccCSk5UpdIZLQY7OohU7dPLHvsAEAsLIRYWCh1GURkJPxdbLDo8c748+VeiAp2hyhq18AbuPAAnltzDCevZ0ldIpHRYbCrBy53QkRUf4Eedlg2pgu2TonAkPaeEARg+9lbeOCLv/DUyiOIuXZb6hKJjAZnxdaDLthx8gQRUb219bLHl6NDcDE1F0v2XMKvJ25i3/k07DufhrAWzpjUpyUiWrlAEASpSyUyWOyxq4fMfE6eICJqaAFudvh0ZCfsfqU3Hu/qAwu5gOjLGRi78ggGLz6In2Ovo7RcI3WZRAaJwa4euI4dEVHj8XW2wUePdMDeGfdjXLgfrJVyxCfnYNrGE+j1vz34Zv9l5BZxL1qiuzHY1VFJmQb5Jdq9Dzkrloio8TRzsMJ7DwTj0Bt9MSMqEC62lkjOLsKHW+MRPm835m2NR3I2J24RAbzHrs4qeutkAmCvYrCDTAbrrl11XxMRNTQHayVevD8AE+7zx69xN/D1/su4lJaPZfsvY/nBKxgY7IGxYb7o5u/E+/DIbDHY1VHFUidqKwvIZPwFIlOp4Lt2jdRlEJEZUFnIMbJrczwW6oM951Lx9f7LOHzlNracSsaWU8kI8rDDU+F+GN6pGayU3LuazAuDXR1xqRMiImnJZAL6tXFHvzbuiE/OwZroa/g59joSUnIxc9MpfPRHAkZ29cGYHr7wcbKWulyiJsExszqqGIp14P11RESSa+Npj3kPt8fhmf0xa3Ab+DhZIbuwFF/vv4xeH+/BhNVHsfPsLZRxNi2ZOPbY1RF3nahMU1CAi/36AwACdu2EzJr/d0xETU9tbYFne7XA+Pv8sfdcKlYfuooDF9KxKyEVuxJS4W5viRFdfDCiiw978cgkMdjVUaaux47BrkJ5ZqbUJRARAQDkdw3TXkrLw4Yjifjp+A3cyinG57sv4os9F3FfgAse79ocA9q6Q6ngABaZBga7OsrS9dhxKJaIyJC1dLXFrCFt8WpUIHacvYUNR5Jw8GI6DlzQPpxtlHgk1BuPhnqjtbud1OUS1QuDXR1l5ldsJ8YeOyIiY2CpkGNoBy8M7eCFxIwCbDyWiB+OXUdqbjG+3n8ZX++/jGAvezwc4o0HOnrB1c5S6pKJ9MZgV0cV99hx8gQRkfFp7myNGVFBmNa/NXYnpOKHmOvYey4VZ27m4MzNs5i7NR69WrngoRBvRLZ1h8qCy6aQcWCwq6OKWbFOvMeOiMhoKeQyRAZ7IDLYA7fzS/D7yZvYdPwG4pKysOdcGvacS4OdpQKD2nvgwU7N0KOFM+Rcu5QMGINdHXHyBBGRaXGyUWJsmB/GhvnhUloefom9gZ9jb+B6ZiG+P3Yd3x+7DhdbJQa188TQDp7o4ufEkEcGh8GujnSTJ2w4FAsAkMmgatdO9zURkTFr6WqLVyIDMa1/axy9ehu/xN3AH6dTkJ5XgrV/X8Pav6/Bzc4Sg9t7YlhHT3T2ceQuRGQQBFEURamLaAg5OTlQq9XIzs6Gvb19o76XRiMiYNZWaETg8Jv94G6vatT3IyIi6ZWWa/DXxXRsOZmMbWdSkFNUpnvOS63CwHaeiAp2R6ivIxRy/g8uNRx9Mg6DXR1kF5Si45ztAIBzHwyEpYI31RIRmZOSMg0OXEjDlpPJ2H72FvKK/wl5TjZK9AtyQ2SwByJauXDiBdWbPhmHQ7F1UHF/nbVSzlBHRGSGlAqZbgHkotJy7Dufhu1nbmFXwi3czi/BDzHX8UPMdVhZyNGrtQsi23qgb5Abl8iiRsdgVwcVwY7bif1DU1iIy0OGAgBabPkdMisriSsiImoaKgs5ooI9EBXsgbJyDY5cvY3tZ25hx9lbuJFViG1nbmHbmVuQywR09XPE/YFu6BPohtbuthAE3pdHDYvBrg6yCrUTJ9RWnDihI4oovXlT9zURkTlSyGUIb+mC8JYueHdYW5y5mYMdZ29h+9lbiE/Owd+Xb+Pvy7cx748EeKlV6B3ohj6BrugZ4AJbS/6TTPXHv0V1kM0ZsURE9B8EQUC7Zmq0a6bGtAGtkZhRgN0Jt7D3fBqiL2XgZnYR1h9JxPojibCQC+jq54Q+ga7oE+iGVm7szaO6YbCrg2z22BERkZ6aO1tjXE9/jOvpj6LSckRfzsDehFTsPZ+GaxkFOHQpA4cuZWDu1gS421uiZ0sX9AzQPjzUXH2B7g2DXR1UrGGntuI9dkREpD+VhRz3B7rh/kA3AMCV9HzsuRPy/r6cgVs5xdgUewObYm8AAFq42uC+AO0Qb1gLZ6i5nSXVgMGuDrIKK3ad4A8WERHVn7+LDfzv88f4+7S9eTHXMnHwYjoOXUzHqRvZuJyWj8tp+VgTfQ0yAWjfTI0eLZ3Rzc8JXXydGPRIh8GuDjgUS0REjUVlIdcNwQLa+7r/vpKBvy6m46+L6biUlo8T17Nx4no2lu27DEEAAt3t0M3fSfvwc4IbF843Wwx2dVAxecKBwe4fggBlQEvd10RE1DDU1ha65VQAICW7CIcupePw5ds4evU2LqfnIyElFwkpuVgTfQ0A4OtsjW5+Tuh6J+j5OltzMoaZYLCrg4rlTjgU+w+ZlRVa/v671GUQEZk8D7UKD4d44+EQbwBAam4Rjl3NxJErt3Hkym3Ep+TgWkYBrmUU4IeY6wC0u2F08nFAZx8HdGrugI4+DrBX8d8wU8RgVwcVQ7H27LEjIiKJudmpMLi9Jwa39wQA5BSVIuaaNugdvXIbJ69n43Z+CXYnpGJ3QioA7cBKgKutNuw1d0Tn5g5o7W4HuYy9esaOwa4OsnRDsZwVS0REhsVeZVFpxm1xWTnik3MRm5iJ2MQsxCZlIul2IS6k5uFCap6uV89aKUewlz2CvdRof2f9vZauNlDIZVJ+HNITg52eRFFETsXkCQ7F6mgKC3HlsccAAP4//MAtxYiIDISlQo5OPg7o5OOAp3tqj6XnFSPuTsiLTczCyevZyCsuw9GrmTh6NVP3WpWFDG087dHOS412zezRrpkardzsoFQw7BkqBjs9FZaWo6RcA4CTJyoRRZRcvKT7moiIDJeLrSX6t3VH/7buAIByjYhLaXk4dT0bp29m48yNHJy5mY38knJtL19ilu61SrkMgR52CPayR6CHHYI87BHkYQdHG45iGQIGOz1VDMNayAVYK+USV0NERFR/cpmA1u52aO1uh0dCtZMyNBoRVzLycfpG9p1HDk7fzEZuURlO3cjGqRvZlc7hZmeJIE9tyAt0t0OQpx0C3GxhqeC/lU2JwU5Pd69hx6njRERkqmQyAS1dbdHS1RYPdmoGQHs7UuLtApy6kY2EZO0SK+du5SDpdiFSc4uRmpuG/efTdOeQywT4u9gg0MMOAa62CHDTnq+Fqw1UFgx8jYHBTk//bCfGYVgiIjIvgiDA19kGvs42GNrhn+N5xWU4l5J755GD+DtfZxeW4mJqHi6m5v3rPIC3oxVautoiwNUWLe8EvgA3WzhxSLdeGOz0lK3bTox/8YiIiADA1lKBUF9HhPo66o6JoohbOcVISMnB+Vu5uJSaj4tp2pCXXViKpNuFSLpdiL3n0iqdy9HaQterpw2R1vC78187rr33nxjs9MTtxIiIiP6bIAjwUKvgoVahz52lVwBt4LudX4KLqXm4lJaPS3fC3qW0PFzPLERmQSmOXcvEsWuZVc7pbKO8K+jZwM/FWvtfZ2t2uNzBYKenLG4nVj1BgIWXl+5rIiKi6giCAGdbSzjbWqJ7C+dKzxWWlONyujboXcsowNWMfN0uGul5xcjIL0FGfgmO3zVLt4K9SgFvR2t4O1rB29EazRyt7nyt/d5cOmQY7PSUzTXsqiWzskLA7l1Sl0FEREbMSilHsJcawV7qKs/lFZfh2p2gdzUjH9fS/wl+KTlFyCkqw9nkHJxNzqn23HYqBZo5WN0V/rQPT7UVPNUqONtamsTOGwx2esriUCwREVGTs7VU1Bj6CkvKkZRZgBuZhbieWYDrmYXaR1YhbmQWID2vBLlFZUhI0c7krY5CJsDdXjt07HHnv57qu/9rBTc7S1gY+E4cDHZ6yuZQLBERkUGxUsp16/BVp6CkDDezCpFUEfh0IbAQKdlFSM0tQplGxI2sQtzIKqzxfQRBu7izp1oFd3sV3O0t4W6nwmNdfOChVjXWx9MLg52eOBRbPU1REa49OQYA4Pt/ayFTGcZfcCIiImulAgFudghwqz74lZVrkJZXjOTsIqRkFyE5uwi3corufF+o+760XERabjHScosB/LNAc2SwB4OdscqqWO7EirNvKtFoUHT6tO5rIiIiY6GQy+7ca1fzPucajYiM/JJKgS81txi3corg6WAYoQ5gsNObboFi9tgRERGZDZlMgKudJVztLNGuWdX7/AyFYd8BaIC4jh0REREZKgY7PZRrROQWlQHg5AkiIiIyPAx2esi501sHsMeOiIiIDE+dgt2SJUvg7+8PlUqF0NBQHDhwoNb2+/btQ2hoKFQqFVq0aIGvvvqqSpuffvoJbdu2haWlJdq2bYuff/65LqU1qoo17GwtFVAY+Do2REREZH70TicbN27E1KlTMWvWLMTGxiIiIgKDBg1CYmJite2vXLmCwYMHIyIiArGxsXjzzTcxZcoU/PTTT7o20dHRGDlyJMaMGYMTJ05gzJgxGDFiBA4fPlz3T9YIsgq0M2LZW1c9uaMj5I6O/92QiIiIGoUgiqKozwu6d++OkJAQLF26VHesTZs2GD58OObNm1el/euvv47NmzcjPj5ed2zixIk4ceIEoqOjAQAjR45ETk4O/vjjD12bgQMHwtHREevXr7+nunJycqBWq5GdnQ17e3t9PtI923suFeNWHUVbT3tsfTmiUd6DiIiI6G76ZBy9euxKSkoQExODyMjISscjIyNx6NChal8THR1dpX1UVBSOHTuG0tLSWtvUdE6pVMyIdeBSJ0RERGSA9FrHLj09HeXl5XB3d6903N3dHSkpKdW+JiUlpdr2ZWVlSE9Ph6enZ41tajonABQXF6O4uFj3fU5O9Zv+NqSKNewY7IiIiMgQ1WkGgCAIlb4XRbHKsf9q/+/j+p5z3rx5UKvVuoePj889119XGlGEnUoBB2vuOvFvmqIiXBszFtfGjIWmqEjqcoiIiMySXj12Li4ukMvlVXrSUlNTq/S4VfDw8Ki2vUKhgLOzc61tajonAMycORPTp0/XfZ+Tk9Po4e7pnv54uqc/9Lwt0TxoNCg4elT3NRERETU9vXrslEolQkNDsWPHjkrHd+zYgfDw8GpfExYWVqX99u3b0aVLF1hYWNTapqZzAoClpSXs7e0rPZpKbT2JRERERFLRe6/Y6dOnY8yYMejSpQvCwsLw9ddfIzExERMnTgSg7Um7ceMG1qxZA0A7A/aLL77A9OnT8eyzzyI6OhorVqyoNNv15ZdfRq9evTB//nw8+OCD+PXXX7Fz504cPHiwgT4mERERkenTO9iNHDkSGRkZmDNnDpKTk9GuXTts3boVvr6+AIDk5ORKa9r5+/tj69atmDZtGr788kt4eXlh8eLFeOSRR3RtwsPDsWHDBrz11lt4++230bJlS2zcuBHdu3dvgI9IREREZB70XsfOUDXFOnZUM01BAc6FhAIAAo/HQGZtLXFFREREpqHR1rEjIiIiIsOl91AsUU0EKyupSyAiIjJrDHbUIGTW1giKPS51GURERGaNQ7FEREREJoLBjoiIiMhEMNhRg9AUFyPx+eeR+Pzz0Ny1hy8RERE1Hd5jRw2jvBz5+/brviYiIqKmxx47IiIiIhPBYEdERERkIhjsiIiIiEwEgx0RERGRiWCwIyIiIjIRJjMrVhRFANqNcqnpaQoKkHdnNmxOTg5kZWUSV0RERGQaKrJNRdapjckEu9zcXACAj4+PxJUQPD2lroCIiMjk5ObmQq1W19pGEO8l/hkBjUaDmzdvws7ODoIgNNr75OTkwMfHB0lJSbC3t2+09yH98doYNl4fw8VrY9h4fQxXU10bURSRm5sLLy8vyGS130VnMj12MpkM3t7eTfZ+9vb2/AEzULw2ho3Xx3Dx2hg2Xh/D1RTX5r966ipw8gQRERGRiWCwIyIiIjIRDHZ6srS0xLvvvgtLS0upS6F/4bUxbLw+hovXxrDx+hguQ7w2JjN5goiIiMjcsceOiIiIyEQw2BERERGZCAY7IiIiIhPBYEdERERkIhjs9LBkyRL4+/tDpVIhNDQUBw4ckLoks7R//34MGzYMXl5eEAQBv/zyS6XnRVHEe++9By8vL1hZWaFPnz44c+aMNMWamXnz5qFr166ws7ODm5sbhg8fjnPnzlVqw+sjjaVLl6JDhw66hVTDwsLwxx9/6J7ndTEc8+bNgyAImDp1qu4Yr4903nvvPQiCUOnh4eGhe97Qrg2D3T3auHEjpk6dilmzZiE2NhYREREYNGgQEhMTpS7N7OTn56Njx4744osvqn3+f//7Hz799FN88cUXOHr0KDw8PDBgwADdfsLUePbt24cXX3wRf//9N3bs2IGysjJERkYiPz9f14bXRxre3t746KOPcOzYMRw7dgx9+/bFgw8+qPsHiNfFMBw9ehRff/01OnToUOk4r4+0goODkZycrHucOnVK95zBXRuR7km3bt3EiRMnVjoWFBQkvvHGGxJVRKIoigDEn3/+Wfe9RqMRPTw8xI8++kh3rKioSFSr1eJXX30lQYXmLTU1VQQg7tu3TxRFXh9D4+joKC5fvpzXxUDk5uaKrVq1Enfs2CH27t1bfPnll0VR5M+N1N59912xY8eO1T5niNeGPXb3oKSkBDExMYiMjKx0PDIyEocOHZKoKqrOlStXkJKSUulaWVpaonfv3rxWEsjOzgYAODk5AeD1MRTl5eXYsGED8vPzERYWxutiIF588UUMGTIE/fv3r3Sc10d6Fy5cgJeXF/z9/fH444/j8uXLAAzz2igkeVcjk56ejvLycri7u1c67u7ujpSUFImqoupUXI/qrtW1a9ekKMlsiaKI6dOn47777kO7du0A8PpI7dSpUwgLC0NRURFsbW3x888/o23btrp/gHhdpLNhwwYcP34cR48erfIcf26k1b17d6xZswatW7fGrVu38MEHHyA8PBxnzpwxyGvDYKcHQRAqfS+KYpVjZBh4raT30ksv4eTJkzh48GCV53h9pBEYGIi4uDhkZWXhp59+wlNPPYV9+/bpnud1kUZSUhJefvllbN++HSqVqsZ2vD7SGDRokO7r9u3bIywsDC1btsS3336LHj16ADCsa8Oh2Hvg4uICuVxepXcuNTW1SkonaVXMVOK1ktbkyZOxefNm7NmzB97e3rrjvD7SUiqVCAgIQJcuXTBv3jx07NgRixYt4nWRWExMDFJTUxEaGgqFQgGFQoF9+/Zh8eLFUCgUumvA62MYbGxs0L59e1y4cMEgf3YY7O6BUqlEaGgoduzYUen4jh07EB4eLlFVVB1/f394eHhUulYlJSXYt28fr1UTEEURL730EjZt2oTdu3fD39+/0vO8PoZFFEUUFxfzukisX79+OHXqFOLi4nSPLl26YPTo0YiLi0OLFi14fQxIcXEx4uPj4enpaZg/O5JM2TBCGzZsEC0sLMQVK1aIZ8+eFadOnSra2NiIV69elbo0s5ObmyvGxsaKsbGxIgDx008/FWNjY8Vr166JoiiKH330kahWq8VNmzaJp06dEp944gnR09NTzMnJkbhy0zdp0iRRrVaLe/fuFZOTk3WPgoICXRteH2nMnDlT3L9/v3jlyhXx5MmT4ptvvinKZDJx+/btoijyuhiau2fFiiKvj5ReeeUVce/eveLly5fFv//+Wxw6dKhoZ2en+/ff0K4Ng50evvzyS9HX11dUKpViSEiIbgkHalp79uwRAVR5PPXUU6Ioaqefv/vuu6KHh4doaWkp9urVSzx16pS0RZuJ6q4LAHHVqlW6Nrw+0hg/frzu95erq6vYr18/XagTRV4XQ/PvYMfrI52RI0eKnp6eooWFhejl5SU+/PDD4pkzZ3TPG9q1EURRFKXpKyQiIiKihsR77IiIiIhMBIMdERERkYlgsCMiIiIyEQx2RERERCaCwY6IiIjIRDDYEREREZkIBjsiIiIiE8FgR0RERGQiGOyIiIiITASDHREREZGJYLAjIiIiMhEMdkREREQm4v8BSDjLhSwYPuYAAAAASUVORK5CYII=", 82 | "text/plain": [ 83 | "
" 84 | ] 85 | }, 86 | "metadata": {}, 87 | "output_type": "display_data" 88 | } 89 | ], 90 | "source": [ 91 | "def plot_biexponential_psp(a, tau, taux):\n", 92 | " # differential equation in right form for scipy\n", 93 | " def f(t, z, a, tau, taux):\n", 94 | " v, x = z\n", 95 | " return [(a*x-v)/tau, -x/taux]\n", 96 | " # solution using scipy diffeq solver\n", 97 | " sol = solve_ivp(f, [0, 50], [0, 1], args=(a, tau, taux), max_step=0.1)\n", 98 | " plt.plot(sol.t, sol.y[0], label='RK45 integration')\n", 99 | " # analytic solution\n", 100 | " tmax = tau*taux/(tau-taux)*np.log(tau/taux)\n", 101 | " plt.axvline(tmax, ls='--', c='C3', label='Analytic peak time')\n", 102 | " plt.axhline(((taux/tau)**(tau/(tau-taux))), ls='--', c='C4', label='Analytic peak value')\n", 103 | " plt.legend(loc='best')\n", 104 | " plt.tight_layout()\n", 105 | "\n", 106 | "plot_biexponential_psp(1, 10, 5)" 107 | ] 108 | }, 109 | { 110 | "cell_type": "markdown", 111 | "metadata": {}, 112 | "source": [ 113 | "### Task 1B\n", 114 | "\n", 115 | "Show the different range of delays you can get by varying $\\tau$ in the range 1-9 ms and $\\tau_x$ from 10-100 ms." 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": [ 122 | "### Task 1C\n", 123 | "\n", 124 | "Write a simulator of $N$ neurons using this model, each of which receives a single input spike train at time 0, but with potentially different values of $\\tau$ and $\\tau_x$ for each neuron. Tune the parameters so that each neuron fires a single output spike with a different delay.\n", 125 | "\n", 126 | "Use the template and test it using the code below to verify that you get similar results." 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": null, 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | "def simulate_biexponential_network(num_neurons, tau, taux, duration=10, dt=0.1):\n", 136 | " # state variables\n", 137 | " v = np.zeros(num_neurons)\n", 138 | " x = np.ones(num_neurons)\n", 139 | " # record activity\n", 140 | " num_time_steps = int(duration/dt)\n", 141 | " v_rec = np.zeros((num_neurons, num_time_steps))\n", 142 | " spike_time = np.zeros(num_neurons) # they will only fire one spike each\n", 143 | " # run simulation: FILL IN THE GAP HERE\n", 144 | " return v_rec, spike_time\n", 145 | "\n", 146 | "taux = np.linspace(.5, 9, 10)\n", 147 | "tau = 10\n", 148 | "v, spike_times = simulate_biexponential_network(len(taux), tau, taux)\n", 149 | "T = np.arange(v.shape[1])*0.1\n", 150 | "for i in range(len(taux)):\n", 151 | " plt.plot(T, i+v[i, :], '-C0')\n", 152 | " plt.plot(spike_times[i], i+v[i, int(spike_times[i]/0.1)], 'or')\n", 153 | "plt.xlabel('Time (ms)')\n", 154 | "plt.ylabel('Membrane potential for neurons')\n", 155 | "plt.yticks([])\n", 156 | "plt.tight_layout();" 157 | ] 158 | }, 159 | { 160 | "cell_type": "markdown", 161 | "metadata": {}, 162 | "source": [ 163 | "## Part 2 - Order sensitive network with lateral inhibition\n", 164 | "\n", 165 | "In this part, we're going to create a network that takes two spike trains as input, one is delayed relative to the other with some delay, and can determine which spike train is delayed relative to which.\n", 166 | "\n", 167 | "To start with, we'll use this function to generate these spike trains. It takes as an argument the number of time steps to generate, how many spikes there should be overall in each spike train, and a delay (in time steps). It returns an array ``spikes`` of shape ``(2, num_time_steps)`` of 0s and 1s (where 1 indicates a spike at that time). The delay can be positive or negative." 168 | ] 169 | }, 170 | { 171 | "cell_type": "code", 172 | "execution_count": 7, 173 | "metadata": {}, 174 | "outputs": [ 175 | { 176 | "data": { 177 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAnYAAAHWCAYAAAD6oMSKAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA7YUlEQVR4nO3deXxU9b3/8fcAWdgSlpCtQEiRPRggCEm8LAqERRavsrkEKBhKUYFGXFKlLLeV2qsQFgHxohGxIVpAtCIaLGEpixISFqUU22giTkSUJICShOT8/uDH6JBtQmYS5vB6Ph7n8eB8z/d853POnIlvvzNnxmIYhiEAAAC4vXp1XQAAAACcg2AHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEm4V7Hbv3q1Ro0YpODhYFotFb7/9dqX909LSZLFYyiz//Oc/7fpt2rRJXbt2lZeXl7p27aotW7a48CgAAABcw62C3cWLFxUeHq6VK1dWa7+TJ0/KarXalg4dOti27d+/XxMmTFBsbKyOHDmi2NhYjR8/XgcPHnR2+QAAAC5lMQzDqOsirofFYtGWLVt09913V9gnLS1Nd9xxh86dO6dmzZqV22fChAkqKCjQ+++/b2sbNmyYmjdvruTkZCdXDQAA4DoN6rqA2tCzZ09dunRJXbt21TPPPKM77rjDtm3//v367W9/a9d/6NChSkxMrHC8wsJCFRYW2tZLS0v1/fffq2XLlrJYLE6vHwAA3LwMw9D58+cVHBysevUqf7PV1MEuKChIa9euVUREhAoLC/X6669r0KBBSktLU//+/SVJubm5CggIsNsvICBAubm5FY67ePFiLVy40KW1AwAA/FxOTo5at25daR9TB7tOnTqpU6dOtvWoqCjl5OTo+eeftwU7SWVm2QzDqHTmLSEhQfHx8bb1/Px8tW3bVv+lEWogDyceQd3b8q9jtn//d8futb5/RWM5a8y6UtF5udpeXtu17TV9LHdUnfNW08eo6jmozfPqqseq6jVl1mvPFc9nbRyrs/4G3qjPizO56r89VY11PX+Lynterx3jsoq1V9vUtGnTKsczdbArT2RkpDZs2GBbDwwMLDM7d+bMmTKzeD/n5eUlLy+vMu0N5KEGFnMFO5+mP035Xs+x1XT/isZy1ph1paLzcrW9vLZr22v6WO6oOuetpo9R1XNQm+fVVY9V1WvKrNeeK57P2jhWZ/0NvFGfF2dy1X97qhrrev4Wlfe8lhnj/98N4cjHvdzqrlhnyMjIUFBQkG09KipKqampdn0+/PBDRUdH13ZpAAAANeJWM3YXLlzQ559/blvPyspSZmamWrRoobZt2yohIUGnT5/W+vXrJUmJiYlq166dunXrpqKiIm3YsEGbNm3Spk2bbGPMnj1b/fv313PPPacxY8Zo69at2rFjh/bu3VvrxwcAAFATbhXsDh06ZHdH69XPuU2ePFlJSUmyWq3Kzs62bS8qKtLcuXN1+vRpNWzYUN26ddN7772nESNG2PpER0dr48aNeuaZZzRv3jy1b99eKSkp6tu3b+0dGAAAgBO4VbAbOHCgKvvavaSkJLv1J554Qk888USV444dO1Zjx46taXkAAAB16qb7jB0AAIBZEewAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACbhVsFu9+7dGjVqlIKDg2WxWPT2229X2n/z5s0aMmSIWrVqJR8fH0VFRemDDz6w65OUlCSLxVJmuXTpkguPBAAAwPncKthdvHhR4eHhWrlypUP9d+/erSFDhmjbtm1KT0/XHXfcoVGjRikjI8Oun4+Pj6xWq93i7e3tikMAAABwmQZ1XUB1DB8+XMOHD3e4f2Jiot36s88+q61bt+rdd99Vz549be0Wi0WBgYHOKhMAAKBOuNWMXU2Vlpbq/PnzatGihV37hQsXFBISotatW2vkyJFlZvQAAADcwU0V7F544QVdvHhR48ePt7V17txZSUlJeuedd5ScnCxvb2/dfvvtOnXqVIXjFBYWqqCgwG4BAACoa271VmxNJCcna8GCBdq6dav8/f1t7ZGRkYqMjLSt33777erVq5dWrFih5cuXlzvW4sWLtXDhQpfXDAAAUB03xYxdSkqKpk2bpjfffFODBw+utG+9evV02223VTpjl5CQoPz8fNuSk5Pj7JIBAACqzfQzdsnJyZo6daqSk5N11113VdnfMAxlZmaqe/fuFfbx8vKSl5eXM8sEAACoMbcKdhcuXNDnn39uW8/KylJmZqZatGihtm3bKiEhQadPn9b69eslXQl1kyZN0rJlyxQZGanc3FxJUsOGDeXr6ytJWrhwoSIjI9WhQwcVFBRo+fLlyszM1Isvvlj7BwgAAFADbvVW7KFDh9SzZ0/bV5XEx8erZ8+e+v3vfy9Jslqtys7OtvV/6aWXdPnyZT388MMKCgqyLbNnz7b1ycvL0/Tp09WlSxfFxMTo9OnT2r17t/r06VO7BwcAAFBDbjVjN3DgQBmGUeH2pKQku/W0tLQqx1y6dKmWLl1aw8oAAADqnlvN2AEAAKBiBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEm4VbDbvXu3Ro0apeDgYFksFr399ttV7rNr1y5FRETI29tbv/zlL7VmzZoyfTZt2qSuXbvKy8tLXbt21ZYtW1xQPQAAgGu5VbC7ePGiwsPDtXLlSof6Z2VlacSIEerXr58yMjL0u9/9TrNmzdKmTZtsffbv368JEyYoNjZWR44cUWxsrMaPH6+DBw+66jAAAABcokFdF1Adw4cP1/Dhwx3uv2bNGrVt21aJiYmSpC5duujQoUN6/vnnde+990qSEhMTNWTIECUkJEiSEhIStGvXLiUmJio5OdnpxwAAAOAqbjVjV1379+9XTEyMXdvQoUN16NAhFRcXV9pn3759tVYnAACAM7jVjF115ebmKiAgwK4tICBAly9f1tmzZxUUFFRhn9zc3ArHLSwsVGFhoW29oKDAuYUDAABcB1PP2EmSxWKxWzcMo0x7eX2ubfu5xYsXy9fX17a0adPGiRUDAABcH1MHu8DAwDIzb2fOnFGDBg3UsmXLSvtcO4v3cwkJCcrPz7ctOTk5zi8eAACgmkwd7KKiopSammrX9uGHH6p3797y8PCotE90dHSF43p5ecnHx8duAQAAqGtu9Rm7Cxcu6PPPP7etZ2VlKTMzUy1atFDbtm2VkJCg06dPa/369ZKkGTNmaOXKlYqPj1dcXJz279+vdevW2d3tOnv2bPXv31/PPfecxowZo61bt2rHjh3au3dvrR8fAABATbjVjN2hQ4fUs2dP9ezZU5IUHx+vnj176ve//70kyWq1Kjs729Y/NDRU27ZtU1pamnr06KH/+Z//0fLly21fdSJJ0dHR2rhxo1599VXdeuutSkpKUkpKivr27Vu7BwcAAFBDbjVjN3DgQNvND+VJSkoq0zZgwAAdPny40nHHjh2rsWPH1rQ8AACAOuVWM3YAAACoGMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASbhfsVq1apdDQUHl7eysiIkJ79uypsO+UKVNksVjKLN26dbP1SUpKKrfPpUuXauNwAAAAnMatgl1KSormzJmjp59+WhkZGerXr5+GDx+u7OzscvsvW7ZMVqvVtuTk5KhFixYaN26cXT8fHx+7flarVd7e3rVxSAAAAE7jVsFuyZIlmjZtmh566CF16dJFiYmJatOmjVavXl1uf19fXwUGBtqWQ4cO6dy5c/rVr35l189isdj1CwwMrI3DAQAAcCq3CXZFRUVKT09XTEyMXXtMTIz27dvn0Bjr1q3T4MGDFRISYtd+4cIFhYSEqHXr1ho5cqQyMjKcVjcAAEBtaVDXBTjq7NmzKikpUUBAgF17QECAcnNzq9zfarXq/fff11/+8he79s6dOyspKUndu3dXQUGBli1bpttvv11HjhxRhw4dyh2rsLBQhYWFtvWCgoLrOCIAAADncpsZu6ssFovdumEYZdrKk5SUpGbNmunuu++2a4+MjNSDDz6o8PBw9evXT2+++aY6duyoFStWVDjW4sWL5evra1vatGlzXccCAADgTG4T7Pz8/FS/fv0ys3NnzpwpM4t3LcMw9Morryg2Nlaenp6V9q1Xr55uu+02nTp1qsI+CQkJys/Pty05OTmOHwgAAICLuE2w8/T0VEREhFJTU+3aU1NTFR0dXem+u3bt0ueff65p06ZV+TiGYSgzM1NBQUEV9vHy8pKPj4/dAgAAUNfc5jN2khQfH6/Y2Fj17t1bUVFRWrt2rbKzszVjxgxJV2bSTp8+rfXr19vtt27dOvXt21dhYWFlxly4cKEiIyPVoUMHFRQUaPny5crMzNSLL75YK8cEAADgLG4V7CZMmKDvvvtOixYtktVqVVhYmLZt22a7y9VqtZb5Trv8/Hxt2rRJy5YtK3fMvLw8TZ8+Xbm5ufL19VXPnj21e/du9enTx+XHAwAA4ExuFewkaebMmZo5c2a525KSksq0+fr66ocffqhwvKVLl2rp0qXOKg8AAKDOuM1n7AAAAFA5gh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZxXV9QnJeXp48//lhnzpxRaWmp3bZJkyY5pTAAAABUT7WD3bvvvqsHHnhAFy9eVNOmTWWxWGzbLBYLwQ4AAKCOVPut2Mcee0xTp07V+fPnlZeXp3PnztmW77//3hU1AgAAwAHVDnanT5/WrFmz1KhRI1fUAwAAgOtU7WA3dOhQHTp0yBW1AAAAoAaq/Rm7u+66S48//rg+++wzde/eXR4eHnbbR48e7bTiAAAA4LhqB7u4uDhJ0qJFi8pss1gsKikpqXlVAAAAqLZqB7trv94EAAAANwa+oBgAAMAkHJqxW758uaZPny5vb28tX7680r6zZs1ySmEAAACoHoeC3dKlS/XAAw/I29tbS5curbCfxWIh2AEAANQRh4JdVlZWuf8GAADAjYPP2AEAAJhEte+KlaSvvvpK77zzjrKzs1VUVGS3bcmSJU4pDAAAANVT7WD30UcfafTo0QoNDdXJkycVFhamL774QoZhqFevXq6oEQAAAA6o9luxCQkJeuyxx3T8+HF5e3tr06ZNysnJ0YABAzRu3DhX1AgAAAAHVDvYnThxQpMnT5YkNWjQQD/++KOaNGmiRYsW6bnnnnN6gQAAAHBMtYNd48aNVVhYKEkKDg7Wv//9b9u2s2fPOq8yAAAAVEu1P2MXGRmpf/zjH+ratavuuusuPfbYYzp27Jg2b96syMhIV9QIAAAAB1Q72C1ZskQXLlyQJC1YsEAXLlxQSkqKbrnllkq/vBgAAACuVa1gV1JSopycHN16662SpEaNGmnVqlUuKQwAAADVU63P2NWvX19Dhw5VXl6ei8oBAADA9ar2zRPdu3fXf/7zH1fUAgAAgBqodrD74x//qLlz5+pvf/ubrFarCgoK7BYAAADUjWrfPDFs2DBJ0ujRo2WxWGzthmHIYrGopKTEedUBAADAYdUOdjt37nRFHQAAAKihage70NBQtWnTxm62TroyY5eTk+O0wgAAAFA91f6MXWhoqL799tsy7d9//71CQ0OdUhQAAACqr9rB7upn6a514cIFeXt7O6UoAAAAVJ/Db8XGx8dLkiwWi+bNm6dGjRrZtpWUlOjgwYPq0aOH0wsEAACAYxwOdhkZGZKuzNgdO3ZMnp6etm2enp4KDw/X3LlznV8hAAAAHOJwsLt6N+yvfvUrLVu2TD4+Pi4rCgAAANVX7c/Yvfrqq3Ua6latWqXQ0FB5e3srIiJCe/bsqbBvWlqaLBZLmeWf//ynXb9Nmzapa9eu8vLyUteuXbVlyxZXHwYAAIDTVTvY1aWUlBTNmTNHTz/9tDIyMtSvXz8NHz5c2dnZle538uRJWa1W29KhQwfbtv3792vChAmKjY3VkSNHFBsbq/Hjx+vgwYOuPhwAAACncqtgt2TJEk2bNk0PPfSQunTposTERLVp00arV6+udD9/f38FBgbalvr169u2JSYmasiQIUpISFDnzp2VkJCgQYMGKTEx0cVHAwAA4FxuE+yKioqUnp6umJgYu/aYmBjt27ev0n179uypoKAgDRo0qMwvZ+zfv7/MmEOHDq10zMLCQn4jFwAA3HCq/csTdeXs2bMqKSlRQECAXXtAQIByc3PL3ScoKEhr165VRESECgsL9frrr2vQoEFKS0tT//79JUm5ubnVGlOSFi9erIULF9bwiK7fB19n2v49NLiHS/evavvPxypvn+up73prqamKzsvV9vLanF1XeWPVdPwb6bzV9DGqeg6c8by44jmojqpeUzfS678q1dnfFc9nRfs78zku7/m6XlW9Zpx1Hdbm9fxzzvy7WdVYNf1bdL3P6/U8hrOfj+uasXv99dd1++23Kzg4WF9++aWkK29pbt261anFlae8nzIr7wuTJalTp06Ki4tTr169FBUVpVWrVumuu+7S888/f91jSlJCQoLy8/NtCz+lBgAAbgTVDnarV69WfHy8RowYoby8PJWUlEiSmjVr5tLPpfn5+al+/fplZtLOnDlTZsatMpGRkTp16pRtPTAwsNpjenl5ycfHx24BAACoa9UOditWrNDLL7+sp59+2u4mhN69e+vYsWNOLe7nPD09FRERodTUVLv21NRURUdHOzxORkaGgoKCbOtRUVFlxvzwww+rNSYAAMCNoNqfscvKylLPnj3LtHt5eenixYtOKaoi8fHxio2NVe/evRUVFaW1a9cqOztbM2bMkHTlLdLTp09r/fr1kq68PdyuXTt169ZNRUVF2rBhgzZt2qRNmzbZxpw9e7b69++v5557TmPGjNHWrVu1Y8cO7d2716XHAgAA4GzVDnahoaHKzMxUSEiIXfv777+vrl27Oq2w8kyYMEHfffedFi1aJKvVqrCwMG3bts1Wi9VqtftOu6KiIs2dO1enT59Ww4YN1a1bN7333nsaMWKErU90dLQ2btyoZ555RvPmzVP79u2VkpKivn37uvRYAAAAnK3awe7xxx/Xww8/rEuXLskwDH388cdKTk7W4sWL9X//93+uqNHOzJkzNXPmzHK3JSUl2a0/8cQTeuKJJ6occ+zYsRo7dqwzygMAAKgz1Q52v/rVr3T58mU98cQT+uGHH3T//ffrF7/4hZYtW6aJEye6okYAAAA4oNrBLi8vT3FxcYqLi9PZs2dVWloqf39/SdLnn3+uW265xelFAgAAoGrVvit2xIgRunTpkqQrX0FyNdSdPHlSAwcOdGpxAAAAcFy1g13z5s1199136/Lly7a2EydOaODAgbr33nudWhwAAAAcV+1gt2nTJl28eFH333+/DMPQ8ePHNXDgQN13331atmyZK2oEAACAA6od7Ly9vfW3v/1Np06d0rhx4zRo0CBNmjRJS5YscUV9AAAAcJBDN08UFBTYrVssFqWkpGjw4MG69957NW/ePFsffl4LAACgbjgU7Jo1ayaLxVKm3TAMrVmzRi+99JIMw5DFYrH9diwAAABql0PBbufOna6uAwAAADXkULAbMGCAq+sAAABADTkU7I4ePaqwsDDVq1dPR48erbTvrbfe6pTCAAAAUD0OBbsePXooNzdX/v7+6tGjhywWiwzDKNOPz9gBAADUHYeCXVZWllq1amX7NwAAAG48DgW7kJCQcv8NAACAG4dDwe5aJ0+e1IoVK3TixAlZLBZ17txZjz76qDp16uTs+gAAAOCgav/yxF//+leFhYUpPT1d4eHhuvXWW3X48GGFhYXprbfeckWNAAAAcEC1Z+yeeOIJJSQkaNGiRXbt8+fP15NPPqlx48Y5rTgAAAA4rtozdrm5uZo0aVKZ9gcffFC5ublOKQoAAADVV+1gN3DgQO3Zs6dM+969e9WvXz+nFAUAAIDqq/ZbsaNHj9aTTz6p9PR0RUZGSpIOHDigt956SwsXLtQ777xj1xcAAAC1o9rBbubMmZKkVatWadWqVeVuk/iyYgAAgNpW7WBXWlrqijoAAABQQ9X+jB0AAABuTA4Hu4MHD+r999+3a1u/fr1CQ0Pl7++v6dOnq7Cw0OkFAgAAwDEOB7sFCxbo6NGjtvVjx45p2rRpGjx4sJ566im9++67Wrx4sUuKBAAAQNUcDnaZmZkaNGiQbX3jxo3q27evXn75ZcXHx2v58uV68803XVIkAAAAquZwsDt37pwCAgJs67t27dKwYcNs67fddptycnKcWx0AAAAc5nCwCwgIUFZWliSpqKhIhw8fVlRUlG37+fPn5eHh4fwKAQAA4BCHg92wYcP01FNPac+ePUpISFCjRo3sfmni6NGjat++vUuKBAAAQNUc/h67P/zhD7rnnns0YMAANWnSRK+99po8PT1t21955RXFxMS4pEgAAABUzeFg16pVK+3Zs0f5+flq0qSJ6tevb7f9rbfeUpMmTZxeIAAAABxT7V+e8PX1Lbe9RYsWNS4GAAAA149fngAAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCTcLtitWrVKoaGh8vb2VkREhPbs2VNh382bN2vIkCFq1aqVfHx8FBUVpQ8++MCuT1JSkiwWS5nl0qVLrj4UAAAAp3KrYJeSkqI5c+bo6aefVkZGhvr166fhw4crOzu73P67d+/WkCFDtG3bNqWnp+uOO+7QqFGjlJGRYdfPx8dHVqvVbvH29q6NQwIAAHCaav/yRF1asmSJpk2bpoceekiSlJiYqA8++ECrV6/W4sWLy/RPTEy0W3/22We1detWvfvuu+rZs6et3WKxKDAw0KW1AwAAuJrbzNgVFRUpPT1dMTExdu0xMTHat2+fQ2OUlpbq/PnzZX7+7MKFCwoJCVHr1q01cuTIMjN61yosLFRBQYHdAgAAUNfcJtidPXtWJSUlCggIsGsPCAhQbm6uQ2O88MILunjxosaPH29r69y5s5KSkvTOO+8oOTlZ3t7euv3223Xq1KkKx1m8eLF8fX1tS5s2ba7voAAAAJzIbYLdVRaLxW7dMIwybeVJTk7WggULlJKSIn9/f1t7ZGSkHnzwQYWHh6tfv35688031bFjR61YsaLCsRISEpSfn29bcnJyrv+AAAAAnMRtPmPn5+en+vXrl5mdO3PmTJlZvGulpKRo2rRpeuuttzR48OBK+9arV0+33XZbpTN2Xl5e8vLycrx4AACAWuA2M3aenp6KiIhQamqqXXtqaqqio6Mr3C85OVlTpkzRX/7yF911111VPo5hGMrMzFRQUFCNawYAAKhNbjNjJ0nx8fGKjY1V7969FRUVpbVr1yo7O1szZsyQdOUt0tOnT2v9+vWSroS6SZMmadmyZYqMjLTN9jVs2FC+vr6SpIULFyoyMlIdOnRQQUGBli9frszMTL344ot1c5AAAADXya2C3YQJE/Tdd99p0aJFslqtCgsL07Zt2xQSEiJJslqtdt9p99JLL+ny5ct6+OGH9fDDD9vaJ0+erKSkJElSXl6epk+frtzcXPn6+qpnz57avXu3+vTpU6vHBgAAUFNuFewkaebMmZo5c2a5266GtavS0tKqHG/p0qVaunSpEyoDAACoW27zGTsAAABUjmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJtwt2q1atUmhoqLy9vRUREaE9e/ZU2n/Xrl2KiIiQt7e3fvnLX2rNmjVl+mzatEldu3aVl5eXunbtqi1btriqfAAAAJdxq2CXkpKiOXPm6Omnn1ZGRob69eun4cOHKzs7u9z+WVlZGjFihPr166eMjAz97ne/06xZs7Rp0yZbn/3792vChAmKjY3VkSNHFBsbq/Hjx+vgwYO1dVgAAABO4VbBbsmSJZo2bZoeeughdenSRYmJiWrTpo1Wr15dbv81a9aobdu2SkxMVJcuXfTQQw9p6tSpev755219EhMTNWTIECUkJKhz585KSEjQoEGDlJiYWEtHBQAA4BxuE+yKioqUnp6umJgYu/aYmBjt27ev3H32799fpv/QoUN16NAhFRcXV9qnojElqbCwUAUFBXYLAABAXXObYHf27FmVlJQoICDArj0gIEC5ubnl7pObm1tu/8uXL+vs2bOV9qloTElavHixfH19bUubNm2u55AAAACcym2C3VUWi8Vu3TCMMm1V9b+2vbpjJiQkKD8/37bk5OQ4XD8AAICrNKjrAhzl5+en+vXrl5lJO3PmTJkZt6sCAwPL7d+gQQO1bNmy0j4VjSlJXl5e8vLyup7DAAAAcBm3mbHz9PRURESEUlNT7dpTU1MVHR1d7j5RUVFl+n/44Yfq3bu3PDw8Ku1T0ZgAAAA3KreZsZOk+Ph4xcbGqnfv3oqKitLatWuVnZ2tGTNmSLryFunp06e1fv16SdKMGTO0cuVKxcfHKy4uTvv379e6deuUnJxsG3P27Nnq37+/nnvuOY0ZM0Zbt27Vjh07tHfv3jo5RgAAgOvlVsFuwoQJ+u6777Ro0SJZrVaFhYVp27ZtCgkJkSRZrVa777QLDQ3Vtm3b9Nvf/lYvvviigoODtXz5ct177722PtHR0dq4caOeeeYZzZs3T+3bt1dKSor69u1b68cHAABQE24V7CRp5syZmjlzZrnbkpKSyrQNGDBAhw8frnTMsWPHauzYsc4oDwAAoM64zWfsAAAAUDmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJNwm2J07d06xsbHy9fWVr6+vYmNjlZeXV2H/4uJiPfnkk+revbsaN26s4OBgTZo0SV9//bVdv4EDB8pisdgtEydOdPHRAAAAOJ/bBLv7779fmZmZ2r59u7Zv367MzEzFxsZW2P+HH37Q4cOHNW/ePB0+fFibN2/Wv/71L40ePbpM37i4OFmtVtvy0ksvufJQAAAAXKJBXRfgiBMnTmj79u06cOCA+vbtK0l6+eWXFRUVpZMnT6pTp05l9vH19VVqaqpd24oVK9SnTx9lZ2erbdu2tvZGjRopMDDQtQcBAADgYm4xY7d//375+vraQp0kRUZGytfXV/v27XN4nPz8fFksFjVr1syu/Y033pCfn5+6deumuXPn6vz5884qHQAAoNa4xYxdbm6u/P39y7T7+/srNzfXoTEuXbqkp556Svfff798fHxs7Q888IBCQ0MVGBio48ePKyEhQUeOHCkz2/dzhYWFKiwstK0XFBRU42gAAABco05n7BYsWFDmxoVrl0OHDkmSLBZLmf0Nwyi3/VrFxcWaOHGiSktLtWrVKrttcXFxGjx4sMLCwjRx4kT99a9/1Y4dO3T48OEKx1u8eLHtJg5fX1+1adOmmkcOAADgfHU6Y/fII49UeQdqu3btdPToUX3zzTdltn377bcKCAiodP/i4mKNHz9eWVlZ+vvf/243W1eeXr16ycPDQ6dOnVKvXr3K7ZOQkKD4+HjbekFBAeEOAADUuToNdn5+fvLz86uyX1RUlPLz8/Xxxx+rT58+kqSDBw8qPz9f0dHRFe53NdSdOnVKO3fuVMuWLat8rE8//VTFxcUKCgqqsI+Xl5e8vLyqHAsAAKA2ucXNE126dNGwYcMUFxenAwcO6MCBA4qLi9PIkSPt7ojt3LmztmzZIkm6fPmyxo4dq0OHDumNN95QSUmJcnNzlZubq6KiIknSv//9by1atEiHDh3SF198oW3btmncuHHq2bOnbr/99jo5VgAAgOvlFsFOunLnavfu3RUTE6OYmBjdeuutev311+36nDx5Uvn5+ZKkr776Su+8846++uor9ejRQ0FBQbbl6p20np6e+uijjzR06FB16tRJs2bNUkxMjHbs2KH69evX+jECAADUhFvcFStJLVq00IYNGyrtYxiG7d/t2rWzWy9PmzZttGvXLqfUBwAAUNfcZsYOAAAAlSPYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMgmAHAABgEgQ7AAAAkyDYAQAAmATBDgAAwCQIdgAAACZBsAMAADAJgh0AAIBJEOwAAABMwm2C3blz5xQbGytfX1/5+voqNjZWeXl5le4zZcoUWSwWuyUyMtKuT2FhoR599FH5+fmpcePGGj16tL766isXHgkAAIBruE2wu//++5WZmant27dr+/btyszMVGxsbJX7DRs2TFar1bZs27bNbvucOXO0ZcsWbdy4UXv37tWFCxc0cuRIlZSUuOpQAAAAXKJBXRfgiBMnTmj79u06cOCA+vbtK0l6+eWXFRUVpZMnT6pTp04V7uvl5aXAwMByt+Xn52vdunV6/fXXNXjwYEnShg0b1KZNG+3YsUNDhw51/sEAAAC4iFvM2O3fv1++vr62UCdJkZGR8vX11b59+yrdNy0tTf7+/urYsaPi4uJ05swZ27b09HQVFxcrJibG1hYcHKywsLAqxwUAALjRuMWMXW5urvz9/cu0+/v7Kzc3t8L9hg8frnHjxikkJERZWVmaN2+e7rzzTqWnp8vLy0u5ubny9PRU8+bN7fYLCAiodNzCwkIVFhba1vPz8yVJl1UsGdU9uuorOF9q+/dlo7jW969oLGeNWVcqOi9X28tru7a9po/ljqpz3mr6GFU9B7V5Xl31WFW9psx67bni+ayNY3Xm30BnvmZuRK76b09VY13PeS3vea3uGI4+hiNjXtaVPobhQMgw6tD8+fMNXYlCFS6ffPKJ8cc//tHo2LFjmf1vueUWY/HixQ4/3tdff214eHgYmzZtMgzDMN544w3D09OzTL/Bgwcbv/71r2tUNwsLCwsLCwuLM5ecnJwqs06dztg98sgjmjhxYqV92rVrp6NHj+qbb74ps+3bb79VQECAw48XFBSkkJAQnTp1SpIUGBiooqIinTt3zm7W7syZM4qOjq5wnISEBMXHx9vWS0tL9eWXX6pHjx7KycmRj4+PwzWZVUFBgdq0acP5EOfi5zgXP+Fc2ON8/IRz8RPOxRWGYej8+fMKDg6usm+dBjs/Pz/5+flV2S8qKkr5+fn6+OOP1adPH0nSwYMHlZ+fX2kAu9Z3332nnJwcBQUFSZIiIiLk4eGh1NRUjR8/XpJktVp1/Phx/fnPf65wHC8vL3l5edm11at35eOKPj4+N/XFdy3Ox084Fz/hXPyEc2GP8/ETzsVPOBeSr6+vQ/3c4uaJLl26aNiwYYqLi9OBAwd04MABxcXFaeTIkXZ3xHbu3FlbtmyRJF24cEFz587V/v379cUXXygtLU2jRo2Sn5+f/vu//1vSlZM0bdo0PfbYY/roo4+UkZGhBx98UN27d7fdJQsAAOAu3OLmCUl64403NGvWLNsdrKNHj9bKlSvt+pw8edJ2I0P9+vV17NgxrV+/Xnl5eQoKCtIdd9yhlJQUNW3a1LbP0qVL1aBBA40fP14//vijBg0apKSkJNWvX7/2Dg4AAMAJ3CbYtWjRQhs2bKi0j/Gzu0UaNmyoDz74oMpxvb29tWLFCq1YsaJG9Xl5eWn+/Pll3qK9WXE+fsK5+Ann4iecC3ucj59wLn7Cuag+i2E4cu8sAAAAbnRu8Rk7AAAAVI1gBwAAYBIEOwAAAJMg2DnJqlWrFBoaKm9vb0VERGjPnj11XZLLLV68WLfddpuaNm0qf39/3X333Tp58qRdnylTpshisdgtkZGRdVSx6yxYsKDMcQYGBtq2G4ahBQsWKDg4WA0bNtTAgQP16aef1mHFrtOuXbsy58Jisejhhx+WZP5rYvfu3Ro1apSCg4NlsVj09ttv22135FooLCzUo48+Kj8/PzVu3FijR4/WV199VYtH4RyVnYvi4mI9+eST6t69uxo3bqzg4GBNmjRJX3/9td0YAwcOLHO9VPXF9jeiqq4LR14XN8N1Iancvx8Wi0X/+7//a+tjluvCFQh2TpCSkqI5c+bo6aefVkZGhvr166fhw4crOzu7rktzqV27dunhhx/WgQMHlJqaqsuXLysmJkYXL1606zds2DBZrVbbsm3btjqq2LW6detmd5zHjh2zbfvzn/+sJUuWaOXKlfrkk08UGBioIUOG6Pz583VYsWt88sknduchNTVVkjRu3DhbHzNfExcvXlR4eHiZr2O6ypFrYc6cOdqyZYs2btyovXv36sKFCxo5cqRKSkpq6zCcorJz8cMPP+jw4cOaN2+eDh8+rM2bN+tf//qXRo8eXaZvXFyc3fXy0ksv1Ub5TlXVdSFV/bq4Ga4LSXbnwGq16pVXXpHFYtG9995r188M14VLOPxDq6hQnz59jBkzZti1de7c2XjqqafqqKK6cebMGUOSsWvXLlvb5MmTjTFjxtRdUbVk/vz5Rnh4eLnbSktLjcDAQONPf/qTre3SpUuGr6+vsWbNmlqqsO7Mnj3baN++vVFaWmoYxs1zTRiGYUgytmzZYlt35FrIy8szPDw8jI0bN9r6nD592qhXr56xffv2Wqvd2a49F+X5+OOPDUnGl19+aWsbMGCAMXv2bNcWV8vKOxdVvS5u5utizJgxxp133mnXZsbrwlmYsauhoqIipaen2744+aqYmBjt27evjqqqG1e/HLpFixZ27WlpafL391fHjh0VFxenM2fO1EV5Lnfq1CkFBwcrNDRUEydO1H/+8x9JUlZWlnJzc+2uES8vLw0YMMD010hRUZE2bNigqVOnymKx2NpvlmviWo5cC+np6SouLrbrExwcrLCwMNNfL/n5+bJYLGrWrJld+xtvvCE/Pz9169ZNc+fONeVMt1T56+JmvS6++eYbvffee5o2bVqZbTfLdVFdbvMFxTeqs2fPqqSkRAEBAXbtAQEBys3NraOqap9hGIqPj9d//dd/KSwszNY+fPhwjRs3TiEhIcrKytK8efN05513Kj093VRfONm3b1+tX79eHTt21DfffKM//OEPio6O1qeffmq7Dsq7Rr788su6KLfWvP3228rLy9OUKVNsbTfLNVEeR66F3NxceXp6qnnz5mX6mPlvyqVLl/TUU0/p/vvvt/tN0AceeEChoaEKDAzU8ePHlZCQoCNHjtje4jeLql4XN+t18dprr6lp06a655577NpvluviehDsnOTnsxHSlaBzbZuZPfLIIzp69Kj27t1r1z5hwgTbv8PCwtS7d2+FhITovffeK/NCdWfDhw+3/bt79+6KiopS+/bt9dprr9k+AH0zXiPr1q3T8OHDFRwcbGu7Wa6JylzPtWDm66W4uFgTJ05UaWmpVq1aZbctLi7O9u+wsDB16NBBvXv31uHDh9WrV6/aLtVlrvd1YebrQpJeeeUVPfDAA/L29rZrv1mui+vBW7E15Ofnp/r165f5P6YzZ86U+b9ys3r00Uf1zjvvaOfOnWrdunWlfYOCghQSEqJTp07VUnV1o3HjxurevbtOnTpluzv2ZrtGvvzyS+3YsUMPPfRQpf1ulmtCkkPXQmBgoIqKinTu3LkK+5hJcXGxxo8fr6ysLKWmptrN1pWnV69e8vDwMP31cu3r4ma7LiRpz549OnnyZJV/Q6Sb57pwBMGuhjw9PRUREVFm+jc1NVXR0dF1VFXtMAxDjzzyiDZv3qy///3vCg0NrXKf7777Tjk5OQoKCqqFCutOYWGhTpw4oaCgINvbBT+/RoqKirRr1y5TXyOvvvqq/P39ddddd1Xa72a5JiQ5dC1ERETIw8PDro/VatXx48dNd71cDXWnTp3Sjh071LJlyyr3+fTTT1VcXGz66+Xa18XNdF1ctW7dOkVERCg8PLzKvjfLdeGQOrxxwzQ2btxoeHh4GOvWrTM+++wzY86cOUbjxo2NL774oq5Lc6nf/OY3hq+vr5GWlmZYrVbb8sMPPxiGYRjnz583HnvsMWPfvn1GVlaWsXPnTiMqKsr4xS9+YRQUFNRx9c712GOPGWlpacZ//vMf48CBA8bIkSONpk2b2q6BP/3pT4avr6+xefNm49ixY8Z9991nBAUFme48XFVSUmK0bdvWePLJJ+3ab4Zr4vz580ZGRoaRkZFhSDKWLFliZGRk2O70dORamDFjhtG6dWtjx44dxuHDh40777zTCA8PNy5fvlxXh3VdKjsXxcXFxujRo43WrVsbmZmZdn9DCgsLDcMwjM8//9xYuHCh8cknnxhZWVnGe++9Z3Tu3Nno2bOnqc6Fo6+Lm+G6uCo/P99o1KiRsXr16jL7m+m6cAWCnZO8+OKLRkhIiOHp6Wn06tXL7is/zEpSucurr75qGIZh/PDDD0ZMTIzRqlUrw8PDw2jbtq0xefJkIzs7u24Ld4EJEyYYQUFBhoeHhxEcHGzcc889xqeffmrbXlpaasyfP98IDAw0vLy8jP79+xvHjh2rw4pd64MPPjAkGSdPnrRrvxmuiZ07d5b7upg8ebJhGI5dCz/++KPxyCOPGC1atDAaNmxojBw50i3PUWXnIisrq8K/ITt37jQMwzCys7ON/v37Gy1atDA8PT2N9u3bG7NmzTK+++67uj2w61DZuXD0dXEzXBdXvfTSS0bDhg2NvLy8Mvub6bpwBYthGIZLpwQBAABQK/iMHQAAgEkQ7AAAAEyCYAcAAGASBDsAAACTINgBAACYBMEOAADAJAh2AAAAJkGwAwAAMAmCHQDTWLBggXr06FFnjz9v3jxNnz7dtj5w4EDNmTOnzupxlblz52rWrFl1XQaAcvDLEwDcgsViqXT75MmTtXLlShUWFjr0Y/LO9s0336hDhw46evSo2rVrJ0n6/vvv5eHhoaZNm7rkMadMmaK8vDy9/fbbLhm/ImfOnFH79u119OhRhYaG1upjA6hcg7ouAAAcYbVabf9OSUnR73//e508edLW1rBhQzVp0kRNmjSpi/K0bt06RUVF2UKdJLVo0aJOanE1f39/xcTEaM2aNXruuefquhwAP8NbsQDcQmBgoG3x9fWVxWIp03btW7FTpkzR3XffrWeffVYBAQFq1qyZFi5cqMuXL+vxxx9XixYt1Lp1a73yyit2j3X69GlNmDBBzZs3V8uWLTVmzBh98cUXlda3ceNGjR492q7t2rdi27Vrp2effVZTp05V06ZN1bZtW61du7bScf/617+qe/fuatiwoVq2bKnBgwfr4sWLWrBggV577TVt3bpVFotFFotFaWlpDtV/9bwsXLhQ/v7+8vHx0a9//WsVFRVV+bhXjR49WsnJyZXWDqD2EewAmNrf//53ff3119q9e7eWLFmiBQsWaOTIkWrevLkOHjyoGTNmaMaMGcrJyZEk/fDDD7rjjjvUpEkT7d69W3v37lWTJk00bNgwu+Dzc+fOndPx48fVu3fvKut54YUX1Lt3b2VkZGjmzJn6zW9+o3/+85/l9rVarbrvvvs0depUnThxQmlpabrnnntkGIbmzp2r8ePHa9iwYbJarbJarYqOjna4/o8++kgnTpzQzp07lZycrC1btmjhwoVVPu5Vffr0UU5Ojr788kuHnwsAtcAAADfz6quvGr6+vmXa58+fb4SHh9vWJ0+ebISEhBglJSW2tk6dOhn9+vWzrV++fNlo3LixkZycbBiGYaxbt87o1KmTUVpaautTWFhoNGzY0Pjggw/KrScjI8OQZGRnZ9u1DxgwwJg9e7ZtPSQkxHjwwQdt66WlpYa/v7+xevXqcsdNT083JBlffPFFudsnT55sjBkzxq7NkfonT55stGjRwrh48aKtz+rVq40mTZoYJSUlVT6uYRhGfn6+IclIS0ursA+A2sdn7ACYWrdu3VSv3k9vTgQEBCgsLMy2Xr9+fbVs2VJnzpyRJKWnp+vzzz8vc8PDpUuX9O9//7vcx/jxxx8lSd7e3lXWc+utt9r+ffXt5KuPfa3w8HANGjRI3bt319ChQxUTE6OxY8eqefPmFY7vaP3h4eFq1KiRbT0qKkoXLlxQTk6OQ4/bsGFDSVdmOAHcOAh2AEzNw8PDbt1isZTbVlpaKkkqLS1VRESE3njjjTJjtWrVqtzH8PPzk3TlLdmK+lRWz9XHvlb9+vWVmpqqffv26cMPP9SKFSv09NNP6+DBgxXejXo99V9bjyOP+/333zs8JoDaw2fsAOBnevXqpVOnTsnf31+33HKL3eLr61vuPu3bt5ePj48+++wzp9djsVh0++23a+HChcrIyJCnp6e2bNkiSfL09FRJScl11X/kyBHbTKMkHThwQE2aNFHr1q2rfFxJOn78uDw8PNStWzenHzOA60ewA4CfeeCBB+Tn56cxY8Zoz549ysrK0q5duzR79mx99dVX5e5Tr149DR48WHv37nVqLQcPHtSzzz6rQ4cOKTs7W5s3b9a3336rLl26SLpyl+3Ro0d18uRJnT17VsXFxQ7XX1RUpGnTpumzzz7T+++/r/nz5+uRRx5RvXr1qnxcSdqzZ4/69etne0sWwI2BYAcAP9OoUSPt3r1bbdu21T333KMuXbpo6tSp+vHHH+Xj41PhftOnT9fGjRsrfFv1evj4+Gj37t0aMWKEOnbsqGeeeUYvvPCChg8fLkmKi4tTp06d1Lt3b7Vq1Ur/+Mc/HK5/0KBB6tChg/r376/x48dr1KhRWrBggUOPK0nJycmKi4tz2rECcA5+eQIAnMAwDEVGRmrOnDm677776rqcStX0Fyvee+89Pf744zp69KgaNOCj2sCNhBk7AHACi8WitWvX6vLly3VdistdvHhRr776KqEOuAHxqgQAJwkPD1d4eHhdl+Fy48ePr+sSAFSAt2IBAABMgrdiAQAATIJgBwAAYBIEOwAAAJMg2AEAAJgEwQ4AAMAkCHYAAAAmQbADAAAwCYIdAACASRDsAAAATOL/AVV3a8s13SWDAAAAAElFTkSuQmCC", 178 | "text/plain": [ 179 | "
" 180 | ] 181 | }, 182 | "metadata": {}, 183 | "output_type": "display_data" 184 | } 185 | ], 186 | "source": [ 187 | "def create_spike_trains(num_time_steps, num_spikes_per_train, delay):\n", 188 | " max_delay_steps = abs(delay)\n", 189 | " S = np.zeros((2, num_time_steps+2*abs(max_delay_steps)))\n", 190 | " S[0, :num_spikes_per_train] = 1\n", 191 | " np.random.shuffle(S[0, :])\n", 192 | " S[1, max_delay_steps+delay:num_time_steps+delay+max_delay_steps] = S[0, max_delay_steps:num_time_steps+max_delay_steps]\n", 193 | " return S[:, max_delay_steps:num_time_steps+max_delay_steps]\n", 194 | "\n", 195 | "plt.imshow(create_spike_trains(200, 30, 10), origin='lower', interpolation='nearest', aspect='auto')\n", 196 | "plt.xlabel('Time (in steps)')\n", 197 | "plt.ylabel('Spike train')\n", 198 | "plt.tight_layout();" 199 | ] 200 | }, 201 | { 202 | "cell_type": "markdown", 203 | "metadata": {}, 204 | "source": [ 205 | "The order sensitive network looks like this:\n", 206 | "\n", 207 | "![Order sensitive network](order-sensitive-network.png)\n", 208 | "\n", 209 | "Each output neuron receives an excitatory input from the input spike train with the same index, and an inhibitory input from the input spike train with the other index.\n", 210 | "\n", 211 | "This means that if spike train 1 is a delayed version of spike train 0, then neuron 0 will first receive excitatory spikes then inhibitory spikes, allowing it to fire. However, neuron 1 will first receive inhibitory spikes and then excitatory spikes, meaning it won't fire. If on the other hand, spike train 0 is a delayed version of spike train 1, the opposite happens. So which neuron (index 0 or 1) fires a spike tells you the relative order of the input spike trains.\n", 212 | "\n", 213 | "### Task 2\n", 214 | "\n", 215 | "1. Implement this network with instantaneous excitatory and inhibitory synapses (cause an instant increase in $v$). Allow different excitatory and inhibitory weights.\n", 216 | "2. Show with two input spike trains each consisting of just one spike that you can reproduce the figure above.\n", 217 | "3. Show that using the spike train generator above that it works for sequences of spikes. Use a spike train of length 500 ms with 50 spikes and delays from -10 to +10 ms.\n", 218 | "\n", 219 | "You can use the templates below to get started." 220 | ] 221 | }, 222 | { 223 | "cell_type": "code", 224 | "execution_count": null, 225 | "metadata": {}, 226 | "outputs": [], 227 | "source": [ 228 | "# vt is the spike threshold (we set it to a large value in the plotting code below to see what's going on)\n", 229 | "def simulate_order_network(spikes, tau, we, wi, vt=1, dt=0.1):\n", 230 | " num_neurons = 2\n", 231 | " num_time_steps = spikes.shape[1]\n", 232 | " # state variables\n", 233 | " v = np.zeros(num_neurons)\n", 234 | " x = np.zeros(num_neurons)\n", 235 | " # record activity\n", 236 | " v_rec = np.zeros((num_neurons, num_time_steps))\n", 237 | " spike_count = np.zeros(num_neurons)\n", 238 | " # run simulation: FILL IN THE GAPS HERE\n", 239 | " return v_rec, spike_count\n", 240 | "\n", 241 | "def plotit(order):\n", 242 | " dt = 0.1\n", 243 | " # Input data\n", 244 | " spikes = np.zeros((2, 100))\n", 245 | " spikes[order, 10] = spikes[1-order, 40] = 1\n", 246 | " # Model with threshold set to 10 so we can see traces without spikes\n", 247 | " v, spike_count = simulate_order_network(spikes, tau=10, we=1.2, wi=1, vt=10, dt=dt)\n", 248 | " # Plot\n", 249 | " T = np.arange(v.shape[1])*dt\n", 250 | " for i in range(2):\n", 251 | " plt.subplot(2, 2, 2*i+1+order)\n", 252 | " plt.plot(T, v[i, :])\n", 253 | " plt.axhline(1, ls='--', c='g')\n", 254 | " plt.ylim(-1.5, 1.5)\n", 255 | " if i:\n", 256 | " plt.xlabel('Time (ms)')\n", 257 | " else:\n", 258 | " plt.title(f'Input {order} before input {1-order}')\n", 259 | "\n", 260 | "plt.figure(figsize=(7, 4))\n", 261 | "plotit(0)\n", 262 | "plotit(1)\n", 263 | "plt.tight_layout();" 264 | ] 265 | }, 266 | { 267 | "cell_type": "code", 268 | "execution_count": null, 269 | "metadata": {}, 270 | "outputs": [], 271 | "source": [ 272 | "def order_spike_counts_simulation(d):\n", 273 | " # Input data\n", 274 | " num_time_steps, dt = 5000, 0.1 # 500 ms\n", 275 | " num_spikes = 50\n", 276 | " delay = int(d/dt)\n", 277 | " spikes = create_spike_trains(num_time_steps, num_spikes, delay)\n", 278 | " # Simulation\n", 279 | " v, spike_count = simulate_order_network(spikes, tau=10, we=1.2, wi=1, vt=1, dt=dt)\n", 280 | " return spike_count\n", 281 | "\n", 282 | "D = np.arange(-10, 10+1)\n", 283 | "C = [order_spike_counts_simulation(d) for d in D]\n", 284 | "plt.plot(D, C, label=('Output 0', 'Output 1'))\n", 285 | "plt.legend(loc='best')\n", 286 | "plt.xlabel('Relative delay between input spike trains (ms)')\n", 287 | "plt.ylabel('Spike count')\n", 288 | "plt.tight_layout();\n" 289 | ] 290 | }, 291 | { 292 | "cell_type": "markdown", 293 | "metadata": {}, 294 | "source": [ 295 | "## Part 3 - Tuning the order-sensitive network with slow inhibition\n", 296 | "\n", 297 | "Do the same as in the previous part, but this time make the inhibitory spikes act slowly by adding a biexponential synapse as in part 1. Keep the excitatory spikes instantaneous (they directly increase $v$).\n", 298 | "\n", 299 | "What happens for different inhibitory time constants?" 300 | ] 301 | }, 302 | { 303 | "cell_type": "markdown", 304 | "metadata": {}, 305 | "source": [ 306 | "## Part 4 - Cross-correlation network (optional extra)\n", 307 | "\n", 308 | "Can you use the ideas here to build a network with more than two output that can accurately and robustly estimate the time delay between two spike trains? Robust means that it should have properties like:\n", 309 | "\n", 310 | "* It should be invariant to the input spike count.\n", 311 | "* It should work if there are additional noise spikes injected.\n", 312 | "* It should work if some input spikes are randomly deleted.\n", 313 | "\n", 314 | "What parameters determine performance and robustness?" 315 | ] 316 | } 317 | ], 318 | "metadata": { 319 | "kernelspec": { 320 | "display_name": "neuro4ml", 321 | "language": "python", 322 | "name": "python3" 323 | }, 324 | "language_info": { 325 | "codemirror_mode": { 326 | "name": "ipython", 327 | "version": 3 328 | }, 329 | "file_extension": ".py", 330 | "mimetype": "text/x-python", 331 | "name": "python", 332 | "nbconvert_exporter": "python", 333 | "pygments_lexer": "ipython3", 334 | "version": "3.9.18" 335 | } 336 | }, 337 | "nbformat": 4, 338 | "nbformat_minor": 2 339 | } 340 | -------------------------------------------------------------------------------- /w3-brain-structure/w3-exercise.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Neuro4ML - Week 3 - Brain structure\n", 8 | "\n", 9 | "This week we're going to analyse some real data, which come from larval zebrafish. \n", 10 | "\n", 11 | "Larval zebrafish are an increasingly used model in neuroscience for three main reasons:\n", 12 | "1. They develop rapidly ex utero, from a single cell to a free-swimming larva in just four days, meaning that their development can be easily studied.\n", 13 | "2. They display a variety of behaviours including a distinct locomotor repertoire, a diurnal pattern of activity and by three weeks of age, social preference.\n", 14 | "3. They have a vertebrate brain plan and at larval stages are small (~4mm body length) and translucent, enabling cellular-resolution whole-brain imaging in both fixed tissue and live animals. \n", 15 | "\n", 16 | "In this notebook we're going to look at the 3d morphology of ~4,000 neurons in the zebrafish brain. In brief these morphologies were obtained by: \n", 17 | "* Fluorescently labeling 1-2 neurons per animal.\n", 18 | "* Imaging each animal and tracing each neuron.\n", 19 | "* Aligning these images in 3d so that all neurons are in a common reference frame. \n", 20 | "\n", 21 | "You can learn more about the data in [Kunst et al., 2019](https://doi.org/10.1016/j.neuron.2019.04.034) or on this [website](https://mapzebrain.org/home). \n", 22 | "\n", 23 | "There are four parts to the exercise: loading the data, looking at the data, estimating a connectivity matrix and some open exploration. " 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [ 32 | "import numpy as np\n", 33 | "import matplotlib.pyplot as plt\n", 34 | "import os\n", 35 | "\n", 36 | "!pip install neurom\n", 37 | "import neurom as nm\n", 38 | "from neurom.view.matplotlib_impl import plot_morph\n", 39 | "\n", 40 | "def figure(): \n", 41 | " fig, ax = plt.subplots(figsize=[6, 8], sharex=True, sharey=True)\n", 42 | " ax.set_aspect('equal')\n", 43 | " ax.axis(\"off\")\n", 44 | " plt.gca().invert_yaxis()\n", 45 | "\n", 46 | " return fig, ax" 47 | ] 48 | }, 49 | { 50 | "cell_type": "markdown", 51 | "metadata": {}, 52 | "source": [ 53 | "### Part 0 - Loading the data \n", 54 | "\n", 55 | "Before getting started we need to download the data. \n", 56 | "\n", 57 | "How to do this depends on how you're working: \n", 58 | "* **Locally**: download the data [here](https://api.mapzebrain.org/media/downloads/Neurons/mapZebrain__neurons.zip), unzip the folder and provide a path to the Aligned/Original folder in the cell below - something like './mapZebrain__neurons/Aligned/Original'. \n", 59 | "* **Colab**: run the three commands in the cell below to fetch, unzip and provide a path to the data." 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": null, 65 | "metadata": {}, 66 | "outputs": [], 67 | "source": [ 68 | "# Download the data \n", 69 | "\n", 70 | "# Locally: \n", 71 | "# folder_path = \"\" # provide a path to the Aligned/Original folder\n", 72 | "\n", 73 | "# Colab: \n", 74 | "# !wget https://api.mapzebrain.org/media/downloads/Neurons/mapZebrain__neurons.zip\n", 75 | "# !unzip /content/mapZebrain__neurons.zip \n", 76 | "# folder_path = \"/content/Aligned/Original\"" 77 | ] 78 | }, 79 | { 80 | "cell_type": "markdown", 81 | "metadata": {}, 82 | "source": [ 83 | "Next we need to understand the data and tools we'll be working with:\n", 84 | "* **Data**: Each neuron's morphology is stored as an .SWC file. Try opening one in a text editor, and then understanding the format using this [guide](http://www.neuronland.org/NLMorphologyConverter/MorphologyFormats/SWC/Spec.html).\n", 85 | "* **Tools**: We're going to use a library called [neurom](https://neurom.readthedocs.io/en/stable/quickstart.html) to load these .SWC files.\n", 86 | "\n", 87 | "Now we'll make a list of the neurons which we can successfully load with neurom. \n", 88 | "\n", 89 | "If this or later parts of the notebook run too slowly (more than a few minutes for a single cell) try working with a subset of the data by sampling from this list. " 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": null, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "# Filtering\n", 99 | "neuron_paths = []\n", 100 | "for file in os.listdir(folder_path): \n", 101 | " neuron_path = folder_path + '/' + file\n", 102 | " \n", 103 | " try:\n", 104 | " m = nm.load_morphology(neuron_path) # try to load the morphology\n", 105 | " neuron_paths.append(neuron_path) # if this works appened the path to neuron_paths\n", 106 | " except: \n", 107 | " pass # otherwise just skip this neuron \n", 108 | "\n", 109 | "print(\"Loaded \" + str(len(neuron_paths)) + \" of \" + str(len(os.listdir(folder_path))) + \" neurons.\")" 110 | ] 111 | }, 112 | { 113 | "cell_type": "markdown", 114 | "metadata": {}, 115 | "source": [ 116 | "### Part 1 - Looking at the data \n", 117 | "\n", 118 | "Now we can load the morphologies, let's try plotting some neurons in 2d. For random neurons, the code below plots:\n", 119 | "* It's morphology as a black line.\n", 120 | "* It's points as grey circles.\n", 121 | "* The location of it's soma as a green circle.\n", 122 | "\n", 123 | "Note how we access these properties from the loaded morphology. \n", 124 | "\n", 125 | "When working with real data, especially large datasets, it's important to get a \"feel\" for the data, so try looking at as many neurons as possible - how similar are they?" 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": null, 131 | "metadata": {}, 132 | "outputs": [], 133 | "source": [ 134 | "# Plotting single neurons in 2d \n", 135 | "n = np.random.randint(low=0, high=len(neuron_paths)) # sample a random neuron\n", 136 | "m = nm.load_morphology(neuron_paths[n]) # load it's morphology\n", 137 | "\n", 138 | "fig, ax = figure()\n", 139 | "plot_morph(m,ax=ax, soma_outline=0, plane='xy', diameter_scale=None, linewidth=3, color='k', alpha=1.0)\n", 140 | "plt.scatter(m.points[:,0], m.points[:,1], color='xkcd:grey')\n", 141 | "plt.scatter(m.soma.center[0], m.soma.center[1], s=90, color='g')" 142 | ] 143 | }, 144 | { 145 | "cell_type": "markdown", 146 | "metadata": {}, 147 | "source": [ 148 | "Now try writing some code to plot single neurons in 3d." 149 | ] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "execution_count": null, 154 | "metadata": {}, 155 | "outputs": [], 156 | "source": [ 157 | "# Plotting single neurons in 3d\n", 158 | "fig = plt.figure()" 159 | ] 160 | }, 161 | { 162 | "cell_type": "markdown", 163 | "metadata": {}, 164 | "source": [ 165 | "Now try to plot all of the neurons together on a single axis in either 2 or 3d. \n", 166 | "\n", 167 | "In general using a low alpha (transparency) value can be helpful for plots with many points.\n", 168 | "\n", 169 | "[Figure 1E](https://doi.org/10.1016/j.neuron.2019.04.034) could provide you with some inspiration for what this plot could look like!" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": null, 175 | "metadata": {}, 176 | "outputs": [], 177 | "source": [ 178 | "# Plot all of the neurons together\n", 179 | "fig, ax = figure()" 180 | ] 181 | }, 182 | { 183 | "cell_type": "markdown", 184 | "metadata": {}, 185 | "source": [ 186 | "### Part 2 - Estimating a connectivity matrix \n", 187 | "\n", 188 | "In week 3 of the course we learnt about [connectomes](https://youtu.be/LANmSbhbdBA?si=HKDoBD7IbS3o4Uhv) - diagrams describing how every neuron connects to every other neuron in an animal. \n", 189 | "\n", 190 | "To create a connectome we need higher resolution data (from electron microscopy), but from the data we have we can estimate how strongly *areas* of the brain connect to each other.\n", 191 | "\n", 192 | "Ideally we would do this by determining where each neuron's dendrites and axon were, dividing the brain into anatomical regions (optic tectum, cerebellum etc), and then working out where the dendrites in each region receive their inputs from. \n", 193 | "\n", 194 | "Though, as a *very rough estimate* we can:\n", 195 | "1. Treat each neuron as having a single input and output point; respectively it's soma and the furthest point from it's soma.\n", 196 | "2. Divide the brain into a 3d grid. \n", 197 | "3. Assign each neuron's input and output point to a pair of grid cells (which we'll term voxels). \n", 198 | "\n", 199 | "Let's work through each of these three steps." 200 | ] 201 | }, 202 | { 203 | "cell_type": "markdown", 204 | "metadata": {}, 205 | "source": [ 206 | "#### Part 2.1\n", 207 | "Write code to define the output point for a single neuron. Check that it works by plotting some single neurons in either 2 or 3d and marking their output point with a coloured circle." 208 | ] 209 | }, 210 | { 211 | "cell_type": "code", 212 | "execution_count": null, 213 | "metadata": {}, 214 | "outputs": [], 215 | "source": [ 216 | "# Define the output point for a single neuron\n", 217 | "n = np.random.randint(low=0, high=len(neuron_paths))\n", 218 | "m = nm.load_morphology(neuron_paths[n])\n", 219 | "\n", 220 | "# Check that this seems to work by plotting some data \n", 221 | "fig, ax = figure()\n" 222 | ] 223 | }, 224 | { 225 | "cell_type": "markdown", 226 | "metadata": {}, 227 | "source": [ 228 | "Now load each neuron in turn and extract it's input (soma) and output (furthest point from soma) locations. \n", 229 | "\n", 230 | "Real datasets can contain small errors, and here you may find that some neuron's lack any points. \n", 231 | "\n", 232 | "In that case skip over those neurons as in Part 0, though make sure that you end up with an equal number of inputs and outputs by using an [assert statement](https://www.w3schools.com/python/ref_keyword_assert.asp) at the end of your code." 233 | ] 234 | }, 235 | { 236 | "cell_type": "code", 237 | "execution_count": null, 238 | "metadata": {}, 239 | "outputs": [], 240 | "source": [ 241 | "# Define each neurons input and output point\n", 242 | "inputs, outputs = [], []\n", 243 | "\n", 244 | "# Use an assert statement at the end of your code " 245 | ] 246 | }, 247 | { 248 | "cell_type": "markdown", 249 | "metadata": {}, 250 | "source": [ 251 | "Now try plotting all of the neurons (on one axis) as single lines connecting their input and output points. \n", 252 | "\n", 253 | "If you've done this correctly it should resemble the last figure in Part 1 surprisingly well. " 254 | ] 255 | }, 256 | { 257 | "cell_type": "code", 258 | "execution_count": null, 259 | "metadata": {}, 260 | "outputs": [], 261 | "source": [ 262 | "# Plot all of the neurons together in line form (input-output point)\n", 263 | "fig, ax = figure()" 264 | ] 265 | }, 266 | { 267 | "cell_type": "markdown", 268 | "metadata": {}, 269 | "source": [ 270 | "#### Part 2.2\n", 271 | "\n", 272 | "Now we need to divide the brain into a 3d grid.\n", 273 | "\n", 274 | "Too coarse a grid may be uninformative, but too fine a grid may be overly sparse.\n", 275 | "\n", 276 | "Try to check if your grid seems reasonable by overlaying it on some whole-brain plots. " 277 | ] 278 | }, 279 | { 280 | "cell_type": "code", 281 | "execution_count": null, 282 | "metadata": {}, 283 | "outputs": [], 284 | "source": [ 285 | "# Divide the brain into a 3d grid\n", 286 | "\n", 287 | "# Try to check if your grid seems reasonable (with some plots)" 288 | ] 289 | }, 290 | { 291 | "cell_type": "markdown", 292 | "metadata": {}, 293 | "source": [ 294 | "#### Part 2.3 \n", 295 | "\n", 296 | "Finally we need to assign each neuron's input and output point to a pair of voxels. \n", 297 | "\n", 298 | "Create a voxel (input) x voxel (output) connectivity matrix where each cell reports the number of neuron's which connect that pair. \n", 299 | "\n", 300 | "Try to visualise this matrix as a heatmap. " 301 | ] 302 | }, 303 | { 304 | "cell_type": "code", 305 | "execution_count": null, 306 | "metadata": {}, 307 | "outputs": [], 308 | "source": [ 309 | "# Create a connectivity matrix \n", 310 | "\n", 311 | "# Visualise this matrix as a heatmap " 312 | ] 313 | }, 314 | { 315 | "cell_type": "markdown", 316 | "metadata": {}, 317 | "source": [ 318 | "### Part 3 - Open exploration \n", 319 | "\n", 320 | "Now that you know how to work with this data, you should try to explore something that interests you!\n", 321 | "\n", 322 | "Here are a few ideas to get you started:\n", 323 | "* **Improving our approach** - as we highlighted above, our connectivity matrix is at best a very rough estimate as we reduce neurons to lines and brain regions to cells in an arbitrary grid. How could you improve on this? The original [paper](https://doi.org/10.1016/j.neuron.2019.04.034) may provide some ideas. \n", 324 | "* **Bilateral symmetry** - in biology many structures are symmetrical across the midline. How similar are the left and right sides of the larval zebrafish brain? Are there any areas which are asymmetrical in their connectivity? \n", 325 | "* **Information flow** - this [paper](https://www.science.org/doi/10.1126/science.add9330#sec-4) uses a signal cascade algorithm to estimate how information flows through the *Drosophila* larva connectome. Try implementing their approach and see if you can discover any patterns in how signals flow through the larval zebrafish brain. \n" 326 | ] 327 | } 328 | ], 329 | "metadata": { 330 | "kernelspec": { 331 | "display_name": "ZF_Anatomy", 332 | "language": "python", 333 | "name": "python3" 334 | }, 335 | "language_info": { 336 | "codemirror_mode": { 337 | "name": "ipython", 338 | "version": 3 339 | }, 340 | "file_extension": ".py", 341 | "mimetype": "text/x-python", 342 | "name": "python", 343 | "nbconvert_exporter": "python", 344 | "pygments_lexer": "ipython3", 345 | "version": "3.9.12" 346 | } 347 | }, 348 | "nbformat": 4, 349 | "nbformat_minor": 2 350 | } 351 | -------------------------------------------------------------------------------- /w4-learning/w4-learning-exercise.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Week 4 - Learning rules - Exercises\n", 8 | "\n", 9 | "This is an outline of the exercise only, leaving you a lot of details to fill in on your own. Later I will provide a more detailed version with some hints.\n", 10 | "\n", 11 | "## Task 1 - Implement STDP\n", 12 | "\n", 13 | "Implement a simulator including STDP using the efficient method from the STDP video.\n", 14 | "\n", 15 | "## Task 2 - Competition and latency\n", 16 | "\n", 17 | "Use your simulator to reproduce the competition and latency reduction results from this paper:\n", 18 | "\n", 19 | "[Song et al. (2000) \"Competitive Hebbian learning through spike-timing-dependent synaptic plasticity\"](https://doi.org/10.1038/78829)\n", 20 | "\n", 21 | "## Task 3 - Correlations\n", 22 | "\n", 23 | "Use your simulator to reproduce the correlated groups results from this paper:\n", 24 | "\n", 25 | "[Song and Abbott (2001) \"Cortical Development and Remapping through Spike Timing-Dependent Plasticity\"](https://doi.org/10.1016/S0896-6273(01)00451-2)\n", 26 | "\n", 27 | "## Task 4 - Sequences (optional)\n", 28 | "\n", 29 | "Use your simulator to reproduce the sequence learning results from this paper:\n", 30 | "\n", 31 | "[Masquelier et al. (2008) \"Spike Timing Dependent Plasticity Finds the Start of Repeating Patterns in Continuous Spike Trains\"](https://doi.org/10.1371/journal.pone.0001377)\n", 32 | "\n", 33 | "This might be quite challenging because this paper uses a different version of the STDP learning rule. For more details on how to reproduce this result, see:\n", 34 | "\n", 35 | "[Hathway and Goodman (2018) \"[Re] Spike Timing Dependent Plasticity Finds the Start of Repeating Patterns in Continuous Spike Trains\"](http://neural-reckoning.org/pub_re_stdp_repeating_patterns.html)\n", 36 | "\n", 37 | "Can you extend the results of these two papers to investigate how robust the results are to different versions of STDP, different parameters, etc.?" 38 | ] 39 | } 40 | ], 41 | "metadata": { 42 | "language_info": { 43 | "name": "python" 44 | } 45 | }, 46 | "nbformat": 4, 47 | "nbformat_minor": 2 48 | } 49 | -------------------------------------------------------------------------------- /w5-snn/w5-snn-exercise.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Week 5 - Training spiking neural networks - Exercise\n", 8 | "\n", 9 | "This exercise builds on the surrogate gradient descent method." 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "\n", 17 | "## Part 1 - SPyTorch tutorial\n", 18 | "\n", 19 | "For the first part, familiarise yourself with the surrogate gradient descent algorithm using the [SPyTorch tutorial](https://github.com/fzenke/spytorch).\n", 20 | "\n", 21 | "You can launch the first notebook in Colab here:\n", 22 | "\n", 23 | "[![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/fzenke/spytorch/blob/main/notebooks/SpyTorchTutorial1.ipynb)\n", 24 | "\n", 25 | "In addition to the main videos from this week, you might find the following introductory video helpful:\n", 26 | "\n", 27 | "" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "metadata": {}, 33 | "source": [ 34 | "## Part 2 - Sound localisation network\n", 35 | "\n", 36 | "For the second part, work through the sound localisation network tutorial (notebook 3) from my [Cosyne tutorial on spiking neural networks](https://neural-reckoning.github.io/cosyne-tutorial-2022/).\n", 37 | "\n", 38 | "[![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/neural-reckoning/cosyne-tutorial-2022/blob/main/3-surrogate-sound-localisation.ipynb)" 39 | ] 40 | } 41 | ], 42 | "metadata": { 43 | "language_info": { 44 | "name": "python" 45 | } 46 | }, 47 | "nbformat": 4, 48 | "nbformat_minor": 2 49 | } 50 | -------------------------------------------------------------------------------- /w6-understanding/w6-understanding-exercise.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Week 6 - Understanding neural networks - Exercise\n", 8 | "\n", 9 | "This weeks videos explained how we can try to understand neural networks by observing, analysing and manipulating their activity. \n", 10 | "\n", 11 | "In this exercise, we'll apply the same techniques to an artificial neural network to see what we can learn." 12 | ] 13 | }, 14 | { 15 | "cell_type": "markdown", 16 | "metadata": {}, 17 | "source": [ 18 | "## Training a model\n", 19 | "\n", 20 | "First, we need a model to interrogate. \n", 21 | "\n", 22 | "In [week 5's exercise](https://github.com/neuro4ml/exercises/blob/main/w5-snn/w5-snn-exercise.ipynb) we learned how to use surrogate gradient descent to train spiking neural networks (SNNs), and trained a SNN on a sound localisation task. \n", 23 | "\n", 24 | "We're going to use the same task this week, and the notebook below has all of the code you'll need. \n", 25 | "\n", 26 | "Note that last weeks notebook had gaps to fill, which are filled here, so turn back if you want to avoid spoilers! \n", 27 | "\n", 28 | "[![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comob-project/snn-sound-localization/blob/main/research/3-Starting-Notebook.ipynb)" 29 | ] 30 | }, 31 | { 32 | "cell_type": "markdown", 33 | "metadata": {}, 34 | "source": [ 35 | "## Aim \n", 36 | "\n", 37 | "Once you have a trained model, your aim is to learn something about how it works.\n", 38 | "\n", 39 | "Below we've outlined two approaches to this (observing and manipulating unit activity). \n", 40 | "\n", 41 | "You can try these in any order or combination you like, or take a totally different approach! For example, the provided notebook ends with some analysis of the networks weight matrix - which could provide you with some ideas.\n", 42 | "\n", 43 | "Remember that visualising your data and analysis is always helpful!\n" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": {}, 49 | "source": [ 50 | "## Approach 1 - Observing unit activity \n", 51 | "\n", 52 | "Once you have a trained model, you could try to observe its unit activity.\n", 53 | "\n", 54 | "To do that you'll need to edit the snn function to return the hidden layers activity, then pass inputs to the network and record this. A matrix of: hidden units x (time and trials) or a tensor of hidden units x time x trials would be good!\n", 55 | "\n", 56 | "Once you have this data try to learn something about the hidden units. Based on the material in W6-V1 you could try: \n", 57 | "\n", 58 | "* Calculating some summary statistics - like how specifically each unit responds to each class. \n", 59 | "* Decoding the input class from the hidden layer - this [paper](https://doi.org/10.1523/ENEURO.0506-19.2020) and associated [code](https://github.com/kordinglab/neural_decoding) could help with that. \n", 60 | "* Grouping the hidden units into functional ensembles - with a clustering algorithm or the ensemble method from the lecture: [paper](https://doi.org/10.1016/j.neuron.2018.05.015) + [code](https://github.com/neurostatslab/tensortools).\n" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "## Approach 2 - Manipulating unit activity\n", 68 | "\n", 69 | "Once you have a trained model, you could try to manipulate its unit activity. \n", 70 | "\n", 71 | "To do that, you'll need to edit the snn function so that you can set some weights or units to zero during a forward pass, and then check how that alters the accuracy. As a sanity check, silencing all of the hidden units should reduce your networks performance to chance. \n", 72 | "\n", 73 | "Once you can silence the units or weights you could: \n", 74 | "* See if silencing each element reduces performance on one or many classes. \n", 75 | "* Try over-activating units instead to see if you can force the network to make incorrect decisions. \n", 76 | "* Try the multi-lesion approach from the lecture: [paper](https://doi.org/10.1371/journal.pcbi.1010250​) + [code](https://kuffmode.github.io/msa/). \n" 77 | ] 78 | } 79 | ], 80 | "metadata": { 81 | "language_info": { 82 | "name": "python" 83 | } 84 | }, 85 | "nbformat": 4, 86 | "nbformat_minor": 2 87 | } 88 | -------------------------------------------------------------------------------- /w8-neuromorphic/chip.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | from typing import Dict, Optional 4 | from models import SNNModel 5 | 6 | # DO NOT CHANGE THESE PARAMETERS 7 | ENERGY_NORMALIZATION_FACTOR = 10_000 8 | PARETO_ALPHA = 0.5 9 | 10 | 11 | class NeuromorphicChip: 12 | def __init__(self): 13 | """ 14 | Memory and energy parameters for the neuromorphic chip 15 | ⚡ DO NOT CHANGE THESE PARAMETERS. THESE ARE THE CONSTRAINTS YOU NEED TO WORK WITH ⚡ 16 | """ 17 | self.MAX_NEURONS = 1024 18 | self.MAX_SYNAPSES = self.MAX_NEURONS * 64 19 | self.MEMORY_PER_NEURON = 32 # bytes 20 | self.MEMORY_PER_SYNAPSE = 4 # bytes 21 | self.TOTAL_MEMORY = ( 22 | self.MAX_SYNAPSES * self.MEMORY_PER_SYNAPSE 23 | + self.MAX_NEURONS * self.MEMORY_PER_NEURON 24 | ) 25 | 26 | self.ENERGY_PER_NEURON_UPDATE = 1e-1 # nJ 27 | self.ENERGY_PER_SYNAPSE_EVENT = 5e-4 # nJ 28 | 29 | self.mapped_snn = None 30 | 31 | def calculate_memory_usage(self, snn: SNNModel) -> int: 32 | """ 33 | Calculate total memory usage for the given SNN 34 | TODO: Implement this method, using the total number of neurons and synapses of the SNN. 35 | /!\ : You need to implement the properties n_neurons and n_synapses in the SNN class first. 36 | 37 | """ 38 | raise NotImplementedError("Memory usage not implemented") 39 | 40 | def map(self, snn: SNNModel) -> bool: 41 | """ 42 | Map the given SNN to the chip. This method should check if the SNN fits on the chip 43 | and map it to the chip if it does, by setting the self.mapped_snn attribute. If it doesn't fit, raise a MemoryError. 44 | TODO: Implement this method, using the total number of neurons and synapses 45 | """ 46 | self.mapped_snn = snn 47 | raise NotImplementedError("Mapping not implemented") 48 | 49 | def run( 50 | self, snn: Optional[SNNModel] = None, input_data: torch.Tensor = None 51 | ) -> Dict: 52 | """ 53 | Run the mapped SNN and return performance metrics. The steps are the following: 54 | 1/ Run the SNN simulation 55 | TODO: Implement the rest of the method. 56 | 2/ Compute the total number of spikes and the spike rate 57 | 3/ Compute the total energy consumed by the SNN 58 | 4/ Return the results in a dictionary 59 | """ 60 | 61 | if snn is not None: 62 | # Map the SNN to the chip and check if it fits 63 | self.map(snn) 64 | 65 | # Run the actual network simulation. We don't need to compute gradients for this. 66 | with torch.no_grad(): 67 | spk_rec, mem_rec = self.mapped_snn(input_data) 68 | 69 | # Get network recordings for all layers. 70 | recordings = self.mapped_snn.recordings 71 | 72 | # Calculate spike metrics 73 | total_spikes = None # TODO: Calculate total number of spikes 74 | spike_rate = None # TODO: Calculate spike rate 75 | 76 | # Calculate energy metrics 77 | # TODO: Get the total number of neuron updates. This should not depend on the recordings. 78 | total_neuron_updates = None 79 | 80 | # TODO: Get the total number of synapse events. This should depend on the recordings. 81 | # To get the total number of synapse events, we need to sum the number of 82 | # spikes x the number of synapses for each layer. For a dense cinnectivity this is straightforward. 83 | # For a sparse connectivity, we need to sum the number of non-zero weights in the synapse matrix. 84 | total_synapse_events = None 85 | 86 | # TODO: Calculate energy metrics. To do so, use the chip energy parameters. 87 | energy_neurons = None 88 | energy_synapses = None 89 | total_energy = None 90 | 91 | # Return the results in a dictionary 92 | sim_results = { 93 | "total_energy_nJ": total_energy, 94 | "memory_usage_bytes": self.calculate_memory_usage(self.mapped_snn), 95 | "neuron_updates": total_neuron_updates, 96 | "synapse_events": total_synapse_events, 97 | "spike_rate": spike_rate, 98 | "total_spikes": total_spikes, 99 | } 100 | 101 | raise NotImplementedError("Simulation results not implemented") 102 | 103 | return (spk_rec, mem_rec), sim_results 104 | 105 | 106 | def calculate_pareto_score(accuracy: float, energy_nj: float) -> float: 107 | """ 108 | Calculate Pareto trade-off score between accuracy and energy. 109 | 110 | Args: 111 | accuracy: Classification accuracy (0 to 1) 112 | energy_nj: Energy consumption in nanojoules 113 | 114 | Returns: 115 | Combined score (higher is better) 116 | """ 117 | # Accuracy term (higher is better) 118 | accuracy_term = PARETO_ALPHA * accuracy 119 | 120 | # Energy efficiency term (lower energy is better, so we invert it) 121 | # Normalized to 0-1 range using ENERGY_NORMALIZATION_FACTOR 122 | energy_efficiency = ( 123 | ENERGY_NORMALIZATION_FACTOR - energy_nj 124 | ) / ENERGY_NORMALIZATION_FACTOR 125 | energy_term = (1 - PARETO_ALPHA) * energy_efficiency 126 | 127 | return accuracy_term + energy_term 128 | -------------------------------------------------------------------------------- /w8-neuromorphic/dataset: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neuro4ml/exercises/4c92694c811ad21db589a8cc7d6a42888b63e13d/w8-neuromorphic/dataset -------------------------------------------------------------------------------- /w8-neuromorphic/dataset_labels: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neuro4ml/exercises/4c92694c811ad21db589a8cc7d6a42888b63e13d/w8-neuromorphic/dataset_labels -------------------------------------------------------------------------------- /w8-neuromorphic/models.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import snntorch as snn 4 | from snntorch import utils 5 | 6 | 7 | class RecordingSequential(nn.Sequential): 8 | """ 9 | A Sequential container that records intermediate activations (spikes and membrane potentials) 10 | from all spiking layers during forward pass. 11 | """ 12 | 13 | def __init__(self, *args): 14 | super().__init__(*args) 15 | self.reset_recordings() 16 | 17 | def reset_recordings(self): 18 | """Reset all spike and membrane potential recordings""" 19 | self._recordings = { 20 | "spikes": {}, 21 | "membrane": {}, 22 | } 23 | 24 | def forward(self, input: torch.Tensor) -> tuple[torch.Tensor, torch.Tensor, dict]: 25 | """ 26 | Forward pass with recording of intermediate activations. 27 | 28 | Args: 29 | input: Input tensor 30 | 31 | Returns: 32 | tuple: (output_spikes, output_membrane, recordings) 33 | """ 34 | # Reset hidden states at the start of each forward pass 35 | self.reset_recordings() 36 | 37 | x = input 38 | current_idx = 0 39 | 40 | # Process each layer while recording spiking layers 41 | for layer in self: 42 | if isinstance( 43 | layer, (snn.Leaky, snn.Synaptic) 44 | ): # Record only spiking layers 45 | out = layer(x) 46 | if isinstance(out, tuple): 47 | spk, *mem = out 48 | mem = mem[-1] 49 | else: 50 | spk, mem = out, None 51 | 52 | self._recordings["spikes"][current_idx] = spk 53 | self._recordings["membrane"][current_idx] = mem 54 | x = spk 55 | current_idx += 1 56 | else: 57 | x = layer(x) 58 | 59 | # Return the final layer's outputs 60 | return ( 61 | self._recordings["spikes"][current_idx - 1], 62 | self._recordings["membrane"][current_idx - 1], 63 | self._recordings, 64 | ) 65 | 66 | 67 | class SNNModel(nn.Module): 68 | """ 69 | A simplified Spiking Neural Network using snnTorch. 70 | You need to implement: 71 | 1/ property n_neurons 72 | 2/ property n_synapses 73 | """ 74 | 75 | def __init__( 76 | self, 77 | n_in: int = 128, 78 | n_hidden: int = 128, 79 | n_out: int = 10, 80 | beta: float = 0.95, # decay rate 81 | seed: int = 42, 82 | ): 83 | super().__init__() 84 | torch.manual_seed(seed) 85 | self.n_in = n_in 86 | self.n_hidden = n_hidden 87 | self.n_out = n_out 88 | self.layers = RecordingSequential( 89 | nn.Linear(self.n_in, self.n_hidden), 90 | snn.Leaky(beta=beta, init_hidden=True, output=True), 91 | nn.Linear(self.n_hidden, self.n_out), 92 | snn.Leaky(beta=beta, init_hidden=True, output=True), 93 | ) 94 | 95 | self.n_timesteps = 100 96 | 97 | def forward(self, x: torch.Tensor) -> tuple[torch.Tensor, torch.Tensor]: 98 | """ 99 | Run network simulation for input x. 100 | Args: 101 | x: Input tensor of shape (time_steps, batch_size, input_size) 102 | Returns: 103 | tuple: (spike_recording, membrane_recording) 104 | """ 105 | 106 | assert x.shape[0] == self.n_timesteps, str( 107 | f"Input tensor must have the correct number of time steps, shape is {x.shape} but should be batch x time x input_size" 108 | ) 109 | # Initialize hidden states 110 | utils.reset(self.layers) 111 | 112 | # Record spikes for each time step 113 | spk_rec = [] 114 | mem_rec = [] 115 | self._recordings = { 116 | "spikes": {}, 117 | "membrane": {}, 118 | } 119 | 120 | for step, x_t in enumerate(x): 121 | spk, *mem, recordings = self.layers(x_t) 122 | mem = mem[-1] 123 | spk_rec.append(spk) 124 | mem_rec.append(mem) 125 | for rec in recordings: 126 | for k, v in recordings[rec].items(): 127 | self._recordings[rec].setdefault(k, []).append(v) 128 | 129 | return torch.stack(spk_rec), torch.stack(mem_rec) 130 | 131 | @property 132 | def recordings(self): 133 | return { 134 | rec_name: { 135 | idx: torch.stack(recs) if (recs[0] is not None) else recs 136 | for idx, recs in self._recordings[rec_name].items() 137 | } 138 | for rec_name in self._recordings 139 | } 140 | 141 | @property 142 | def n_neurons(self) -> int: 143 | """ 144 | TODO: Calculate total number of neurons in the network 145 | Hint: Use out_features of linear layers or use the dimensions that we used in the initialization 146 | """ 147 | raise NotImplementedError("Number of neurons not implemented") 148 | 149 | @property 150 | def n_synapses(self) -> int: 151 | """ 152 | TODO: Calculate total number of active synapses in the network 153 | Hint: Count non-zero weights in linear layers (access weights with layer.weight) 154 | Hint: you can use torch.count_nonzero(...), but remember to copy back to CPU with .cpu().data.item() 155 | Optional: Use weight masks to create sparse connectivity in the network, to reduce this number ! 156 | """ 157 | raise NotImplementedError("Number of synapses not implemented") 158 | 159 | def __repr__(self): 160 | return f"SNNModel(n_neurons={self.n_neurons}, n_synapses={self.n_synapses})" 161 | 162 | def to(self, device: str): 163 | self.layers.to(device) 164 | return self 165 | -------------------------------------------------------------------------------- /w8-neuromorphic/training.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import pandas as pd 3 | from dataclasses import dataclass 4 | from tqdm.auto import tqdm 5 | from torch.utils.data import TensorDataset, random_split 6 | 7 | from models import SNNModel 8 | from snntorch.functional.loss import ce_rate_loss 9 | from chip import NeuromorphicChip, calculate_pareto_score 10 | 11 | 12 | def get_dataloaders( 13 | batch_size: int = 32, 14 | train_split: float = 0.8, 15 | ): 16 | data = torch.load("dataset", weights_only=True) 17 | labels = torch.load("dataset_labels", weights_only=True) 18 | spike_times = data[..., 0].int().long() 19 | spikes = torch.nn.functional.one_hot(spike_times, num_classes=100).transpose(1, 2) 20 | 21 | dataset = TensorDataset(spikes, labels) 22 | 23 | # Split into train/test 24 | train_size = int(train_split * len(dataset)) 25 | test_size = len(dataset) - train_size 26 | train_dataset, test_dataset = random_split( 27 | dataset, 28 | [train_size, test_size], 29 | ) 30 | 31 | def collate_fn(batch): 32 | # Unpack the batch into inputs and targets 33 | inputs, targets = zip(*batch) 34 | # Stack and transpose inputs from (batch, time, features) to (time, batch, features) 35 | inputs = torch.stack(inputs).transpose(0, 1) 36 | # Stack targets normally 37 | targets = torch.stack(targets) 38 | return inputs, targets 39 | 40 | # Create data loaders 41 | train_loader = torch.utils.data.DataLoader( 42 | train_dataset, batch_size=batch_size, shuffle=True, collate_fn=collate_fn 43 | ) 44 | test_loader = torch.utils.data.DataLoader( 45 | test_dataset, batch_size=batch_size, shuffle=False, collate_fn=collate_fn 46 | ) 47 | 48 | return train_loader, test_loader, dataset 49 | 50 | 51 | @dataclass 52 | class TrainingMetrics: 53 | accuracy: float 54 | energy_usage: float 55 | epoch: int 56 | loss: float 57 | firing_rate: float 58 | 59 | 60 | class SNNTrainer: 61 | def __init__( 62 | self, 63 | snn: SNNModel, 64 | learning_rate: float = 0.001, 65 | lr_gamma: float = 0.9, 66 | config: dict = {}, 67 | device: str = "cuda" if torch.cuda.is_available() else "cpu", 68 | ): 69 | self.config = config 70 | self.model = snn.to(device) 71 | self.chip = NeuromorphicChip() 72 | self.optimizer = torch.optim.Adam(self.model.parameters(), lr=learning_rate) 73 | self.scheduler = torch.optim.lr_scheduler.ExponentialLR( 74 | self.optimizer, gamma=lr_gamma 75 | ) 76 | 77 | # Initialize loss function 78 | self.loss_fn = ce_rate_loss() 79 | self.metrics_history: list[TrainingMetrics] = [] 80 | self.chip_results: list[pd.DataFrame] = [] 81 | 82 | def calculate_accuracy( 83 | self, spikes: torch.Tensor, target: torch.Tensor 84 | ) -> tuple[float, float]: 85 | """ 86 | Calculate accuracy and loss from a rate-based loss 87 | TODO: Complete this method to return accuracy and loss 88 | Optional: Implement a temporal time-to-first-spike based loss using snnTorch. 89 | """ 90 | 91 | loss = self.loss_fn(spikes, target).mean() 92 | acc = None 93 | 94 | raise NotImplementedError("Accuracy not implemented") 95 | 96 | return acc, loss.item() 97 | 98 | def train_epoch( 99 | self, 100 | train_loader: torch.utils.data.DataLoader, 101 | epoch: int, 102 | n_epochs: int, 103 | pbar: tqdm = None, 104 | ) -> TrainingMetrics: 105 | self.model.train() 106 | total_correct = 0 107 | total_samples = 0 108 | epoch_energy = 0.0 109 | epoch_loss = 0.0 110 | epoch_firing_rate = 0.0 111 | 112 | if pbar is None: 113 | pbar = tqdm(train_loader, desc="Training: ", leave=False) 114 | pbar_to_set = pbar 115 | else: 116 | pbar_to_set = pbar 117 | pbar = train_loader 118 | 119 | for batch_idx, (data, target) in enumerate(pbar): 120 | self.optimizer.zero_grad() 121 | 122 | data = data.float().to(self.device) 123 | target = target.to(self.device) 124 | 125 | # Forward pass 126 | spikes, mem = self.model(data) 127 | 128 | # Calculate loss and backward 129 | loss = self.loss_fn(spikes, target) 130 | 131 | loss.backward() 132 | self.optimizer.step() 133 | 134 | # Calculate metrics 135 | acc, loss_val = self.calculate_accuracy(spikes, target) 136 | firing_rate = spikes.mean().item() 137 | 138 | desc = str( 139 | f"Epoch {epoch}/{n_epochs} - Batch {batch_idx}/{len(train_loader)}: loss: {loss_val:.3f}, " 140 | + f"Firing Rate: {firing_rate:.3f}, Acc: {acc:.3f}" 141 | ) 142 | pbar_to_set.set_postfix_str(desc) 143 | 144 | total_correct += acc * target.size(0) 145 | total_samples += target.size(0) 146 | epoch_loss += loss_val 147 | epoch_firing_rate += firing_rate 148 | 149 | # Calculate epoch metrics 150 | metrics = TrainingMetrics( 151 | accuracy=total_correct / total_samples, 152 | energy_usage=epoch_energy / len(train_loader), 153 | epoch=epoch, 154 | loss=epoch_loss / len(train_loader), 155 | firing_rate=epoch_firing_rate / len(train_loader), 156 | ) 157 | self.metrics_history.append(metrics) 158 | 159 | # Update learning rate 160 | self.scheduler.step( 161 | metrics.loss 162 | if isinstance(self.scheduler, torch.optim.lr_scheduler.ReduceLROnPlateau) 163 | else None 164 | ) 165 | 166 | return metrics 167 | 168 | def evaluate( 169 | self, 170 | test_loader: torch.utils.data.DataLoader, 171 | pbar: tqdm = None, 172 | epoch=-1, 173 | ) -> TrainingMetrics: 174 | self.model.eval() 175 | total_correct = 0 176 | total_samples = 0 177 | total_energy = 0.0 178 | total_loss = 0.0 179 | total_firing_rate = 0.0 180 | 181 | all_results = [] 182 | 183 | with torch.no_grad(): 184 | for data, target in test_loader: 185 | data = data.float().to(self.device) 186 | target = target.to(self.device) 187 | 188 | (spikes, mem), results = self.chip.run(self.model, input_data=data) 189 | acc, loss = self.calculate_accuracy(spikes, target) 190 | 191 | results["accuracy"] = acc 192 | results["loss"] = loss 193 | results["epoch"] = epoch 194 | all_results.append(results) 195 | 196 | total_correct += acc * target.size(0) 197 | total_samples += target.size(0) 198 | total_energy += results["total_energy_nJ"] 199 | total_loss += loss 200 | total_firing_rate += spikes.mean().item() 201 | 202 | metrics = TrainingMetrics( 203 | accuracy=total_correct / total_samples, 204 | energy_usage=total_energy / len(test_loader), 205 | epoch=-1, # Indicates evaluation 206 | loss=total_loss / len(test_loader), 207 | firing_rate=total_firing_rate / len(test_loader), 208 | ) 209 | 210 | all_results = pd.DataFrame(all_results) 211 | 212 | if pbar is not None: 213 | desc = f"Test Acc: {metrics.accuracy:.3f}, Energy: {metrics.energy_usage / 1000:.2f} uJ" 214 | pbar.set_description(desc) 215 | 216 | return metrics, all_results 217 | 218 | def train( 219 | self, 220 | train_loader: torch.utils.data.DataLoader, 221 | test_loader: torch.utils.data.DataLoader, 222 | n_epochs: int, 223 | ): 224 | pbar = tqdm(range(n_epochs), desc="Training: ", leave=False) 225 | for epoch in pbar: 226 | self.train_epoch(train_loader, epoch=epoch, n_epochs=n_epochs, pbar=pbar) 227 | metrics, pd_results = self.evaluate(test_loader, epoch=epoch, pbar=pbar) 228 | self.chip_results.append(pd_results) 229 | 230 | @property 231 | def pd_results(self) -> pd.DataFrame: 232 | if len(self.chip_results) == 0: 233 | return pd.DataFrame() 234 | else: 235 | results = pd.concat(self.chip_results) 236 | for k, v in self.config.items(): 237 | results[k] = [v] * len(results) 238 | return results 239 | 240 | @property 241 | def device(self) -> str: 242 | return next(self.model.parameters()).device 243 | 244 | @property 245 | def pareto_tradeoff(self) -> pd.DataFrame: 246 | best_epoch_mean = ( 247 | self.pd_results.groupby("epoch") 248 | .mean() 249 | .sort_values(by="accuracy", ascending=False) 250 | .iloc[0] 251 | ) 252 | return calculate_pareto_score( 253 | best_epoch_mean["accuracy"], best_epoch_mean["total_energy_nJ"] 254 | ) 255 | -------------------------------------------------------------------------------- /w8-neuromorphic/w8-neuromorphic-exercise.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Week 8 - Neuromorphic computing - Exercise\n", 8 | "\n", 9 | "Note: this is a new version of the exercise, for the old version see [w8-neuromorphic-exercise-v1.ipynb](w8-neuromorphic-exercise-v1.ipynb).\n", 10 | "\n", 11 | "[![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/neuro4ml/exercises/blob/main/w8-neuromorphic/w8-neuromorphic-exercise.ipynb)\n", 12 | "\n", 13 | "## 🧠 Introduction \n", 14 | "\n", 15 | "Neuromorphic engineering is a field that aims to design and build artificial neural systems that mimic the architecture and principles of biological neural networks. Unlike traditional von Neumann computing architectures, neuromorphic chips:\n", 16 | "\n", 17 | "1. 🔄 Process information in a parallel, event-driven manner\n", 18 | "2. 💾 Integrate memory and computation\n", 19 | "3. ⚡ Operate with extremely low power consumption\n", 20 | "\n", 21 | "### 🤔 Why trade off power and accuracy?\n", 22 | "\n", 23 | "Traditional deep learning models running on GPUs or CPUs consume significant power (often hundreds of watts). In contrast, the human brain processes complex information while consuming only ~20 watts. Neuromorphic chips aim to bridge this efficiency gap by:\n", 24 | "\n", 25 | "- 📊 Using spike-based computation\n", 26 | "- 🎯 Implementing local learning rules\n", 27 | "- ⚡ Exploiting sparse, event-driven processing\n", 28 | "\n", 29 | "However, these benefits often come with reduced accuracy compared to traditional deep learning approaches. Understanding and optimizing this trade-off is crucial for deploying neural networks in power-constrained environments like mobile devices or IoT sensors.\n", 30 | "\n", 31 | "## 📝 Exercise overview\n", 32 | "\n", 33 | "In this exercise, you will:\n", 34 | "1. 🔧 Implement a simple neuromorphic chip simulator\n", 35 | "2. 🏃‍♂️ Train SNNs with different architectures\n", 36 | "3. 📊 Analyze the power-accuracy trade-off\n", 37 | "4. 🔍 Explore how different parameters affect this trade-off\n", 38 | "\n", 39 | "**This will also serve as a solid introduction on how to effectively train SNNs using modern packages such as SNNTorch!**\n", 40 | "\n", 41 | "## 💻 Setup\n", 42 | "\n", 43 | "Some of the code for this exercise is already provided, but you will need to implement some parts: \n", 44 | "\n", 45 | "### SNNModel (models.py)\n", 46 | "The `SNNModel` class implements a 2-layer Leaky Integrate-and-Fire (LIF) network using SNNTorch. The network architecture consists of:\n", 47 | "- Input layer → Hidden layer (with LIF neurons) → Output layer (with LIF neurons). (You will be able to play with other network architectures)\n", 48 | "- Each LIF neuron has a decay rate (beta) that controls how quickly the membrane potential decays. (You will be able to play with other neuron models provided by SNNTorch)\n", 49 | "- The network processes input data over multiple timesteps, producing spikes at each layer\n", 50 | "\n", 51 | "### NeuromorphicChip (chip.py)\n", 52 | "The `NeuromorphicChip` class simulates a neuromorphic hardware platform with the following constraints:\n", 53 | "- Maximum number of neurons: 1024\n", 54 | "- Maximum number of synapses: 64 * 1024\n", 55 | "- Memory per neuron: 32 bytes\n", 56 | "- Memory per synapse: 4 bytes\n", 57 | "- Energy consumption:\n", 58 | " - 1e-1 nJ per neuron update\n", 59 | " - 5e-4 nJ per synapse event\n", 60 | " \n", 61 | "This backend hardware is very simple and does not include many features of neuromorphic hardware, and serves only as an introduction to thinking about efficient network design." 62 | ] 63 | }, 64 | { 65 | "cell_type": "markdown", 66 | "metadata": {}, 67 | "source": [ 68 | "## Imports and data loading" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": 10, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "try:\n", 78 | " import google.colab\n", 79 | "\n", 80 | " IN_COLAB = True\n", 81 | "except:\n", 82 | " IN_COLAB = False\n", 83 | "\n", 84 | "if IN_COLAB:\n", 85 | " !pip install snntorch\n", 86 | " !git clone https://github.com/neuro4ml/exercises.git\n", 87 | " !cp exercises/w8-neuromorphic/*.py .\n", 88 | " !cp exercises/w8-neuromorphic/dataset .\n", 89 | " !cp exercises/w8-neuromorphic/dataset_labels .\n", 90 | "\n", 91 | "# If you are using a local machine, please install the dependencies yourself." 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": 4, 97 | "metadata": {}, 98 | "outputs": [], 99 | "source": [ 100 | "# For automatic reloading of external modules\n", 101 | "%load_ext autoreload\n", 102 | "%autoreload 2" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": 5, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [ 111 | "import torch\n", 112 | "import seaborn as sns\n", 113 | "import matplotlib.pyplot as plt\n", 114 | "\n", 115 | "from chip import NeuromorphicChip\n", 116 | "from models import SNNModel" 117 | ] 118 | }, 119 | { 120 | "cell_type": "markdown", 121 | "metadata": {}, 122 | "source": [ 123 | "## 🛠️ Exercise 1.1: Mapping Implementation\n", 124 | "\n", 125 | "To complete this first question you need to implement the functions necessary to map your network on the chip.\n", 126 | "\n", 127 | "- 📍 Go to [models.py](models.py) and implement the `n_neurons` and `n_synapses` properties.\n", 128 | "- 📍 Go to [chip.py](chip.py) and implement the `calculate_memory_usage`, `map` and `run` methods.\n", 129 | "- ▶️ Run the following cell to check your implementation\n", 130 | "\n", 131 | "This is what you should see:\n", 132 | "\n", 133 | " Simulation Results:\n", 134 | " Energy consumption: 1.29 µJ\n", 135 | " Memory usage: 57.34 KB\n", 136 | " Total neuron updates: 11000\n", 137 | " Total synapse events: 389740\n", 138 | " Average spike rate: 0.205\n", 139 | " Total spikes: 3070.0" 140 | ] 141 | }, 142 | { 143 | "cell_type": "code", 144 | "execution_count": 4, 145 | "metadata": {}, 146 | "outputs": [], 147 | "source": [ 148 | "chip = NeuromorphicChip()\n", 149 | "\n", 150 | "dims = (128, 100, 10)\n", 151 | "n_timesteps = 100\n", 152 | "seed = 42\n", 153 | "snn = SNNModel(n_in=dims[0], n_hidden=dims[1], n_out=dims[-1], beta=0.95, seed=seed)" 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": 5, 159 | "metadata": {}, 160 | "outputs": [ 161 | { 162 | "name": "stdout", 163 | "output_type": "stream", 164 | "text": [ 165 | "\n", 166 | "Simulation Results:\n", 167 | "Energy consumption: 0.29 µJ\n", 168 | "Memory usage: 57.34 KB\n", 169 | "Total neuron updates: 110\n", 170 | "Total synapse events: 553716\n", 171 | "Average spike rate: 0.219\n", 172 | "Total spikes: 4327.0\n" 173 | ] 174 | } 175 | ], 176 | "source": [ 177 | "# Generate random input (seed is fixed to 42 for reproducibility)\n", 178 | "torch.manual_seed(seed)\n", 179 | "input_data = torch.randn(n_timesteps, dims[0]) * 10 # 100 timesteps\n", 180 | "\n", 181 | "# Map the network on the chip\n", 182 | "chip.map(snn)\n", 183 | "# Run the network\n", 184 | "output, results = chip.run(input_data=input_data)\n", 185 | "\n", 186 | "print(\"\\nSimulation Results:\")\n", 187 | "print(f\"Energy consumption: {results['total_energy_nJ']/1000:.2f} µJ\")\n", 188 | "print(f\"Memory usage: {results['memory_usage_bytes']/1024:.2f} KB\")\n", 189 | "print(f\"Total neuron updates: {results['neuron_updates']}\")\n", 190 | "print(f\"Total synapse events: {results['synapse_events']}\")\n", 191 | "print(f\"Average spike rate: {results['spike_rate']:.3f}\")\n", 192 | "print(f\"Total spikes: {results['total_spikes']}\")" 193 | ] 194 | }, 195 | { 196 | "cell_type": "markdown", 197 | "metadata": {}, 198 | "source": [ 199 | "## 🚫 Exercise 1.2: Failed Mappings\n", 200 | "\n", 201 | "Now let's explore what happens when we try to map networks that exceed the chip's constraints:\n", 202 | "\n", 203 | "### 🔬 Experiments:\n", 204 | "1. 🧠 First, we'll try mapping a network with too many neurons\n", 205 | "2. 🔗 Then, we'll attempt to map one with too many synapses \n", 206 | "3. 💡 Finally, we'll see how sparse connectivity can help fit larger networks\n", 207 | "\n", 208 | "Let's run these experiments and observe the error messages we get! Each case will demonstrate different limitations of neuromorphic hardware:\n", 209 | "The first two cases should return a `MemoryError` if your code is correct. The third case should run without errors.\n" 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": 31, 215 | "metadata": {}, 216 | "outputs": [ 217 | { 218 | "name": "stdout", 219 | "output_type": "stream", 220 | "text": [ 221 | "Too many neurons: 1034 (max: 1024)\n" 222 | ] 223 | } 224 | ], 225 | "source": [ 226 | "chip = NeuromorphicChip()\n", 227 | "\n", 228 | "# Case 1 : Too many neurons\n", 229 | "dims = (128, 1024, 10)\n", 230 | "seed = 42\n", 231 | "snn = SNNModel(n_in=dims[0], n_hidden=dims[1], n_out=dims[-1], beta=0.95, seed=seed)\n", 232 | "# Map the network on the chip\n", 233 | "try:\n", 234 | " chip.map(snn)\n", 235 | "except MemoryError as e:\n", 236 | " print(e)" 237 | ] 238 | }, 239 | { 240 | "cell_type": "code", 241 | "execution_count": 32, 242 | "metadata": {}, 243 | "outputs": [ 244 | { 245 | "name": "stdout", 246 | "output_type": "stream", 247 | "text": [ 248 | "Too many synapses: 70656 (max: 65536)\n" 249 | ] 250 | } 251 | ], 252 | "source": [ 253 | "chip = NeuromorphicChip()\n", 254 | "\n", 255 | "# Case 2 : Too many synapses\n", 256 | "dims = (128, 512, 10)\n", 257 | "seed = 42\n", 258 | "snn = SNNModel(n_in=dims[0], n_hidden=dims[1], n_out=dims[-1], beta=0.95, seed=seed)\n", 259 | "# Map the network on the chip\n", 260 | "try:\n", 261 | " chip.map(snn)\n", 262 | "except MemoryError as e:\n", 263 | " print(e)" 264 | ] 265 | }, 266 | { 267 | "cell_type": "code", 268 | "execution_count": 33, 269 | "metadata": {}, 270 | "outputs": [ 271 | { 272 | "name": "stdout", 273 | "output_type": "stream", 274 | "text": [ 275 | "Mapped! Memory usage: 154.16 KB, Number of neurons: 522, Number of synapses: 35289\n" 276 | ] 277 | } 278 | ], 279 | "source": [ 280 | "# Case 3 : Sparse connectivity\n", 281 | "dims = (128, 512, 10)\n", 282 | "seed = 42\n", 283 | "snn = SNNModel(n_in=dims[0], n_hidden=dims[1], n_out=dims[-1], beta=0.95, seed=seed)\n", 284 | "for l in snn.layers:\n", 285 | " if hasattr(l, \"weight\"):\n", 286 | " l.weight.data = (\n", 287 | " torch.rand(l.weight.data.shape) < 0.5\n", 288 | " ) # 50% of the weights are non-zero\n", 289 | "\n", 290 | "# Map the network on the chip\n", 291 | "try:\n", 292 | " chip.map(snn)\n", 293 | " print(\n", 294 | " f\"Mapped! Memory usage: {chip.calculate_memory_usage(snn)/1024:.2f} KB, Number of neurons: {snn.n_neurons}, Number of synapses: {snn.n_synapses}\"\n", 295 | " )\n", 296 | "except MemoryError as e:\n", 297 | " print(e)" 298 | ] 299 | }, 300 | { 301 | "cell_type": "markdown", 302 | "metadata": {}, 303 | "source": [ 304 | "## 🎯 Exercise 2: Training\n", 305 | "\n", 306 | "In this exercise you will train a SNN on the [Randman dataset](https://github.com/fzenke/randman).\n", 307 | "\n", 308 | "### 📊 Background: The Randman Dataset\n", 309 | "\n", 310 | "The Randman dataset is a synthetic dataset specifically designed for training Spiking Neural Networks (SNNs). Here's what you need to know:\n", 311 | "\n", 312 | "1. **Dataset Structure**\n", 313 | " - Generates labeled spike trains for classification\n", 314 | " - Each sample consists of temporal spike patterns\n", 315 | " - Data is organized into multiple classes (10 classes)\n", 316 | " - Spike times are stored in `dataset` file\n", 317 | " - Class labels are stored in `dataset_labels` file\n", 318 | "\n", 319 | "2. **Data Format**\n", 320 | " - Input: Spike trains encoded as binary tensors (time x neurons)\n", 321 | " - Each neuron can spike at different time steps\n", 322 | " - Data is converted to one-hot encoding across time steps\n", 323 | " - Shape: (batch_size, timesteps, input_neurons)\n", 324 | "\n", 325 | "3. **Classification Task**\n", 326 | " - Goal: Classify input spike patterns into correct classes\n", 327 | " - Output layer produces spike trains\n", 328 | " - Classification is done using rate coding (for now !): the output neuron that spikes the most indicates the predicted class\n", 329 | "\n", 330 | "4. **Data Loading**\n", 331 | " All necessary code for loading and preprocessing the data is provided:\n", 332 | " - Data loading from files\n", 333 | " - Conversion to one-hot encoding\n", 334 | " - Train/test splitting\n", 335 | " - DataLoader creation with batching\n", 336 | "\n", 337 | "### 🎓 2.1 Training\n", 338 | "\n", 339 | "- 📝 Go to [training.py](training.py) and complete the `SNNTrainer` class, in particular the `calculate_accuracy` method\n", 340 | "- ▶️ Run the following cell to train your network\n", 341 | "- 📊 Take a look at the training and testing metrics, especially the accuracy and energy consumption\n", 342 | "- 🔄 Start experimenting with different architectures and parameters to see how they affect the accuracy and energy consumption" 343 | ] 344 | }, 345 | { 346 | "cell_type": "code", 347 | "execution_count": 2, 348 | "metadata": {}, 349 | "outputs": [], 350 | "source": [ 351 | "from training import get_dataloaders, SNNTrainer" 352 | ] 353 | }, 354 | { 355 | "cell_type": "code", 356 | "execution_count": 4, 357 | "metadata": {}, 358 | "outputs": [ 359 | { 360 | "name": "stdout", 361 | "output_type": "stream", 362 | "text": [ 363 | "torch.Size([64, 100, 128]) torch.Size([64])\n" 364 | ] 365 | } 366 | ], 367 | "source": [ 368 | "# Create dataloaders\n", 369 | "train_loader, test_loader, dataset = get_dataloaders(\n", 370 | " batch_size=64,\n", 371 | ")" 372 | ] 373 | }, 374 | { 375 | "cell_type": "code", 376 | "execution_count": 5, 377 | "metadata": {}, 378 | "outputs": [ 379 | { 380 | "name": "stdout", 381 | "output_type": "stream", 382 | "text": [ 383 | "torch.Size([64, 100, 128]) torch.Size([64])\n" 384 | ] 385 | } 386 | ], 387 | "source": [ 388 | "# Take a look at the data\n", 389 | "data, labels = next(iter(train_loader))\n", 390 | "print(\n", 391 | " data.shape, labels.shape\n", 392 | ") # batch_size x timesteps x n_in. 1st and 2nd dims are swapped when passed to the model" 393 | ] 394 | }, 395 | { 396 | "cell_type": "code", 397 | "execution_count": 6, 398 | "metadata": {}, 399 | "outputs": [], 400 | "source": [ 401 | "snn_config = {\n", 402 | " \"n_hidden\": 128,\n", 403 | " \"beta\": 0.95,\n", 404 | " \"seed\": 42,\n", 405 | "}" 406 | ] 407 | }, 408 | { 409 | "cell_type": "code", 410 | "execution_count": null, 411 | "metadata": {}, 412 | "outputs": [], 413 | "source": [ 414 | "# Initialize model\n", 415 | "snn = SNNModel(\n", 416 | " n_hidden=snn_config[\"n_hidden\"],\n", 417 | " beta=snn_config[\"beta\"],\n", 418 | " seed=snn_config[\"seed\"],\n", 419 | ")" 420 | ] 421 | }, 422 | { 423 | "cell_type": "code", 424 | "execution_count": null, 425 | "metadata": {}, 426 | "outputs": [], 427 | "source": [ 428 | "# Initialize trainer\n", 429 | "trainer = SNNTrainer(snn, learning_rate=1e-3, lr_gamma=0.9, config=snn_config)\n", 430 | "# Train the model\n", 431 | "trainer.train(train_loader, test_loader, n_epochs=10)" 432 | ] 433 | }, 434 | { 435 | "cell_type": "markdown", 436 | "metadata": {}, 437 | "source": [ 438 | "### 📈 2.2 Plot the results\n", 439 | "- 📊 We can plot the accuracy and energy consumption as a function of the epoch\n", 440 | "- 📈 We see that the accuracy is improving but the energy consumption is also increasing\n", 441 | "- ⚖️ This is a trade-off that we need to be aware of when training SNNs" 442 | ] 443 | }, 444 | { 445 | "cell_type": "code", 446 | "execution_count": null, 447 | "metadata": {}, 448 | "outputs": [], 449 | "source": [ 450 | "results = trainer.pd_results.groupby(\"epoch\", as_index=False).mean()\n", 451 | "fig, ax = plt.subplots()\n", 452 | "sns.lineplot(\n", 453 | " data=results, x=\"epoch\", y=\"accuracy\", ax=ax, label=\"Accuracy\", legend=False\n", 454 | ")\n", 455 | "ax2 = ax.twinx()\n", 456 | "sns.lineplot(\n", 457 | " data=results,\n", 458 | " x=\"epoch\",\n", 459 | " y=\"total_energy_nJ\",\n", 460 | " ax=ax2,\n", 461 | " color=\"orange\",\n", 462 | " label=\"Energy\",\n", 463 | " legend=False,\n", 464 | ")\n", 465 | "ax.figure.legend()\n", 466 | "ax.set_title(\n", 467 | " f\"Accuracy and Energy, Final Trade-off Score: {trainer.pareto_tradeoff:.2f}\"\n", 468 | ")\n", 469 | "plt.show()" 470 | ] 471 | }, 472 | { 473 | "cell_type": "markdown", 474 | "metadata": {}, 475 | "source": [ 476 | "## 🚀 Exercise 3: Optimizing the trade-off\n", 477 | "\n", 478 | "Now, you will explore how different parameters affect the accuracy and energy consumption of the SNN. This part is open-ended, here are some ideas:\n", 479 | "\n", 480 | "- Experiment with network architectures (number of layers, number of neurons, etc.)\n", 481 | "- Regularize spiking activity \n", 482 | "- Implement a bi-exponential neuron model, using SnnTorch (snn.neurons.Synaptic)\n", 483 | "- Implement a temporal loss (time-to-first-spike), using SnnTorch. Be careful to change the `calculate_accuracy` method in `training.py`\n", 484 | "- Implement weight masks to reduce the number of synapses\n", 485 | "- Use SnnTorch to make the time-constants heterogeneous and/or learnable, and maybe use less neurons\n", 486 | "\n", 487 | "Ideally, after experimenting with these parameters, you should start to see a rough trade-off between accuracy and energy! Can we see some kind of Pareto front appearing? \n", 488 | "\n", 489 | "### 🏆 *The group with the best trade-off score will win the competition!*" 490 | ] 491 | } 492 | ], 493 | "metadata": { 494 | "kernelspec": { 495 | "display_name": "neuro4ml", 496 | "language": "python", 497 | "name": "python3" 498 | }, 499 | "language_info": { 500 | "codemirror_mode": { 501 | "name": "ipython", 502 | "version": 3 503 | }, 504 | "file_extension": ".py", 505 | "mimetype": "text/x-python", 506 | "name": "python", 507 | "nbconvert_exporter": "python", 508 | "pygments_lexer": "ipython3", 509 | "version": "3.9.20" 510 | } 511 | }, 512 | "nbformat": 4, 513 | "nbformat_minor": 2 514 | } 515 | --------------------------------------------------------------------------------