├── .coveragerc
├── .github
└── workflows
│ ├── build.yml
│ ├── keep-alive.yml
│ ├── mirror-ebrains.yml
│ └── test.yml
├── .gitignore
├── .readthedocs.yaml
├── .zenodo.json
├── AUTHORS.txt
├── COPYING
├── COPYING.lesser
├── LICENSE.txt
├── MANIFEST.in
├── Makefile
├── README.rst
├── bluepymm
├── __init__.py
├── legacy
│ ├── __init__.py
│ └── create_hoc_files.py
├── main.py
├── prepare_combos
│ ├── __init__.py
│ ├── create_mm_sqlite.py
│ ├── main.py
│ ├── parse_files.py
│ └── prepare_emodel_dirs.py
├── run_combos
│ ├── __init__.py
│ ├── calculate_scores.py
│ └── main.py
├── select_combos
│ ├── __init__.py
│ ├── main.py
│ ├── megate_output.py
│ ├── process_megate_config.py
│ ├── reporting.py
│ ├── sqlite_io.py
│ └── table_processing.py
├── templates
│ ├── cell_template_neurodamus.jinja2
│ ├── cell_template_neurodamus_sbo.jinja2
│ └── cell_template_neuron.jinja2
├── tools.py
└── validate_output
│ ├── __init__.py
│ └── main.py
├── codecov.yml
├── docs
├── .gitignore
├── Makefile
└── source
│ ├── .gitignore
│ ├── bluepymm.prepare_combos.rst
│ ├── bluepymm.rst
│ ├── bluepymm.run_combos.rst
│ ├── bluepymm.select_combos.rst
│ ├── conf.py
│ ├── index.rst
│ └── logo
│ └── BluePyMMBanner.png
├── notebook
└── BluePyMM.ipynb
├── package.json
├── pyproject.toml
├── pytest.ini
├── requirements.txt
├── requirements_docs.txt
├── tests
├── .gitignore
├── __init__.py
├── examples
│ ├── cmvd3a
│ │ ├── circuit_intlayers.mvd3
│ │ └── circuit_strlayers.mvd3
│ └── simple1
│ │ ├── .gitignore
│ │ ├── build_git.py
│ │ ├── cell_template.jinja2
│ │ ├── cell_template_neuron.jinja2
│ │ ├── data
│ │ ├── emodels_dir
│ │ │ └── subdir
│ │ │ │ ├── .gitignore
│ │ │ │ ├── __init__.py
│ │ │ │ ├── emodel_etype_map.json
│ │ │ │ ├── final.json
│ │ │ │ ├── mechanisms
│ │ │ │ └── Ih.mod
│ │ │ │ ├── morphologies
│ │ │ │ └── setup
│ │ │ │ ├── __init__.py
│ │ │ │ └── evaluator.py
│ │ ├── morphs
│ │ │ ├── apical_points_isec.json
│ │ │ ├── morph1.asc
│ │ │ ├── morph2.asc
│ │ │ └── neuronDB.xml
│ │ ├── rep_morphs
│ │ │ ├── apical_points_isec.json
│ │ │ ├── morph1.asc
│ │ │ ├── morph2.asc
│ │ │ └── neuronDB.xml
│ │ ├── simple1_recipe.xml
│ │ └── simple1_recipe.yaml
│ │ ├── output_expected
│ │ ├── emodel_dirs.json
│ │ ├── emodels_hoc
│ │ │ ├── emodel1.hoc
│ │ │ └── emodel2.hoc
│ │ ├── final.json
│ │ └── scores.sqlite
│ │ ├── output_megate_expected
│ │ ├── extneurondb.dat
│ │ └── mecombo_emodel.tsv
│ │ ├── simple1_conf_hoc.json
│ │ ├── simple1_conf_prepare.json
│ │ ├── simple1_conf_prepare_git.json
│ │ ├── simple1_conf_run.json
│ │ ├── simple1_conf_select.json
│ │ └── simple1_conf_select_2.json
├── test_bluepymm.py
├── test_calculate_scores.py
├── test_create_mm_sqlite.py
├── test_legacy.py
├── test_main.py
├── test_megate_output.py
├── test_parse_files.py
├── test_prepare_combos.py
├── test_prepare_emodel_dirs.py
├── test_process_megate_config.py
├── test_reporting.py
├── test_run_combos.py
├── test_select_combos.py
├── test_sqlite_io.py
├── test_table_processing.py
└── test_tools.py
└── tox.ini
/.coveragerc:
--------------------------------------------------------------------------------
1 | [report]
2 | omit=bluepymm/_version.py
3 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: Build
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | tags:
8 | - '[0-9]+.[0-9]+.[0-9]+'
9 |
10 | jobs:
11 | call-test-workflow:
12 | uses: BlueBrain/BluePyMM/.github/workflows/test.yml@master
13 |
14 | build-tag-n-publish:
15 | name: Build, tag and publish on PyPI
16 | runs-on: ubuntu-latest
17 | needs: call-test-workflow
18 | permissions:
19 | contents: write
20 | steps:
21 | - uses: actions/checkout@v3
22 | - name: Set up Python 3.10
23 | uses: actions/setup-python@v4
24 | with:
25 | python-version: "3.10"
26 |
27 | - name: Bump version and push tag
28 | uses: anothrNick/github-tag-action@1.64.0
29 | if: ${{ !startsWith(github.ref, 'refs/tags/') }}
30 | id: tag
31 | env:
32 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
33 | WITH_V: false
34 | DEFAULT_BUMP: patch
35 |
36 | - name: Build a source tarball and wheel
37 | run: |
38 | pip install build
39 | python -m build
40 |
41 | - name: Get and store tag from 'Bump version and push tag' step
42 | if: ${{ !startsWith(github.ref, 'refs/tags/') }}
43 | run: echo "TAG_NAME=${{ steps.tag.outputs.new_tag }}" >> $GITHUB_ENV
44 | - name: Get and store tag from triggered tag push
45 | if: ${{ startsWith(github.ref, 'refs/tags/') }}
46 | run: echo "TAG_NAME=${{ github.ref_name }}" >> $GITHUB_ENV
47 |
48 | - name: Release
49 | uses: softprops/action-gh-release@v1
50 | with:
51 | tag_name: ${{ env.TAG_NAME }}
52 | name: ${{ env.TAG_NAME }}
53 | generate_release_notes: true
54 |
55 | - name: Publish package to PyPI
56 | uses: pypa/gh-action-pypi-publish@release/v1
57 | with:
58 | user: __token__
59 | password: ${{ secrets.PYPI_PASSWORD }}
60 |
--------------------------------------------------------------------------------
/.github/workflows/keep-alive.yml:
--------------------------------------------------------------------------------
1 | name: Keep-alive
2 |
3 | on:
4 | schedule:
5 | # Runs every sunday at 3 a.m.
6 | - cron: '0 3 * * SUN'
7 |
8 | jobs:
9 | call-test-workflow:
10 | uses: BlueBrain/BluePyMM/.github/workflows/test.yml@master
11 |
12 | keep-workflow-alive:
13 | name: Keep workflow alive
14 | runs-on: ubuntu-latest
15 | steps:
16 | - uses: actions/checkout@v2
17 | with:
18 | ref: master
19 |
20 | - name: Get date from 50 days ago
21 | run: |
22 | datethen=`date -d "-50 days" --utc +%FT%TZ`
23 | echo "datelimit=$datethen" >> $GITHUB_ENV
24 |
25 | - name: setup git config
26 | if: github.event.repository.pushed_at <= env.datelimit
27 | run: |
28 | # setup the username and email.
29 | git config user.name "Github Actions Keepalive Bot"
30 | git config user.email "<>"
31 |
32 | - name: commit IF last commit is older than 50 days
33 | if: github.event.repository.pushed_at <= env.datelimit
34 | run: |
35 | git commit -m "Empty commit to keep the gihub workflows alive" --allow-empty
36 | git push origin master
--------------------------------------------------------------------------------
/.github/workflows/mirror-ebrains.yml:
--------------------------------------------------------------------------------
1 | name: Mirror to Ebrains
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 |
7 | jobs:
8 | to_ebrains:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - name: syncmaster
12 | uses: wei/git-sync@v3
13 | with:
14 | source_repo: "BlueBrain/BluePyMM"
15 | source_branch: "master"
16 | destination_repo: "https://ghpusher:${{ secrets.EBRAINS_GITLAB_ACCESS_TOKEN }}@gitlab.ebrains.eu/BlueBrain/bluepymm.git"
17 | destination_branch: "master"
18 | - name: synctags
19 | uses: wei/git-sync@v3
20 | with:
21 | source_repo: "BlueBrain/BluePyMM"
22 | source_branch: "refs/tags/*"
23 | destination_repo: "https://ghpusher:${{ secrets.EBRAINS_GITLAB_ACCESS_TOKEN }}@gitlab.ebrains.eu/BlueBrain/bluepymm.git"
24 | destination_branch: "refs/tags/*"
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Test
2 |
3 | on:
4 | pull_request:
5 | # allows this workflow to be reusable (e.g. by the build workflow)
6 | workflow_call:
7 |
8 | jobs:
9 | test:
10 | runs-on: ubuntu-latest
11 | strategy:
12 | matrix:
13 | python-version: ["3.9", "3.10", "3.11", "3.12"]
14 |
15 | steps:
16 | - uses: actions/checkout@v2
17 |
18 | - name: Set up Python ${{ matrix.python-version }}
19 | uses: actions/setup-python@v2
20 | with:
21 | python-version: ${{ matrix.python-version }}
22 |
23 | - name: Install dependencies
24 | run: |
25 | python -m pip install --upgrade pip setuptools
26 | pip install tox tox-gh-actions
27 |
28 | - name: Run tox
29 | run: tox
30 |
31 | - name: "Upload coverage to Codecov"
32 | uses: codecov/codecov-action@v2
33 | with:
34 | fail_ci_if_error: false
35 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /tmp
2 | *.pyc
3 | /.*.swp
4 | /bluepymm.egg-info
5 | /dist/
6 | /venv
7 | /.noseids
8 | /build
9 | /.tox
10 | /.coverage
11 | /cov_reports/
12 | .python-version
13 | .vscode
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | sphinx:
9 | configuration: docs/source/conf.py
10 | fail_on_warning: true
11 |
12 | build:
13 | os: "ubuntu-20.04"
14 | tools:
15 | python: "3.10"
16 |
17 | python:
18 | install:
19 | - method: pip
20 | path: .
21 | - requirements: requirements_docs.txt
22 |
--------------------------------------------------------------------------------
/.zenodo.json:
--------------------------------------------------------------------------------
1 | {
2 | "title" : "BluePyMM",
3 | "license": "LGPL-3.0",
4 | "upload_type": "software",
5 | "description": "BluePyMM is a software built to do Cell Model Management (MM). It takes as input a morphology release, a circuit recipe and a set of e-models with some extra information. Next, it finds all possible (morphology, e-model)-combinations (me-combos) based on e-type, m-type, and layer as described by the circuit recipe, and calculates the scores for every combination. Finally, it writes out the resulting accepted me-combos to a database, and produces a report with information on the number of matches.",
6 | "creators": [
7 | {
8 | "affiliation": "Blue Brain Project, EPFL",
9 | "name": "Van Geit, Werner",
10 | "orcid": "0000-0002-2915-720X"
11 | },
12 | {
13 | "affiliation": "Blue Brain Project, EPFL",
14 | "name": "Vanherpe, Liesbeth",
15 | "orcid": "0000-0002-1226-0526"
16 | },
17 | {
18 | "affiliation": "Blue Brain Project, EPFL",
19 | "name": "Rössert, Christian",
20 | "orcid": "0000-0002-4839-2424"
21 | },
22 | {
23 | "affiliation": "Blue Brain Project, EPFL",
24 | "name": "Gevaert, Michael",
25 | "orcid": "0000-0002-7547-3297"
26 | },
27 | {
28 | "affiliation": "Blue Brain Project, EPFL",
29 | "name": "Courcol, Jean-Denis",
30 | "orcid": "0000-0002-9351-1461"
31 | },
32 | {
33 | "affiliation": "Blue Brain Project, EPFL",
34 | "name": "King, James Gonzalo",
35 | "orcid": "0000-0003-0906-8389"
36 | },
37 | {
38 | "affiliation": "Blue Brain Project, EPFL",
39 | "name": "Jaquier, Aurélien",
40 | "orcid": "0000-0001-6202-6175"
41 | }
42 | ]
43 | }
44 |
--------------------------------------------------------------------------------
/AUTHORS.txt:
--------------------------------------------------------------------------------
1 | Werner Van Geit @ BBP
2 | Liesbeth Vanherpe @ BBP
3 | Christian Roessert @ BBP
4 | Mike Gevaert @ BBP
5 | Jean-Denis Courcol @ BBP
6 | James King @ BBP
7 | Aurélien Jaquier @ BBP
8 |
--------------------------------------------------------------------------------
/COPYING.lesser:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise based on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | based on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work based on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work based
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work based on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
166 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | BluePyMM - Bluebrain Python Optimisation Library
2 |
3 | BluePyMM is licensed under the LGPL, unless noted otherwise, e.g., for external
4 | dependencies. See files COPYING and COPYING.lesser for the full license.
5 | Examples and test are BSD-licensed.
6 | External dependencies are either LGPL or BSD-licensed.
7 | See file ACKNOWLEDGEMENTS.txt and AUTHORS.txt for further details.
8 |
9 | Copyright (C) 2005-2018, Blue Brain Project/EPFL.
10 |
11 | This program is free software: you can redistribute it and/or modify it under
12 | the terms of the GNU Lesser General Public License as published by the
13 | Free Software Foundation, either version 3 of the License, or (at your option)
14 | any later version.
15 |
16 | This program is distributed in the hope that it will be useful,
17 | but WITHOUT ANY WARRANTY;
18 | without even the implied warranty of
19 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
20 |
21 | See the GNU Lesser General Public License for more details.
22 |
23 | You should have received a copy of the GNU Lesser General Public License
24 | along with this program. If not, see .
25 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include versioneer.py
2 | include bluepymm/_version.py
3 | include LICENSE.txt
4 | include COPYING
5 | include COPYING.lesser
6 | include AUTHORS.txt
7 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | all: install
2 | install: clean
3 | python -c "exec(\"import sys, pip\\nif map(int, pip.__version__.split('.')) <= [9, 0, 0]: sys.exit('Need pip version >= 9, please upgrade pip in your python environment')\")"
4 | python setup.py sdist
5 | pip install `ls dist/bluepymm-*.tar.gz` --upgrade
6 | test: install_tox
7 | tox
8 | test3: install_tox
9 | tox -e py3-unit-functional
10 | unit3: install_tox
11 | tox -e py3-unit
12 | func3: install_tox
13 | tox -e py3-functional
14 | install_tox:
15 | pip install tox
16 | tox_clean:
17 | rm -rf .tox
18 | clean:
19 | rm -rf bluepymm.egg-info
20 | rm -rf dist
21 | find . -name '*.pyc' -delete
22 | rm -rf tests/examples/simple1/tmp
23 | rm -rf tests/examples/simple1/tmp_git
24 | rm -rf tests/examples/simple1/output
25 | rm -rf tests/examples/simple1/output_megate
26 | rm -rf tests/examples/simple1/hoc
27 | rm -rf tests/tmp
28 | rm -rf docs/build
29 | rm -rf build
30 | mkdir tests/tmp
31 | simple1_git:
32 | cd tests/examples/simple1; python build_git.py
33 | autopep8: clean
34 | pip install autopep8
35 | find bluepymm -name '*.py' -exec autopep8 -i '{}' \;
36 | doc:
37 | pip install -q sphinx sphinx-autobuild sphinx_rtd_theme -I
38 | sphinx-apidoc -o docs/source bluepymm
39 | cd docs; $(MAKE) clean; $(MAKE) html
40 | docpdf:
41 | pip install sphinx sphinx-autobuild -I
42 | cd docs; $(MAKE) clean; $(MAKE) latexpdf
43 | docopen: doc
44 | open docs/build/html/index.html
45 | toxbinlinks:
46 | cd ${TOX_ENVBINDIR}; find $(TOX_NRNBINDIR) -type f -exec ln -sf \{\} . \;
47 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | |banner|
2 |
3 | BluePyMM
4 | ========
5 |
6 | +----------------+------------+
7 | | Latest Release | |pypi| |
8 | +----------------+------------+
9 | | Documentation | |docs| |
10 | +----------------+------------+
11 | | License | |license| |
12 | +----------------+------------+
13 | | Build Status | |tests| |
14 | +----------------+------------+
15 | | Coverage | |coverage| |
16 | +----------------+------------+
17 | | Citation | |zenodo| |
18 | +----------------+------------+
19 | | Gitter | |gitter| |
20 | +----------------+------------+
21 |
22 | Introduction
23 | ------------
24 |
25 |
26 | When building a network simulation, biophysically detailed electrical models (e-models) need to be tested for every morphology that is possibly used in the circuit.
27 |
28 | E-models can e.g. be obtained using `BluePyOpt `_ by data-driven model parameter optimisation.
29 | Developing e-models can take a lot of time and computing resources. Therefore, these models are not reoptimized for every morphology in the network.
30 | Instead we want to test if an existing e-model matches that particular morphology 'well enough'.
31 |
32 | This process is called Cell Model Management (MM). It takes as input a morphology release, a circuit recipe and a set of e-models with some extra information.
33 | Next, it finds all possible (morphology, e-model)-combinations (me-combos) based on e-type, m-type, and layer as described by the circuit recipe, and calculates the scores for every combination.
34 | Finally, it writes out the resulting accepted me-combos to a database, and produces a report with information on the number of matches.
35 |
36 | Citation
37 | --------
38 |
39 | When you use this BluePyMM software for your research, we ask you to cite the following publications (this includes poster presentations):
40 |
41 | .. code-block::
42 |
43 | @article{bluepymm,
44 | title={BluePyMM},
45 | DOI={10.5281/zenodo.8146238},
46 | url={https://doi.org/10.5281/zenodo.8146238}
47 | abstractNote={BluePyMM is a software built to do Cell Model Management (MM). It takes as input a morphology release, a circuit recipe and a set of e-models with some extra information. Next, it finds all possible (morphology, e-model)-combinations (me-combos) based on e-type, m-type, and layer as described by the circuit recipe, and calculates the scores for every combination. Finally, it writes out the resulting accepted me-combos to a database, and produces a report with information on the number of matches.},
48 | publisher={Zenodo},
49 | author={Van Geit, Werner and
50 | Vanherpe, Liesbeth and
51 | Rössert, Christian and
52 | Gevaert, Michael and
53 | Courcol, Jean-Denis and
54 | King, James Gonzalo and
55 | Jaquier, Aurélien},
56 | year={2023},
57 | month={Jul}
58 | }
59 |
60 | Support
61 | -------
62 |
63 | We are providing support using a chat channel on `Gitter `_.
64 |
65 | Requirements
66 | ------------
67 |
68 | * `Python 3.9+ `_
69 | * `Neuron 7.4+ `_
70 | * `eFEL eFeature Extraction Library `_
71 | * `BluePyOpt `_
72 | * `NumPy `_
73 | * `pandas `_
74 | * `matplotlib `_
75 | * `sh `_
76 | * `ipyparallel `_
77 | * `lxml `_
78 | * `h5py `_
79 | * `pyyaml `_
80 |
81 | All of the requirements except for `Neuron` are automatically installed with bluepymm.
82 | The decision on how to install `Neuron` is left to the user.
83 |
84 | One simple way of installing Neuron is through pip
85 |
86 | .. code-block:: bash
87 |
88 | pip install NEURON
89 |
90 | Neuron can also be installed from the source and used by bluepymm provided that it is compiled with Python support.
91 |
92 |
93 | Installation
94 | ------------
95 |
96 |
97 | .. code-block:: bash
98 |
99 | pip install bluepymm
100 |
101 | NOTES:
102 |
103 | * Make sure you are using the latest version of pip (at least >9.0). Otherwise the ipython dependency will fail to install correctly.
104 | * Make sure you are using a new version of git (at least >=1.8). Otherwise some exceptions might be raised by the versioneer module.
105 |
106 | Quick Start
107 | -----------
108 |
109 | An IPython notebook with a simple test example can be found in:
110 |
111 | https://github.com/BlueBrain/BluePyMM/blob/master/notebook/BluePyMM.ipynb
112 |
113 | API documentation
114 | -----------------
115 | The API documentation can be found on `ReadTheDocs `_.
116 |
117 | License
118 | -------
119 |
120 | BluePyMM is licensed under the LGPL, unless noted otherwise, e.g., for external
121 | dependencies. See file LGPL.txt for the full license.
122 |
123 | Funding
124 | -------
125 | This work has been partially funded by the European Union Seventh Framework Program (FP7/20072013) under grant agreement no. 604102 (HBP),
126 | the European Union’s Horizon 2020 Framework Programme for Research and Innovation under the Specific Grant Agreement No. 720270, 785907
127 | (Human Brain Project SGA1/SGA2) and by the EBRAINS research infrastructure, funded from the European Union’s Horizon 2020 Framework
128 | Programme for Research and Innovation under the Specific Grant Agreement No. 945539 (Human Brain Project SGA3).
129 | This project/research was supported by funding to the Blue Brain Project, a research center of the École polytechnique fédérale de Lausanne (EPFL),
130 | from the Swiss government’s ETH Board of the Swiss Federal Institutes of Technology.
131 |
132 | Copyright (c) 2016-2024 Blue Brain Project/EPFL
133 |
134 | .. |pypi| image:: https://img.shields.io/pypi/v/bluepymm.svg
135 | :target: https://pypi.org/project/bluepymm/
136 | :alt: latest release
137 | .. |docs| image:: https://readthedocs.org/projects/bluepymm/badge/?version=latest
138 | :target: https://bluepymm.readthedocs.io/en/latest/
139 | :alt: latest documentation
140 | .. |license| image:: https://img.shields.io/pypi/l/bluepymm.svg
141 | :target: https://github.com/BlueBrain/bluepymm/blob/master/LICENSE.txt
142 | :alt: license
143 | .. |tests| image:: https://github.com/BlueBrain/BluePyMM/workflows/Build/badge.svg?branch=master
144 | :target: https://github.com/BlueBrain/BluePyMM/actions
145 | :alt: Actions build status
146 | .. |coverage| image:: https://codecov.io/github/BlueBrain/BluePyMM/coverage.svg?branch=master
147 | :target: https://codecov.io/gh/BlueBrain/bluepymm
148 | :alt: coverage
149 | .. |gitter| image:: https://badges.gitter.im/Join%20Chat.svg
150 | :target: https://gitter.im/bluebrain/bluepymm
151 | :alt: gitter
152 | .. |zenodo| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.8146238.svg
153 | :target: https://doi.org/10.5281/zenodo.8146238
154 | :alt: DOI
155 |
156 | ..
157 | The following image is also defined in the index.rst file, as the relative path is
158 | different, depending from where it is sourced.
159 | The following location is used for the github README
160 | The index.rst location is used for the docs README; index.rst also defined an end-marker,
161 | to skip content after the marker 'substitutions'.
162 |
163 | .. substitutions
164 | .. |banner| image:: docs/source/logo/BluePyMMBanner.png
165 |
--------------------------------------------------------------------------------
/bluepymm/__init__.py:
--------------------------------------------------------------------------------
1 | """Init"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 | import bluepymm.legacy # NOQA
23 |
24 | from .main import run # NOQA
25 |
--------------------------------------------------------------------------------
/bluepymm/legacy/__init__.py:
--------------------------------------------------------------------------------
1 | """Create legacy .hoc files"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 |
23 | from .create_hoc_files import main # NOQA
24 |
--------------------------------------------------------------------------------
/bluepymm/legacy/create_hoc_files.py:
--------------------------------------------------------------------------------
1 | """Create hoc files"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 | import sys
23 | import os
24 | import argparse
25 | import multiprocessing
26 | import csv
27 |
28 |
29 | from bluepymm import tools, prepare_combos
30 |
31 |
32 | def get_parser():
33 | """Return the argument parser"""
34 | parser = argparse.ArgumentParser(description='Create legacy .hoc files')
35 | parser.add_argument('conf_filename')
36 |
37 | return parser
38 |
39 |
40 | def add_full_paths(config, directory):
41 | """Add full paths based on given directory to values of given config if the
42 | resulting path is valid.
43 |
44 | Args:
45 | config: dictionary
46 | directory: string used to complete paths
47 |
48 | Returns:
49 | The dictionary with completed paths.
50 | """
51 | for k, v in config.items():
52 | if isinstance(v, str):
53 | test_path = os.path.join(directory, v)
54 | print(test_path)
55 | if os.path.isdir(test_path) or os.path.isfile(test_path):
56 | config[k] = test_path
57 | return config
58 |
59 |
60 | def load_combinations_dict(mecombo_emodel_path):
61 | """Load combinations dictionary.
62 |
63 | Args:
64 | mecombo_emodel_path: path to file with me-combo data
65 |
66 | Returns:
67 | A dictionary
68 | """
69 | with open(mecombo_emodel_path) as f:
70 | reader = csv.DictReader(f, delimiter='\t')
71 | combinations_dict = {row['combo_name']: row for row in reader}
72 | return combinations_dict
73 |
74 |
75 | def run_create_and_write_hoc_file(emodel, setup_dir, hoc_dir, emodel_params,
76 | template, morph_path,
77 | model_name):
78 | """Run create_and_write_hoc_file in isolated environment.
79 |
80 | Args:
81 | See create_and_write_hoc_file.
82 | """
83 | pool = multiprocessing.pool.Pool(1, maxtasksperchild=1)
84 | pool.apply(prepare_combos.prepare_emodel_dirs.create_and_write_hoc_file,
85 | (emodel, setup_dir, hoc_dir, emodel_params, template,
86 | morph_path, model_name))
87 | pool.terminate()
88 | pool.join()
89 | del pool
90 |
91 |
92 | def create_hoc_files(combinations_dict, emodels_dir, final_dict, template,
93 | hoc_dir):
94 | """Create a .hoc file for every combination in a given database.
95 |
96 | Args:
97 | combinations_dict: Dictionary with e-model - morphology combinations.
98 | emodels_dir: Directory containing all e-model data as used by the
99 | application 'bluepymm'.
100 | final_dict: Dictionary with e-model parameters.
101 | template: Template to be used to create .hoc files.
102 | hoc_dir: Directory where all create .hoc files will be written.
103 | """
104 | for combination, comb_data in combinations_dict.items():
105 | print('Working on combination {}'.format(combination))
106 | emodel = comb_data['emodel']
107 | setup_dir = os.path.join(emodels_dir, emodel)
108 | morph_path = '{}.asc'.format(comb_data['morph_name'])
109 | emodel_params = final_dict[emodel]['params']
110 |
111 | run_create_and_write_hoc_file(emodel,
112 | setup_dir,
113 | hoc_dir,
114 | emodel_params,
115 | template,
116 | morph_path,
117 | combination)
118 |
119 |
120 | def main(arg_list):
121 | """Main"""
122 |
123 | # parse and process arguments
124 | args = get_parser().parse_args(arg_list)
125 | config = tools.load_json(args.conf_filename)
126 | config_dir = os.path.abspath(os.path.dirname(args.conf_filename))
127 | config = add_full_paths(config, config_dir)
128 |
129 | # process configuration
130 | mecombo_emodel_filename = config['mecombo_emodel_filename']
131 | combinations_dict = load_combinations_dict(mecombo_emodel_filename)
132 | final_dict = tools.load_json(config['final_json_path'])
133 | emodels_dir = config['emodels_tmp_dir']
134 |
135 | # create output directory for .hoc files
136 | tools.makedirs(config['hoc_output_dir'])
137 |
138 | # create hoc files
139 | create_hoc_files(combinations_dict, emodels_dir, final_dict,
140 | config['template'], config['hoc_output_dir'])
141 |
142 |
143 | if __name__ == '__main__':
144 | main(sys.argv[1:])
145 |
--------------------------------------------------------------------------------
/bluepymm/main.py:
--------------------------------------------------------------------------------
1 | """Main"""
2 |
3 | from __future__ import print_function
4 |
5 | """
6 | Copyright (c) 2018, EPFL/Blue Brain Project
7 |
8 | This file is part of BluePyMM
9 |
10 | This library is free software; you can redistribute it and/or modify it under
11 | the terms of the GNU Lesser General Public License version 3.0 as published
12 | by the Free Software Foundation.
13 |
14 | This library is distributed in the hope that it will be useful, but WITHOUT
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
16 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
17 | details.
18 |
19 | You should have received a copy of the GNU Lesser General Public License
20 | along with this library; if not, write to the Free Software Foundation, Inc.,
21 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22 | """
23 |
24 | import sys
25 | import argparse
26 |
27 | from bluepymm import prepare_combos, run_combos, select_combos, validate_output
28 |
29 |
30 | def get_parser():
31 | """Return the argument parser"""
32 | parser = argparse.ArgumentParser()
33 | actions = parser.add_subparsers(help='actions', dest='action')
34 |
35 | prepare_combos.add_parser(actions)
36 | run_combos.add_parser(actions)
37 | select_combos.add_parser(actions)
38 | validate_output.add_parser(actions)
39 |
40 | return parser
41 |
42 |
43 | def run(arg_list):
44 | """Run BluePyMM"""
45 |
46 | print('\n######################################')
47 | print('# Blue Brain Python Model Management #')
48 | print('######################################\n')
49 |
50 | args = get_parser().parse_args(arg_list)
51 |
52 | if args.action == "prepare":
53 | prepare_combos.prepare_combos(conf_filename=args.conf_filename,
54 | continu=args.continu,
55 | n_processes=args.n_processes)
56 | elif args.action == "run":
57 | run_combos.run_combos(conf_filename=args.conf_filename,
58 | ipyp=args.ipyp,
59 | ipyp_profile=args.ipyp_profile,
60 | n_processes=args.n_processes)
61 | elif args.action == "select":
62 | select_combos.select_combos(conf_filename=args.conf_filename,
63 | n_processes=args.n_processes)
64 | elif args.action == "validate":
65 | validate_output.validate_output(conf_filename=args.conf_filename)
66 |
67 |
68 | def main():
69 | """Main"""
70 | run(sys.argv[1:])
71 |
--------------------------------------------------------------------------------
/bluepymm/prepare_combos/__init__.py:
--------------------------------------------------------------------------------
1 | """Init prepare combos"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 | from .main import prepare_combos, add_parser # NOQA
23 |
--------------------------------------------------------------------------------
/bluepymm/prepare_combos/main.py:
--------------------------------------------------------------------------------
1 | """ Create database of possible me-combinations."""
2 |
3 | from __future__ import print_function
4 |
5 | """
6 | Copyright (c) 2018, EPFL/Blue Brain Project
7 |
8 | This file is part of BluePyMM
9 |
10 | This library is free software; you can redistribute it and/or modify it under
11 | the terms of the GNU Lesser General Public License version 3.0 as published
12 | by the Free Software Foundation.
13 |
14 | This library is distributed in the hope that it will be useful, but WITHOUT
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
16 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
17 | details.
18 |
19 | You should have received a copy of the GNU Lesser General Public License
20 | along with this library; if not, write to the Free Software Foundation, Inc.,
21 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22 | """
23 |
24 |
25 | import os
26 |
27 | from bluepymm import tools
28 | from . import prepare_emodel_dirs as prepare_dirs
29 | from . import create_mm_sqlite
30 |
31 |
32 | def prepare_emodels(conf_dict, continu, scores_db_path, n_processes):
33 | """Prepare emodels"""
34 |
35 | tmp_dir = conf_dict['tmp_dir']
36 | emodels_dir = os.path.abspath(os.path.join(tmp_dir, 'emodels'))
37 |
38 | # Convert e-models input to BluePyMM file structure
39 | emodels_in_repo = prepare_dirs.check_emodels_in_repo(conf_dict)
40 | tmp_emodels_dir = prepare_dirs.convert_emodel_input(emodels_in_repo,
41 | conf_dict,
42 | continu)
43 |
44 | # Get information from emodels repo
45 | print('Getting final emodels dict')
46 | final_dict, emodel_etype_map, opt_dir = prepare_dirs.get_emodel_dicts(
47 | tmp_emodels_dir, conf_dict['final_json_path'],
48 | conf_dict['emodel_etype_map_path'])
49 |
50 | if "template" in conf_dict.keys():
51 | hoc_template = os.path.abspath(conf_dict["template"])
52 | else:
53 | base_dir = os.path.abspath(os.path.dirname(__file__))
54 | template_dir = os.path.join(base_dir, '../templates')
55 | hoc_template = os.path.join(
56 | template_dir, 'cell_template_neurodamus_sbo.jinja2')
57 | hoc_template = os.path.abspath(hoc_template)
58 | print('Preparing emodels in %s' % emodels_dir)
59 | emodels_hoc_dir = os.path.abspath(conf_dict['emodels_hoc_dir'])
60 | # Clone the emodels repo and prepare the dirs for all the emodels
61 | emodel_dirs = prepare_dirs.prepare_emodel_dirs(
62 | final_dict, emodel_etype_map, emodels_dir, opt_dir, emodels_hoc_dir,
63 | emodels_in_repo, hoc_template, continu=continu,
64 | n_processes=n_processes)
65 |
66 | if not continu:
67 | print('Creating sqlite db at %s' % scores_db_path)
68 | skip_repaired_exemplar = conf_dict.get('skip_repaired_exemplar', False)
69 | morph_dir = conf_dict['morph_path']
70 | rep_morph_dir = conf_dict['rep_morph_path']
71 | unrep_morph_dir = conf_dict.get('unrep_morph_path', None)
72 | print('Using repaired exemplar morph path: %s' % rep_morph_dir)
73 | print('Using unrepaired exemplar morph path: %s' % unrep_morph_dir)
74 |
75 | if 'circuitmvd3_path' in conf_dict:
76 | if 'recipe_path' in conf_dict:
77 | raise ValueError('Impossible to specify both recipe_path '
78 | 'and circuitmvd3_path in config file')
79 | circuitmvd3_path = conf_dict['circuitmvd3_path']
80 |
81 | create_mm_sqlite.create_mm_sqlite_circuitmvd3(
82 | scores_db_path,
83 | circuitmvd3_path,
84 | morph_dir,
85 | rep_morph_dir,
86 | unrep_morph_dir,
87 | emodel_etype_map,
88 | final_dict,
89 | emodel_dirs,
90 | skip_repaired_exemplar=skip_repaired_exemplar)
91 | else:
92 | recipe_filename = conf_dict['recipe_path']
93 |
94 | # Create a sqlite3 db with all the combos
95 | create_mm_sqlite.create_mm_sqlite(
96 | scores_db_path,
97 | recipe_filename,
98 | morph_dir,
99 | rep_morph_dir,
100 | unrep_morph_dir,
101 | emodel_etype_map,
102 | final_dict,
103 | emodel_dirs,
104 | skip_repaired_exemplar=skip_repaired_exemplar)
105 |
106 | return final_dict, emodel_dirs
107 |
108 |
109 | def prepare_combos(conf_filename, continu, n_processes=None):
110 | """Prepare combos"""
111 |
112 | print('Reading configuration at %s' % conf_filename)
113 | conf_dict = tools.load_json(conf_filename)
114 | scores_db_path = os.path.abspath(conf_dict['scores_db'])
115 |
116 | final_dict, emodel_dirs = prepare_emodels(
117 | conf_dict, continu, scores_db_path, n_processes)
118 |
119 | # Save output
120 | # TODO: gather all output business here?
121 | output_dir = conf_dict['output_dir']
122 | tools.makedirs(output_dir)
123 | tools.write_json(output_dir, 'final.json', final_dict)
124 | tools.write_json(output_dir, 'emodel_dirs.json', emodel_dirs)
125 |
126 |
127 | def add_parser(action):
128 | """Add option parser"""
129 |
130 | parser = action.add_parser(
131 | 'prepare',
132 | help='Create and prepare database with me-combinations')
133 | parser.add_argument('conf_filename')
134 | parser.add_argument('--continu', action='store_true',
135 | help='continue from previous run')
136 | parser.add_argument('--n_processes', help='number of processes',
137 | type=int)
138 |
--------------------------------------------------------------------------------
/bluepymm/run_combos/__init__.py:
--------------------------------------------------------------------------------
1 | """Init run combos"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 |
23 | from .main import add_parser, run_combos # NOQA
24 |
--------------------------------------------------------------------------------
/bluepymm/run_combos/main.py:
--------------------------------------------------------------------------------
1 | """Run combinations and calculate scores."""
2 |
3 | from __future__ import print_function
4 |
5 | """
6 | Copyright (c) 2018, EPFL/Blue Brain Project
7 |
8 | This file is part of BluePyMM
9 |
10 | This library is free software; you can redistribute it and/or modify it under
11 | the terms of the GNU Lesser General Public License version 3.0 as published
12 | by the Free Software Foundation.
13 |
14 | This library is distributed in the hope that it will be useful, but WITHOUT
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
16 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
17 | details.
18 |
19 | You should have received a copy of the GNU Lesser General Public License
20 | along with this library; if not, write to the Free Software Foundation, Inc.,
21 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22 | """
23 |
24 |
25 | import os
26 |
27 | from bluepymm import tools
28 | from . import calculate_scores
29 |
30 |
31 | def add_parser(action):
32 | """Add parser"""
33 | parser = action.add_parser(
34 | 'run',
35 | help='Calculate scores of me-combinations')
36 | parser.add_argument('conf_filename',
37 | help='path to configuration file')
38 | parser.add_argument('--ipyp', action='store_true',
39 | help='Use ipyparallel')
40 | parser.add_argument('--ipyp_profile',
41 | help='Path to ipyparallel profile')
42 | parser.add_argument('--timeout',
43 | help='Timeout for ipyparallel clients')
44 | parser.add_argument('--n_processes', help='number of processes',
45 | type=int)
46 |
47 |
48 | def run_combos_from_conf(conf_dict, ipyp=None, ipyp_profile=None, timeout=10,
49 | n_processes=None):
50 | """Run combos from conf dictionary"""
51 | output_dir = conf_dict['output_dir']
52 | final_dict = tools.load_json(
53 | os.path.join(
54 | output_dir,
55 | 'final.json'))
56 | emodel_dirs = tools.load_json(
57 | os.path.join(
58 | output_dir,
59 | 'emodel_dirs.json'))
60 | scores_db_path = os.path.abspath(conf_dict['scores_db'])
61 |
62 | if 'use_apical_points' in conf_dict:
63 | use_apical_points = conf_dict['use_apical_points']
64 | else:
65 | use_apical_points = True
66 |
67 | print('Calculating scores')
68 | calculate_scores.calculate_scores(
69 | final_dict,
70 | emodel_dirs,
71 | scores_db_path,
72 | use_ipyp=ipyp,
73 | ipyp_profile=ipyp_profile,
74 | timeout=timeout,
75 | use_apical_points=use_apical_points,
76 | n_processes=n_processes)
77 |
78 |
79 | def run_combos(conf_filename, ipyp=None, ipyp_profile=None, n_processes=None):
80 | """Run combos"""
81 |
82 | print('Reading configuration at %s' % conf_filename)
83 | conf_dict = tools.load_json(conf_filename)
84 |
85 | run_combos_from_conf(conf_dict, ipyp, ipyp_profile,
86 | n_processes=n_processes)
87 |
--------------------------------------------------------------------------------
/bluepymm/select_combos/__init__.py:
--------------------------------------------------------------------------------
1 | """Init select combos"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 |
23 | from .main import select_combos, add_parser # NOQA
24 |
--------------------------------------------------------------------------------
/bluepymm/select_combos/main.py:
--------------------------------------------------------------------------------
1 | """Analyse scores"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 |
23 | # pylint: disable=R0914, C0325, W0640
24 | # pylama: ignore=E402
25 |
26 | import os
27 |
28 | from bluepymm import tools
29 |
30 | from . import sqlite_io, reporting, megate_output
31 | from . import process_megate_config as proc_config
32 |
33 |
34 | def select_combos(conf_filename, n_processes):
35 | """Parse conf file and run select combos"""
36 | # Parse configuration file
37 | conf_dict = tools.load_json(conf_filename)
38 |
39 | select_combos_from_conf(conf_dict, n_processes)
40 |
41 |
42 | def select_combos_from_conf(conf_dict, n_processes=None):
43 | """Compare scores of me-combinations to thresholds, select successful
44 | combinations, and write results out to file.
45 |
46 | Args:
47 | conf_filename: filename of configuration (.json file)
48 | n_processes: integer number of processes, `None` will use all of them
49 | """
50 | scores_db_filename = conf_dict['scores_db']
51 | pdf_filename = conf_dict['pdf_filename']
52 | output_dir = conf_dict['output_dir']
53 |
54 | print('Reading configuration files')
55 | # read skip features
56 | to_skip_patterns, to_skip_features = \
57 | proc_config.read_to_skip_features(
58 | conf_dict)
59 |
60 | # read megate thresholds
61 | megate_patterns, megate_thresholds = \
62 | proc_config.read_megate_thresholds(
63 | conf_dict)
64 |
65 | select_perc_best = conf_dict.get('select_perc_best', None)
66 |
67 | print('Reading tables from sqlite')
68 | # read score tables
69 | scores, score_values = sqlite_io.read_and_process_sqlite_score_tables(
70 | scores_db_filename)
71 |
72 | print('Checking if all combos have run')
73 | tools.check_all_combos_have_run(scores, scores_db_filename)
74 |
75 | print('Start creation of ext_neurondb')
76 | # create final database and write report
77 | ext_neurondb = \
78 | reporting.create_final_db_and_write_report(
79 | pdf_filename,
80 | to_skip_features,
81 | to_skip_patterns,
82 | megate_thresholds,
83 | megate_patterns,
84 | conf_dict.get('skip_repaired_exemplar', False),
85 | conf_dict.get('check_opt_scores', True),
86 | scores, score_values,
87 | conf_dict.get('plot_emodels_per_morphology', False),
88 | output_dir,
89 | select_perc_best,
90 | n_processes=n_processes)
91 | print('Wrote pdf to %s' % os.path.abspath(pdf_filename))
92 |
93 | # write output files
94 | compliant = conf_dict.get('make_names_neuron_compliant', False)
95 | extneurondb_path, mecombo_emodel_path = \
96 | megate_output.save_megate_results(
97 | ext_neurondb,
98 | output_dir,
99 | sort_key='combo_name',
100 | make_names_neuron_compliant=compliant)
101 |
102 | emodels_hoc_path = conf_dict['emodels_hoc_dir']
103 |
104 | megate_output.write_mecomboreleasejson(
105 | output_dir,
106 | emodels_hoc_path,
107 | extneurondb_path,
108 | mecombo_emodel_path)
109 |
110 |
111 | def add_parser(action):
112 | """Add parser"""
113 |
114 | parser = action.add_parser('select',
115 | help='Select feasible me-combinations')
116 | parser.add_argument('conf_filename')
117 | parser.add_argument('--n_processes', help='number of processes',
118 | type=int)
119 |
--------------------------------------------------------------------------------
/bluepymm/select_combos/megate_output.py:
--------------------------------------------------------------------------------
1 | """BluePyMM megate output."""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 |
23 | # pylint: disable=R0914, C0325, W0640
24 |
25 |
26 | import os
27 | from bluepymm import tools
28 | from . import table_processing
29 |
30 |
31 | def _write_extneurondbdat(extneurondb, filename):
32 | """Write extneurondb.dat"""
33 | pure_extneuron_db = extneurondb.copy()
34 |
35 | # Select the correct columns
36 | column_order = ['morph_name', 'layer', 'fullmtype', 'etype', 'combo_name']
37 | # pure_extneuron_db = pure_extneuron_db[column_order]
38 | pure_extneuron_db.to_csv(
39 | filename,
40 | sep=' ',
41 | columns=column_order,
42 | index=False,
43 | header=False)
44 |
45 |
46 | def save_megate_results(extneurondb, output_dir,
47 | extneurondb_filename='extneurondb.dat',
48 | mecombo_emodel_filename='mecombo_emodel.tsv',
49 | sort_key=None,
50 | make_names_neuron_compliant=False,
51 | extra_value_errors=True):
52 | """Write results of megating to two files.
53 |
54 | Args:
55 | extneurondb: pandas.DataFrame with result of me-gating
56 | output_dir: path to output directory
57 | extneurondb_filename: filename of extended neuron database. The columns
58 | of this file are ordered as 'morph_name',
59 | 'layer', 'fullmtype', 'etype', 'combo_name'.
60 | Values are separated by a space. Default filename
61 | is 'extneurondb.dat'.
62 | mecombo_emodel_filename: filename of 'mecombo_emodel' file. Values are
63 | separated with a tab. Default filename is
64 | 'mecombo_emodel.tsv'.
65 | sort_key: key to sort database in ascending order before writing out to
66 | file. Default is None.
67 | make_names_neuron_compliant: boolean indicating whether the combo name
68 | should be made NEURON-compliant. Default
69 | is False. If set to True, a log file with
70 | the conversion info is written out to
71 | /log_neuron_compliance.csv
72 | """
73 | tools.makedirs(output_dir)
74 |
75 | if make_names_neuron_compliant:
76 | log_filename = 'log_neuron_compliance.csv'
77 | log_path = os.path.join(output_dir, log_filename)
78 | table_processing.process_combo_name(extneurondb, log_path)
79 |
80 | if sort_key is not None:
81 | extneurondb = extneurondb.sort_values(sort_key).reset_index(drop=True)
82 |
83 | extneurondb_path = os.path.join(output_dir, extneurondb_filename)
84 | _write_extneurondbdat(extneurondb, extneurondb_path)
85 | print(
86 | 'Wrote extneurondb.dat to {}'.format(
87 | os.path.abspath(extneurondb_path)))
88 |
89 | mecombo_emodel_path = os.path.join(output_dir, mecombo_emodel_filename)
90 |
91 | if extra_value_errors:
92 | for extra_values_key in ['holding_current', 'threshold_current']:
93 | null_rows = extneurondb[extra_values_key].isnull()
94 | if null_rows.sum() > 0:
95 | # TODO reenable this for release !
96 | # raise ValueError(
97 | # "There are rows with None for "
98 | # "holding current: %s" % str(
99 | # extneurondb[null_rows]))
100 | print("WARNING ! There are rows with None for "
101 | "holding current: %s" % str(extneurondb[null_rows]))
102 |
103 | extneurondb.to_csv(
104 | mecombo_emodel_path,
105 | columns=[
106 | 'morph_name',
107 | 'layer',
108 | 'fullmtype',
109 | 'etype',
110 | 'emodel',
111 | 'combo_name',
112 | 'threshold_current',
113 | 'holding_current'],
114 | sep='\t',
115 | index=False)
116 | print(
117 | 'Wrote mecombo_emodel tsv to {}'.format(
118 | os.path.abspath(mecombo_emodel_path)))
119 |
120 | return extneurondb_path, mecombo_emodel_path
121 |
122 |
123 | def write_mecomboreleasejson(
124 | output_dir,
125 | emodels_hoc_path,
126 | extneurondb_path,
127 | mecombo_emodel_path):
128 | """Write json file contain info about release"""
129 |
130 | output_paths = {}
131 | output_paths['emodels_hoc'] = os.path.abspath(emodels_hoc_path)
132 | output_paths['extneurondb.dat'] = os.path.abspath(extneurondb_path)
133 | output_paths['mecombo_emodel.tsv'] = os.path.abspath(mecombo_emodel_path)
134 | release = {'version': '1.0', 'output_paths': output_paths}
135 | tools.write_json(
136 | output_dir,
137 | 'mecombo_release.json',
138 | release)
139 |
140 | print(
141 | 'Wrote mecombo_release json to %s' %
142 | os.path.abspath(os.path.join(
143 | output_dir,
144 | 'mecombo_release.json')))
145 |
--------------------------------------------------------------------------------
/bluepymm/select_combos/process_megate_config.py:
--------------------------------------------------------------------------------
1 | """Process megate configuration file."""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 | # pylint: disable=R0914, C0325, W0640
23 |
24 |
25 | import re
26 |
27 |
28 | def join_regex(list_regex):
29 | """Create regular expresssion that matches one of a given list of regular
30 | expressions."""
31 |
32 | list_regex_dollar = []
33 | for regex in list_regex:
34 | if regex[-1] != '$':
35 | regex_dollar = regex + '$'
36 |
37 | list_regex_dollar.append(regex_dollar)
38 |
39 | return re.compile('(' + ')|('.join(list_regex_dollar) + ')')
40 |
41 |
42 | def read_to_skip_features(conf_dict):
43 | """Parse features to skip from configuration and return list of compiled
44 | regular expressions.
45 |
46 | Args:
47 | conf_dict: dictionary, value of conf_dict['to_skip_features'] is
48 | processed if available.
49 |
50 | Returns:
51 | A tuple (, )
52 | """
53 |
54 | to_skip_features = conf_dict.get('to_skip_features', [])
55 |
56 | return [re.compile(feature_str)
57 | for feature_str in to_skip_features], to_skip_features
58 |
59 |
60 | def read_megate_thresholds(conf_dict):
61 | """Parse megate thresholds from configuraiton and return list of compiled
62 | regular expressions.
63 |
64 | Args:
65 | conf_dict: dictionary, value of conf_dict['megate_thresholds'] is
66 | processed if available.
67 |
68 | Returns:
69 | A tuple (, )"""
70 |
71 | megate_thresholds = conf_dict.get('megate_thresholds', [])
72 |
73 | megate_patterns = []
74 | for megate_threshold_dict in megate_thresholds:
75 | for key in megate_threshold_dict:
76 | if key not in {'emodel', 'etype', 'fullmtype',
77 | 'features', 'megate_threshold'}:
78 | raise ValueError(
79 | 'Invalid key in megate thresholds: %s in %s' %
80 | (key, megate_threshold_dict))
81 |
82 | megate_pattern = {}
83 | megate_pattern['megate_feature_threshold'] = {
84 | 'megate_threshold': megate_threshold_dict['megate_threshold'],
85 | 'features': join_regex(megate_threshold_dict['features'])
86 | }
87 | for key in ['emodel', 'fullmtype', 'etype']:
88 | if key in megate_threshold_dict:
89 | megate_pattern[key] = join_regex(megate_threshold_dict[key])
90 | else:
91 | megate_pattern[key] = re.compile('.*$')
92 |
93 | megate_patterns.append(megate_pattern)
94 |
95 | return megate_patterns, megate_thresholds
96 |
--------------------------------------------------------------------------------
/bluepymm/select_combos/sqlite_io.py:
--------------------------------------------------------------------------------
1 | """BluePyMM select_combos sqlite input"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 | # pylint: disable=R0914, C0325, W0640
23 |
24 |
25 | import pandas
26 | import sqlite3
27 |
28 |
29 | def read_and_process_sqlite_score_tables(scores_sqlite_filename):
30 | """Read score and score values tables from score sqlite dabatase.
31 |
32 | Args:
33 | scores_sqlite_filename: path to sqlite database
34 |
35 | Returns:
36 | A tuple of two pandas.DataFrames, as loaded from the tables 'scores'
37 | and 'score_values' respectively, If present, the column 'index' is
38 | dropped from score_values.
39 |
40 | Raises:
41 | Exception if the number of rows in the 'scores' table does not equal
42 | the number of rows in the 'score_values' table.
43 | """
44 |
45 | print('Reading scores and score values from {} ...'.format(
46 | scores_sqlite_filename))
47 | with sqlite3.connect(scores_sqlite_filename) as conn:
48 | scores = pandas.read_sql('SELECT * FROM scores', conn)
49 | score_values = pandas.read_sql('SELECT * FROM score_values', conn)
50 |
51 | if len(score_values.index) != len(scores.index):
52 | raise Exception("Score and score values tables don't have same number"
53 | " of entries!")
54 |
55 | # every column should correspond to a score.
56 | if 'index' in score_values.columns.values:
57 | score_values.drop(labels=['index'], axis=1, inplace=True)
58 |
59 | return scores, score_values
60 |
--------------------------------------------------------------------------------
/bluepymm/templates/cell_template_neurodamus.jinja2:
--------------------------------------------------------------------------------
1 | /*
2 | {%- if banner %}
3 | {{banner}}
4 | {%- endif %}
5 | */
6 | {load_file("stdrun.hoc")}
7 | {load_file("import3d.hoc")}
8 |
9 | {%- if global_params %}
10 | /*
11 | * Check that global parameters are the same as with the optimization
12 | */
13 | proc check_parameter(/* name, expected_value, value */){
14 | strdef error
15 | if($2 != $3){
16 | sprint(error, "Parameter %s has different value %f != %f", $s1, $2, $3)
17 | execerror(error)
18 | }
19 | }
20 | proc check_simulator() {
21 | {%- for param, value in global_params.items() %}
22 | check_parameter("{{param}}", {{value}}, {{param}})
23 | {%- endfor %}
24 | }
25 | {%- endif %}
26 | {%- if ignored_global_params %}
27 | /* The following global parameters were set in BluePyOpt
28 | {%- for param, value in ignored_global_params.items() %}
29 | * {{param}} = {{value}}
30 | {%- endfor %}
31 | */
32 | {%- endif %}
33 |
34 | begintemplate {{template_name}}
35 | public init, morphology, geom_nseg_fixed, geom_nsec, getCell, getCCell, setCCell, gid, getCell
36 | public channel_seed, channel_seed_set
37 | public connect2target, clear, ASCIIrpt
38 | public soma, dend, apic, axon, myelin, getThreshold
39 | create soma[1], dend[1], apic[1], axon[1], myelin[1]
40 | public nSecAll, nSecSoma, nSecApical, nSecBasal, nSecMyelinated, nSecAxonalOrig, nSecAxonal
41 | public CellRef, synHelperList, synlist
42 | objref this, CellRef, segCounts, ASCIIrpt, synHelperList, synlist
43 |
44 | public all, somatic, apical, axonal, basal, myelinated, APC
45 | objref all, somatic, apical, axonal, basal, myelinated, APC
46 |
47 |
48 | obfunc getCell(){
49 | return this
50 | }
51 |
52 | obfunc getCCell(){
53 | return CellRef
54 | }
55 | proc setCCell(){
56 | CellRef = $o1
57 | }
58 |
59 | //-----------------------------------------------------------------------------------------------
60 |
61 | /*!
62 | * When clearing the model, the circular reference between Cells and CCells must be broken so the
63 | * entity watching reference counts can work.
64 | */
65 | proc clear() { localobj nil
66 | CellRef = nil
67 | }
68 |
69 |
70 |
71 | /*!
72 | * @param $o1 NetCon source (can be nil)
73 | * @param $o2 Variable where generated NetCon will be placed
74 | */
75 | proc connect2target() { //$o1 target point process, $o2 returned NetCon
76 | soma $o2 = new NetCon(&v(1), $o1)
77 | $o2.threshold = -30
78 | }
79 |
80 |
81 | proc init(/* args: morphology_dir, morphology_name */) {
82 | all = new SectionList()
83 | apical = new SectionList()
84 | axonal = new SectionList()
85 | basal = new SectionList()
86 | somatic = new SectionList()
87 | myelinated = new SectionList()
88 |
89 | synHelperList = new List()
90 | synlist = new List()
91 |
92 | //For compatibility with BBP CCells
93 | CellRef = this
94 |
95 | forall delete_section()
96 |
97 | gid = $1
98 |
99 | if(numarg() >= 3) {
100 | load_morphology($s2, $s3)
101 | } else {
102 | {%- if morphology %}
103 | load_morphology($s2, "{{morphology}}")
104 | {%- else %}
105 | execerror("Template {{template_name}} requires morphology name to instantiate")
106 | {%- endif %}
107 | }
108 |
109 | geom_nseg()
110 | indexSections()
111 | {%- if replace_axon %}
112 | replace_axon()
113 | {%- endif %}
114 | insertChannel()
115 | biophys()
116 |
117 | // Initialize channel_seed_set to avoid accidents
118 | channel_seed_set = 0
119 | // Initialize random number generators
120 | re_init_rng()
121 | }
122 |
123 | /*!
124 | * Assign section indices to the section voltage value. This will be useful later for serializing
125 | * the sections into an array. Note, that once the simulation begins, the voltage values will revert to actual data again.
126 | *
127 | * @param $o1 Import3d_GUI object
128 | */
129 | proc indexSections() { local index
130 | index = 0
131 | forsec all {
132 | v(0.0001) = index
133 | index = index +1
134 | }
135 | }
136 |
137 | func getThreshold() { return 0.0 }
138 |
139 | proc load_morphology(/* morphology_dir, morphology_name */) {localobj morph, import, sf, extension
140 | strdef morph_path
141 | sprint(morph_path, "%s/%s", $s1, $s2)
142 |
143 | sf = new StringFunctions()
144 | extension = new String()
145 |
146 | sscanf(morph_path, "%s", extension.s)
147 | sf.right(extension.s, sf.len(extension.s)-4)
148 |
149 | if(strcmp(extension.s, ".ASC") == 0 ){
150 | extension.s = ".asc"
151 | sf.left(morph_path, sf.len(morph_path)-3)
152 | sprint(morph_path,"%s%s",morph_path,"asc")
153 | print "USING LOWERCASE EXTENSION", morph_path
154 | }
155 |
156 |
157 | if( strcmp(extension.s, ".asc") == 0 ) {
158 | morph = new Import3d_Neurolucida3()
159 | } else if( strcmp(extension.s, ".swc" ) == 0) {
160 | morph = new Import3d_SWC_read()
161 | } else {
162 | printf(extension.s)
163 | printf("Unsupported file format: Morphology file has to end with .asc or .swc" )
164 | quit()
165 | }
166 |
167 | morph.quiet = 1
168 | morph.input(morph_path)
169 |
170 | import = new Import3d_GUI(morph, 0)
171 | import.instantiate(this)
172 | }
173 |
174 | /*
175 | * Assignment of mechanism values based on distance from the soma
176 | * Matches the BluePyOpt method
177 | */
178 | proc distribute_distance(){local x localobj sl
179 | strdef stmp, distfunc, mech
180 |
181 | sl = $o1
182 | mech = $s2
183 | distfunc = $s3
184 | this.soma[0] distance(0, 0.5)
185 | sprint(distfunc, "%%s %s(%%f) = %s", mech, distfunc)
186 | forsec sl for(x, 0) {
187 | // use distance(x) twice for the step distribution case, e.g. for calcium hotspot
188 | sprint(stmp, distfunc, secname(), x, distance(x), distance(x))
189 | execute(stmp)
190 | }
191 | }
192 |
193 | proc geom_nseg() {
194 | this.geom_nsec() //To count all sections
195 | //TODO: geom_nseg_fixed depends on segCounts which is calculated by
196 | // geom_nsec. Can this be collapsed?
197 | this.geom_nseg_fixed(40)
198 | this.geom_nsec() //To count all sections
199 | }
200 |
201 | proc insertChannel() {
202 | {%- for location, names in channels.items() %}
203 | forsec this.{{location}} {
204 | {%- for channel in names %}
205 | insert {{channel}}
206 | {%- endfor %}
207 | }
208 | {%- endfor %}
209 | }
210 |
211 | proc biophys() {
212 | {% for loc, parameters in section_params %}
213 | forsec CellRef.{{ loc }} {
214 | {%- for param in parameters %}
215 | {{ param.name }} = {{ param.value }}
216 | {%- endfor %}
217 | }
218 | {% endfor %}
219 | {%- for location, param_name, value in range_params %}
220 | distribute_distance(CellRef.{{location}}, "{{param_name}}", "{{value}}")
221 | {%- endfor %}
222 | }
223 |
224 | func sec_count(/* SectionList */) { local nSec
225 | nSec = 0
226 | forsec $o1 {
227 | nSec += 1
228 | }
229 | return nSec
230 | }
231 |
232 | /*
233 | * Iterate over the section and compute how many segments should be allocate to
234 | * each.
235 | */
236 | proc geom_nseg_fixed(/* chunkSize */) { local secIndex, chunkSize
237 | chunkSize = $1
238 | soma area(.5) // make sure diam reflects 3d points
239 | secIndex = 0
240 | forsec all {
241 | nseg = 1 + 2*int(L/chunkSize)
242 | segCounts.x[secIndex] = nseg
243 | secIndex += 1
244 | }
245 | }
246 |
247 | /*
248 | * Count up the number of sections
249 | */
250 | proc geom_nsec() { local nSec
251 | nSecAll = sec_count(all)
252 | nSecSoma = sec_count(somatic)
253 | nSecApical = sec_count(apical)
254 | nSecBasal = sec_count(basal)
255 | nSecMyelinated = sec_count(myelinated)
256 | nSecAxonalOrig = nSecAxonal = sec_count(axonal)
257 |
258 | segCounts = new Vector()
259 | segCounts.resize(nSecAll)
260 | nSec = 0
261 | forsec all {
262 | segCounts.x[nSec] = nseg
263 | nSec += 1
264 | }
265 | }
266 |
267 | /*
268 | * Replace the axon built from the original morphology file with a stub axon
269 | */
270 | {%- if replace_axon %}
271 | {{replace_axon}}
272 | {%- endif %}
273 |
274 |
275 | {{re_init_rng}}
276 |
277 | endtemplate {{template_name}}
278 |
279 |
--------------------------------------------------------------------------------
/bluepymm/templates/cell_template_neurodamus_sbo.jinja2:
--------------------------------------------------------------------------------
1 | /*
2 | {%- if banner %}
3 | {{banner}}
4 | {%- endif %}
5 | */
6 | {load_file("stdrun.hoc")}
7 | {load_file("import3d.hoc")}
8 |
9 | {%- if global_params %}
10 | /*
11 | * Check that global parameters are the same as with the optimization
12 | */
13 | proc check_parameter(/* name, expected_value, value */){
14 | strdef error
15 | if($2 != $3){
16 | sprint(error, "Parameter %s has different value %f != %f", $s1, $2, $3)
17 | execerror(error)
18 | }
19 | }
20 | proc check_simulator() {
21 | {%- for param, value in global_params.items() %}
22 | check_parameter("{{param}}", {{value}}, {{param}})
23 | {%- endfor %}
24 | }
25 | {%- endif %}
26 | {%- if ignored_global_params %}
27 | /* The following global parameters were set in BluePyOpt
28 | {%- for param, value in ignored_global_params.items() %}
29 | * {{param}} = {{value}}
30 | {%- endfor %}
31 | */
32 | {%- endif %}
33 |
34 | begintemplate {{template_name}}
35 | public init, morphology, geom_nseg_fixed, geom_nsec, getCell, getCCell, setCCell, gid, getCell
36 | public channel_seed, channel_seed_set
37 | public connect2target, clear, ASCIIrpt
38 | public soma, dend, apic, axon, myelin, getThreshold
39 | create soma[1], dend[1], apic[1], axon[1], myelin[1]
40 | public nSecAll, nSecSoma, nSecApical, nSecBasal, nSecMyelinated, nSecAxonalOrig, nSecAxonal
41 | public CellRef, synHelperList, synlist
42 | objref this, CellRef, segCounts, ASCIIrpt, synHelperList, synlist
43 |
44 | public all, somatic, apical, axonal, basal, myelinated, APC
45 | objref all, somatic, apical, axonal, basal, myelinated, APC
46 |
47 |
48 | obfunc getCell(){
49 | return this
50 | }
51 |
52 | obfunc getCCell(){
53 | return CellRef
54 | }
55 | proc setCCell(){
56 | CellRef = $o1
57 | }
58 |
59 | //-----------------------------------------------------------------------------------------------
60 |
61 | /*!
62 | * When clearing the model, the circular reference between Cells and CCells must be broken so the
63 | * entity watching reference counts can work.
64 | */
65 | proc clear() { localobj nil
66 | CellRef = nil
67 | }
68 |
69 |
70 |
71 | /*!
72 | * @param $o1 NetCon source (can be nil)
73 | * @param $o2 Variable where generated NetCon will be placed
74 | */
75 | proc connect2target() { //$o1 target point process, $o2 returned NetCon
76 | soma $o2 = new NetCon(&v(1), $o1)
77 | $o2.threshold = -30
78 | }
79 |
80 |
81 | proc init(/* args: morphology_dir, morphology_name */) {
82 | all = new SectionList()
83 | apical = new SectionList()
84 | axonal = new SectionList()
85 | basal = new SectionList()
86 | somatic = new SectionList()
87 | myelinated = new SectionList()
88 |
89 | synHelperList = new List()
90 | synlist = new List()
91 |
92 | //For compatibility with BBP CCells
93 | CellRef = this
94 |
95 | forall delete_section()
96 |
97 | gid = $1
98 |
99 | if(numarg() >= 3) {
100 | load_morphology($s2, $s3)
101 | } else {
102 | {%- if morphology %}
103 | load_morphology($s2, "{{morphology}}")
104 | {%- else %}
105 | execerror("Template {{template_name}} requires morphology name to instantiate")
106 | {%- endif %}
107 | }
108 |
109 | geom_nseg()
110 | indexSections()
111 | {%- if replace_axon %}
112 | replace_axon()
113 | {%- endif %}
114 | insertChannel()
115 | biophys()
116 |
117 | // Initialize channel_seed_set to avoid accidents
118 | channel_seed_set = 0
119 | // Initialize random number generators
120 | re_init_rng()
121 | }
122 |
123 | /*!
124 | * Assign section indices to the section voltage value. This will be useful later for serializing
125 | * the sections into an array. Note, that once the simulation begins, the voltage values will revert to actual data again.
126 | *
127 | * @param $o1 Import3d_GUI object
128 | */
129 | proc indexSections() { local index
130 | index = 0
131 | forsec all {
132 | v(0.0001) = index
133 | index = index +1
134 | }
135 | }
136 |
137 | func getThreshold() { return 0.0 }
138 |
139 | proc load_morphology(/* morphology_dir, morphology_name */) {localobj morph, import, sf, extension, commands, pyobj
140 | strdef morph_path
141 | sprint(morph_path, "%s/%s", $s1, $s2)
142 | sf = new StringFunctions()
143 | extension = new String()
144 | sscanf(morph_path, "%s", extension.s)
145 |
146 | sf.right(extension.s, sf.len(extension.s)-3)
147 | if(strcmp(extension.s, "ASC") == 0 ){
148 | extension.s = "asc"
149 | }
150 | if(strcmp(extension.s, "SWC") == 0 ){
151 | extension.s = "swc"
152 | }
153 |
154 | if( strcmp(extension.s, "asc") == 0 ) {
155 | morph = new Import3d_Neurolucida3()
156 | morph.quiet = 1
157 | morph.input(morph_path)
158 |
159 | import = new Import3d_GUI(morph, 0)
160 | import.instantiate(this)
161 | } else if( strcmp(extension.s, "swc" ) == 0) {
162 | morph = new Import3d_SWC_read()
163 | morph.quiet = 1
164 | morph.input(morph_path)
165 |
166 | import = new Import3d_GUI(morph, 0)
167 | import.instantiate(this)
168 | } else if( strcmp(extension.s, ".h5") == 0 ) {
169 | if(nrnpython ("from morphio_wrapper import MorphIOWrapper") == 1) {
170 | pyobj = new PythonObject()
171 | commands = pyobj.MorphIOWrapper(morph_path).morph_as_hoc()
172 | for i = 0, pyobj.len(commands) - 1 {
173 | execute(commands._[i], this)
174 | }
175 | indexSections()
176 | geom_nsec()
177 | } else {
178 | printf( ".h5 morphlogy used but cannot load 'morphio_wrapper'." )
179 | quit()
180 | }
181 | } else {
182 | printf(extension.s)
183 | printf("Unsupported file format: Morphology file has to end with .asc, .swc or .h5" )
184 | quit()
185 | }
186 | }
187 |
188 | /*
189 | * Assignment of mechanism values based on distance from the soma
190 | * Matches the BluePyOpt method
191 | */
192 | proc distribute_distance(){local x localobj sl
193 | strdef stmp, distfunc, mech
194 |
195 | sl = $o1
196 | mech = $s2
197 | distfunc = $s3
198 | this.soma[0] distance(0, 0.5)
199 | sprint(distfunc, "%%s %s(%%f) = %s", mech, distfunc)
200 | forsec sl for(x, 0) {
201 | // use distance(x) twice for the step distribution case, e.g. for calcium hotspot
202 | sprint(stmp, distfunc, secname(), x, distance(x), distance(x))
203 | execute(stmp)
204 | }
205 | }
206 |
207 | proc geom_nseg() {
208 | this.geom_nsec() //To count all sections
209 | //TODO: geom_nseg_fixed depends on segCounts which is calculated by
210 | // geom_nsec. Can this be collapsed?
211 | this.geom_nseg_fixed(40)
212 | this.geom_nsec() //To count all sections
213 | }
214 |
215 | proc insertChannel() {
216 | {%- for location, names in channels.items() %}
217 | forsec this.{{location}} {
218 | {%- for channel in names %}
219 | insert {{channel}}
220 | {%- endfor %}
221 | }
222 | {%- endfor %}
223 | }
224 |
225 | proc biophys() {
226 | {% for loc, parameters in section_params %}
227 | forsec CellRef.{{ loc }} {
228 | {%- for param in parameters %}
229 | {{ param.name }} = {{ param.value }}
230 | {%- endfor %}
231 | }
232 | {% endfor %}
233 | {%- for location, param_name, value in range_params %}
234 | distribute_distance(CellRef.{{location}}, "{{param_name}}", "{{value}}")
235 | {%- endfor %}
236 | }
237 |
238 | func sec_count(/* SectionList */) { local nSec
239 | nSec = 0
240 | forsec $o1 {
241 | nSec += 1
242 | }
243 | return nSec
244 | }
245 |
246 | /*
247 | * Iterate over the section and compute how many segments should be allocate to
248 | * each.
249 | */
250 | proc geom_nseg_fixed(/* chunkSize */) { local secIndex, chunkSize
251 | chunkSize = $1
252 | soma area(.5) // make sure diam reflects 3d points
253 | secIndex = 0
254 | forsec all {
255 | nseg = 1 + 2*int(L/chunkSize)
256 | segCounts.x[secIndex] = nseg
257 | secIndex += 1
258 | }
259 | }
260 |
261 | /*
262 | * Count up the number of sections
263 | */
264 | proc geom_nsec() { local nSec
265 | nSecAll = sec_count(all)
266 | nSecSoma = sec_count(somatic)
267 | nSecApical = sec_count(apical)
268 | nSecBasal = sec_count(basal)
269 | nSecMyelinated = sec_count(myelinated)
270 | nSecAxonalOrig = nSecAxonal = sec_count(axonal)
271 |
272 | segCounts = new Vector()
273 | segCounts.resize(nSecAll)
274 | nSec = 0
275 | forsec all {
276 | segCounts.x[nSec] = nseg
277 | nSec += 1
278 | }
279 | }
280 |
281 | /*
282 | * Replace the axon built from the original morphology file with a stub axon
283 | */
284 | {%- if replace_axon %}
285 | {{replace_axon}}
286 | {%- endif %}
287 |
288 |
289 | {{re_init_rng}}
290 |
291 | endtemplate {{template_name}}
292 |
293 |
--------------------------------------------------------------------------------
/bluepymm/templates/cell_template_neuron.jinja2:
--------------------------------------------------------------------------------
1 | /*
2 | {%- if banner %}
3 | {{banner}}
4 | {%- endif %}
5 | */
6 | {load_file("stdrun.hoc")}
7 | {load_file("import3d.hoc")}
8 |
9 | {%- if global_params %}
10 | /*
11 | * Check that global parameters are the same as with the optimization
12 | */
13 | proc check_parameter(/* name, expected_value, value */){
14 | strdef error
15 | if($2 != $3){
16 | sprint(error, "Parameter %s has different value %f != %f", $s1, $2, $3)
17 | execerror(error)
18 | }
19 | }
20 | proc check_simulator() {
21 | {%- for param, value in global_params.items() %}
22 | check_parameter("{{param}}", {{value}}, {{param}})
23 | {%- endfor %}
24 | }
25 | {%- endif %}
26 | {%- if ignored_global_params %}
27 | /* The following global parameters were set in BluePyOpt
28 | {%- for param, value in ignored_global_params.items() %}
29 | * {{param}} = {{value}}
30 | {%- endfor %}
31 | */
32 | {%- endif %}
33 |
34 | begintemplate {{template_name}}
35 | public init, morphology, geom_nseg_fixed, geom_nsec
36 | public soma, dend, apic, axon, myelin
37 | create soma[1], dend[1], apic[1], axon[1], myelin[1]
38 |
39 | objref this, CellRef, segCounts
40 |
41 | public all, somatic, apical, axonal, basal, myelinated, APC
42 | objref all, somatic, apical, axonal, basal, myelinated, APC
43 |
44 | obfunc getCell(){
45 | return this
46 | }
47 |
48 | proc init(/* args: morphology_dir, morphology_name */) {
49 | all = new SectionList()
50 | apical = new SectionList()
51 | axonal = new SectionList()
52 | basal = new SectionList()
53 | somatic = new SectionList()
54 | myelinated = new SectionList()
55 |
56 | //For compatibility with BBP CCells
57 | CellRef = this
58 |
59 | forall delete_section()
60 |
61 | if(numarg() >= 2) {
62 | load_morphology($s1, $s2)
63 | } else {
64 | {%- if morphology %}
65 | load_morphology($s1, "{{morphology}}")
66 | {%- else %}
67 | execerror("Template {{template_name}} requires morphology name to instantiate")
68 | {%- endif %}
69 | }
70 |
71 | geom_nseg()
72 | {%- if replace_axon %}
73 | replace_axon()
74 | {%- endif %}
75 | insertChannel()
76 | biophys()
77 | re_init_rng()
78 | }
79 |
80 | proc load_morphology(/* morphology_dir, morphology_name */) {localobj morph, import, sf, extension
81 | strdef morph_path
82 | sprint(morph_path, "%s/%s", $s1, $s2)
83 |
84 | sf = new StringFunctions()
85 | extension = new String()
86 |
87 | sscanf(morph_path, "%s", extension.s)
88 | sf.right(extension.s, sf.len(extension.s)-4)
89 |
90 | if( strcmp(extension.s, ".asc") == 0 ) {
91 | morph = new Import3d_Neurolucida3()
92 | } else if( strcmp(extension.s, ".swc" ) == 0) {
93 | morph = new Import3d_SWC_read()
94 | } else {
95 | printf("Unsupported file format: Morphology file has to end with .asc or .swc" )
96 | quit()
97 | }
98 |
99 | morph.quiet = 1
100 | morph.input(morph_path)
101 |
102 | import = new Import3d_GUI(morph, 0)
103 | import.instantiate(this)
104 | }
105 |
106 | /*
107 | * Assignment of mechanism values based on distance from the soma
108 | * Matches the BluePyOpt method
109 | */
110 | proc distribute_distance(){local x localobj sl
111 | strdef stmp, distfunc, mech
112 |
113 | sl = $o1
114 | mech = $s2
115 | distfunc = $s3
116 | this.soma[0] distance(0, 0.5)
117 | sprint(distfunc, "%%s %s(%%f) = %s", mech, distfunc)
118 | forsec sl for(x, 0) {
119 | // use distance(x) twice for the step distribution case, e.g. for calcium hotspot
120 | sprint(stmp, distfunc, secname(), x, distance(x), distance(x))
121 | execute(stmp)
122 | }
123 | }
124 |
125 | proc geom_nseg() {
126 | this.geom_nsec() //To count all sections
127 | //TODO: geom_nseg_fixed depends on segCounts which is calculated by
128 | // geom_nsec. Can this be collapsed?
129 | this.geom_nseg_fixed(40)
130 | this.geom_nsec() //To count all sections
131 | }
132 |
133 | proc insertChannel() {
134 | {%- for location, names in channels.items() %}
135 | forsec this.{{location}} {
136 | {%- for channel in names %}
137 | insert {{channel}}
138 | {%- endfor %}
139 | }
140 | {%- endfor %}
141 | }
142 |
143 | proc biophys() {
144 | {% for loc, parameters in section_params %}
145 | forsec CellRef.{{ loc }} {
146 | {%- for param in parameters %}
147 | {{ param.name }} = {{ param.value }}
148 | {%- endfor %}
149 | }
150 | {% endfor %}
151 | {%- for location, param_name, value in range_params %}
152 | distribute_distance(CellRef.{{location}}, "{{param_name}}", "{{value}}")
153 | {%- endfor %}
154 | }
155 |
156 | func sec_count(/* SectionList */) { local nSec
157 | nSec = 0
158 | forsec $o1 {
159 | nSec += 1
160 | }
161 | return nSec
162 | }
163 |
164 | /*
165 | * Iterate over the section and compute how many segments should be allocate to
166 | * each.
167 | */
168 | proc geom_nseg_fixed(/* chunkSize */) { local secIndex, chunkSize
169 | chunkSize = $1
170 | soma area(.5) // make sure diam reflects 3d points
171 | secIndex = 0
172 | forsec all {
173 | nseg = 1 + 2*int(L/chunkSize)
174 | segCounts.x[secIndex] = nseg
175 | secIndex += 1
176 | }
177 | }
178 |
179 | /*
180 | * Count up the number of sections
181 | */
182 | proc geom_nsec() { local nSec
183 | nSecAll = sec_count(all)
184 | nSecSoma = sec_count(somatic)
185 | nSecApical = sec_count(apical)
186 | nSecBasal = sec_count(basal)
187 | nSecMyelinated = sec_count(myelinated)
188 | nSecAxonalOrig = nSecAxonal = sec_count(axonal)
189 |
190 | segCounts = new Vector()
191 | segCounts.resize(nSecAll)
192 | nSec = 0
193 | forsec all {
194 | segCounts.x[nSec] = nseg
195 | nSec += 1
196 | }
197 | }
198 |
199 | /*
200 | * Replace the axon built from the original morphology file with a stub axon
201 | */
202 | {%- if replace_axon %}
203 | {{replace_axon}}
204 | {%- endif %}
205 |
206 |
207 | {{re_init_rng}}
208 |
209 | endtemplate {{template_name}}
210 |
--------------------------------------------------------------------------------
/bluepymm/tools.py:
--------------------------------------------------------------------------------
1 | """BluePyMM tools"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 | import contextlib
23 | import errno
24 | import importlib
25 | import json
26 | import os
27 | import sys
28 | import hashlib
29 | import multiprocessing.pool
30 | from string import digits
31 |
32 |
33 | @contextlib.contextmanager
34 | def cd(dir_name):
35 | """Change directory"""
36 | old_cwd = os.getcwd()
37 | os.chdir(dir_name)
38 | try:
39 | yield
40 | finally:
41 | os.chdir(old_cwd)
42 |
43 |
44 | def load_json(path):
45 | """Load json file"""
46 | with open(path) as f:
47 | return json.load(f)
48 |
49 |
50 | def write_json(output_dir, output_name, config):
51 | """Write json file"""
52 | path = os.path.join(output_dir, output_name)
53 | with open(path, 'w') as fd:
54 | json.dump(config, fd, indent=2, sort_keys=True)
55 | return path
56 |
57 |
58 | def makedirs(path):
59 | """mkdir but don't fail when dir exists"""
60 | try:
61 | os.makedirs(path)
62 | except OSError as exception:
63 | if exception.errno != errno.EEXIST:
64 | raise
65 | return path
66 |
67 |
68 | def check_no_null_nan_values(data, description):
69 | """Check whether a pandas.DataFrame contains neither None nor NaN values.
70 |
71 | Returns:
72 | bool: True if successful.
73 |
74 | Raises:
75 | Exception: if `data` contains at least one None or NaN value.
76 | """
77 | if data.isnull().values.any():
78 | raise Exception('{} contains None/NaN values.'.format(description))
79 | return True
80 |
81 |
82 | def check_all_combos_have_run(database, description):
83 | """Verify that all entries of a given database have run.
84 |
85 | Args:
86 | database: a pandas.DataFrame with a column 'to_run'
87 | description: string that contains description of database
88 |
89 | Returns:
90 | True if the value of 'to_run' is False for all rows.
91 |
92 | Raises:
93 | Exception, if the database contains at least one entry where the value
94 | of 'to_run' is True.
95 | """
96 | if database['to_run'].any():
97 | raise Exception('At least one me-combination of database "{}" has not'
98 | ' been run'.format(description))
99 | else:
100 | return True
101 |
102 |
103 | def load_module(name, path):
104 | """Try and load module `name` but *only* in `path`
105 |
106 | from https://docs.python.org/3.6/library/importlib.html
107 | """
108 | # Fast path: see if the module has already been imported.
109 | try:
110 | return sys.modules[name]
111 | except KeyError:
112 | pass
113 |
114 | spec = importlib.util.spec_from_file_location(name, path)
115 | if spec is None:
116 | return None
117 | module = importlib.util.module_from_spec(spec)
118 | sys.modules[name] = module
119 | spec.loader.exec_module(module)
120 | return module
121 |
122 |
123 | def check_compliance_with_neuron(template_name):
124 | """Verify that a given name is compliant with the rules for a NEURON
125 | template name: a name should be a non-empty alphanumeric string, and start
126 | with a letter. Underscores are allowed. The length should not exceed 50
127 | characters.
128 |
129 | Returns:
130 | True if compliant, false otherwise.
131 | """
132 | max_len = 50
133 | return (template_name and template_name[0].isalpha() and
134 | template_name.replace('_', '').isalnum() and
135 | len(template_name) <= max_len)
136 |
137 |
138 | def shorten_and_hash_string(label, keep_length=40, hash_length=9):
139 | """Convert string to a shorter string if required.
140 |
141 | Args:
142 | label: a string to be converted
143 | keep_length: length of the original string to keep. Default is 40
144 | characters.
145 | hash_length: length of the hash to generate, should not be more then
146 | 20. Default is 9 characters.
147 |
148 | Returns:
149 | If the length of the original label is shorter than the sum of
150 | 'keep_length' and 'hash_length' plus one the original string is
151 | returned. Otherwise, a string with structure _ is
152 | returned, where is the first part of the original string
153 | with length equal to and the last part is a hash of
154 | 'hash_length' characters, based on the original string.
155 |
156 | Raises:
157 | ValueError, if 'hash_length' exceeds 20.
158 | """
159 |
160 | if hash_length > 20:
161 | raise ValueError('Parameter hash_length should not exceed 20, '
162 | ' received: {}'.format(hash_length))
163 |
164 | if len(label) <= keep_length + hash_length + 1:
165 | return label
166 |
167 | hash_string = hashlib.sha1(label.encode('utf-8')).hexdigest()
168 | return '{}_{}'.format(label[0:keep_length], hash_string[0:hash_length])
169 |
170 |
171 | def decode_bstring(bstr_obj):
172 | """Decodes and returns the str object from bytes.
173 |
174 | Args:
175 | bstr_obj: the bytes string object
176 | Returns:
177 | string object if conversion is successful, input object otherwise.
178 | """
179 |
180 | try:
181 | decode_bstring = bstr_obj.decode()
182 | except (UnicodeDecodeError, AttributeError):
183 | print("Warning: decoding of bstring failed, returning the input.")
184 | return bstr_obj
185 | return decode_bstring
186 |
187 |
188 | def get_neuron_compliant_template_name(name):
189 | """Get template name that is compliant with NEURON based on given name.
190 |
191 | Args:
192 | name: string
193 |
194 | Returns:
195 | If `name` is NEURON-compliant, the same string is return. Otherwise,
196 | hyphens are replaced by underscores and if appropriate, the string is
197 | shortened. Leading numbers are removed.
198 | """
199 | template_name = name
200 | if not check_compliance_with_neuron(template_name):
201 | template_name = template_name.lstrip(digits).replace("-", "_")
202 | template_name = shorten_and_hash_string(template_name,
203 | keep_length=40,
204 | hash_length=9)
205 | return template_name
206 |
207 |
208 | class NestedPool(multiprocessing.pool.Pool):
209 |
210 | """Class that represents a MultiProcessing nested pool"""
211 |
212 | def Process(self, *args, **kwds):
213 | process = super(NestedPool, self).Process(*args, **kwds)
214 |
215 | class NoDaemonProcess(process.__class__):
216 |
217 | """Class that represents a non-daemon process"""
218 |
219 | # pylint: disable=R0201
220 |
221 | @property
222 | def daemon(self):
223 | """Get daemon flag"""
224 | return False
225 |
226 | @daemon.setter
227 | def daemon(self, value):
228 | """Set daemon flag"""
229 | pass
230 |
231 | process.__class__ = NoDaemonProcess
232 |
233 | return process
234 |
--------------------------------------------------------------------------------
/bluepymm/validate_output/__init__.py:
--------------------------------------------------------------------------------
1 | from .main import add_parser, validate_output # NOQA
2 |
--------------------------------------------------------------------------------
/bluepymm/validate_output/main.py:
--------------------------------------------------------------------------------
1 | """Analyse scores"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 |
23 | # pylint: disable=R0914, C0325, W0640
24 |
25 | import bluepymm.tools as bpmmtools
26 |
27 | # from . import sqlite_io, reporting, megate_output
28 | # from . import process_megate_config as proc_config
29 |
30 |
31 | def validate_output(conf_filename):
32 | """Parse conf file and validate output"""
33 | # Parse configuration file
34 | conf_dict = bpmmtools.load_json(conf_filename)
35 |
36 | mecombo_release_path = conf_dict['mecombo_release_path']
37 |
38 | mecombo_release = bpmmtools.load_json(mecombo_release_path)
39 |
40 | print(mecombo_release)
41 |
42 | extneurondbdat_path = mecombo_release['output_paths']['extneurondb.dat']
43 | mecombotsv_path = mecombo_release['output_paths']['mecombo_emodel.tsv']
44 | emodelshoc_path = mecombo_release['output_paths']['emodels_hoc']
45 |
46 | print(extneurondbdat_path, mecombotsv_path, emodelshoc_path)
47 |
48 | # extneurondbdat = read_extneurondb_dat(extneurondbdat_path)
49 |
50 |
51 | def add_parser(action):
52 | """Add parser"""
53 |
54 | parser = action.add_parser('validate',
55 | help='Validate me-combo output')
56 | parser.add_argument('conf_filename')
57 |
58 |
59 | '''
60 |
61 | select_combos_from_conf(conf_dict)
62 |
63 | def select_combos(conf_filename):
64 | """Parse conf file and run select combos"""
65 | # Parse configuration file
66 | conf_dict = tools.load_json(conf_filename)
67 |
68 | select_combos_from_conf(conf_dict)
69 |
70 |
71 | def select_combos_from_conf(conf_dict):
72 | """Compare scores of me-combinations to thresholds, select successful
73 | combinations, and write results out to file.
74 |
75 | Args:
76 | conf_filename: filename of configuration (.json file)
77 | """
78 | scores_db_filename = conf_dict['scores_db']
79 | pdf_filename = conf_dict['pdf_filename']
80 | output_dir = conf_dict['output_dir']
81 |
82 | # read skip features
83 | to_skip_patterns, to_skip_features = proc_config.read_to_skip_features(
84 | conf_dict)
85 |
86 | # read megate thresholds
87 | megate_patterns, megate_thresholds = proc_config.read_megate_thresholds(
88 | conf_dict)
89 |
90 | # read score tables
91 | scores, score_values = sqlite_io.read_and_process_sqlite_score_tables(
92 | scores_db_filename)
93 | tools.check_all_combos_have_run(scores, scores_db_filename)
94 |
95 | # create final database and write report
96 | ext_neurondb = reporting.create_final_db_and_write_report(
97 | pdf_filename,
98 | to_skip_features,
99 | to_skip_patterns,
100 | megate_thresholds,
101 | megate_patterns,
102 | conf_dict.get('skip_repaired_exemplar', False),
103 | conf_dict.get('check_opt_scores', True),
104 | scores, score_values,
105 | conf_dict.get('plot_emodels_per_morphology', False))
106 | print('Wrote pdf to %s' % pdf_filename)
107 |
108 | # write output files
109 | compliant = conf_dict.get('make_names_neuron_compliant', False)
110 | extneurondb_path, mecombo_emodel_path = megate_output.save_megate_results(
111 | ext_neurondb,
112 | output_dir,
113 | sort_key='combo_name',
114 | make_names_neuron_compliant=compliant)
115 |
116 | emodels_hoc_path = conf_dict['emodels_hoc_dir']
117 |
118 | megate_output.write_mecomboreleasejson(
119 | output_dir,
120 | emodels_hoc_path,
121 | extneurondb_path,
122 | mecombo_emodel_path)
123 |
124 |
125 |
126 | '''
127 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | coverage:
2 | range: "50...100"
3 |
4 | status:
5 | project:
6 | default: off
7 | unit:
8 | target: 50%
9 | flags: unit
10 | functional:
11 | target: 50%
12 | flags: functional
13 | patch: off
14 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = build
9 |
10 | # User-friendly check for sphinx-build
11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
13 | endif
14 |
15 | # Internal variables.
16 | PAPEROPT_a4 = -D latex_paper_size=a4
17 | PAPEROPT_letter = -D latex_paper_size=letter
18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
19 | # the i18n builder cannot share the environment and doctrees with the others
20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
21 |
22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
23 |
24 | help:
25 | @echo "Please use \`make ' where is one of"
26 | @echo " html to make standalone HTML files"
27 | @echo " dirhtml to make HTML files named index.html in directories"
28 | @echo " singlehtml to make a single large HTML file"
29 | @echo " pickle to make pickle files"
30 | @echo " json to make JSON files"
31 | @echo " htmlhelp to make HTML files and a HTML help project"
32 | @echo " qthelp to make HTML files and a qthelp project"
33 | @echo " applehelp to make an Apple Help Book"
34 | @echo " devhelp to make HTML files and a Devhelp project"
35 | @echo " epub to make an epub"
36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
37 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
39 | @echo " text to make text files"
40 | @echo " man to make manual pages"
41 | @echo " texinfo to make Texinfo files"
42 | @echo " info to make Texinfo files and run them through makeinfo"
43 | @echo " gettext to make PO message catalogs"
44 | @echo " changes to make an overview of all changed/added/deprecated items"
45 | @echo " xml to make Docutils-native XML files"
46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
47 | @echo " linkcheck to check all external links for integrity"
48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
49 | @echo " coverage to run coverage check of the documentation (if enabled)"
50 |
51 | clean:
52 | rm -rf $(BUILDDIR)/*
53 |
54 | html:
55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
56 | @echo
57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
58 |
59 | dirhtml:
60 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
61 | @echo
62 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
63 |
64 | singlehtml:
65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
66 | @echo
67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
68 |
69 | pickle:
70 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
71 | @echo
72 | @echo "Build finished; now you can process the pickle files."
73 |
74 | json:
75 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
76 | @echo
77 | @echo "Build finished; now you can process the JSON files."
78 |
79 | htmlhelp:
80 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
81 | @echo
82 | @echo "Build finished; now you can run HTML Help Workshop with the" \
83 | ".hhp project file in $(BUILDDIR)/htmlhelp."
84 |
85 | qthelp:
86 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
87 | @echo
88 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
89 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
90 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/eFEL.qhcp"
91 | @echo "To view the help file:"
92 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/eFEL.qhc"
93 |
94 | applehelp:
95 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
96 | @echo
97 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
98 | @echo "N.B. You won't be able to view it unless you put it in" \
99 | "~/Library/Documentation/Help or install it in your application" \
100 | "bundle."
101 |
102 | devhelp:
103 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
104 | @echo
105 | @echo "Build finished."
106 | @echo "To view the help file:"
107 | @echo "# mkdir -p $$HOME/.local/share/devhelp/eFEL"
108 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/eFEL"
109 | @echo "# devhelp"
110 |
111 | epub:
112 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
113 | @echo
114 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
115 |
116 | latex:
117 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
118 | @echo
119 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
120 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
121 | "(use \`make latexpdf' here to do that automatically)."
122 |
123 | latexpdf:
124 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
125 | @echo "Running LaTeX files through pdflatex..."
126 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
127 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
128 |
129 | latexpdfja:
130 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
131 | @echo "Running LaTeX files through platex and dvipdfmx..."
132 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
133 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
134 |
135 | text:
136 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
137 | @echo
138 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
139 |
140 | man:
141 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
142 | @echo
143 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
144 |
145 | texinfo:
146 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
147 | @echo
148 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
149 | @echo "Run \`make' in that directory to run these through makeinfo" \
150 | "(use \`make info' here to do that automatically)."
151 |
152 | info:
153 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
154 | @echo "Running Texinfo files through makeinfo..."
155 | make -C $(BUILDDIR)/texinfo info
156 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
157 |
158 | gettext:
159 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
160 | @echo
161 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
162 |
163 | changes:
164 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
165 | @echo
166 | @echo "The overview file is in $(BUILDDIR)/changes."
167 |
168 | linkcheck:
169 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
170 | @echo
171 | @echo "Link check complete; look for any errors in the above output " \
172 | "or in $(BUILDDIR)/linkcheck/output.txt."
173 |
174 | doctest:
175 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
176 | @echo "Testing of doctests in the sources finished, look at the " \
177 | "results in $(BUILDDIR)/doctest/output.txt."
178 |
179 | coverage:
180 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
181 | @echo "Testing of coverage in the sources finished, look at the " \
182 | "results in $(BUILDDIR)/coverage/python.txt."
183 |
184 | xml:
185 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
186 | @echo
187 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
188 |
189 | pseudoxml:
190 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
191 | @echo
192 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
193 |
--------------------------------------------------------------------------------
/docs/source/.gitignore:
--------------------------------------------------------------------------------
1 | /ephys/
2 | /optimisations/
3 | /deapext/
4 |
--------------------------------------------------------------------------------
/docs/source/bluepymm.prepare_combos.rst:
--------------------------------------------------------------------------------
1 | bluepymm\.prepare\_combos package
2 | =================================
3 |
4 | Submodules
5 | ----------
6 |
7 | bluepymm\.prepare\_combos\.create\_mm\_sqlite module
8 | ----------------------------------------------------
9 |
10 | .. automodule:: bluepymm.prepare_combos.create_mm_sqlite
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | bluepymm\.prepare\_combos\.main module
16 | --------------------------------------
17 |
18 | .. automodule:: bluepymm.prepare_combos.main
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | bluepymm\.prepare\_combos\.parse\_files module
24 | ----------------------------------------------
25 |
26 | .. automodule:: bluepymm.prepare_combos.parse_files
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | bluepymm\.prepare\_combos\.prepare\_emodel\_dirs module
32 | -------------------------------------------------------
33 |
34 | .. automodule:: bluepymm.prepare_combos.prepare_emodel_dirs
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 |
40 | Module contents
41 | ---------------
42 |
43 | .. automodule:: bluepymm.prepare_combos
44 | :members:
45 | :undoc-members:
46 | :show-inheritance:
47 |
--------------------------------------------------------------------------------
/docs/source/bluepymm.rst:
--------------------------------------------------------------------------------
1 | bluepymm package
2 | ================
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 |
9 | bluepymm.prepare_combos
10 | bluepymm.run_combos
11 | bluepymm.select_combos
12 |
13 | Submodules
14 | ----------
15 |
16 | bluepymm\.main module
17 | ---------------------
18 |
19 | .. automodule:: bluepymm.main
20 | :members:
21 | :undoc-members:
22 | :show-inheritance:
23 |
24 | bluepymm\.tools module
25 | ----------------------
26 |
27 | .. automodule:: bluepymm.tools
28 | :members:
29 | :undoc-members:
30 | :show-inheritance:
31 |
32 |
33 | Module contents
34 | ---------------
35 |
36 | .. automodule:: bluepymm
37 | :members:
38 | :undoc-members:
39 | :show-inheritance:
40 |
--------------------------------------------------------------------------------
/docs/source/bluepymm.run_combos.rst:
--------------------------------------------------------------------------------
1 | bluepymm\.run\_combos package
2 | =============================
3 |
4 | Submodules
5 | ----------
6 |
7 | bluepymm\.run\_combos\.calculate\_scores module
8 | -----------------------------------------------
9 |
10 | .. automodule:: bluepymm.run_combos.calculate_scores
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | bluepymm\.run\_combos\.main module
16 | ----------------------------------
17 |
18 | .. automodule:: bluepymm.run_combos.main
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 |
24 | Module contents
25 | ---------------
26 |
27 | .. automodule:: bluepymm.run_combos
28 | :members:
29 | :undoc-members:
30 | :show-inheritance:
31 |
--------------------------------------------------------------------------------
/docs/source/bluepymm.select_combos.rst:
--------------------------------------------------------------------------------
1 | bluepymm\.select\_combos package
2 | ================================
3 |
4 | Submodules
5 | ----------
6 |
7 | bluepymm\.select\_combos\.main module
8 | -------------------------------------
9 |
10 | .. automodule:: bluepymm.select_combos.main
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | bluepymm\.select\_combos\.megate\_output module
16 | -----------------------------------------------
17 |
18 | .. automodule:: bluepymm.select_combos.megate_output
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | bluepymm\.select\_combos\.process\_megate\_config module
24 | --------------------------------------------------------
25 |
26 | .. automodule:: bluepymm.select_combos.process_megate_config
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | bluepymm\.select\_combos\.reporting module
32 | ------------------------------------------
33 |
34 | .. automodule:: bluepymm.select_combos.reporting
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | bluepymm\.select\_combos\.sqlite\_io module
40 | -------------------------------------------
41 |
42 | .. automodule:: bluepymm.select_combos.sqlite_io
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
47 | bluepymm\.select\_combos\.table\_processing module
48 | --------------------------------------------------
49 |
50 | .. automodule:: bluepymm.select_combos.table_processing
51 | :members:
52 | :undoc-members:
53 | :show-inheritance:
54 |
55 |
56 | Module contents
57 | ---------------
58 |
59 | .. automodule:: bluepymm.select_combos
60 | :members:
61 | :undoc-members:
62 | :show-inheritance:
63 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../../README.rst
2 | :end-before: .. substitutions
3 |
4 | .. toctree::
5 | :hidden:
6 |
7 | Home
8 | bluepymm.rst
9 |
10 | .. |banner| image:: /logo/BluePyMMBanner.png
11 |
--------------------------------------------------------------------------------
/docs/source/logo/BluePyMMBanner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BlueBrain/BluePyMM/e095fec6f185409ef7ca68d4161dbb485eed9ffb/docs/source/logo/BluePyMMBanner.png
--------------------------------------------------------------------------------
/notebook/BluePyMM.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# BluePyMM\n",
8 | "\n",
9 | "This notebook will guide you through model management with the help of BluePyMM. This run is based on the test example *simple1*, which can be found in the folder *bluepymm/tests/examples/simple1*."
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 1,
15 | "metadata": {
16 | "collapsed": true
17 | },
18 | "outputs": [],
19 | "source": [
20 | "import bluepymm\n",
21 | "import os\n",
22 | "from pprint import pprint"
23 | ]
24 | },
25 | {
26 | "cell_type": "code",
27 | "execution_count": 2,
28 | "metadata": {
29 | "collapsed": true
30 | },
31 | "outputs": [],
32 | "source": [
33 | "test_dir = '../bluepymm/tests/examples/simple1/'"
34 | ]
35 | },
36 | {
37 | "cell_type": "markdown",
38 | "metadata": {},
39 | "source": [
40 | "Model management consists of three phases:\n",
41 | "1. [**prepare**](#prepare): processing of input data, finding all possible morphology/electrical model combinations (*me-combinations*), and preparation of a database\n",
42 | "2. [**run**](#run): run all me-combinations\n",
43 | "3. [**select**](#select): compare all me-combinations against input thresholds, select successful combinations and write them out to file; generate report"
44 | ]
45 | },
46 | {
47 | "cell_type": "markdown",
48 | "metadata": {},
49 | "source": [
50 | "## Prepare me-combinations\n",
51 | "\n",
52 | "In the preparation phase, the input data is processed, which consists of morphology releases, a set of released e-models and a recipe that describes which layers, morphology types (*m-types*), and electrical types (*e-types*) should be tested."
53 | ]
54 | },
55 | {
56 | "cell_type": "code",
57 | "execution_count": 3,
58 | "metadata": {},
59 | "outputs": [],
60 | "source": [
61 | "# Input configuration\n",
62 | "prepare_config = 'simple1_conf_prepare.json'\n",
63 | "\n",
64 | "with bluepymm.tools.cd(test_dir):\n",
65 | " config = bluepymm.tools.load_json(prepare_config)\n",
66 | "\n",
67 | "# Inspect configuration\n",
68 | "pprint(config)"
69 | ]
70 | },
71 | {
72 | "cell_type": "code",
73 | "execution_count": 4,
74 | "metadata": {},
75 | "outputs": [],
76 | "source": [
77 | "with bluepymm.tools.cd(test_dir):\n",
78 | " # Check directories that could possibly clash with this run \n",
79 | " for d in [config['output_dir'], config['tmp_dir']]:\n",
80 | " if os.path.exists(d):\n",
81 | " raise Exception('Directory {} already exists'.format(os.path.abspath(d)))\n",
82 | "\n",
83 | " # Run combination preparation\n",
84 | " bluepymm.prepare_combos.main.prepare_combos(conf_filename=prepare_config, continu=False)"
85 | ]
86 | },
87 | {
88 | "cell_type": "markdown",
89 | "metadata": {
90 | "collapsed": true
91 | },
92 | "source": [
93 | "## Run me-combinations\n",
94 | "\n",
95 | "In this phase, all the me-combinations as found in the SQLite database are run. The resulting scores are saved for later processing."
96 | ]
97 | },
98 | {
99 | "cell_type": "code",
100 | "execution_count": 5,
101 | "metadata": {
102 | "collapsed": true
103 | },
104 | "outputs": [],
105 | "source": [
106 | "# Input configuration\n",
107 | "run_config = 'simple1_conf_run.json'\n",
108 | "\n",
109 | "with bluepymm.tools.cd(test_dir):\n",
110 | " config = bluepymm.tools.load_json(run_config)\n",
111 | "\n",
112 | "# Inspect configuration\n",
113 | "pprint(config)"
114 | ]
115 | },
116 | {
117 | "cell_type": "code",
118 | "execution_count": 6,
119 | "metadata": {
120 | "collapsed": true
121 | },
122 | "outputs": [],
123 | "source": [
124 | "with bluepymm.tools.cd(test_dir):\n",
125 | " # Run combinations\n",
126 | " bluepymm.run_combos.main.run_combos(conf_filename=run_config, ipyp=False, ipyp_profile=None)"
127 | ]
128 | },
129 | {
130 | "cell_type": "markdown",
131 | "metadata": {},
132 | "source": [
133 | "## Select me-combinations\n",
134 | "\n",
135 | "In the last phase, the scores of the simulated me-combinations are compared with input threshold values. The successful combinations are stored in database."
136 | ]
137 | },
138 | {
139 | "cell_type": "code",
140 | "execution_count": 7,
141 | "metadata": {
142 | "collapsed": true,
143 | "scrolled": true
144 | },
145 | "outputs": [],
146 | "source": [
147 | "# Input configuration\n",
148 | "select_config = 'simple1_conf_select.json'\n",
149 | "\n",
150 | "with bluepymm.tools.cd(test_dir):\n",
151 | " config = bluepymm.tools.load_json(select_config)\n",
152 | "\n",
153 | "# Inspect configuration\n",
154 | "pprint(config)"
155 | ]
156 | },
157 | {
158 | "cell_type": "code",
159 | "execution_count": 8,
160 | "metadata": {
161 | "collapsed": true
162 | },
163 | "outputs": [],
164 | "source": [
165 | "with bluepymm.tools.cd(test_dir):\n",
166 | " # Run combinations\n",
167 | " bluepymm.select_combos.main.select_combos(conf_filename=select_config, n_processes=1)"
168 | ]
169 | },
170 | {
171 | "cell_type": "code",
172 | "execution_count": null,
173 | "metadata": {},
174 | "outputs": [],
175 | "source": []
176 | }
177 | ],
178 | "metadata": {
179 | "kernelspec": {
180 | "display_name": "Python 3 (ipykernel)",
181 | "language": "python",
182 | "name": "python3"
183 | },
184 | "language_info": {
185 | "codemirror_mode": {
186 | "name": "ipython",
187 | "version": 3
188 | },
189 | "file_extension": ".py",
190 | "mimetype": "text/x-python",
191 | "name": "python",
192 | "nbconvert_exporter": "python",
193 | "pygments_lexer": "ipython3",
194 | "version": "3.8.3"
195 | }
196 | },
197 | "nbformat": 4,
198 | "nbformat_minor": 4
199 | }
200 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "BluePyMM",
3 | "description": "Blue Brain Python Cell Model Management",
4 | "version": "0.6.32",
5 | "scripts": {
6 | "build_doc": "echo 'bluepymm.readthedocs.io'"
7 | },
8 | "repository": {
9 | "type": "git",
10 | "url": "https://github.com/BlueBrain/BluePyMM/releases",
11 | "issuesurl": "https://github.com/BlueBrain/BluePyMM/issues"
12 | },
13 | "author": "Werner Van Geit (werner.vangeit@epfl.ch)",
14 | "contributors": ["Werner Van Geit", "Liesbeth Vanherpe", "James King", "Mike Gevaert", "Christian Roessert"],
15 | "license": "LGPL/BSD"
16 | }
17 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools >= 64", "setuptools-scm>=8.0"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [project]
6 | name = "bluepymm"
7 | authors = [
8 | {name = "Blue Brain Project, EPFL", email = "werner.vangeit@epfl.ch"},
9 | ]
10 | description="Model Management Python Library (bluepymm)"
11 | readme = "README.rst"
12 | license = {file = "LICENSE.txt"}
13 | requires-python = ">= 3.9"
14 | dynamic = ["version"]
15 | dependencies = [
16 | "sh",
17 | "bluepyopt",
18 | "matplotlib",
19 | "pandas>=2.0.0",
20 | "numpy",
21 | "ipyparallel",
22 | "lxml",
23 | "h5py",
24 | "pyyaml",
25 | ]
26 | classifiers = [
27 | "Development Status :: 4 - Beta",
28 | "Environment :: Console",
29 | "Programming Language :: Python :: 3",
30 | "Operating System :: POSIX",
31 | "Topic :: Scientific/Engineering",
32 | "Topic :: Utilities",
33 | "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
34 | ]
35 | keywords = [
36 | "optimisation",
37 | "neuroscience",
38 | "BlueBrainProject"
39 | ]
40 |
41 | [project.urls]
42 | Homepage = "https://github.com/BlueBrain/BluePyMM"
43 | Source = "https://github.com/BlueBrain/BluePyMM"
44 | Repository = "https://github.com/BlueBrain/BluePyMM.git"
45 | Tracker = "https://github.com/BlueBrain/BluePyMM/issues"
46 | Documentation = "https://bluepymm.readthedocs.io/en/latest"
47 |
48 | [project.scripts]
49 | bluepymm = "bluepymm:main.main"
50 |
51 | [tool.setuptools]
52 | include-package-data = true
53 |
54 | [tool.setuptools.package-data]
55 | bluepymm = [
56 | "templates/cell_template_neuron.jinja2",
57 | "templates/cell_template_neurodamus.jinja2",
58 | "templates/cell_template_neurodamus_sbo.jinja2",
59 | ]
60 |
61 | [tool.setuptools.packages.find]
62 | exclude = ["notebook", "tests",]
63 |
64 | [tool.setuptools_scm]
65 | version_scheme = "python-simplified-semver"
66 | local_scheme = "no-local-version"
67 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | markers =
3 | unit: unit tests.
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | -e .
2 |
--------------------------------------------------------------------------------
/requirements_docs.txt:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2016-2021, EPFL/Blue Brain Project
2 | #
3 | # This file is part of BluePyOpt
4 | # This library is free software; you can redistribute it and/or modify it under
5 | # the terms of the GNU Lesser General Public License version 3.0 as published
6 | # by the Free Software Foundation.
7 | # This library is distributed in the hope that it will be useful, but WITHOUT
8 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
9 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
10 | # details.
11 | # You should have received a copy of the GNU Lesser General Public License
12 | # along with this library; if not, write to the Free Software Foundation, Inc.,
13 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
14 |
15 | sphinx>=2.0.0
16 | sphinx-bluebrain-theme
17 | sphinx-autorun
--------------------------------------------------------------------------------
/tests/.gitignore:
--------------------------------------------------------------------------------
1 | /.coverage
2 | /coverage.xml
3 | /output/
4 | /tmp/
5 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BlueBrain/BluePyMM/e095fec6f185409ef7ca68d4161dbb485eed9ffb/tests/__init__.py
--------------------------------------------------------------------------------
/tests/examples/cmvd3a/circuit_intlayers.mvd3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BlueBrain/BluePyMM/e095fec6f185409ef7ca68d4161dbb485eed9ffb/tests/examples/cmvd3a/circuit_intlayers.mvd3
--------------------------------------------------------------------------------
/tests/examples/cmvd3a/circuit_strlayers.mvd3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BlueBrain/BluePyMM/e095fec6f185409ef7ca68d4161dbb485eed9ffb/tests/examples/cmvd3a/circuit_strlayers.mvd3
--------------------------------------------------------------------------------
/tests/examples/simple1/.gitignore:
--------------------------------------------------------------------------------
1 | /tmp
2 | /tmp_git
3 | /output_megate
4 | /hoc/
5 | /output/
6 |
--------------------------------------------------------------------------------
/tests/examples/simple1/build_git.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 |
23 | import os
24 | import shutil
25 | import contextlib
26 |
27 | import sh
28 |
29 | BASE_DIR = os.path.abspath(os.path.dirname(__file__))
30 |
31 |
32 | @contextlib.contextmanager
33 | def cd(dir_name):
34 | """Change directory"""
35 | old_cwd = os.getcwd()
36 | os.chdir(dir_name)
37 | try:
38 | yield
39 | finally:
40 | os.chdir(old_cwd)
41 |
42 |
43 | def main():
44 | """Main"""
45 |
46 | tmp_git_dir = os.path.join(BASE_DIR, 'tmp_git')
47 | git_subdir = 'subdir'
48 | orig_dir = os.path.join(BASE_DIR, 'data/emodels_dir')
49 |
50 | if os.path.exists(tmp_git_dir):
51 | shutil.rmtree(tmp_git_dir)
52 |
53 | os.makedirs(tmp_git_dir)
54 |
55 | with cd(tmp_git_dir):
56 | sh.git('init')
57 |
58 | sh.git.config('user.name', 'dummy')
59 | sh.git.config('user.email', 'dummy@dummy.com')
60 |
61 | main_files = ['final.json', 'emodel_etype_map.json']
62 |
63 | os.makedirs(git_subdir)
64 |
65 | with cd(git_subdir):
66 | for filename in main_files:
67 | shutil.copy(os.path.join(orig_dir, git_subdir, filename), '.')
68 | sh.git.add(filename)
69 |
70 | sh.git.commit('-m', 'main')
71 |
72 | with cd(git_subdir):
73 | for emodel_n in ['1', '2']:
74 | sh.git.checkout('master')
75 | sh.git.checkout('-b', 'emodel%s' % emodel_n)
76 | model_files = [
77 | "morphologies/morph%s.asc" % emodel_n,
78 | "setup/evaluator.py",
79 | "setup/__init__.py"]
80 |
81 | for filename in model_files:
82 | model_file_dir = os.path.dirname(filename)
83 | if not os.path.exists(model_file_dir):
84 | os.makedirs(model_file_dir)
85 | shutil.copy(
86 | os.path.join(
87 | orig_dir,
88 | git_subdir,
89 | filename),
90 | model_file_dir)
91 |
92 | sh.git.add(filename)
93 | sh.git.commit('-m', 'emodel%s' % emodel_n)
94 |
95 |
96 | if __name__ == '__main__':
97 | main()
98 |
--------------------------------------------------------------------------------
/tests/examples/simple1/cell_template.jinja2:
--------------------------------------------------------------------------------
1 | /*
2 | {%- if banner %}
3 | {{banner}}
4 | {%- endif %}
5 | */
6 | {load_file("stdrun.hoc")}
7 | {load_file("import3d.hoc")}
8 |
9 | {%- if global_params %}
10 | /*
11 | * Check that global parameters are the same as with the optimization
12 | */
13 | proc check_parameter(/* name, expected_value, value */){
14 | strdef error
15 | if($2 != $3){
16 | sprint(error, "Parameter %s has different value %f != %f", $s1, $2, $3)
17 | execerror(error)
18 | }
19 | }
20 | proc check_simulator() {
21 | {%- for param, value in global_params.items() %}
22 | check_parameter("{{param}}", {{value}}, {{param}})
23 | {%- endfor %}
24 | }
25 | {%- endif %}
26 | {%- if ignored_global_params %}
27 | /* The following global parameters were set in BluePyOpt
28 | {%- for param, value in ignored_global_params.items() %}
29 | * {{param}} = {{value}}
30 | {%- endfor %}
31 | */
32 | {%- endif %}
33 |
34 | begintemplate {{template_name}}
35 | public init, morphology, geom_nseg_fixed, geom_nsec
36 | public soma, dend, apic, axon, myelin
37 | create soma[1], dend[1], apic[1], axon[1], myelin[1]
38 |
39 | objref this, CellRef, segCounts
40 |
41 | public all, somatic, apical, axonal, basal, myelinated, APC
42 | objref all, somatic, apical, axonal, basal, myelinated, APC
43 |
44 | proc init(/* args: morphology_dir, morphology_name */) {
45 | all = new SectionList()
46 | apical = new SectionList()
47 | axonal = new SectionList()
48 | basal = new SectionList()
49 | somatic = new SectionList()
50 | myelinated = new SectionList()
51 |
52 | //For compatibility with BBP CCells
53 | CellRef = this
54 |
55 | forall delete_section()
56 |
57 | if(numarg() >= 2) {
58 | load_morphology($s1, $s2)
59 | } else {
60 | {%- if morphology %}
61 | load_morphology($s1, "{{morphology}}")
62 | {%- else %}
63 | execerror("Template {{template_name}} requires morphology name to instantiate")
64 | {%- endif %}
65 | }
66 |
67 | geom_nseg()
68 | {%- if replace_axon %}
69 | replace_axon()
70 | {%- endif %}
71 | insertChannel()
72 | biophys()
73 | re_init_rng()
74 | }
75 |
76 | proc load_morphology(/* morphology_dir, morphology_name */) {localobj morph, import, sf, extension
77 | strdef morph_path
78 | sprint(morph_path, "%s/%s", $s1, $s2)
79 |
80 | sf = new StringFunctions()
81 | extension = new String()
82 |
83 | sscanf(morph_path, "%s", extension.s)
84 | sf.right(extension.s, sf.len(extension.s)-4)
85 |
86 | if( strcmp(extension.s, ".asc") == 0 ) {
87 | morph = new Import3d_Neurolucida3()
88 | } else if( strcmp(extension.s, ".swc" ) == 0) {
89 | morph = new Import3d_SWC_read()
90 | } else {
91 | printf("Unsupported file format: Morphology file has to end with .asc or .swc" )
92 | quit()
93 | }
94 |
95 | morph.quiet = 1
96 | morph.input(morph_path)
97 |
98 | import = new Import3d_GUI(morph, 0)
99 | import.instantiate(this)
100 | }
101 |
102 | /*
103 | * Assignment of mechanism values based on distance from the soma
104 | * Matches the BluePyOpt method
105 | */
106 | proc distribute_distance(){local x localobj sl
107 | strdef stmp, distfunc, mech
108 |
109 | sl = $o1
110 | mech = $s2
111 | distfunc = $s3
112 | this.soma[0] distance(0, 0.5)
113 | sprint(distfunc, "%%s %s(%%f) = %s", mech, distfunc)
114 | forsec sl for(x, 0) {
115 | sprint(stmp, distfunc, secname(), x, distance(x))
116 | execute(stmp)
117 | }
118 | }
119 |
120 | proc geom_nseg() {
121 | this.geom_nsec() //To count all sections
122 | //TODO: geom_nseg_fixed depends on segCounts which is calculated by
123 | // geom_nsec. Can this be collapsed?
124 | this.geom_nseg_fixed(40)
125 | this.geom_nsec() //To count all sections
126 | }
127 |
128 | proc insertChannel() {
129 | {%- for location, names in channels.items() %}
130 | forsec this.{{location}} {
131 | {%- for channel in names %}
132 | insert {{channel}}
133 | {%- endfor %}
134 | }
135 | {%- endfor %}
136 | }
137 |
138 | proc biophys() {
139 | {% for loc, parameters in section_params %}
140 | forsec CellRef.{{ loc }} {
141 | {%- for param in parameters %}
142 | {{ param.name }} = {{ param.value }}
143 | {%- endfor %}
144 | }
145 | {% endfor %}
146 | {%- for location, param_name, value in range_params %}
147 | distribute_distance(CellRef.{{location}}, "{{param_name}}", "{{value}}")
148 | {%- endfor %}
149 | }
150 |
151 | func sec_count(/* SectionList */) { local nSec
152 | nSec = 0
153 | forsec $o1 {
154 | nSec += 1
155 | }
156 | return nSec
157 | }
158 |
159 | /*
160 | * Iterate over the section and compute how many segments should be allocate to
161 | * each.
162 | */
163 | proc geom_nseg_fixed(/* chunkSize */) { local secIndex, chunkSize
164 | chunkSize = $1
165 | soma area(.5) // make sure diam reflects 3d points
166 | secIndex = 0
167 | forsec all {
168 | nseg = 1 + 2*int(L/chunkSize)
169 | segCounts.x[secIndex] = nseg
170 | secIndex += 1
171 | }
172 | }
173 |
174 | /*
175 | * Count up the number of sections
176 | */
177 | proc geom_nsec() { local nSec
178 | nSecAll = sec_count(all)
179 | nSecSoma = sec_count(somatic)
180 | nSecApical = sec_count(apical)
181 | nSecBasal = sec_count(basal)
182 | nSecMyelinated = sec_count(myelinated)
183 | nSecAxonalOrig = nSecAxonal = sec_count(axonal)
184 |
185 | segCounts = new Vector()
186 | segCounts.resize(nSecAll)
187 | nSec = 0
188 | forsec all {
189 | segCounts.x[nSec] = nseg
190 | nSec += 1
191 | }
192 | }
193 |
194 | /*
195 | * Replace the axon built from the original morphology file with a stub axon
196 | */
197 | {%- if replace_axon %}
198 | {{replace_axon}}
199 | {%- endif %}
200 |
201 |
202 | {{re_init_rng}}
203 |
204 | endtemplate {{template_name}}
205 |
--------------------------------------------------------------------------------
/tests/examples/simple1/cell_template_neuron.jinja2:
--------------------------------------------------------------------------------
1 | /*
2 | {%- if banner %}
3 | {{banner}}
4 | {%- endif %}
5 | */
6 | {load_file("stdrun.hoc")}
7 | {load_file("import3d.hoc")}
8 |
9 | {%- if global_params %}
10 | /*
11 | * Check that global parameters are the same as with the optimization
12 | */
13 | proc check_parameter(/* name, expected_value, value */){
14 | strdef error
15 | if($2 != $3){
16 | sprint(error, "Parameter %s has different value %f != %f", $s1, $2, $3)
17 | execerror(error)
18 | }
19 | }
20 | proc check_simulator() {
21 | {%- for param, value in global_params.items() %}
22 | check_parameter("{{param}}", {{value}}, {{param}})
23 | {%- endfor %}
24 | }
25 | {%- endif %}
26 | {%- if ignored_global_params %}
27 | /* The following global parameters were set in BluePyOpt
28 | {%- for param, value in ignored_global_params.items() %}
29 | * {{param}} = {{value}}
30 | {%- endfor %}
31 | */
32 | {%- endif %}
33 |
34 | begintemplate {{template_name}}
35 | public init, morphology, geom_nseg_fixed, geom_nsec
36 | public soma, dend, apic, axon, myelin
37 | create soma[1], dend[1], apic[1], axon[1], myelin[1]
38 |
39 | objref this, CellRef, segCounts
40 |
41 | public all, somatic, apical, axonal, basal, myelinated, APC
42 | objref all, somatic, apical, axonal, basal, myelinated, APC
43 |
44 | proc init(/* args: morphology_dir, morphology_name */) {
45 | all = new SectionList()
46 | apical = new SectionList()
47 | axonal = new SectionList()
48 | basal = new SectionList()
49 | somatic = new SectionList()
50 | myelinated = new SectionList()
51 |
52 | //For compatibility with BBP CCells
53 | CellRef = this
54 |
55 | forall delete_section()
56 |
57 | if(numarg() >= 2) {
58 | load_morphology($s1, $s2)
59 | } else {
60 | {%- if morphology %}
61 | load_morphology($s1, "{{morphology}}")
62 | {%- else %}
63 | execerror("Template {{template_name}} requires morphology name to instantiate")
64 | {%- endif %}
65 | }
66 |
67 | geom_nseg()
68 | {%- if replace_axon %}
69 | replace_axon()
70 | {%- endif %}
71 | insertChannel()
72 | biophys()
73 | re_init_rng()
74 | }
75 |
76 | proc load_morphology(/* morphology_dir, morphology_name */) {localobj morph, import, sf, extension
77 | strdef morph_path
78 | sprint(morph_path, "%s/%s", $s1, $s2)
79 |
80 | sf = new StringFunctions()
81 | extension = new String()
82 |
83 | sscanf(morph_path, "%s", extension.s)
84 | sf.right(extension.s, sf.len(extension.s)-4)
85 |
86 | if( strcmp(extension.s, ".asc") == 0 ) {
87 | morph = new Import3d_Neurolucida3()
88 | } else if( strcmp(extension.s, ".swc" ) == 0) {
89 | morph = new Import3d_SWC_read()
90 | } else {
91 | printf("Unsupported file format: Morphology file has to end with .asc or .swc" )
92 | quit()
93 | }
94 |
95 | morph.quiet = 1
96 | morph.input(morph_path)
97 |
98 | import = new Import3d_GUI(morph, 0)
99 | import.instantiate(this)
100 | }
101 |
102 | /*
103 | * Assignment of mechanism values based on distance from the soma
104 | * Matches the BluePyOpt method
105 | */
106 | proc distribute_distance(){local x localobj sl
107 | strdef stmp, distfunc, mech
108 |
109 | sl = $o1
110 | mech = $s2
111 | distfunc = $s3
112 | this.soma[0] distance(0, 0.5)
113 | sprint(distfunc, "%%s %s(%%f) = %s", mech, distfunc)
114 | forsec sl for(x, 0) {
115 | sprint(stmp, distfunc, secname(), x, distance(x))
116 | execute(stmp)
117 | }
118 | }
119 |
120 | proc geom_nseg() {
121 | this.geom_nsec() //To count all sections
122 | //TODO: geom_nseg_fixed depends on segCounts which is calculated by
123 | // geom_nsec. Can this be collapsed?
124 | this.geom_nseg_fixed(40)
125 | this.geom_nsec() //To count all sections
126 | }
127 |
128 | proc insertChannel() {
129 | {%- for location, names in channels.items() %}
130 | forsec this.{{location}} {
131 | {%- for channel in names %}
132 | insert {{channel}}
133 | {%- endfor %}
134 | }
135 | {%- endfor %}
136 | }
137 |
138 | proc biophys() {
139 | {% for loc, parameters in section_params %}
140 | forsec CellRef.{{ loc }} {
141 | {%- for param in parameters %}
142 | {{ param.name }} = {{ param.value }}
143 | {%- endfor %}
144 | }
145 | {% endfor %}
146 | {%- for location, param_name, value in range_params %}
147 | distribute_distance(CellRef.{{location}}, "{{param_name}}", "{{value}}")
148 | {%- endfor %}
149 | }
150 |
151 | func sec_count(/* SectionList */) { local nSec
152 | nSec = 0
153 | forsec $o1 {
154 | nSec += 1
155 | }
156 | return nSec
157 | }
158 |
159 | /*
160 | * Iterate over the section and compute how many segments should be allocate to
161 | * each.
162 | */
163 | proc geom_nseg_fixed(/* chunkSize */) { local secIndex, chunkSize
164 | chunkSize = $1
165 | soma area(.5) // make sure diam reflects 3d points
166 | secIndex = 0
167 | forsec all {
168 | nseg = 1 + 2*int(L/chunkSize)
169 | segCounts.x[secIndex] = nseg
170 | secIndex += 1
171 | }
172 | }
173 |
174 | /*
175 | * Count up the number of sections
176 | */
177 | proc geom_nsec() { local nSec
178 | nSecAll = sec_count(all)
179 | nSecSoma = sec_count(somatic)
180 | nSecApical = sec_count(apical)
181 | nSecBasal = sec_count(basal)
182 | nSecMyelinated = sec_count(myelinated)
183 | nSecAxonalOrig = nSecAxonal = sec_count(axonal)
184 |
185 | segCounts = new Vector()
186 | segCounts.resize(nSecAll)
187 | nSec = 0
188 | forsec all {
189 | segCounts.x[nSec] = nseg
190 | nSec += 1
191 | }
192 | }
193 |
194 | /*
195 | * Replace the axon built from the original morphology file with a stub axon
196 | */
197 | {%- if replace_axon %}
198 | {{replace_axon}}
199 | {%- endif %}
200 |
201 |
202 | {{re_init_rng}}
203 |
204 | endtemplate {{template_name}}
205 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/emodels_dir/subdir/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | x86_64
3 | *.bk
4 | /checkpoints/
5 | /responses.pkl
6 | test.log
7 | .ipython/
8 | file.patch
9 | file_.patch
10 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/emodels_dir/subdir/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BlueBrain/BluePyMM/e095fec6f185409ef7ca68d4161dbb485eed9ffb/tests/examples/simple1/data/emodels_dir/subdir/__init__.py
--------------------------------------------------------------------------------
/tests/examples/simple1/data/emodels_dir/subdir/emodel_etype_map.json:
--------------------------------------------------------------------------------
1 | {
2 | "emodel1": {
3 | "mm_recipe": "emodel1",
4 | "etype": "etype1",
5 | "layer": ["1","str1"]
6 | },
7 | "emodel2": {
8 | "mm_recipe": "emodel2",
9 | "etype": "etype2",
10 | "layer": ["1","2"]
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/emodels_dir/subdir/final.json:
--------------------------------------------------------------------------------
1 | {
2 | "emodel1": {
3 | "main_path": ".",
4 | "seed": 2,
5 | "rank": 0,
6 | "notes": "",
7 | "branch": "emodel1",
8 | "params": {"cm": 1.0},
9 | "fitness": {"Step1.SpikeCount": 20.0},
10 | "score": 104.72906197480131,
11 | "morph_path": "morphologies/morph1.asc"
12 | },
13 | "emodel2": {
14 | "main_path": ".",
15 | "seed": 2,
16 | "rank": 0,
17 | "notes": "",
18 | "branch": "emodel2",
19 | "params": {"cm": 0.5},
20 | "fitness": {"Step1.SpikeCount": 20.0},
21 | "score": 104.72906197480131,
22 | "morph_path": "morphologies/morph2.asc"
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/emodels_dir/subdir/mechanisms/Ih.mod:
--------------------------------------------------------------------------------
1 | :Comment :
2 | :Reference : : Kole,Hallermann,and Stuart, J. Neurosci. 2006
3 |
4 | NEURON {
5 | SUFFIX Ih
6 | NONSPECIFIC_CURRENT ihcn
7 | RANGE gIhbar, gIh, ihcn
8 | }
9 |
10 | UNITS {
11 | (S) = (siemens)
12 | (mV) = (millivolt)
13 | (mA) = (milliamp)
14 | }
15 |
16 | PARAMETER {
17 | gIhbar = 0.00001 (S/cm2)
18 | ehcn = -45.0 (mV)
19 | }
20 |
21 | ASSIGNED {
22 | v (mV)
23 | ihcn (mA/cm2)
24 | gIh (S/cm2)
25 | mInf
26 | mTau
27 | mAlpha
28 | mBeta
29 | }
30 |
31 | STATE {
32 | m
33 | }
34 |
35 | BREAKPOINT {
36 | SOLVE states METHOD cnexp
37 | gIh = gIhbar*m
38 | ihcn = gIh*(v-ehcn)
39 | }
40 |
41 | DERIVATIVE states {
42 | rates()
43 | m' = (mInf-m)/mTau
44 | }
45 |
46 | INITIAL{
47 | rates()
48 | m = mInf
49 | }
50 |
51 | PROCEDURE rates(){
52 | UNITSOFF
53 | if(v == -154.9){
54 | v = v + 0.0001
55 | }
56 | mAlpha = 0.001*6.43*(v+154.9)/(exp((v+154.9)/11.9)-1)
57 | mBeta = 0.001*193*exp(v/33.1)
58 | mInf = mAlpha/(mAlpha + mBeta)
59 | mTau = 1/(mAlpha + mBeta)
60 | UNITSON
61 | }
62 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/emodels_dir/subdir/morphologies:
--------------------------------------------------------------------------------
1 | ../../morphs
--------------------------------------------------------------------------------
/tests/examples/simple1/data/emodels_dir/subdir/setup/__init__.py:
--------------------------------------------------------------------------------
1 | """Module init"""
2 |
3 | from . import evaluator # NOQA
4 |
5 | multieval = True
6 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/emodels_dir/subdir/setup/evaluator.py:
--------------------------------------------------------------------------------
1 | """Run simple cell optimisation"""
2 |
3 | import bluepyopt.ephys as ephys
4 | import bluepyopt as bpopt
5 |
6 |
7 | class MultiEvaluator(bpopt.evaluators.Evaluator):
8 |
9 | """Multiple cell evaluator"""
10 |
11 | def __init__(
12 | self, evaluators=None, sim=None,
13 | ):
14 | """Constructor
15 |
16 | Args:
17 | evaluators (list): list of CellModel evaluators
18 | """
19 |
20 | self.sim = sim
21 | self.evaluators = evaluators
22 | objectives = []
23 | # loop objectives for all evaluators, rename based on evaluators
24 | for i, evaluator in enumerate(self.evaluators):
25 | for objective in evaluator.objectives:
26 | objectives.append(objective)
27 |
28 | # these are identical for all models. Better solution available?
29 | self.param_names = self.evaluators[0].param_names
30 | params = self.evaluators[0].cell_model.params_by_names(
31 | self.param_names
32 | )
33 |
34 | super(MultiEvaluator, self).__init__(objectives, params)
35 |
36 | def param_dict(self, param_array):
37 | """Convert param_array in param_dict"""
38 | param_dict = {}
39 | for param_name, param_value in zip(self.param_names, param_array):
40 | param_dict[param_name] = param_value
41 |
42 | return param_dict
43 |
44 | def objective_dict(self, objective_array):
45 | """Convert objective_array in objective_dict"""
46 | objective_dict = {}
47 | objective_names = [objective.name for objective in self.objectives]
48 |
49 | if len(objective_names) != len(objective_array):
50 | raise Exception(
51 | "MultiEvaluator: list given to objective_dict() "
52 | "has wrong number of objectives"
53 | )
54 |
55 | for objective_name, objective_value in zip(
56 | objective_names, objective_array
57 | ):
58 | objective_dict[objective_name] = objective_value
59 |
60 | return objective_dict
61 |
62 | def objective_list(self, objective_dict):
63 | """Convert objective_dict in objective_list"""
64 | objective_list = []
65 | objective_names = [objective.name for objective in self.objectives]
66 | for objective_name in objective_names:
67 | objective_list.append(objective_dict[objective_name])
68 |
69 | return objective_list
70 |
71 | def evaluate_with_dicts(self, param_dict=None):
72 | """Run evaluation with dict as input and output"""
73 |
74 | scores = {}
75 | for evaluator in self.evaluators:
76 | score = evaluator.evaluate_with_dicts(param_dict=param_dict)
77 | scores.update(score)
78 |
79 | return scores
80 |
81 | def evaluate_with_lists(self, param_list=None):
82 | """Run evaluation with lists as input and outputs"""
83 |
84 | param_dict = self.param_dict(param_list)
85 |
86 | obj_dict = self.evaluate_with_dicts(param_dict=param_dict)
87 |
88 | return self.objective_list(obj_dict)
89 |
90 | def evaluate(self, param_list=None):
91 | """Run evaluation with lists as input and outputs"""
92 |
93 | return self.evaluate_with_lists(param_list)
94 |
95 | def __str__(self):
96 |
97 | content = "multi cell evaluator:\n"
98 |
99 | content += " evaluators:\n"
100 | for evaluator in self.evaluators:
101 | content += " %s\n" % str(evaluator)
102 |
103 | return content
104 |
105 |
106 | def create(etype, altmorph=None):
107 | """Setup"""
108 |
109 | soma_loc = ephys.locations.NrnSeclistCompLocation(
110 | name="soma", seclist_name="somatic", sec_index=0, comp_x=0.5
111 | )
112 |
113 | somatic_loc = ephys.locations.NrnSeclistLocation(
114 | "somatic", seclist_name="somatic"
115 | )
116 |
117 | hh_mech = ephys.mechanisms.NrnMODMechanism(
118 | name="hh", suffix="hh", locations=[somatic_loc]
119 | )
120 |
121 | cm_param = ephys.parameters.NrnSectionParameter(
122 | name="cm",
123 | param_name="cm",
124 | value=1.0,
125 | locations=[somatic_loc],
126 | bounds=[0.5, 2.0],
127 | )
128 |
129 | if altmorph:
130 | morph_path = altmorph[0][1]
131 | else:
132 | if etype == "emodel1":
133 | morph_path = "../morphologies/morph1.asc"
134 | elif etype == "emodel2":
135 | morph_path = "../morphologies/morph2.asc"
136 | else:
137 | raise Exception("Unknown emodel: %s" % etype)
138 |
139 | morph = ephys.morphologies.NrnFileMorphology(morph_path)
140 |
141 | simple_cell = ephys.models.CellModel(
142 | name="simple_cell", morph=morph, mechs=[hh_mech], params=[cm_param]
143 | )
144 |
145 | stim = ephys.stimuli.NrnSquarePulse(
146 | step_amplitude=0.01,
147 | step_delay=100,
148 | step_duration=50,
149 | location=soma_loc,
150 | total_duration=200,
151 | )
152 |
153 | rec = ephys.recordings.CompRecording(
154 | name="Step1.soma.v", location=soma_loc, variable="v"
155 | )
156 |
157 | protocol = ephys.protocols.SweepProtocol("Step1", [stim], [rec])
158 |
159 | nrn = ephys.simulators.NrnSimulator()
160 |
161 | efeature = ephys.efeatures.eFELFeature(
162 | "Step1.Spikecount",
163 | efel_feature_name="Spikecount",
164 | recording_names={"": "Step1.soma.v"},
165 | stim_start=100,
166 | stim_end=150,
167 | exp_mean=1,
168 | exp_std=0.05,
169 | )
170 |
171 | objective = ephys.objectives.SingletonObjective(
172 | "Step1.SpikeCount", efeature
173 | )
174 |
175 | score_calc = ephys.objectivescalculators.ObjectivesCalculator([objective])
176 |
177 | cell_evaluator = ephys.evaluators.CellEvaluator(
178 | cell_model=simple_cell,
179 | param_names=["cm"],
180 | fitness_protocols={protocol.name: protocol},
181 | fitness_calculator=score_calc,
182 | sim=nrn,
183 | )
184 |
185 | all_cell_evaluators = [cell_evaluator]
186 |
187 | multi_eval = MultiEvaluator(evaluators=all_cell_evaluators, sim=nrn)
188 |
189 | return multi_eval
190 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/morphs/apical_points_isec.json:
--------------------------------------------------------------------------------
1 | {
2 | "morph": 0
3 | }
--------------------------------------------------------------------------------
/tests/examples/simple1/data/morphs/morph1.asc:
--------------------------------------------------------------------------------
1 | ("CellBody"
2 | (Color Red)
3 | (CellBody)
4 | (10, 0, 0, 0)
5 | (0, 10, 0, 0)
6 | (-10, 0, 0, 0)
7 | (0, -10, 0, 0)
8 | (10, 0, 0, 0)
9 | );
10 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/morphs/morph2.asc:
--------------------------------------------------------------------------------
1 | ("CellBody"
2 | (Color Red)
3 | (CellBody)
4 | (10, 0, 0, 0)
5 | (0, 10, 0, 0)
6 | (-10, 0, 0, 0)
7 | (0, -10, 0, 0)
8 | (10, 0, 0, 0)
9 | );
10 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/morphs/neuronDB.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | morph1
5 | mtype1
6 |
7 | 1
8 |
9 |
10 | morph2
11 | mtype2
12 |
13 | 1
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/rep_morphs/apical_points_isec.json:
--------------------------------------------------------------------------------
1 | ../morphs/apical_points_isec.json
--------------------------------------------------------------------------------
/tests/examples/simple1/data/rep_morphs/morph1.asc:
--------------------------------------------------------------------------------
1 | ("CellBody"
2 | (Color Red)
3 | (CellBody)
4 | (10, 0, 0, 0)
5 | (0, 10, 0, 0)
6 | (-10, 0, 0, 0)
7 | (0, -10, 0, 0)
8 | (10, 0, 0, 0)
9 | );
10 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/rep_morphs/morph2.asc:
--------------------------------------------------------------------------------
1 | ("CellBody"
2 | (Color Red)
3 | (CellBody)
4 | (10, 0, 0, 0)
5 | (0, 10, 0, 0)
6 | (-10, 0, 0, 0)
7 | (0, -10, 0, 0)
8 | (10, 0, 0, 0)
9 | );
10 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/rep_morphs/neuronDB.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | morph1
5 | mtype1
6 |
7 | 1
8 |
9 |
10 | morph2
11 | mtype2
12 |
13 | 1
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/simple1_recipe.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/tests/examples/simple1/data/simple1_recipe.yaml:
--------------------------------------------------------------------------------
1 | neurons:
2 | - traits:
3 | etype: {etype1: 33.33, etype2: 66.67}
4 | layer: 1
5 | mtype: mtype1
6 | - traits:
7 | etype: {etype1: 10.00}
8 | layer: 1
9 | mtype: mtype2
10 | - traits:
11 | etype: {etype2: 100}
12 | layer: 2
13 | mtype: mtype1
14 | version: v2.0
15 |
16 |
--------------------------------------------------------------------------------
/tests/examples/simple1/output_expected/emodel_dirs.json:
--------------------------------------------------------------------------------
1 | {
2 | "emodel1": "../tmp/emodels/emodel1",
3 | "emodel2": "../tmp/emodels/emodel2"
4 | }
5 |
--------------------------------------------------------------------------------
/tests/examples/simple1/output_expected/emodels_hoc/emodel1.hoc:
--------------------------------------------------------------------------------
1 | /*
2 | Created by BluePyOpt(1.5.8) at 2017-04-28 13:01:20.865764
3 | */
4 | {load_file("stdrun.hoc")}
5 | {load_file("import3d.hoc")}
6 |
7 | begintemplate simple_cell
8 | public init, morphology, geom_nseg_fixed, geom_nsec
9 | public soma, dend, apic, axon, myelin
10 | create soma[1], dend[1], apic[1], axon[1], myelin[1]
11 |
12 | objref this, CellRef, segCounts
13 |
14 | public all, somatic, apical, axonal, basal, myelinated, APC
15 | objref all, somatic, apical, axonal, basal, myelinated, APC
16 |
17 | proc init(/* args: morphology_dir, morphology_name */) {
18 | all = new SectionList()
19 | apical = new SectionList()
20 | axonal = new SectionList()
21 | basal = new SectionList()
22 | somatic = new SectionList()
23 | myelinated = new SectionList()
24 |
25 | //For compatibility with BBP CCells
26 | CellRef = this
27 |
28 | forall delete_section()
29 |
30 | if(numarg() >= 2) {
31 | load_morphology($s1, $s2)
32 | } else {
33 | load_morphology($s1, "morph1.asc")
34 | }
35 |
36 | geom_nseg()
37 | insertChannel()
38 | biophys()
39 | re_init_rng()
40 | }
41 |
42 | proc load_morphology(/* morphology_dir, morphology_name */) {localobj morph, import, sf, extension
43 | strdef morph_path
44 | sprint(morph_path, "%s/%s", $s1, $s2)
45 |
46 | sf = new StringFunctions()
47 | extension = new String()
48 |
49 | sscanf(morph_path, "%s", extension.s)
50 | sf.right(extension.s, sf.len(extension.s)-4)
51 |
52 | if( strcmp(extension.s, ".asc") == 0 ) {
53 | morph = new Import3d_Neurolucida3()
54 | } else if( strcmp(extension.s, ".swc" ) == 0) {
55 | morph = new Import3d_SWC_read()
56 | } else {
57 | printf("Unsupported file format: Morphology file has to end with .asc or .swc" )
58 | quit()
59 | }
60 |
61 | morph.quiet = 1
62 | morph.input(morph_path)
63 |
64 | import = new Import3d_GUI(morph, 0)
65 | import.instantiate(this)
66 | }
67 |
68 | /*
69 | * Assignment of mechanism values based on distance from the soma
70 | * Matches the BluePyOpt method
71 | */
72 | proc distribute_distance(){local x localobj sl
73 | strdef stmp, distfunc, mech
74 |
75 | sl = $o1
76 | mech = $s2
77 | distfunc = $s3
78 | this.soma[0] distance(0, 0.5)
79 | sprint(distfunc, "%%s %s(%%f) = %s", mech, distfunc)
80 | forsec sl for(x, 0) {
81 | sprint(stmp, distfunc, secname(), x, distance(x))
82 | execute(stmp)
83 | }
84 | }
85 |
86 | proc geom_nseg() {
87 | this.geom_nsec() //To count all sections
88 | //TODO: geom_nseg_fixed depends on segCounts which is calculated by
89 | // geom_nsec. Can this be collapsed?
90 | this.geom_nseg_fixed(40)
91 | this.geom_nsec() //To count all sections
92 | }
93 |
94 | proc insertChannel() {
95 | forsec this.all {
96 | }
97 | forsec this.apical {
98 | }
99 | forsec this.axonal {
100 | }
101 | forsec this.basal {
102 | }
103 | forsec this.somatic {
104 | insert hh
105 | }
106 | forsec this.myelinated {
107 | }
108 | }
109 |
110 | proc biophys() {
111 |
112 | forsec CellRef.all {
113 | }
114 |
115 | forsec CellRef.apical {
116 | }
117 |
118 | forsec CellRef.axonal {
119 | }
120 |
121 | forsec CellRef.basal {
122 | }
123 |
124 | forsec CellRef.somatic {
125 | cm = 1
126 | }
127 |
128 | forsec CellRef.myelinated {
129 | }
130 |
131 | }
132 |
133 | func sec_count(/* SectionList */) { local nSec
134 | nSec = 0
135 | forsec $o1 {
136 | nSec += 1
137 | }
138 | return nSec
139 | }
140 |
141 | /*
142 | * Iterate over the section and compute how many segments should be allocate to
143 | * each.
144 | */
145 | proc geom_nseg_fixed(/* chunkSize */) { local secIndex, chunkSize
146 | chunkSize = $1
147 | soma area(.5) // make sure diam reflects 3d points
148 | secIndex = 0
149 | forsec all {
150 | nseg = 1 + 2*int(L/chunkSize)
151 | segCounts.x[secIndex] = nseg
152 | secIndex += 1
153 | }
154 | }
155 |
156 | /*
157 | * Count up the number of sections
158 | */
159 | proc geom_nsec() { local nSec
160 | nSecAll = sec_count(all)
161 | nSecSoma = sec_count(somatic)
162 | nSecApical = sec_count(apical)
163 | nSecBasal = sec_count(basal)
164 | nSecMyelinated = sec_count(myelinated)
165 | nSecAxonalOrig = nSecAxonal = sec_count(axonal)
166 |
167 | segCounts = new Vector()
168 | segCounts.resize(nSecAll)
169 | nSec = 0
170 | forsec all {
171 | segCounts.x[nSec] = nseg
172 | nSec += 1
173 | }
174 | }
175 |
176 | /*
177 | * Replace the axon built from the original morphology file with a stub axon
178 | */
179 |
180 |
181 |
182 | func hash_str() {localobj sf strdef right
183 | sf = new StringFunctions()
184 |
185 | right = $s1
186 |
187 | n_of_c = sf.len(right)
188 |
189 | hash = 0
190 | char_int = 0
191 | for i = 0, n_of_c - 1 {
192 | sscanf(right, "%c", & char_int)
193 | hash = (hash * 31 + char_int) % (2 ^ 31 - 1)
194 | sf.right(right, 1)
195 | }
196 |
197 | return hash
198 | }
199 |
200 | proc re_init_rng() {localobj sf
201 | strdef full_str, name
202 |
203 | sf = new StringFunctions()
204 |
205 |
206 | }
207 |
208 |
209 | endtemplate simple_cell
--------------------------------------------------------------------------------
/tests/examples/simple1/output_expected/emodels_hoc/emodel2.hoc:
--------------------------------------------------------------------------------
1 | /*
2 | Created by BluePyOpt(1.5.8) at 2017-04-28 13:01:20.862237
3 | */
4 | {load_file("stdrun.hoc")}
5 | {load_file("import3d.hoc")}
6 |
7 | begintemplate simple_cell
8 | public init, morphology, geom_nseg_fixed, geom_nsec
9 | public soma, dend, apic, axon, myelin
10 | create soma[1], dend[1], apic[1], axon[1], myelin[1]
11 |
12 | objref this, CellRef, segCounts
13 |
14 | public all, somatic, apical, axonal, basal, myelinated, APC
15 | objref all, somatic, apical, axonal, basal, myelinated, APC
16 |
17 | proc init(/* args: morphology_dir, morphology_name */) {
18 | all = new SectionList()
19 | apical = new SectionList()
20 | axonal = new SectionList()
21 | basal = new SectionList()
22 | somatic = new SectionList()
23 | myelinated = new SectionList()
24 |
25 | //For compatibility with BBP CCells
26 | CellRef = this
27 |
28 | forall delete_section()
29 |
30 | if(numarg() >= 2) {
31 | load_morphology($s1, $s2)
32 | } else {
33 | load_morphology($s1, "morph2.asc")
34 | }
35 |
36 | geom_nseg()
37 | insertChannel()
38 | biophys()
39 | re_init_rng()
40 | }
41 |
42 | proc load_morphology(/* morphology_dir, morphology_name */) {localobj morph, import, sf, extension
43 | strdef morph_path
44 | sprint(morph_path, "%s/%s", $s1, $s2)
45 |
46 | sf = new StringFunctions()
47 | extension = new String()
48 |
49 | sscanf(morph_path, "%s", extension.s)
50 | sf.right(extension.s, sf.len(extension.s)-4)
51 |
52 | if( strcmp(extension.s, ".asc") == 0 ) {
53 | morph = new Import3d_Neurolucida3()
54 | } else if( strcmp(extension.s, ".swc" ) == 0) {
55 | morph = new Import3d_SWC_read()
56 | } else {
57 | printf("Unsupported file format: Morphology file has to end with .asc or .swc" )
58 | quit()
59 | }
60 |
61 | morph.quiet = 1
62 | morph.input(morph_path)
63 |
64 | import = new Import3d_GUI(morph, 0)
65 | import.instantiate(this)
66 | }
67 |
68 | /*
69 | * Assignment of mechanism values based on distance from the soma
70 | * Matches the BluePyOpt method
71 | */
72 | proc distribute_distance(){local x localobj sl
73 | strdef stmp, distfunc, mech
74 |
75 | sl = $o1
76 | mech = $s2
77 | distfunc = $s3
78 | this.soma[0] distance(0, 0.5)
79 | sprint(distfunc, "%%s %s(%%f) = %s", mech, distfunc)
80 | forsec sl for(x, 0) {
81 | sprint(stmp, distfunc, secname(), x, distance(x))
82 | execute(stmp)
83 | }
84 | }
85 |
86 | proc geom_nseg() {
87 | this.geom_nsec() //To count all sections
88 | //TODO: geom_nseg_fixed depends on segCounts which is calculated by
89 | // geom_nsec. Can this be collapsed?
90 | this.geom_nseg_fixed(40)
91 | this.geom_nsec() //To count all sections
92 | }
93 |
94 | proc insertChannel() {
95 | forsec this.all {
96 | }
97 | forsec this.apical {
98 | }
99 | forsec this.axonal {
100 | }
101 | forsec this.basal {
102 | }
103 | forsec this.somatic {
104 | insert hh
105 | }
106 | forsec this.myelinated {
107 | }
108 | }
109 |
110 | proc biophys() {
111 |
112 | forsec CellRef.all {
113 | }
114 |
115 | forsec CellRef.apical {
116 | }
117 |
118 | forsec CellRef.axonal {
119 | }
120 |
121 | forsec CellRef.basal {
122 | }
123 |
124 | forsec CellRef.somatic {
125 | cm = 0.5
126 | }
127 |
128 | forsec CellRef.myelinated {
129 | }
130 |
131 | }
132 |
133 | func sec_count(/* SectionList */) { local nSec
134 | nSec = 0
135 | forsec $o1 {
136 | nSec += 1
137 | }
138 | return nSec
139 | }
140 |
141 | /*
142 | * Iterate over the section and compute how many segments should be allocate to
143 | * each.
144 | */
145 | proc geom_nseg_fixed(/* chunkSize */) { local secIndex, chunkSize
146 | chunkSize = $1
147 | soma area(.5) // make sure diam reflects 3d points
148 | secIndex = 0
149 | forsec all {
150 | nseg = 1 + 2*int(L/chunkSize)
151 | segCounts.x[secIndex] = nseg
152 | secIndex += 1
153 | }
154 | }
155 |
156 | /*
157 | * Count up the number of sections
158 | */
159 | proc geom_nsec() { local nSec
160 | nSecAll = sec_count(all)
161 | nSecSoma = sec_count(somatic)
162 | nSecApical = sec_count(apical)
163 | nSecBasal = sec_count(basal)
164 | nSecMyelinated = sec_count(myelinated)
165 | nSecAxonalOrig = nSecAxonal = sec_count(axonal)
166 |
167 | segCounts = new Vector()
168 | segCounts.resize(nSecAll)
169 | nSec = 0
170 | forsec all {
171 | segCounts.x[nSec] = nseg
172 | nSec += 1
173 | }
174 | }
175 |
176 | /*
177 | * Replace the axon built from the original morphology file with a stub axon
178 | */
179 |
180 |
181 |
182 | func hash_str() {localobj sf strdef right
183 | sf = new StringFunctions()
184 |
185 | right = $s1
186 |
187 | n_of_c = sf.len(right)
188 |
189 | hash = 0
190 | char_int = 0
191 | for i = 0, n_of_c - 1 {
192 | sscanf(right, "%c", & char_int)
193 | hash = (hash * 31 + char_int) % (2 ^ 31 - 1)
194 | sf.right(right, 1)
195 | }
196 |
197 | return hash
198 | }
199 |
200 | proc re_init_rng() {localobj sf
201 | strdef full_str, name
202 |
203 | sf = new StringFunctions()
204 |
205 |
206 | }
207 |
208 |
209 | endtemplate simple_cell
--------------------------------------------------------------------------------
/tests/examples/simple1/output_expected/final.json:
--------------------------------------------------------------------------------
1 | {
2 | "emodel1": {
3 | "branch": "emodel1",
4 | "fitness": {
5 | "Step1.SpikeCount": 20
6 | },
7 | "main_path": ".",
8 | "morph_path": "morphologies/morph1.asc",
9 | "notes": "",
10 | "params": {
11 | "cm": 1
12 | },
13 | "rank": 0,
14 | "score": 104.72906197480131,
15 | "seed": 2
16 | },
17 | "emodel2": {
18 | "branch": "emodel2",
19 | "fitness": {
20 | "Step1.SpikeCount": 20
21 | },
22 | "main_path": ".",
23 | "morph_path": "morphologies/morph2.asc",
24 | "notes": "",
25 | "params": {
26 | "cm": 0.5
27 | },
28 | "rank": 0,
29 | "score": 104.72906197480131,
30 | "seed": 2
31 | }
32 | }
--------------------------------------------------------------------------------
/tests/examples/simple1/output_expected/scores.sqlite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BlueBrain/BluePyMM/e095fec6f185409ef7ca68d4161dbb485eed9ffb/tests/examples/simple1/output_expected/scores.sqlite
--------------------------------------------------------------------------------
/tests/examples/simple1/output_megate_expected/extneurondb.dat:
--------------------------------------------------------------------------------
1 | morph1 1 mtype1 etype1 emodel1_mtype1_1_morph1
2 | morph2 1 mtype2 etype1 emodel1_mtype2_1_morph2
3 | morph1 1 mtype1 etype2 emodel2_mtype1_1_morph1
4 |
--------------------------------------------------------------------------------
/tests/examples/simple1/output_megate_expected/mecombo_emodel.tsv:
--------------------------------------------------------------------------------
1 | morph_name layer fullmtype etype emodel combo_name threshold_current holding_current
2 | morph1 1 mtype1 etype1 emodel1 emodel1_mtype1_1_morph1
3 | morph2 1 mtype2 etype1 emodel1 emodel1_mtype2_1_morph2
4 | morph1 1 mtype1 etype2 emodel2 emodel2_mtype1_1_morph1
5 |
--------------------------------------------------------------------------------
/tests/examples/simple1/simple1_conf_hoc.json:
--------------------------------------------------------------------------------
1 | {
2 | "final_json_path": "./data/emodels_dir/subdir/final.json",
3 | "emodels_tmp_dir": "./tmp/emodels",
4 | "mecombo_emodel_filename": "output_megate_expected/mecombo_emodel.tsv",
5 | "template": "cell_template_neuron.jinja2",
6 | "hoc_output_dir": "hoc"
7 | }
8 |
--------------------------------------------------------------------------------
/tests/examples/simple1/simple1_conf_prepare.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "1.0",
3 | "recipe_path": "./data/simple1_recipe.xml",
4 | "morph_path": "./data/morphs",
5 | "rep_morph_path": "./data/rep_morphs",
6 | "emodels_dir": "./data/emodels_dir",
7 | "emodels_hoc_dir": "./output/emodels_hoc",
8 | "emodel_etype_map_path": "subdir/emodel_etype_map.json",
9 | "final_json_path": "subdir/final.json",
10 | "tmp_dir": "./tmp",
11 | "scores_db": "./output/scores.sqlite",
12 | "output_dir": "./output/",
13 | "make_template_name_compatible": true
14 | }
15 |
--------------------------------------------------------------------------------
/tests/examples/simple1/simple1_conf_prepare_git.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "1.0",
3 | "recipe_path": "./data/simple1_recipe.xml",
4 | "morph_path": "./data/morphs",
5 | "rep_morph_path": "./data/rep_morphs",
6 | "emodels_repo": "tmp_git",
7 | "emodels_githash": "master",
8 | "emodels_hoc_dir": "./output/emodels_hoc",
9 | "emodel_etype_map_path": "subdir/emodel_etype_map.json",
10 | "final_json_path": "subdir/final.json",
11 | "tmp_dir": "./tmp",
12 | "scores_db": "./output/scores.sqlite",
13 | "output_dir": "./output"
14 | }
15 |
--------------------------------------------------------------------------------
/tests/examples/simple1/simple1_conf_run.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "1.0",
3 | "scores_db": "./output/scores.sqlite",
4 | "output_dir": "./output/"
5 | }
6 |
--------------------------------------------------------------------------------
/tests/examples/simple1/simple1_conf_select.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "1.0",
3 | "scores_db": "./output/scores.sqlite",
4 | "emodels_hoc_dir": "./output/emodels_hoc",
5 | "to_skip_features": [],
6 | "pdf_filename": "output_megate/megating.pdf",
7 | "megate_thresholds": [
8 | {"emodel": [".*"], "fullmtype": [".*"], "etype": [".*"], "features": [".*"], "megate_threshold": 5}
9 | ],
10 | "output_dir": "./output_megate",
11 | "check_opt_scores": true,
12 | "plot_emodels_per_morphology": true
13 | }
14 |
--------------------------------------------------------------------------------
/tests/examples/simple1/simple1_conf_select_2.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "1.0",
3 | "scores_db": "./output/scores.sqlite",
4 | "to_skip_features": [],
5 | "pdf_filename": "output_megate/megating.pdf",
6 | "megate_thresholds": [
7 | {"emodel": [".*"], "fullmtype": [".*"], "etype": [".*"], "features": [".*"], "megate_threshold": 5}
8 | ],
9 | "output_dir": "./output_megate",
10 | "skip_repaired_exemplar": false,
11 | "plot_emodels_per_morphology": true,
12 | "make_names_neuron_compliant": true
13 | }
14 |
--------------------------------------------------------------------------------
/tests/test_bluepymm.py:
--------------------------------------------------------------------------------
1 | """Test bluepymm module"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 |
23 | import pytest
24 | import sh
25 |
26 |
27 | @pytest.mark.unit
28 | def test_import():
29 | """bluepymm: test importing bluepymm"""
30 | import bluepymm # NOQA
31 |
32 |
33 | @pytest.mark.unit
34 | def test_shell():
35 | """bluepymm: test running bluepymm from shell"""
36 | bluepymm_h_output = sh.bluepymm('-h')
37 | assert 'usage: bluepymm' in bluepymm_h_output
38 |
--------------------------------------------------------------------------------
/tests/test_create_mm_sqlite.py:
--------------------------------------------------------------------------------
1 | """Tests for functionality in bluepymm/prepare_combos/create_mm_sqlite.py"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 |
23 | import pandas
24 | import re
25 | import os
26 | import json
27 |
28 | import pytest
29 |
30 | from bluepymm.prepare_combos import create_mm_sqlite
31 | from bluepymm import tools
32 |
33 |
34 | BASE_DIR = os.path.abspath(os.path.dirname(__file__))
35 | TEST_DIR = os.path.join(BASE_DIR, 'examples/simple1')
36 |
37 |
38 | @pytest.mark.unit
39 | def test_check_morphology_existence():
40 | """prepare_combos.create_mm_sqlite: tests check_morphology_existence"""
41 | morph_name = 'morph1.asc'
42 | morph_type = 'test'
43 | morph_dir = os.path.join(TEST_DIR, 'data/morphs', morph_name)
44 | ret = create_mm_sqlite.check_morphology_existence(morph_name, morph_type,
45 | morph_dir)
46 | assert ret
47 |
48 | morph_name = 'does_not_exist.asc'
49 | morph_dir = os.path.join(TEST_DIR, 'data/morphs', morph_name)
50 |
51 | with pytest.raises(ValueError):
52 | create_mm_sqlite.check_morphology_existence(
53 | morph_name, morph_type, morph_dir)
54 |
55 |
56 | @pytest.mark.unit
57 | def test_create_exemplar_rows_skip_repaired_exemplar():
58 | """prepare_combos.create_mm_sqlite: test create_exemplar_rows
59 | based on test example 'simple1'.
60 | """
61 | emodel = 'emodel1'
62 | final_dict = {emodel: {
63 | 'main_path': '.',
64 | 'seed': 2,
65 | 'rank': 0,
66 | 'notes': '',
67 | 'branch': 'emodel1',
68 | 'params': {'cm': 1.0},
69 | 'fitness': {'Step1.SpikeCount': 20.0},
70 | 'score': 104.72906197480131,
71 | 'morph_path': 'morphologies/morph1.asc'
72 | }}
73 | fullmtype_morph_map = {} # not used in case of skip_repaired_exemplar
74 | emodel_etype_map = {emodel: {
75 | 'mm_recipe': 'emodel1',
76 | 'etype': 'etype1',
77 | 'layer': ['1', 'str1']
78 | }}
79 | emodels = ['emodel1']
80 | emodel_dir = os.path.join(TEST_DIR, 'data/emodels_dir/subdir/')
81 | emodel_dirs = {emodel: emodel_dir}
82 | rep_morph_dir = os.path.join(TEST_DIR, 'data/morphs')
83 | skip_repaired_exemplar = True
84 |
85 | # construct expected output
86 | unrep_morph_dir = os.path.dirname(
87 | os.path.join(emodel_dirs[emodel], final_dict[emodel]['morph_path']))
88 |
89 | with tools.cd(TEST_DIR):
90 | ret = create_mm_sqlite.create_exemplar_rows(
91 | final_dict, fullmtype_morph_map, emodel_etype_map, emodels,
92 | emodel_dirs, rep_morph_dir, unrep_morph_dir,
93 | skip_repaired_exemplar)
94 |
95 | data = [(None, None, None, None, emodel_etype_map[emodel]['etype'],
96 | 'morph1', '.asc', emodel, emodel, unrep_morph_dir, None,
97 | json.dumps(final_dict[emodel]['fitness']), None, True, True,
98 | False, False),
99 | (None, None, None, None, emodel_etype_map[emodel]['etype'],
100 | 'morph1', '.asc', emodel, emodel, unrep_morph_dir, None,
101 | json.dumps(final_dict[emodel]['fitness']), None, True, True,
102 | False, True)]
103 | columns = [
104 | 'layer',
105 | 'fullmtype',
106 | 'mtype',
107 | 'msubtype',
108 | 'etype',
109 | 'morph_name',
110 | 'morph_ext',
111 | 'emodel',
112 | 'original_emodel',
113 | 'morph_dir',
114 | 'scores',
115 | 'opt_scores',
116 | 'exception',
117 | 'to_run',
118 | 'is_exemplar',
119 | 'is_repaired',
120 | 'is_original']
121 | expected_ret = pandas.DataFrame(data, columns=columns)
122 | expected_ret.sort_index(axis=1, inplace=True)
123 | ret.sort_index(axis=1, inplace=True)
124 |
125 | pandas.testing.assert_frame_equal(ret, expected_ret)
126 |
127 |
128 | @pytest.mark.unit
129 | def test_remove_morph_regex_failures():
130 | """prepare_combos.create_mm_sqlite: test remove_morph_regex_failures"""
131 | data = pandas.DataFrame([('morph1', re.compile('morph1')),
132 | ('morph2', re.compile('morph1')),
133 | ('morph3', re.compile('.*')), ],
134 | columns=['morph_name', 'morph_regex'])
135 | ret = create_mm_sqlite.remove_morph_regex_failures(data)
136 |
137 | expected_ret = pandas.DataFrame([('morph1'),
138 | ('morph3'), ],
139 | columns=['morph_name'])
140 | pandas.testing.assert_frame_equal(ret, expected_ret)
141 |
142 |
143 | @pytest.mark.unit
144 | def test_create_mm_sqlite():
145 | """prepare_combos.create_mm_sqlite: test create_mm_sqlite
146 | based on test example 'simple1'.
147 | """
148 | output_filename = 'scores.sqlite'
149 | recipe_filename = 'data/simple1_recipe.xml'
150 | morph_dir = 'data/morphs/'
151 | rep_morph_dir = 'data/rep_morphs/'
152 | emodel_dir = os.path.join(TEST_DIR, 'data/emodels_dir/subdir/')
153 | emodel_etype_map = tools.load_json(os.path.join(emodel_dir,
154 | 'emodel_etype_map.json'))
155 | final_dict = tools.load_json(os.path.join(emodel_dir, 'final.json'))
156 | emodel_dirs = {m: emodel_dir for m in ['emodel1', 'emodel2']}
157 | skip_repaired_exemplar = True
158 |
159 | with tools.cd(TEST_DIR):
160 | create_mm_sqlite.create_mm_sqlite(output_filename,
161 | recipe_filename,
162 | morph_dir,
163 | rep_morph_dir,
164 | rep_morph_dir,
165 | emodel_etype_map,
166 | final_dict,
167 | emodel_dirs,
168 | skip_repaired_exemplar)
169 | assert os.path.isfile(output_filename)
170 | # TODO: test database contents
171 |
172 | # clear output
173 | os.remove(output_filename)
174 |
--------------------------------------------------------------------------------
/tests/test_main.py:
--------------------------------------------------------------------------------
1 | """Test bluepymm main interface"""
2 |
3 | from __future__ import print_function
4 |
5 | """
6 | Copyright (c) 2018, EPFL/Blue Brain Project
7 |
8 | This file is part of BluePyMM
9 |
10 | This library is free software; you can redistribute it and/or modify it under
11 | the terms of the GNU Lesser General Public License version 3.0 as published
12 | by the Free Software Foundation.
13 |
14 | This library is distributed in the hope that it will be useful, but WITHOUT
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
16 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
17 | details.
18 |
19 | You should have received a copy of the GNU Lesser General Public License
20 | along with this library; if not, write to the Free Software Foundation, Inc.,
21 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22 | """
23 |
24 | import os
25 | import shutil
26 | import filecmp
27 |
28 | import bluepymm
29 |
30 |
31 | BASE_DIR = os.path.abspath(os.path.dirname(__file__))
32 | TEST_DATA_DIR = os.path.join(BASE_DIR, 'examples/simple1')
33 | TMP_DIR = os.path.join(BASE_DIR, 'tmp/main')
34 |
35 |
36 | def teardown_module():
37 | """Remove the temporary files."""
38 | shutil.rmtree(TMP_DIR)
39 |
40 |
41 | def _verify_emodel_json(filename, output_dir, nb_emodels):
42 | """Helper function to verify the emodel json file"""
43 | data_json = os.path.join(output_dir, filename)
44 | assert os.path.isfile(data_json)
45 | data = bluepymm.tools.load_json(data_json)
46 | assert len(data) == nb_emodels
47 | return data
48 |
49 |
50 | def _verify_prepare_combos_output(scores_db, emodels_hoc_dir, output_dir,
51 | nb_emodels):
52 | """Helper function to verify the output of the prepare combos step"""
53 | # TODO: test database contents
54 | assert os.path.isfile(scores_db)
55 |
56 | assert os.path.isdir(emodels_hoc_dir)
57 | hoc_files = os.listdir(emodels_hoc_dir)
58 | assert len(hoc_files) == nb_emodels
59 | for hoc_file in hoc_files:
60 | assert hoc_file.endswith('.hoc')
61 |
62 | _verify_emodel_json('final.json', output_dir, nb_emodels)
63 | emodel_dirs = _verify_emodel_json('emodel_dirs.json', output_dir,
64 | nb_emodels)
65 | for emodel in emodel_dirs:
66 | assert os.path.isdir(emodel_dirs[emodel])
67 |
68 |
69 | def _verify_run_combos_output(scores_db):
70 | """Helper function to verify the output of the run combos step"""
71 | assert os.path.isfile(scores_db)
72 |
73 | # TODO: test database contents
74 | # Disabled for now, there are absolute paths in db
75 | """
76 | import pandas
77 | import sqlite3
78 |
79 | scores_sqlite_filename = 'output/scores.sqlite'
80 | exp_scores_sqlite_filename = 'output_expected/scores.sqlite'
81 | with sqlite3.connect(scores_sqlite_filename) as conn:
82 | scores = pandas.read_sql('SELECT * FROM scores', conn)
83 |
84 | with sqlite3.connect(exp_scores_sqlite_filename) as conn:
85 | exp_scores = pandas.read_sql('SELECT * FROM scores', conn)
86 |
87 | if not scores.equals(exp_scores):
88 | print "Resulting scores db: ", scores
89 | print "Expected scored db:", exp_scores
90 |
91 | nt.assert_true(scores.equals(exp_scores))
92 | """
93 |
94 |
95 | def _verify_select_combos_output(benchmark_dir, output_dir):
96 | """Helper function to verify output of combination selection"""
97 | files = ['extneurondb.dat', 'mecombo_emodel.tsv']
98 | matches = filecmp.cmpfiles(benchmark_dir, output_dir, files)
99 | if len(matches[0]) != len(files):
100 | print('Mismatch in files: {}'.format(matches[1]))
101 | assert len(matches[0]) == len(files)
102 |
103 |
104 | def _new_prepare_json(original_filename, test_dir):
105 | """Helper function to prepare new configuration file for prepare_combos."""
106 | config = bluepymm.tools.load_json(original_filename)
107 | config['tmp_dir'] = os.path.join(test_dir, 'tmp')
108 | config['output_dir'] = os.path.join(test_dir, 'output')
109 | config['scores_db'] = os.path.join(config['output_dir'], 'scores.sqlite')
110 | config['emodels_hoc_dir'] = os.path.join(config['output_dir'],
111 | 'emodels_hoc')
112 | return bluepymm.tools.write_json(test_dir, original_filename, config)
113 |
114 |
115 | def _new_run_json(original_filename, test_dir):
116 | """Helper function to prepare new configuration file for run_combos."""
117 | config = bluepymm.tools.load_json(original_filename)
118 | config['output_dir'] = os.path.join(test_dir, 'output')
119 | config['scores_db'] = os.path.join(config['output_dir'], 'scores.sqlite')
120 | return bluepymm.tools.write_json(test_dir, original_filename, config)
121 |
122 |
123 | def _new_select_json(original_filename, test_dir):
124 | """Helper function to prepare new configuration file for select_combos."""
125 | config = bluepymm.tools.load_json(original_filename)
126 | config['scores_db'] = os.path.join(test_dir, 'output', 'scores.sqlite')
127 | config['pdf_filename'] = os.path.join(test_dir, 'megating.pdf')
128 | config['output_dir'] = os.path.join(test_dir, 'output')
129 | return bluepymm.tools.write_json(test_dir, original_filename, config)
130 |
131 |
132 | def _test_main(test_data_dir, prepare_config_json, run_config_json,
133 | select_config_json, nb_emodels, test_dir):
134 | """Helper function to test complete BluePyMM workflow"""
135 |
136 | bluepymm.tools.makedirs(test_dir)
137 |
138 | with bluepymm.tools.cd(test_data_dir):
139 | # prepare new configuration files based on 'test_dir'
140 | prepare_config_json = _new_prepare_json(prepare_config_json, test_dir)
141 | run_config_json = _new_run_json(run_config_json, test_dir)
142 | select_config_json = _new_select_json(select_config_json, test_dir)
143 |
144 | # prepare combinations
145 | args_list = ['prepare', prepare_config_json]
146 | bluepymm.main.run(args_list)
147 |
148 | # verify prepared combinations
149 | prepare_config = bluepymm.tools.load_json(prepare_config_json)
150 | _verify_prepare_combos_output(prepare_config['scores_db'],
151 | prepare_config['emodels_hoc_dir'],
152 | prepare_config['output_dir'], nb_emodels)
153 |
154 | # run combinations
155 | args_list = ['run', run_config_json]
156 | bluepymm.main.run(args_list)
157 |
158 | # verify run combinations
159 | run_config = bluepymm.tools.load_json(run_config_json)
160 | _verify_run_combos_output(run_config['scores_db'])
161 |
162 | # select combinations
163 | args_list = ['select', select_config_json]
164 | bluepymm.main.run(args_list)
165 |
166 | # test selection output
167 | select_config = bluepymm.tools.load_json(select_config_json)
168 | _verify_select_combos_output('output_megate_expected',
169 | select_config['output_dir'])
170 |
171 |
172 | def test_main_from_dir():
173 | """bluepymm.main: test full BluePyMM workflow with plain directory input
174 | based on example simple1"""
175 | prepare_config_json = 'simple1_conf_prepare.json'
176 | run_config_json = 'simple1_conf_run.json'
177 | select_config_json = 'simple1_conf_select.json'
178 | nb_emodels = 2
179 | test_dir = os.path.join(TMP_DIR, 'test_main_from_dir')
180 |
181 | _test_main(TEST_DATA_DIR, prepare_config_json, run_config_json,
182 | select_config_json, nb_emodels, test_dir)
183 |
184 |
185 | def test_main_from_git_repo():
186 | """bluepymm.main: test full BluePyMM workflow with git repo input
187 | based on example simple1"""
188 | prepare_config_json = 'simple1_conf_prepare_git.json'
189 | run_config_json = 'simple1_conf_run.json'
190 | select_config_json = 'simple1_conf_select.json'
191 | nb_emodels = 2
192 | test_dir = os.path.join(TMP_DIR, 'test_main_from_git_repo')
193 |
194 | _test_main(TEST_DATA_DIR, prepare_config_json, run_config_json,
195 | select_config_json, nb_emodels, test_dir)
196 |
--------------------------------------------------------------------------------
/tests/test_megate_output.py:
--------------------------------------------------------------------------------
1 | """Tests for select_combos/megate_output"""
2 |
3 | from __future__ import print_function
4 |
5 | """
6 | Copyright (c) 2018, EPFL/Blue Brain Project
7 |
8 | This file is part of BluePyMM
9 |
10 | This library is free software; you can redistribute it and/or modify it under
11 | the terms of the GNU Lesser General Public License version 3.0 as published
12 | by the Free Software Foundation.
13 |
14 | This library is distributed in the hope that it will be useful, but WITHOUT
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
16 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
17 | details.
18 |
19 | You should have received a copy of the GNU Lesser General Public License
20 | along with this library; if not, write to the Free Software Foundation, Inc.,
21 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22 | """
23 |
24 | import pandas
25 | import filecmp
26 | import os
27 |
28 | import pytest
29 |
30 | import bluepymm.select_combos as select_combos
31 | from bluepymm import tools
32 |
33 |
34 | BASE_DIR = os.path.abspath(os.path.dirname(__file__))
35 | TEST_DATA_DIR = os.path.join(BASE_DIR, 'examples/simple1')
36 | TMP_DIR = os.path.join(BASE_DIR, 'tmp/megate_output')
37 |
38 |
39 | def _test_save_megate_results(data, sort_key, test_dir, compliant):
40 | # input parameters
41 | columns = ['morph_name', 'layer', 'fullmtype', 'etype', 'emodel',
42 | 'combo_name', 'threshold_current', 'holding_current']
43 | df = pandas.DataFrame(data, columns=columns)
44 |
45 | # save_megate_results
46 | select_combos.megate_output.save_megate_results(
47 | df, test_dir, sort_key=sort_key, make_names_neuron_compliant=compliant)
48 |
49 | # verify output files
50 | benchmark_dir = os.path.join(TEST_DATA_DIR, 'output_megate_expected')
51 | files = ['extneurondb.dat', 'mecombo_emodel.tsv']
52 | matches = filecmp.cmpfiles(benchmark_dir, test_dir, files)
53 | if len(matches[0]) != len(files):
54 | print('Mismatch in files: {}'.format(matches[1]))
55 | assert len(matches[0]) == len(files)
56 |
57 | if compliant:
58 | logfile_path = os.path.join(test_dir, 'log_neuron_compliance.csv')
59 | assert os.path.isfile(logfile_path)
60 |
61 |
62 | @pytest.mark.unit
63 | def test_save_megate_results_no_sort():
64 | """bluepymm.select_combos: test save_megate_results."""
65 | data = [('morph1', 1, 'mtype1', 'etype1', 'emodel1',
66 | 'emodel1_mtype1_1_morph1', '', ''),
67 | ('morph2', 1, 'mtype2', 'etype1', 'emodel1',
68 | 'emodel1_mtype2_1_morph2', '', ''),
69 | ('morph1', 1, 'mtype1', 'etype2', 'emodel2',
70 | 'emodel2_mtype1_1_morph1', '', '')]
71 | test_dir = os.path.join(TMP_DIR, 'test_save_megate_results_no_sort')
72 | tools.makedirs(test_dir)
73 | _test_save_megate_results(data, None, test_dir, False)
74 |
75 |
76 | @pytest.mark.unit
77 | def test_save_megate_results_sort():
78 | """bluepymm.select_combos: test save_megate_results sorted."""
79 | data = [('morph1', 1, 'mtype1', 'etype1', 'emodel1',
80 | 'emodel1_mtype1_1_morph1', '', ''),
81 | ('morph1', 1, 'mtype1', 'etype2', 'emodel2',
82 | 'emodel2_mtype1_1_morph1', '', ''),
83 | ('morph2', 1, 'mtype2', 'etype1', 'emodel1',
84 | 'emodel1_mtype2_1_morph2', '', '')]
85 | test_dir = os.path.join(TMP_DIR, 'test_save_megate_results_sort')
86 | tools.makedirs(test_dir)
87 | _test_save_megate_results(data, 'combo_name', test_dir, False)
88 |
89 |
90 | @pytest.mark.unit
91 | def test_save_megate_results_compliant():
92 | """bluepymm.select_combos: test save_megate_results neuron compliant."""
93 | data = [('morph1', 1, 'mtype1', 'etype1', 'emodel1',
94 | 'emodel1_mtype1_1_morph1', '', ''),
95 | ('morph2', 1, 'mtype2', 'etype1', 'emodel1',
96 | 'emodel1_mtype2_1_morph2', '', ''),
97 | ('morph1', 1, 'mtype1', 'etype2', 'emodel2',
98 | 'emodel2_mtype1_1_morph1', '', '')]
99 | test_dir = os.path.join(TMP_DIR, 'test_save_megate_results_compliant')
100 | tools.makedirs(test_dir)
101 | _test_save_megate_results(data, None, test_dir, True)
102 |
--------------------------------------------------------------------------------
/tests/test_prepare_combos.py:
--------------------------------------------------------------------------------
1 | """Test bluepymm/prepare_combos"""
2 |
3 | from __future__ import print_function
4 |
5 | """
6 | Copyright (c) 2018, EPFL/Blue Brain Project
7 |
8 | This file is part of BluePyMM
9 |
10 | This library is free software; you can redistribute it and/or modify it under
11 | the terms of the GNU Lesser General Public License version 3.0 as published
12 | by the Free Software Foundation.
13 |
14 | This library is distributed in the hope that it will be useful, but WITHOUT
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
16 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
17 | details.
18 |
19 | You should have received a copy of the GNU Lesser General Public License
20 | along with this library; if not, write to the Free Software Foundation, Inc.,
21 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22 | """
23 |
24 | import os
25 |
26 | from bluepymm import tools, prepare_combos
27 |
28 |
29 | BASE_DIR = os.path.abspath(os.path.dirname(__file__))
30 | TEST_DATA_DIR = os.path.join(BASE_DIR, 'examples/simple1')
31 | TMP_DIR = os.path.join(BASE_DIR, 'tmp/prepare_combos')
32 |
33 |
34 | def _verify_emodel_json(filename, output_dir, nb_emodels):
35 | """Helper function to verify emodel json output"""
36 | data_json = os.path.join(output_dir, filename)
37 | assert os.path.isfile(data_json)
38 | data = tools.load_json(data_json)
39 | assert len(data) == nb_emodels
40 | return data
41 |
42 |
43 | def _verify_prepare_combos_output(scores_db, emodels_hoc_dir, output_dir,
44 | nb_emodels):
45 | """Helper function to verify output of prepare combos"""
46 | # TODO: test database contents
47 | assert os.path.isfile(scores_db)
48 |
49 | assert os.path.isdir(emodels_hoc_dir)
50 | hoc_files = os.listdir(emodels_hoc_dir)
51 | assert len(hoc_files) == nb_emodels
52 | for hoc_file in hoc_files:
53 | assert hoc_file.endswith('.hoc')
54 |
55 | _verify_emodel_json('final.json', output_dir, nb_emodels)
56 | emodel_dirs = _verify_emodel_json('emodel_dirs.json', output_dir,
57 | nb_emodels)
58 | for emodel in emodel_dirs:
59 | assert os.path.isdir(emodel_dirs[emodel])
60 |
61 |
62 | def _prepare_config_json(original_filename, test_dir):
63 | """Helper function to prepare new configuration file."""
64 | config = tools.load_json(original_filename)
65 | config['tmp_dir'] = os.path.join(test_dir, 'tmp')
66 | config['output_dir'] = os.path.join(test_dir, 'output')
67 | config['scores_db'] = os.path.join(config['output_dir'], 'scores.sqlite')
68 | config['emodels_hoc_dir'] = os.path.join(config['output_dir'],
69 | 'emodels_hoc')
70 | tools.makedirs(test_dir)
71 | return tools.write_json(test_dir, 'config.json', config)
72 |
73 |
74 | def _test_prepare_combos(test_data_dir, config_template_path, nb_emodels,
75 | test_dir):
76 | """Helper function to perform functional test prepare_combos"""
77 | with tools.cd(test_data_dir):
78 | # prepare new configuration file based on test_dir
79 | config_path = _prepare_config_json(config_template_path, test_dir)
80 |
81 | # run combination preparation
82 | prepare_combos.main.prepare_combos(conf_filename=config_path,
83 | continu=False)
84 |
85 | # test output
86 | config = tools.load_json(config_path)
87 | _verify_prepare_combos_output(config['scores_db'],
88 | config['emodels_hoc_dir'],
89 | config['output_dir'], nb_emodels)
90 |
91 |
92 | def test_prepare_combos_from_dir():
93 | """bluepymm.prepare_combos: test prepare_combos with plain directory input
94 | based on example simple1
95 | """
96 | config_template_path = 'simple1_conf_prepare.json'
97 | nb_emodels = 2
98 | test_dir = os.path.join(TMP_DIR, 'test_prepare_combos_from_dir')
99 |
100 | _test_prepare_combos(TEST_DATA_DIR, config_template_path, nb_emodels,
101 | test_dir)
102 |
103 |
104 | def test_prepare_combos_from_git_repo():
105 | """bluepymm.prepare_combos: test prepare_combos with git repo input
106 | based on example simple1
107 | """
108 | config_template_path = 'simple1_conf_prepare_git.json'
109 | nb_emodels = 2
110 | test_dir = os.path.join(TMP_DIR, 'test_prepare_combos_from_git_repo')
111 |
112 | _test_prepare_combos(TEST_DATA_DIR, config_template_path, nb_emodels,
113 | test_dir)
114 |
--------------------------------------------------------------------------------
/tests/test_process_megate_config.py:
--------------------------------------------------------------------------------
1 | """Test process_megate_config"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 |
23 | import re
24 |
25 | import pytest
26 |
27 | from bluepymm.select_combos import process_megate_config as proc_config
28 |
29 |
30 | @pytest.mark.unit
31 | def test_join_regex():
32 | """select_combos.process_megate_config: test join_regex"""
33 | test_list = ['one', '.*', 'three']
34 | joined_list = '(one$)|(.*$)|(three$)'
35 | ret = proc_config.join_regex(test_list)
36 | assert ret == re.compile(joined_list)
37 |
38 |
39 | def _test_read_to_skip_features(skip_features, conf_dict):
40 | """Test read_to_skip_features helper function"""
41 | r_patterns, r_features = proc_config.read_to_skip_features(conf_dict)
42 | assert skip_features == r_features
43 | exp_patterns = [re.compile(f) for f in skip_features]
44 | assert exp_patterns == r_patterns
45 |
46 |
47 | @pytest.mark.unit
48 | def test_read_to_skip_features():
49 | """select_combos.process_megate_config: test read_to_skip_features"""
50 |
51 | skip_features = []
52 | conf_dict = {'to_skip_features': skip_features}
53 | _test_read_to_skip_features(skip_features, conf_dict)
54 |
55 | skip_features = []
56 | conf_dict = {}
57 | _test_read_to_skip_features(skip_features, conf_dict)
58 |
59 | skip_features = ['test']
60 | conf_dict = {'to_skip_features': skip_features}
61 | _test_read_to_skip_features(skip_features, conf_dict)
62 |
63 | skip_features = ['.*']
64 | conf_dict = {'to_skip_features': skip_features}
65 | _test_read_to_skip_features(skip_features, conf_dict)
66 |
67 |
68 | '''
69 | # Disabling this test for now because it is unstable (give stochastic results)
70 | @pytest.mark.unit
71 | def test_read_megate_thresholds():
72 | """select_combos.process_megate_config: test read_megate_thresholds"""
73 |
74 | # all keys present
75 | test_dict = {'megate_thresholds': [
76 | {'emodel': ['test1'], 'fullmtype': ['test2'], 'etype': ['test3'],
77 | 'features': ['.*'], 'megate_threshold': 5}]}
78 | ret_patterns, ret_thresholds = proc_config.read_megate_thresholds(
79 | test_dict)
80 | expected_patterns = [
81 | {'megate_feature_threshold':
82 | {'megate_threshold': 5, 'features': proc_config.join_regex(['.*'])},
83 | 'emodel': proc_config.join_regex(['test1']), 'fullmtype':
84 | proc_config.join_regex(['test2']), 'etype': proc_config.join_regex(
85 | ['test3'])}]
86 | nt.assert_list_equal(ret_thresholds, test_dict['megate_thresholds'])
87 | nt.assert_equal(len(ret_patterns), len(expected_patterns))
88 | nt.assert_dict_equal(ret_patterns[0], expected_patterns[0])
89 |
90 | # key 'fullmtype' not present
91 | test_dict = {'megate_thresholds': [
92 | {'emodel': ['test1'], 'etype': ['test3'], 'features': ['.*'],
93 | 'megate_threshold': 5}]}
94 | ret_patterns, ret_thresholds = proc_config.read_megate_thresholds(
95 | test_dict)
96 | expected_patterns = [
97 | {'megate_feature_threshold':
98 | {'megate_threshold': 5, 'features': proc_config.join_regex(['.*'])},
99 | 'emodel': proc_config.join_regex(['test1']), 'fullmtype':
100 | re.compile('.*'), 'etype': proc_config.join_regex(['test3'])}]
101 | nt.assert_list_equal(ret_thresholds, test_dict['megate_thresholds'])
102 | nt.assert_equal(len(ret_patterns), len(expected_patterns))
103 | nt.assert_dict_equal(ret_patterns[0], expected_patterns[0])
104 | '''
105 |
--------------------------------------------------------------------------------
/tests/test_reporting.py:
--------------------------------------------------------------------------------
1 | """Tests for select_combos/reporting.py"""
2 |
3 | from __future__ import print_function
4 |
5 | """
6 | Copyright (c) 2018, EPFL/Blue Brain Project
7 |
8 | This file is part of BluePyMM
9 |
10 | This library is free software; you can redistribute it and/or modify it under
11 | the terms of the GNU Lesser General Public License version 3.0 as published
12 | by the Free Software Foundation.
13 |
14 | This library is distributed in the hope that it will be useful, but WITHOUT
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
16 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
17 | details.
18 |
19 | You should have received a copy of the GNU Lesser General Public License
20 | along with this library; if not, write to the Free Software Foundation, Inc.,
21 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22 | """
23 |
24 |
25 | import os
26 | import pandas
27 |
28 | import matplotlib.pyplot as plt
29 | from matplotlib.backends.backend_pdf import PdfPages
30 | import pytest
31 |
32 | from bluepymm import select_combos
33 |
34 |
35 | BASE_DIR = os.path.abspath(os.path.dirname(__file__))
36 | TMP_DIR = os.path.join(BASE_DIR, 'tmp/')
37 |
38 |
39 | @pytest.mark.unit
40 | def test_pdf_file():
41 | """select_combos.reporting: test pdf_file"""
42 | filename = 'test_report.pdf'
43 | path = os.path.join(TMP_DIR, filename)
44 | with select_combos.reporting.pdf_file(path) as pp:
45 | assert pp.get_pagecount() == 0
46 | assert os.path.exists(path)
47 |
48 |
49 | def _get_pdf_file(filename):
50 | """Helper function to open PDF file."""
51 | path = os.path.join(TMP_DIR, filename)
52 | if not os.path.exists(TMP_DIR):
53 | os.makedirs(TMP_DIR)
54 | return PdfPages(path)
55 |
56 |
57 | @pytest.mark.unit
58 | def test_add_plot_to_report():
59 | """select_combos.reporting: test add_plot_to_report"""
60 | filename = 'test_add_plot_to_report.pdf'
61 |
62 | def _figure(title):
63 | fig = plt.figure()
64 | plt.title(title)
65 | return fig
66 |
67 | with _get_pdf_file(filename) as pp:
68 | select_combos.reporting.add_plot_to_report(pp, _figure, 'test')
69 |
70 |
71 | @pytest.mark.unit
72 | def test_plot_dict():
73 | """select_combos.reporting: test plot_dict"""
74 | test_dict = {'test': 1}
75 | title = 'test_title'
76 | fig = select_combos.reporting.plot_dict(test_dict, title)
77 | assert title == fig.get_axes()[0].get_title()
78 | plt.close()
79 |
80 |
81 | @pytest.mark.unit
82 | def test_plot_stacked_bars():
83 | """select_combos.reporting: test plot_stacked_bars"""
84 | test_data = pandas.DataFrame({'data': [1, 2, 3]})
85 | xlabel = 'test_xlabel'
86 | ylabel = 'test_ylabel'
87 | title = 'test_title'
88 | color_map = 'C0'
89 | fig = select_combos.reporting.plot_stacked_bars(test_data, xlabel, ylabel,
90 | title, color_map)
91 | axes = fig.get_axes()[0]
92 | assert xlabel == axes.get_xlabel()
93 | assert ylabel == axes.get_ylabel()
94 | assert title == axes.get_title()
95 | plt.close()
96 |
97 |
98 | @pytest.mark.unit
99 | def test_plot_morphs_per_feature_for_emodel():
100 | """select_combos.reporting: test plot_morphs_per_feature_for_emodel"""
101 | emodel = 'emodel1'
102 | test_data = pandas.DataFrame({'passed': [True, False, True]})
103 | test_data_2 = pandas.DataFrame({'scores': [1, 2, 3]})
104 | fig = select_combos.reporting.plot_morphs_per_feature_for_emodel(
105 | emodel, test_data, test_data_2)
106 | assert emodel in fig.get_axes()[0].get_title()
107 |
108 |
109 | @pytest.mark.unit
110 | def test_plot_morphs_per_mtype_for_emodel():
111 | """select_combos.reporting: test plot_morphs_per_mtype_for_emodel"""
112 | emodel = 'emodel1'
113 | mtypes = pandas.DataFrame({'mtypes': ['mtype1', 'mtype2', 'mtype1']})
114 | test_scores = pandas.DataFrame({'Passed all': [True, False, True],
115 | 'mtypes': ['mtype1', 'mtype2', 'mtype1']})
116 | fig = select_combos.reporting.plot_morphs_per_mtype_for_emodel(
117 | emodel, mtypes['mtypes'], test_scores)
118 | assert emodel in fig.get_axes()[0].get_title()
119 |
120 |
121 | @pytest.mark.unit
122 | def test_create_morphology_label():
123 | """select_combos.reporting: test create_morphology_label"""
124 | data = pandas.DataFrame({'morph_name': ['morph1', 'morph2'],
125 | 'fullmtype': ['mtype1', 'mtype2'],
126 | 'etype': ['etype1', 'etype2']})
127 | ret = select_combos.reporting.create_morphology_label(data)
128 | expected_ret = 'morph1 (mtype1, etype1)'
129 | assert ret == expected_ret
130 |
131 |
132 | @pytest.mark.unit
133 | def test_plot_emodels_per_morphology():
134 | """select_combos.reporting: test plot_emodels_per_morphology"""
135 | data = pandas.DataFrame({'is_exemplar': False, 'morph_name': 'morph1',
136 | 'exception': None, 'fullmtype': 'mtype1',
137 | 'etype': 'etype1'}, index=[0])
138 | final_db = pandas.DataFrame({'morph_name': 'morph1'}, index=[0])
139 | fig = select_combos.reporting.plot_emodels_per_morphology(data, final_db)
140 | assert 'morphology' in fig.get_axes()[0].get_title()
141 |
142 |
143 | @pytest.mark.unit
144 | def test_plot_emodels_per_metype():
145 | """select_combos.reporting: test plot_emodels_per_metype"""
146 | data = pandas.DataFrame({'is_exemplar': False, 'morph_name': 'morph1',
147 | 'exception': None, 'fullmtype': 'mtype1',
148 | 'etype': 'etype1'}, index=[0])
149 | final_db = pandas.DataFrame({'morph_name': 'morph1', 'fullmtype': 'mtype1',
150 | 'etype': 'etype1'}, index=[0])
151 | fig = select_combos.reporting.plot_emodels_per_metype(data, final_db)
152 | assert 'me-type' in fig.get_axes()[0].get_title()
153 |
--------------------------------------------------------------------------------
/tests/test_run_combos.py:
--------------------------------------------------------------------------------
1 | """Test bluepymm/run_combos"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 | import os
23 | import shutil
24 |
25 | from bluepymm import tools, run_combos
26 |
27 |
28 | BASE_DIR = os.path.abspath(os.path.dirname(__file__))
29 | TEST_DATA_DIR = os.path.join(BASE_DIR, 'examples/simple1')
30 | TMP_DIR = os.path.join(BASE_DIR, 'tmp/run_combos')
31 |
32 |
33 | def _verify_run_combos_output(scores_db):
34 | """Helper function to verify output run combos"""
35 | # TODO: test database contents
36 | assert os.path.isfile(scores_db)
37 |
38 |
39 | def test_run_combos():
40 | """bluepymm.run_combos: test run_combos based on example simple1"""
41 | config_template_path = 'simple1_conf_run.json'
42 |
43 | with tools.cd(TEST_DATA_DIR):
44 | # prepare input data
45 | shutil.copytree('output_expected', TMP_DIR)
46 | config = tools.load_json(config_template_path)
47 | config['scores_db'] = os.path.join(TMP_DIR, 'scores.sqlite')
48 | config['output_dir'] = TMP_DIR
49 |
50 | # run combination preparation
51 | run_combos.main.run_combos_from_conf(config)
52 |
53 | # verify output
54 | _verify_run_combos_output(config['scores_db'])
55 |
56 | # with use_apical_points
57 | config['use_apical_points'] = False
58 | run_combos.main.run_combos_from_conf(config)
59 | _verify_run_combos_output(config['scores_db'])
60 |
--------------------------------------------------------------------------------
/tests/test_select_combos.py:
--------------------------------------------------------------------------------
1 | """Test bluepymm/select_combos"""
2 |
3 | from __future__ import print_function
4 |
5 | """
6 | Copyright (c) 2018, EPFL/Blue Brain Project
7 |
8 | This file is part of BluePyMM
9 |
10 | This library is free software; you can redistribute it and/or modify it under
11 | the terms of the GNU Lesser General Public License version 3.0 as published
12 | by the Free Software Foundation.
13 |
14 | This library is distributed in the hope that it will be useful, but WITHOUT
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
16 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
17 | details.
18 |
19 | You should have received a copy of the GNU Lesser General Public License
20 | along with this library; if not, write to the Free Software Foundation, Inc.,
21 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22 | """
23 |
24 | import os
25 | import shutil
26 | import filecmp
27 |
28 | from bluepymm import tools, select_combos
29 |
30 | BASE_DIR = os.path.abspath(os.path.dirname(__file__))
31 | TEST_DATA_DIR = os.path.join(BASE_DIR, 'examples/simple1')
32 | TMP_DIR = os.path.join(BASE_DIR, 'tmp/select_combos')
33 |
34 |
35 | def _verify_output(benchmark_dir, output_dir):
36 | """Helper function to verify output of combination selection"""
37 | files = ['extneurondb.dat', 'mecombo_emodel.tsv']
38 | matches = filecmp.cmpfiles(benchmark_dir, output_dir, files)
39 | if len(matches[0]) != len(files):
40 | print('Mismatch in files: {}'.format(matches[1]))
41 | assert len(matches[0]) == len(files)
42 | assert os.path.exists(os.path.join(output_dir, 'mecombo_release.json'))
43 |
44 |
45 | def _config_select_combos(config_template_path, tmp_dir):
46 | """Helper function to prepare input data for select_combos"""
47 | # copy input data
48 | shutil.copytree('output_expected', tmp_dir)
49 |
50 | # set configuration dict
51 | config = tools.load_json(config_template_path)
52 | config['scores_db'] = os.path.join(tmp_dir, 'scores.sqlite')
53 | config['pdf_filename'] = os.path.join(tmp_dir, 'megating.pdf')
54 | config['output_dir'] = os.path.join(tmp_dir, 'output')
55 | config['emodels_hoc_dir'] = os.path.join(tmp_dir, 'output/emodels_hoc')
56 | return config
57 |
58 |
59 | def _test_select_combos(test_data_dir, tmp_dir, config_template_path,
60 | benchmark_dir, n_processes=None):
61 | """Helper function to perform functional test of select_combos"""
62 | with tools.cd(test_data_dir):
63 | # prepare input data
64 | config = _config_select_combos(config_template_path, tmp_dir)
65 |
66 | # run combination selection
67 | select_combos.main.select_combos_from_conf(config, n_processes)
68 |
69 | # verify output
70 | _verify_output(benchmark_dir, config['output_dir'])
71 |
72 |
73 | def test_select_combos():
74 | """bluepymm.select_combos: test select_combos based on example simple1"""
75 | config_template_path = 'simple1_conf_select.json'
76 | benchmark_dir = 'output_megate_expected'
77 | tmp_dir = os.path.join(TMP_DIR, 'test_select_combos')
78 |
79 | _test_select_combos(TEST_DATA_DIR, tmp_dir, config_template_path,
80 | benchmark_dir, n_processes=1)
81 |
82 |
83 | def test_select_combos_2():
84 | """bluepymm.select_combos: test select_combos based on example simple1 bis
85 | """
86 | config_template_path = 'simple1_conf_select_2.json'
87 | benchmark_dir = 'output_megate_expected'
88 | tmp_dir = os.path.join(TMP_DIR, 'test_select_combos_2')
89 |
90 | _test_select_combos(TEST_DATA_DIR, tmp_dir, config_template_path,
91 | benchmark_dir)
92 |
--------------------------------------------------------------------------------
/tests/test_sqlite_io.py:
--------------------------------------------------------------------------------
1 | """Tests for functionality in bluepymm/select_combos/sqlite_io.py"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 |
23 | import sqlite3
24 | import os
25 | import pandas
26 |
27 | import pytest
28 |
29 | from bluepymm.select_combos import sqlite_io
30 | from bluepymm import tools
31 |
32 |
33 | BASE_DIR = os.path.abspath(os.path.dirname(__file__))
34 | TMP_DIR = os.path.join(BASE_DIR, 'tmp/test_sqlite_io')
35 |
36 |
37 | def _create_database(test_dir, filename, scores, score_values):
38 | """Helper function to create test database."""
39 | path = os.path.join(test_dir, filename)
40 | with sqlite3.connect(path) as conn:
41 | scores.to_sql('scores', conn, if_exists='replace', index=False)
42 | score_values.to_sql('score_values', conn, if_exists='replace')
43 | return path
44 |
45 |
46 | @pytest.mark.unit
47 | def test_read_and_process_sqlite_score_tables():
48 | """select_combos.sqlite_io: test read_and_process_sqlite_score_tables"""
49 | # create database
50 | scores_row = {'test': 1}
51 | scores = pandas.DataFrame(scores_row, index=[0])
52 | score_values_row = {'value': 2}
53 | score_values = pandas.DataFrame(score_values_row, index=[0])
54 | test_dir = os.path.join(
55 | TMP_DIR, 'test_read_and_process_sqlite_score_tables')
56 | tools.makedirs(test_dir)
57 | filename = 'test_db.sql'
58 | path = _create_database(test_dir, filename, scores, score_values)
59 |
60 | # read database
61 | ret_scs, ret_sc_vals = sqlite_io.read_and_process_sqlite_score_tables(path)
62 |
63 | # verify output
64 | assert 'index' not in ret_sc_vals.columns.values
65 |
66 | pandas.testing.assert_frame_equal(ret_scs, scores)
67 | pandas.testing.assert_frame_equal(ret_sc_vals, score_values)
68 |
69 |
70 | @pytest.mark.unit
71 | def test_read_and_process_sqlite_score_tables_error():
72 | """select_combos.sqlite_io: test read_and_process_sqlite_score_tables excep
73 | """
74 | # create database, table 'scores' has one entry, table 'score_values' two
75 | scores_row = {'test': [1, 3]}
76 | scores = pandas.DataFrame(scores_row)
77 | score_values_row = {'value': 2}
78 | score_values = pandas.DataFrame(score_values_row, index=[0])
79 | test_dir = os.path.join(
80 | TMP_DIR, 'test_read_and_process_sqlite_score_tables_error')
81 | tools.makedirs(test_dir)
82 | filename = 'test_db_error.sql'
83 | path = _create_database(test_dir, filename, scores, score_values)
84 |
85 | # read database, number of rows incompatible -> exception
86 | with pytest.raises(Exception):
87 | sqlite_io.read_and_process_sqlite_score_tables(path)
88 |
--------------------------------------------------------------------------------
/tests/test_tools.py:
--------------------------------------------------------------------------------
1 | """Test tools module"""
2 |
3 | """
4 | Copyright (c) 2018, EPFL/Blue Brain Project
5 |
6 | This file is part of BluePyMM
7 |
8 | This library is free software; you can redistribute it and/or modify it under
9 | the terms of the GNU Lesser General Public License version 3.0 as published
10 | by the Free Software Foundation.
11 |
12 | This library is distributed in the hope that it will be useful, but WITHOUT
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
15 | details.
16 |
17 | You should have received a copy of the GNU Lesser General Public License
18 | along with this library; if not, write to the Free Software Foundation, Inc.,
19 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 | """
21 |
22 | import os
23 | import pandas
24 | from string import digits
25 |
26 | import pytest
27 |
28 | from bluepymm import tools
29 |
30 | BASE_DIR = os.path.abspath(os.path.dirname(__file__))
31 | EXAMPLES = os.path.join(BASE_DIR, 'examples')
32 | TMP_DIR = os.path.join(BASE_DIR, 'tmp/tools')
33 |
34 |
35 | @pytest.mark.unit
36 | def test_cd():
37 | """bluepymm.tools: test cd"""
38 |
39 | old_cwd = os.getcwd()
40 | with tools.cd(EXAMPLES):
41 | assert os.getcwd() == EXAMPLES
42 |
43 | assert old_cwd == os.getcwd()
44 |
45 |
46 | @pytest.mark.unit
47 | def test_json():
48 | """bluepymm.tools: test load_json and write_json"""
49 | output_dir = TMP_DIR
50 | output_name = 'tmp.json'
51 | config = {'test': ['1', 'two']}
52 |
53 | tools.makedirs(output_dir)
54 | ret_path = tools.write_json(output_dir, output_name, config)
55 | assert os.path.join(output_dir, output_name) == ret_path
56 | ret = tools.load_json(ret_path)
57 | assert config == ret
58 |
59 |
60 | @pytest.mark.unit
61 | def test_makedirs():
62 | """bluepymm.tools: test makedirs"""
63 | make_dir = os.path.join(TMP_DIR, 'make_dir')
64 | tools.makedirs(make_dir)
65 | assert os.path.isdir(make_dir)
66 |
67 | # try again -> no error
68 | make_dir = os.path.join(TMP_DIR, 'make_dir')
69 | tools.makedirs(make_dir)
70 | assert os.path.isdir(make_dir)
71 |
72 | # causes error that is not caught
73 | with pytest.raises(OSError):
74 | tools.makedirs('')
75 |
76 |
77 | @pytest.mark.unit
78 | def test_check_no_null_nan_values():
79 | """bluepymm.tools: test check_no_null_nan_values"""
80 | data = pandas.DataFrame([[1, 2], [3, 4]], columns=list('AB'))
81 | assert tools.check_no_null_nan_values(data, 'test')
82 |
83 |
84 | @pytest.mark.unit
85 | def test_check_no_null_nan_values_nan():
86 | """bluepymm.tools: test check_no_null_nan_values with nan"""
87 | data = pandas.DataFrame([[1, float('nan')], [3, 4]], columns=list('AB'))
88 | with pytest.raises(Exception):
89 | tools.check_no_null_nan_values(data, 'test')
90 |
91 |
92 | @pytest.mark.unit
93 | def test_check_no_null_nan_values_none():
94 | """bluepymm.tools: test check_no_null_nan_values with None"""
95 | data = pandas.DataFrame([[1, 2], [None, 4]], columns=list('AB'))
96 | with pytest.raises(Exception):
97 | tools.check_no_null_nan_values(data, 'test')
98 |
99 |
100 | @pytest.mark.unit
101 | def test_check_all_combos_have_run():
102 | """bluepymm.tools: test check_all_combos_have_run"""
103 | data = pandas.DataFrame({'to_run': [False, False, False],
104 | 'field': [1, 2, 3]})
105 | assert tools.check_all_combos_have_run(data, 'test')
106 |
107 | data = pandas.DataFrame({'to_run': [True, True, True],
108 | 'field': [1, 2, 3]})
109 | with pytest.raises(Exception):
110 | tools.check_all_combos_have_run(data, 'test')
111 |
112 | data = pandas.DataFrame({'to_run': [False, True, False],
113 | 'field': [1, 2, 3]})
114 | with pytest.raises(Exception):
115 | tools.check_all_combos_have_run(data, 'test')
116 |
117 |
118 | @pytest.mark.unit
119 | def test_load_module():
120 | """bluepymm.tools: test load_module"""
121 | # load module
122 | module_dir = os.path.join(EXAMPLES, 'simple1/data/emodels_dir/subdir/')
123 | setup = tools.load_module(
124 | 'setup', os.path.join(module_dir, 'setup/__init__.py')
125 | )
126 | # try and execute something from loaded module
127 | setup.evaluator.create('emodel1')
128 |
129 | # load as file
130 | setup_dir = os.path.join(module_dir, 'setup')
131 | evaluator = tools.load_module(
132 | 'evaluator', os.path.join(setup_dir, 'evaluator.py')
133 | )
134 | evaluator.create('emodel1')
135 |
136 |
137 | @pytest.mark.unit
138 | def test_check_compliance_with_neuron():
139 | """bluepymm.tools: test check compliance with neuron template name rules"""
140 | not_compl = ['', '1test', 'test-test',
141 | 'testtesttesttesttesttesttesttesttesttesttesttesttesttesttes']
142 | for name in not_compl:
143 | assert not tools.check_compliance_with_neuron(name)
144 |
145 | compliant = ['test_tesT', 'test123test', 'Test']
146 | for name in compliant:
147 | assert tools.check_compliance_with_neuron(name)
148 |
149 |
150 | @pytest.mark.unit
151 | def test_shorten_and_hash_string():
152 | """bluepymm.tools: test convert string"""
153 | label = 'testtesttesttesttesttesttesttesttest'
154 | assert label == tools.shorten_and_hash_string(label)
155 |
156 | keep_length = 3
157 | hash_length = 20
158 | expected_length = keep_length + hash_length + 1
159 | ret = tools.shorten_and_hash_string(label, keep_length=keep_length,
160 | hash_length=hash_length)
161 | assert label != ret
162 | assert len(ret) == expected_length
163 | assert label[0:keep_length] == ret[0:keep_length]
164 | assert '_' == ret[keep_length]
165 |
166 | hash_length = 21
167 | with pytest.raises(ValueError):
168 | tools.shorten_and_hash_string(label, keep_length, hash_length)
169 |
170 |
171 | @pytest.mark.unit
172 | def test_get_neuron_compliant_template_name():
173 | """bluepymm.tools: test get neuron-compliant template name"""
174 | name = 'test'
175 | assert tools.check_compliance_with_neuron(name)
176 | ret = tools.get_neuron_compliant_template_name(name)
177 | assert ret == name
178 | assert tools.check_compliance_with_neuron(ret)
179 |
180 | name = '123test-test'
181 | assert not tools.check_compliance_with_neuron(name)
182 | ret = tools.get_neuron_compliant_template_name(name)
183 | assert ret == name.lstrip(digits).replace('-', '_')
184 | assert tools.check_compliance_with_neuron(ret)
185 |
186 | name = 'testtesttesttesttesttesttesttesttesttesttesttesttesttesttest'
187 | assert not tools.check_compliance_with_neuron(name)
188 | ret = tools.get_neuron_compliant_template_name(name)
189 | assert tools.check_compliance_with_neuron(ret)
190 |
191 |
192 | @pytest.mark.unit
193 | def test_decode_bstring():
194 | """bluepymm.tools test the bstring decoding function."""
195 | bstr_obj = b"this is a byte string"
196 | decoded_bstr = "this is a byte string"
197 | assert tools.decode_bstring(bstr_obj) == decoded_bstr
198 |
199 | str_obj = "this is a string"
200 | assert tools.decode_bstring(str_obj) == str_obj
201 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py3-{unit,functional,style}
3 | minversion = 4
4 | [gh-actions]
5 | python =
6 | 3.9: py3
7 | 3.10: py3
8 | 3.11: py3
9 | 3.12: py3
10 |
11 | [testenv]
12 | envdir =
13 | py3{9,10,11,12,}{-unit,-functional,-style}: {toxworkdir}/py3
14 | docs: {toxworkdir}/docs
15 | deps =
16 | NEURON-nightly
17 | pytest
18 | pytest-cov
19 | pycodestyle
20 | coverage
21 | sh
22 | allowlist_externals =
23 | make
24 | find
25 | passenv = https_proxy
26 | setenv =
27 | TOX_NRNDIR={envdir}/.tox-neuronpy
28 | TOX_ENVBINDIR={envbindir}
29 | TOX_NRNBINDIR=../.tox-neuronpy/local/x86_64/bin/
30 | PYTHONPATH={env:TOX_NRNDIR}/local/lib/python:{env:TOX_NRNDIR}/local/lib64/python
31 | commands =
32 | make clean
33 |
34 | make simple1_git
35 | style: pycodestyle --ignore=E402,W503,W504 bluepymm tests
36 |
37 | unit: pytest --cov-append --cov-report=xml --cov-config=.coveragerc --cov=bluepymm tests -vx -m unit
38 | functional: pytest --cov-append --cov-report=xml --cov-config=.coveragerc --cov=bluepymm tests -vx -m "not unit"
39 |
40 | [testenv:docs]
41 | basepython = python3.10
42 | changedir = docs
43 | deps =
44 | sphinx
45 | sphinx-bluebrain-theme
46 | commands = make html SPHINXOPTS=-W
47 | allowlist_externals = make
48 |
--------------------------------------------------------------------------------