├── .all-contributorsrc ├── .flake8 ├── .github ├── ISSUE_TEMPLATE │ └── bug_report.md └── workflows │ ├── prs.yaml │ └── publish.yml ├── .gitignore ├── .mypy.ini ├── CONTRIBUTING.md ├── README.md ├── apt.txt ├── build-helper.sh ├── extra-text └── extra-text.md ├── requirements-book.txt ├── requirements.txt ├── scripts ├── contributors │ └── get_contributors.py ├── lems │ ├── Readme.md │ ├── asttemplates.py │ ├── regensourceannotations.sh │ ├── requirements.txt │ ├── sourceannotations.xml │ └── xml2md.py └── schemas │ ├── asttemplates.py │ ├── component-list.json │ ├── generate-jupyter-ast.py │ └── requirements.txt └── source ├── 404.md ├── Devdocs ├── DevSOP.md ├── Devdocs.md ├── InteractionOtherBits.md ├── ReleaseProcess.md └── UpdatingStandard.md ├── Events ├── 2012-Edinburgh.md ├── 202103-Harmony.md ├── 202107-CNS2021.md ├── 202108-INCF-Training-Week.md ├── 202109-COMBINE.md ├── 202204-Harmony.md ├── 20220630-CNS2022.md ├── 202404-Harmony.md ├── 202407-CNS2024.md ├── Events.md └── PastEvents.md ├── Landing.md ├── NeuroMLOrg ├── Board.md ├── BoardHistory.md ├── BoardMeetingReports.md ├── CoC.md ├── CommunicationChannels.md ├── Contributors.md ├── Funding.md ├── History.md ├── OutreachTraining.md ├── Repositories.md ├── ScientificCommittee.md └── Standards.md ├── Reference ├── Glossary.md ├── references.bib └── zBibliography.md ├── Userdocs ├── Conventions.md ├── ConvertingModels.md ├── CreatingNeuroMLModels.md ├── ExtendingNeuroMLv2.md ├── FAQ.md ├── FindingNeuroMLModels.md ├── GetNeuroML.md ├── GettingStarted.md ├── HDF5.md ├── ImportingMorphologyFiles.md ├── IzhikevichNetworkExample.md ├── LEMS.md ├── LEMSExample1.md ├── LEMSExample2.md ├── LEMSExample3.md ├── LEMSExample4.md ├── LEMSExample5.md ├── LEMSExample6.md ├── LEMSExample7.md ├── LEMSExample8.md ├── LEMSOverview.md ├── LEMSSchema.md ├── LEMSSimulation.md ├── LEMS_elements │ ├── DefiningComponents.md │ ├── Definingcomponenttypes.md │ ├── Dynamics.md │ ├── Geometry.md │ ├── Modelstructure.md │ ├── Procedure.md │ ├── Simulation.md │ ├── Structure.md │ └── Unitsanddimensions.md ├── LEMS_examples │ ├── elecdims.xml │ ├── ex2dims.xml │ ├── example1.xml │ ├── example2.xml │ ├── example5.xml │ ├── example7.xml │ ├── example8.xml │ ├── hhaltgate.xml │ ├── hhcell.xml │ ├── hhchannel.xml │ ├── hhmodels.xml │ ├── lems_example4.png │ ├── lems_example6.png │ ├── lems_example7.png │ ├── misciaf.xml │ └── spikegenerators.xml ├── MissionAndAims.md ├── MultiCompartmentOLMexample.md ├── NML2_examples │ ├── .ipynb_checkpoints │ │ └── GettingStartedNotebook-checkpoint.ipynb │ ├── FergusonEtAl2015_PYR3.nwb │ ├── FittedIzhFergusonPyr3.net.nml │ ├── HH_example_cell.nml │ ├── HH_example_k_channel.nml │ ├── HH_example_k_channel_1.png │ ├── HH_example_k_channel_2.png │ ├── HH_example_leak_channel.nml │ ├── HH_example_na_channel.nml │ ├── HH_example_na_channel_1.png │ ├── HH_example_na_channel_2.png │ ├── HH_example_net.nml │ ├── HH_example_net.png │ ├── HH_single_compartment.ipynb │ ├── HH_single_compartment_example_sim-i.png │ ├── HH_single_compartment_example_sim-iden.png │ ├── HH_single_compartment_example_sim-v.png │ ├── IzNet-1.gv.png │ ├── IzNet.gv.png │ ├── IzhikevichNetwork.ipynb │ ├── LEMS_HH_single_compartment_example_sim.xml │ ├── LEMS_example-izhikevich2007cell-sim.xml │ ├── LEMS_example-single-izhikevich2007cell-sim.xml │ ├── LEMS_example_izhikevich2007network_sim.png │ ├── LEMS_example_izhikevich2007network_sim.xml │ ├── LEMS_fitted_izhikevich_sim.xml │ ├── LEMS_lorenz.xml │ ├── LEMS_olm_example_sim.xml │ ├── LorenzLems.py │ ├── NeuroML-DB.ipynb │ ├── OLM.ipynb │ ├── SingleNeuron.ipynb │ ├── SingleNeuron.png │ ├── TuneIzhFergusonPyr3.net.nml │ ├── example-single-izhikevich2007cell-sim-v.png │ ├── example_izhikevich2007network_sim-spikes.png │ ├── fitted_izhikevich_fitness.png │ ├── fitted_izhikevich_hist.png │ ├── fitted_izhikevich_output.png │ ├── fitted_izhikevich_pyelectro_analysis_results.txt │ ├── fitted_izhikevich_scatter.png │ ├── fitted_izhikevich_screenshot_nwbexplorer.png │ ├── fitted_izhikevich_sim-exp-v.png │ ├── fitted_izhikevich_sim-model-v.png │ ├── hh-single-compartment.py │ ├── izhikevich-network.py │ ├── izhikevich-single-neuron.py │ ├── izhikevich2007_network.nml │ ├── izhikevich2007_single_cell_network.nml │ ├── lems_sim │ │ ├── LEMS_SimulationExample.xml │ │ ├── NML2_SingleCompHHCell.nml │ │ └── create_lems.py │ ├── matplotlibrc │ ├── olm-example.py │ ├── olm-example │ │ ├── HCNolm.channel.nml │ │ ├── Kdrfast.channel.nml │ │ ├── KvAolm.channel.nml │ │ ├── Nav.channel.nml │ │ └── leak_chan.channel.nml │ ├── olm.cell.nml │ ├── olm.cell.png │ ├── olm.cell.xy.png │ ├── olm.hoc │ ├── olm_example_net.nml │ ├── olm_example_sim_seg0_axon0-v.png │ ├── olm_example_sim_seg0_soma0-v.png │ ├── olm_example_sim_seg1_axon0-v.png │ ├── olm_example_sim_seg1_soma0-v.png │ ├── single_hh_cell_network.gv.png │ ├── single_olm_cell_network.gv.png │ └── tune-izhikevich.py ├── NeuroMLv1.md ├── NeuroMLv2.md ├── NeuroMLv2AndLEMS.md ├── OptimisingNeuroMLModels.md ├── Paths.md ├── Provenance.md ├── Publications.md ├── QuantitiesAndRecording.md ├── Schemas │ ├── Cells.md │ ├── Channels.md │ ├── Index.md │ ├── Inputs.md │ ├── Networks.md │ ├── NeuroMLCoreCompTypes.md │ ├── NeuroMLCoreDimensions.md │ ├── NeuroMLDocument.md │ ├── PyNN.md │ ├── Simulation.md │ └── Synapses.md ├── SimulatingNeuroMLModels.md ├── SingleCompartmentHHExample.md ├── SingleNeuronExample.md ├── Software │ ├── MatLab.md │ ├── NeuroML_API.md │ ├── NeuroMLlite.md │ ├── Software.md │ ├── SupportingTools.md │ ├── Tools │ │ ├── Approaches.md │ │ ├── Arbor.md │ │ ├── Brian.md │ │ ├── EDEN.md │ │ ├── MOOSE.md │ │ ├── N2A.md │ │ ├── NEST.md │ │ ├── NEURON.md │ │ ├── NetPyNE.md │ │ ├── PyNN.md │ │ └── SWC.md │ ├── jLEMS.md │ ├── jNeuroML.md │ ├── libNeuroML.md │ ├── pyLEMS.md │ └── pyNeuroML.md ├── Specification.md ├── TestingNeuroMLModels.md ├── UnitsAndDimensions.md ├── Usage.md ├── ValidatingNeuroMLModels.md ├── VisualisingCells.md ├── VisualisingChannels.md ├── VisualisingNeuroMLModels.md └── Walkthroughs │ ├── RayEtAl2020 │ ├── Conversion.md │ ├── OMV.md │ ├── RayEtAl2020.md │ ├── Setup.md │ └── scripts │ │ ├── cellmorph2nml.py │ │ └── postprocess_cells.py │ └── Walkthroughs.md ├── _config.yml ├── _static ├── NeuroML2012 │ ├── DWaltemath_sed-ml_edinburgh2012_.pdf │ ├── JKozloski_NeuroML_workshop_2012.pdf │ ├── MHull_NineML.pdf │ ├── NeuroLexNIFupdate_3-13-12.ppt │ ├── NleNovere_NeuroML-COMBINE.pdf │ ├── Open_Worm_03-13-12.ppt │ ├── PGleeson_NeuroMLIntro2012.ppt │ ├── RCannon_ModellingIonChannels.pdf │ ├── RTomsett_LargeScaleCorticalModel.ppt │ ├── SKeating_libsbml-and-sbml.pdf │ └── YleFranc_CNO.pdf ├── files │ ├── 20231122-ACNet.webm │ ├── 20231122-HL23PYR.webm │ ├── NeuroMLEditorialBoardMeeting2014.pdf │ ├── NeuroMLEditorialBoardMeeting2015.pdf │ ├── NeuroMLEditorialBoardMeeting2016.pdf │ ├── NeuroMLEditorialBoardMeeting2018.pdf │ ├── NeuroMLEditorialBoardMeeting2019.pdf │ ├── NeuroMLWorkshop2009.pdf │ ├── NeuroMLWorkshop2010.pdf │ ├── NeuroMLWorkshop2011.pdf │ └── neuroml-documentation.pdf └── zcustom.css ├── _toc.yml └── images ├── 20231122-ACNet.png ├── Acnet-LEMS.png ├── Acnet-matrix-1.png ├── Acnet-matrix-2.png ├── Acnet-matrix-3.png ├── Acnet-matrix-4.png ├── Acnet-matrix-5.png ├── Acnet-medium-graph-level1.png ├── Acnet-medium-graph-level5.png ├── Acnet-medium.net.png ├── Acnet-medium.povray.png ├── Board ├── ankur.png ├── boris.jpg ├── padraig2.jpeg ├── salva.png ├── sotirios.jpg └── subhasis.jpg ├── Figure6a.png ├── Funders ├── EUS_200px.gif ├── bbsrc.gif ├── incf.png ├── mrc.jpg ├── nih.gif ├── nsf.gif └── wtlogo.png ├── GGN-vispy.png ├── GGN.png ├── Ih-combined.png ├── KC-NEURON.png ├── KC-NeuroML.png ├── MorphologyNeuroML2.png ├── NML-DB.png ├── NaTa.png ├── NeuroML2_LEMS_Overview_web.svg ├── OSB.png ├── OSBv1.png ├── ScientificCommittee ├── andrew.jpg ├── angus.png ├── avrama.jpg ├── bhalla.png ├── cgunay.JPG ├── hugo.png ├── lyle.png ├── michael.png ├── robertmcdougal.png └── rsz_crook.jpg ├── Steady_state(s)_of_activation_variables_in_nas_at_6.3_degC.png ├── Steady_state(s)_of_activation_variables_of_kv_from_kv.channel.nml_at_6.3_degC.png ├── Time_Course(s)_of_activation_variables_of_kv_from_kv.channel.nml_at_6.3_degC.png ├── Time_course(s)_of_activation_variables_in_nas_at_6.3_degC.png ├── crook2007-morphml-figure1.png ├── cvapp.png ├── favicon.ico ├── izhikevich-binder.png ├── izhikevich-google.png ├── izhikevich-livecode.png ├── izhikevich-rocket-options.png ├── izhikevich-rocket.png ├── jupyter-download.png ├── jupyterbook-issue.png ├── lems-figure2.png ├── lems-neuroml2.png ├── lems_nml_files.png ├── libneuroml.png ├── logo-large.png ├── logo.png ├── neuromldb-channel-analysis.png ├── nml-db-morphology.png ├── nmllite-example.png ├── olm-cell-fi.png ├── olm-cell-subthresholdVi.png ├── olm-cell-voltage-traces.png ├── osb-channel-analysis.png ├── osb-conversion.png ├── osb-morphology.png ├── pynml-channelanalysis.png ├── pynml-plotmorph-1.png ├── pynml-plotmorph-2.png ├── pynml_jnml.svg ├── slider ├── combine.png ├── endorsed.png ├── moose_mod.png ├── openworm2-mod.png └── osbnivo_mod2.png ├── test_morphology_plot_2d_Cell_497232312_cell_nml_xy.png └── tools ├── arbor.png ├── biosimulators.png ├── brian2.png ├── catmaid.png ├── cx3d.png ├── genesis.png ├── geppetto.png ├── lfpy.png ├── mdf.png ├── moose.jpg ├── myokit.png ├── nest-logo.png ├── netpyne.png ├── neuroconstruct.png ├── neuron.png ├── neuronland.png ├── neuronvisio.png ├── openworm.png ├── pynn.png ├── trakem2.png ├── trees.png └── tvb.png /.all-contributorsrc: -------------------------------------------------------------------------------- 1 | { 2 | "projectName": "Documentation", 3 | "projectOwner": "NeuroML", 4 | "repoType": "github", 5 | "repoHost": "https://github.com", 6 | "files": [ 7 | "README.md" 8 | ], 9 | "imageSize": 100, 10 | "commit": true, 11 | "commitConvention": "angular", 12 | "contributors": [ 13 | { 14 | "login": "pgleeson", 15 | "name": "Padraig Gleeson", 16 | "avatar_url": "https://avatars.githubusercontent.com/u/1556687?v=4", 17 | "profile": "http://www.opensourcebrain.org/", 18 | "contributions": [ 19 | "code", 20 | "content", 21 | "data", 22 | "doc", 23 | "example", 24 | "ideas", 25 | "infra", 26 | "maintenance", 27 | "question", 28 | "research", 29 | "test", 30 | "tutorial", 31 | "review" 32 | ] 33 | }, 34 | { 35 | "login": "sanjayankur31", 36 | "name": "Ankur Sinha", 37 | "avatar_url": "https://avatars.githubusercontent.com/u/102575?v=4", 38 | "profile": "https://ankursinha.in/", 39 | "contributions": [ 40 | "code", 41 | "content", 42 | "data", 43 | "doc", 44 | "example", 45 | "ideas", 46 | "infra", 47 | "maintenance", 48 | "question", 49 | "research", 50 | "test", 51 | "tutorial", 52 | "review" 53 | ] 54 | }, 55 | { 56 | "login": "borismarin", 57 | "name": "Boris Marin", 58 | "avatar_url": "https://avatars.githubusercontent.com/u/3452783?v=4", 59 | "profile": "https://github.com/borismarin", 60 | "contributions": [ 61 | "code", 62 | "content", 63 | "review" 64 | ] 65 | }, 66 | { 67 | "login": "shayan823", 68 | "name": "Shayan Shafquat", 69 | "avatar_url": "https://avatars.githubusercontent.com/u/16141677?v=4", 70 | "profile": "https://github.com/shayan823", 71 | "contributions": [ 72 | "code", 73 | "content" 74 | ] 75 | }, 76 | { 77 | "login": "ramcdougal", 78 | "name": "Robert A McDougal", 79 | "avatar_url": "https://avatars.githubusercontent.com/u/6668090?v=4", 80 | "profile": "https://github.com/ramcdougal", 81 | "contributions": [ 82 | "code", 83 | "content" 84 | ] 85 | }, 86 | { 87 | "login": "erik-ski", 88 | "name": "erik-ski", 89 | "avatar_url": "https://avatars.githubusercontent.com/u/108799727?v=4", 90 | "profile": "https://github.com/erik-ski", 91 | "contributions": [ 92 | "bug", 93 | "review" 94 | ] 95 | } 96 | ], 97 | "contributorsPerLine": 7, 98 | "linkToUsage": true, 99 | "commitType": "docs" 100 | } 101 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E501 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: 'T: bug' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **Page on which issue is** 14 | Which page is the issue on? (URL) 15 | 16 | **Other relevant information** 17 | - Web browser in use: 18 | - Operating system: 19 | -------------------------------------------------------------------------------- /.github/workflows/prs.yaml: -------------------------------------------------------------------------------- 1 | name: Test build pull requests 2 | 3 | on: 4 | pull_request: 5 | branches: [ main, master, development ] 6 | 7 | jobs: 8 | build: 9 | 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Set up Python 3.9 15 | uses: actions/setup-python@v4 16 | with: 17 | python-version: 3.9 18 | - name: Set up JDK 11 for jNeuroML etc 19 | uses: actions/setup-java@v3 20 | with: 21 | java-version: 11 22 | distribution: 'temurin' 23 | - name: Install dependencies 24 | run: | 25 | python -m pip install --upgrade pip 26 | # pip install -r requirements.txt 27 | pip install -r requirements-book.txt 28 | sudo apt-get install build-essential 29 | - name: Disable building jupyter-books 30 | run: | 31 | sed -i "s/execute_notebooks.*/execute_notebooks: 'off'/" source/_config.yml 32 | - name: Run jupyter-book 33 | run: | 34 | jupyter-book clean ./source 35 | jupyter-book build ./source 36 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | schedule: 7 | - cron: "37 22 * * 1,3,5" 8 | 9 | jobs: 10 | build: 11 | 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Set up Python 3.11 17 | uses: actions/setup-python@v5 18 | with: 19 | python-version: 3.11 20 | - name: Set up JDK 11 for jNeuroML etc 21 | uses: actions/setup-java@v4 22 | with: 23 | java-version: 11 24 | distribution: 'temurin' 25 | - name: Install dependencies 26 | run: | 27 | python -m pip install --upgrade pip 28 | # pip install -r requirements.txt 29 | pip install -r requirements-book.txt 30 | sudo apt-get update -y 31 | sudo apt-get install build-essential -y 32 | sudo apt-get install texlive-latex-recommended texlive-latex-extra texlive-fonts-recommended texlive-fonts-extra texlive-xetex latexmk imagemagick -y 33 | - name: Update buf_size in texmf.cnf 34 | run: | 35 | kpsewhich -a texmf.cnf 36 | grep -i "^buf_size" `kpsewhich -a texmf.cnf` 37 | - name: Disable building jupyter-books 38 | run: | 39 | sed -i "s/execute_notebooks.*/execute_notebooks: 'off'/" source/_config.yml 40 | - name: Run jupyter-book 41 | run: | 42 | jupyter-book clean ./source 43 | jupyter-book build ./source --builder pdflatex 44 | mv ./source/_build/latex/neuroml-documentation.pdf ./source/_static/files/ 45 | jupyter-book build ./source 46 | - name: Update locations in 404 page 47 | run: | 48 | sed -i 's|src="\([[:alnum:]_]\)|src="/\1|g' ./source/_build/html/404.html 49 | sed -i 's|href="\([[:alnum:]_]\)|href="/\1|g' ./source/_build/html/404.html 50 | # if we also replaced "http.." with "/http..", undo that 51 | sed -i 's|href="/http|href="http|g' ./source/_build/html/404.html 52 | sed -i 's|src="/http|src="http|g' ./source/_build/html/404.html 53 | - name: Import to GitHub pages 54 | run: | 55 | ghp-import -c "docs.neuroml.org" -n -p -f ./source/_build/html 56 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .venv/ 2 | source/_build/* 3 | tags.* 4 | /bin 5 | /build 6 | /lib 7 | /pyvenv.cfg 8 | .ipynb_checkpoints/ 9 | *.gv 10 | *.mod 11 | x86_64/ 12 | *.dat 13 | *nrn.py 14 | __pycache__ 15 | /regen.sh 16 | /source/Userdocs/NML2_examples/lems_sim/LEMS_sim1.xml 17 | /source/Userdocs/NML2_examples/lems_sim/ex.spikes 18 | /source/Userdocs/NML2_examples/lems_sim/report.txt 19 | NT_*/ 20 | .mypy_cache/ 21 | /source/Userdocs/NML2_examples/NMLCH000008.channel.nml 22 | /source/Userdocs/NML2_examples/TestCell.net.nml 23 | /source/Userdocs/NML2_examples/complex_cell.hoc 24 | /source/Userdocs/NML2_examples/iv_complex_cell.net.nml 25 | /source/Userdocs/NML2_examples/NMLCH000007.channel.nml 26 | /source/Userdocs/NML2_examples/NMLCH000110.channel.nml 27 | /source/Userdocs/NML2_examples/NMLCH000113.channel.nml 28 | /source/Userdocs/NML2_examples/LEMS_Test_NaTa_t.xml 29 | /source/Userdocs/NML2_examples/LEMS_Test_SKv3_1.xml 30 | /source/Userdocs/NML2_examples/LEMS_iv_complex_cell.xml 31 | arm64 32 | /source/Userdocs/NML2_examples/Ca.channel.nml 33 | /source/Userdocs/NML2_examples/CaDynamics_E2_NML2.nml 34 | /source/Userdocs/NML2_examples/Ca_LVAst.channel.nml 35 | /source/Userdocs/NML2_examples/Exc.spikes 36 | /source/Userdocs/NML2_examples/Ih.channel.nml 37 | /source/Userdocs/NML2_examples/Im.channel.nml 38 | /source/Userdocs/NML2_examples/Inh.spikes 39 | /source/Userdocs/NML2_examples/K_Pst.channel.nml 40 | /source/Userdocs/NML2_examples/K_Tst.channel.nml 41 | /source/Userdocs/NML2_examples/LEMS_NML_DB_network_sim.xml 42 | /source/Userdocs/NML2_examples/LEMS_iv_bAC217_L23_MC_40be3bf0e8_0_0.xml 43 | /source/Userdocs/NML2_examples/LEMS_iv_novel_cell.xml 44 | /source/Userdocs/NML2_examples/NMLCH000015.channel.nml 45 | /source/Userdocs/NML2_examples/NMLCL000109.nml.zip 46 | /source/Userdocs/NML2_examples/NML_DB_Net.gv.png 47 | /source/Userdocs/NML2_examples/NML_DB_network.net.nml 48 | /source/Userdocs/NML2_examples/NaTa_t.channel.nml 49 | /source/Userdocs/NML2_examples/NaTs2_t.channel.nml 50 | /source/Userdocs/NML2_examples/Nap_Et2.channel.nml 51 | /source/Userdocs/NML2_examples/SK_E2.channel.nml 52 | /source/Userdocs/NML2_examples/SKv3_1.channel.nml 53 | /source/Userdocs/NML2_examples/TestCell.cell.nml 54 | /source/Userdocs/NML2_examples/bAC217_L23_MC_40be3bf0e8_0_0.cell.nml 55 | /source/Userdocs/NML2_examples/bAC217_L23_MC_40be3bf0e8_0_0.hoc 56 | /source/Userdocs/NML2_examples/iv_bAC217_L23_MC_40be3bf0e8_0_0.net.nml 57 | /source/Userdocs/NML2_examples/iv_novel_cell.net.nml 58 | /source/Userdocs/NML2_examples/novel_cell.hoc 59 | /source/Userdocs/NML2_examples/pas.channel.nml 60 | -------------------------------------------------------------------------------- /.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_missing_imports = True 3 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contribution guidelines 2 | 3 | - Jupyter-book supports [multiple content types](https://jupyterbook.org/file-types/index.html). Their flavour of Markdown is preferred. 4 | - Please start each sentence on a new line in the documentation. This allows 5 | for better diffs and pull requests. 6 | 7 | 8 | ## Building docs locally 9 | 10 | The documentation currently uses [Jupyter-book](https://jupyterbook.org/). 11 | 12 | Please note that Jupyterbook does not yet support Sphinx v4, and Sphinx v3 does not run correctly with Python 3.10. 13 | Therefore, until Jupyterbook is updated to support Sphinx v4, please use Python `<= 3.9` for your virtual environment. 14 | 15 | To build the documentation locally, to test before opening Pull Requests for example, a virtual environment can be used: 16 | 17 | ``` 18 | 19 | # Create a new virtual environment 20 | $ python3 -m venv ./.venv 21 | # Activate the virtual environment 22 | $ source .venv/bin/activate 23 | # Install the necessary Python packages 24 | $ pip install -r requirements-book.txt 25 | # Build the docs 26 | $ jupyter-book build ./source 27 | # This will create the HTML files in ./source/_build/html 28 | ``` 29 | 30 | 31 | To deactivate the virtual environment: 32 | 33 | ``` 34 | 35 | $ deactivate 36 | ``` 37 | 38 | More information on Python virtual environments can be found in the Python documentation [here](https://docs.python.org/3.9/library/venv.html). 39 | 40 | ## Publishing the book 41 | 42 | The book is published using GitHub pages, using the `ghp-import` tool. 43 | 44 | ``` 45 | 46 | # Use ghp-import 47 | $ ghp-import -n -p -f ./source/_build/html 48 | ``` 49 | 50 | This will import the HTML files built by `jupyter-book` to the `gh-pages` branch. 51 | More information on this can be found in the [official documentation](https://jupyterbook.org/publish/gh-pages.html). 52 | 53 | A helper script `./build-helper.sh` is present in the repository to assist with these steps. 54 | 55 | 56 | ## Updating schema pages 57 | 58 | The schema pages are generated using the script provide in the scripts/schemas directory. 59 | This uses a copy of the NeuroML2 repository to parse the XML core type definitions to generate the myAST docs using Jinja templates. 60 | It also goes through the examples to extract XML example snippets. 61 | Finally, it inspects the libNeuroML Python API to include constructor definitions where they are available. 62 | 63 | Please remember to commit the newly generated schema doc files after running the script. 64 | -------------------------------------------------------------------------------- /apt.txt: -------------------------------------------------------------------------------- 1 | default-jre 2 | graphviz 3 | xdg-utils 4 | -------------------------------------------------------------------------------- /requirements-book.txt: -------------------------------------------------------------------------------- 1 | # To build the docs using jupyter-book 2 | jupyter-book>=0.13 3 | matplotlib 4 | numpy 5 | ghp-import 6 | sphinx-jupyterbook-latex 7 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # For Jupyter notebooks, on mybinder etc. 2 | pyneuroml 3 | neuromllite 4 | libNeuroML 5 | NEURON 6 | pyelectro 7 | -------------------------------------------------------------------------------- /scripts/lems/Readme.md: -------------------------------------------------------------------------------- 1 | Generate the LEMS element documentation page. 2 | 3 | We use the `sourceannotations.xml` file that is generated from the LEMS/LEMS repository here. 4 | We simply parse the XML to write markdown. 5 | -------------------------------------------------------------------------------- /scripts/lems/asttemplates.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Templates for LEMS ast generator 4 | 5 | File: scripts/lems/asttemplates.py 6 | 7 | Copyright 2023 NeuroML contributors 8 | """ 9 | 10 | import textwrap 11 | from jinja2 import Environment 12 | 13 | env = Environment() 14 | 15 | 16 | """Template for the page header""" 17 | page_header = env.from_string(textwrap.dedent( 18 | """ 19 | (lemsschema:page:{{ section_data[0]|lower|replace(" ", "_") }}_)= 20 | # {{ section_data[0] }} 21 | 22 | {% if section_data[1] | length > 0 -%} 23 | **{{- section_data[1] -}}** 24 | 25 | --- 26 | {%- endif %} 27 | 28 | Schema against which LEMS based on these should be valid: [LEMS_v{{ lems_version }}.xsd](https://github.com/LEMS/LEMS/tree/{{ lems_branch }}/Schemas/LEMS/LEMS_v{{ lems_version }}.xsd). 29 | Generated on {{ lems_date }} from [this](https://github.com/LEMS/LEMS/commit/{{ lems_commit }}) commit. 30 | Please file any issues or questions at the [issue tracker here](https://github.com/LEMS/LEMS/issues). 31 | 32 | --- 33 | """)) 34 | 35 | 36 | """Template for element types""" 37 | elementtype = env.from_string(textwrap.dedent( 38 | """ 39 | (lemsschema:{{ et['@name']|lower|replace(" ", "_") }}_)= 40 | ## {{ et['@name'] }} 41 | 42 | {{ et.Info| default("") }} 43 | 44 | """ 45 | )) 46 | 47 | """Template for property""" 48 | prop = env.from_string(textwrap.dedent( 49 | """ 50 | ````{tab-item} Properties 51 | ```{csv-table} 52 | :widths: 1, 2, 7 53 | :width: 100% 54 | :delim: $ 55 | 56 | {% for prop in props -%} 57 | **{{ prop['@name'] }}**$ {{ prop['@type'] }}$ {{ prop['#text'] }} 58 | {% endfor %} 59 | ``` 60 | ```` 61 | """ 62 | 63 | )) 64 | 65 | """Template for list property""" 66 | listprop = env.from_string(textwrap.dedent( 67 | """ 68 | ````{tab-item} can contain these elements 69 | ```{csv-table} 70 | :widths: 2, 8 71 | :width: 100% 72 | :delim: $ 73 | 74 | {% for prop in lprops -%} 75 | **{{ prop['@name'] }}**$ {ref}`lemsschema:{{ prop['@type'] | lower|replace(" ", "_") }}_` 76 | {% endfor %} 77 | ``` 78 | ```` 79 | """ 80 | 81 | )) 82 | 83 | schema_quote = env.from_string(textwrap.dedent( 84 | """ 85 | ````{tab-item} Schema 86 | ```{code-block} xml 87 | {{ schemadoc }} 88 | ``` 89 | ```` 90 | """ 91 | )) 92 | 93 | examples = env.from_string(textwrap.dedent( 94 | """ 95 | {% if lemsexamples|length > 0 %} 96 | ````{tab-item} {{ title }}: XML 97 | {% for e in lemsexamples -%} 98 | ```{code-block} xml 99 | {{ e|trim }} 100 | ``` 101 | {% endfor -%} 102 | ```` 103 | {%- endif -%} 104 | """ 105 | )) 106 | -------------------------------------------------------------------------------- /scripts/lems/regensourceannotations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2023 NeuroML contributors 4 | # File : scripts/lems/regensourceannotations.sh 5 | # 6 | # Regenerate sourceannotations.xml, and then generate ast. 7 | # 8 | 9 | echo "Regenerating sourceannotations.xml" 10 | 11 | curdir=$(pwd) 12 | tempdir=$(mktemp --directory) 13 | pushd ${tempdir} 14 | 15 | git clone --branch development https://github.com/LEMS/LEMS.git --single-branch --depth 4 16 | git clone --branch development https://github.com/LEMS/jLEMS.git --single-branch --depth 4 17 | 18 | pushd jLEMS 19 | mvn install 20 | popd 21 | 22 | pushd LEMS/docgeneration 23 | ant -p 24 | ant 25 | ls -alt html 26 | popd 27 | 28 | popd 29 | 30 | cp "${tempdir}/LEMS/docgeneration/extractedannotations/sourceannotations.xml" . -v 31 | 32 | echo "Regenerating AST sources" 33 | python3 ./xml2md.py 34 | -------------------------------------------------------------------------------- /scripts/lems/requirements.txt: -------------------------------------------------------------------------------- 1 | lxml 2 | xmltodict 3 | jinja2 4 | -------------------------------------------------------------------------------- /scripts/schemas/requirements.txt: -------------------------------------------------------------------------------- 1 | libneuroml 2 | pylems 3 | jinja2 4 | lxml 5 | -------------------------------------------------------------------------------- /source/404.md: -------------------------------------------------------------------------------- 1 | # Page not found 2 | 3 | Sorry, the page you were looking could not be found. 4 | Please use the search function to look for information in the documentation. 5 | For any issues, please {ref}`contact us `. 6 | -------------------------------------------------------------------------------- /source/Devdocs/Devdocs.md: -------------------------------------------------------------------------------- 1 | (devdocs:overview)= 2 | # Overview 3 | 4 | This section will contain information for those who wish to **contribute to the development** of the NeuroML standard and associated tools. 5 | 6 | An overview of the NeuroML **release process** can be found {ref}`here `. 7 | 8 | The relationship of NeuroML to a number of other tools and standards in computational neuroscience, 9 | and the practical steps taken thus far to ensure interoperability, can be found {ref}`here `. 10 | 11 | The following project Kanban boards are used to consolidate issues: 12 | 13 | - [NeuroML](https://github.com/orgs/NeuroML/projects/4/views/1): for all repositories under the NeuroML GitHub organization 14 | - [LEMS](https://github.com/orgs/LEMS/projects/2/views/1): for all repositories under the LEMS GitHub organization 15 | - [NeuralEnsemble](https://github.com/orgs/NeuralEnsemble/projects/1/views/1): for all NeuroML related repositories in the Neural Ensemble GitHub organization 16 | -------------------------------------------------------------------------------- /source/Devdocs/InteractionOtherBits.md: -------------------------------------------------------------------------------- 1 | (devdocs:interaction)= 2 | # Interaction with other languages and standards 3 | 4 | ```{admonition} Needs work 5 | TODO: Add more information to each of these 6 | ``` 7 | 8 | (devdocs:interaction:pynn)= 9 | ## PyNN 10 | 11 | [https://github.com/NeuroML/NeuroML2/issues/73](https://github.com/NeuroML/NeuroML2/issues/73) 12 | 13 | (devdocs:interaction:sbml)= 14 | ## SBML 15 | 16 | [https://github.com/OpenSourceBrain/SBMLShowcase](https://github.com/OpenSourceBrain/SBMLShowcase) 17 | 18 | 19 | (devdocs:interaction:sonata)= 20 | ## Sonata 21 | 22 | [https://journals.plos.org/ploscompbiol/article?id=10.1371/journal.pcbi.1007696](https://journals.plos.org/ploscompbiol/article?id=10.1371/journal.pcbi.1007696) 23 | 24 | 25 | (devdocs:interaction:nineml)= 26 | ## NineML & SpineML 27 | 28 | [https://github.com/OpenSourceBrain/NineMLShowcase](https://github.com/OpenSourceBrain/NineMLShowcase) 29 | 30 | 31 | (devdocs:interaction:mdf)= 32 | ## ModECI MDF 33 | 34 | [http://www.modeci.org/](http://www.modeci.org/) 35 | 36 | (devdocs:interaction:swc)= 37 | ## SWC 38 | 39 | http://www.neuronland.org/NLMorphologyConverter/MorphologyFormats/SWC/Spec.html 40 | http://www.neuromorpho.org/myfaq.jsp 41 | -------------------------------------------------------------------------------- /source/Devdocs/ReleaseProcess.md: -------------------------------------------------------------------------------- 1 | (devdocs:release)= 2 | # Release Process 3 | 4 | ## Overview 5 | 6 | In general, work is carried out in the **development** branches of the [main NeuroML repositories](https://github.com/NeuroML/.github/blob/main/testsheet/README.md) 7 | and these are merged to **master** branches on a new major release, e.g. move from NeuroML v2.1 to v2.2. 8 | 9 | A single page showing the **status of the automated test** as well as any **open Pull Requests** on all of the core NeuroML repositories can be found [here](https://github.com/NeuroML/.github/blob/main/testsheet/README.md). 10 | 11 | ## Steps for new major release 12 | 13 | These are the steps required for a new release of the NeuroML development tools. 14 | 15 | | Task | Version this was last done | 16 | | --- | --- | 17 | | Commit final stable work in development branches | v2.3 | 18 | | Make releases (not just tag - generates DOI) previous development versions of individual repos | v2.3 | 19 | | Increment all version numbers - to distinguish release from previous development version | v2.3 | 20 | | Test all development branches - rerun GitHub Actions at least once | v2.3 | 21 | | Recheck all READMEs & docs | v2.3 | 22 | | Run & check [test.py](https://github.com/NeuroML/NeuroML2/blob/master/test.py) in NeuroML2 repo | v2.3 | 23 | | Check through issues for closed & easily closable ones | v2.3 | 24 | | Update version in {ref}`documentation pages ` | v2.3 | 25 | | Update [HISTORY.md](https://github.com/NeuroML/NeuroML2/blob/master/HISTORY.md) in NeuroML2 | v2.3 | 26 | | pylems: Update README; Merge to master; Tag release; Release to pip | v2.3 | 27 | | libNeuroML: Update README; Retest; Merge to master; Tag release; Release to pip; Check [installation docs](https://libneuroml.readthedocs.org/en/latest/install.html) | v2.3 | 28 | | pyNeuroML: Update Readme; Tag release; Release to pip | v2.3 | 29 | | NeuroMLlite: Update Readme; Tag release; Release to pip | v2.3 | 30 | | Java repositories ({ref}`jNeuroML `, org.neuroml.* etc.): Merge development to master; Tag releases | v2.3 | 31 | | Rebuild jNeuroML & commit to [jNeuroMLJar](https://sourceforge.net/p/neuroml/code/HEAD/tree/jNeuroMLJar/) and use latest for [jNeuroML for OMV](https://github.com/OpenSourceBrain/osb-model-validation/blob/master/omv/engines/getjnml.py#L8) | v2.3 | 32 | | Add new binary release on [https://github.com/NeuroML/jNeuroML/releases](https://github.com/NeuroML/jNeuroML/releases) | v2.3 | 33 | | Update version used in [neuroConstruct](https://github.com/NeuralEnsemble/neuroConstruct) | v2.3 | 34 | | Update docs on [http://docs.neuroml.org](https://docs.neuroml.org) | v2.3 | 35 | | Update version on [COMBINE website](https://github.com/combine-org/combine-org.github.io/blob/master/content/authors/NeuroML/_index.md) | v2.2 | 36 | | ANNOUNCE (mailing list, Twitter) | v2.2 | 37 | | Increment version numbers in all development branches | v2.3 | 38 | | DOI on [Zenodo](https://doi.org/10.5281/zenodo.593108) | v2.3 | 39 | | Update NeuroML [milestones](https://github.com/NeuroML/NeuroML2/milestones) | v2.2 | 40 | | New release of [neuroConstruct](https://github.com/NeuralEnsemble/neuroConstruct/releases) | v2.3 | 41 | | Test toolchain on Windows... | v2.0 | 42 | -------------------------------------------------------------------------------- /source/Events/202103-Harmony.md: -------------------------------------------------------------------------------- 1 | (neuromlevents:202103harmony)= 2 | # March 2021: NeuroML hackathon at HARMONY 2021 3 | 4 | ```{admonition} Registration for the COMBINE initiative's HARMONY 2021 meeting is free. 5 | :class: tip 6 | Register for the COMBINE: HARMONY 2021 meeting [here](http://co.mbine.org/events/HARMONY_2021). 7 | Registration is free. 8 | ``` 9 | 10 | We will be running 3 online NeuroML hackathon sessions during the upcoming [COMBINE: HARMONY 2021](http://co.mbine.org/events/HARMONY_2021) meeting on 23-25th March. 11 | The general theme of the sessions will be: **learn to build, visualise, analyse and simulate your models using NeuroML**. 12 | 13 | (neuromlevents:202103harmony:why)= 14 | ## Why take part? 15 | 16 | These hackathons will give members of the neuroscience community the chance to: 17 | 18 | - Get high level introductions to the NeuroML language and tool chain 19 | - Meet the NeuroML core development team and editors 20 | - Find out the latest information on which simulators/applications support NeuroML 21 | - Open, discuss and work on issues related to converting your model to NeuroML, or supporting NeuroML in your simulator 22 | - Learn how to share your models with the community 23 | 24 | (neuromlevents:202103harmony:when)= 25 | ## Times and dates 26 | 27 | All sessions will be online and take place over 3 hours (9am-noon Pacific; 12-3pm EST time; 4-7pm UK/UTC; 5-8pm CET, 9:30pm-12:30am IST; note non-standard US/EU time differences that week). 28 | The broad focus of each of the sessions (dependent on interests of attendees) is: 29 | 30 | - Tues 23rd March: Introduction to NeuroML, general questions about usage 31 | - Wed 24th March: Detailed cell/conductance based models (e.g. converting channels to NeuroML) 32 | - Thus 25th March: Abstract/point neuron networks including PyNN interactions 33 | 34 | (neuromlevents:202103harmony:registration)= 35 | ## Registration 36 | 37 | To take part in the hackathon, please register [here](http://co.mbine.org/events/HARMONY_2021) for the HARMONY meeting (registration is free). 38 | You will get sent details to access the [agenda](https://harmony2021.sched.com), which will have links to the Zoom sessions for each of the days. 39 | 40 | (neuromlevents:202103harmony:prepare)= 41 | ## Open an issue beforehand! 42 | While it will be possible to raise and discuss new issues at the hackathons, it will be easier to manage and plan work/discussions if you open an issue with a description of the problem you are trying to address at: [https://github.com/NeuroML/NeuroML2/issues](https://github.com/NeuroML/NeuroML2/issues). 43 | 44 | (neuromlevents:202103harmony:slack)= 45 | ## Slack 46 | To aid communication with the community during (and after) the meeting, we have a **Slack channel** for NeuroML related discussions. 47 | Please contact [Padraig Gleeson](mailto:p_DOT_gleeson_AT_ucl.ac.uk) for an invite. 48 | 49 | We look forward to working with the community to drive further uptake of NeuroML compliant models and tools! 50 | -------------------------------------------------------------------------------- /source/Events/202107-CNS2021.md: -------------------------------------------------------------------------------- 1 | (neuromlevents:202107cns2021)= 2 | # July 2021: NeuroML tutorial at CNS*2021 3 | 4 | ```{admonition} Register for the 30th Annual meeting of the Organization for Computational Neurosciences (OCNS). 5 | :class: tip 6 | Register for the CNS*2021 [here](http://www.cnsorg.org/cns-2021). 7 | ``` 8 | 9 | We will be running a half day tutorial at the 30th annual meeting of the Organization for Computational Neurosciences (OCNS): [CNS*2021](https://www.cnsorg.org/cns-2021). 10 | 11 | The goal of the tutorial is to teach users to: **build, visualise, analyse and simulate models using NeuroML**. 12 | 13 | (neuromlevents:202107cns2021:why)= 14 | ## Why take part? 15 | 16 | This tutorial is aimed at new and current NeuroML users. We will start with a quick introduction to the NeuroML standard and the associated software ecosystem, after which we will proceed to conduct hands-on sessions to show how one can build computational models with NeuroML. 17 | 18 | (neuromlevents:202107cns2021:when)= 19 | ## Times and dates 20 | 21 | - [Friday 2nd July 1500UTC](https://www.timeanddate.com/worldclock/fixedtime.html?msg=NeuroML+tutorial+at+CNS%2A2021&iso=20210702T11&p1=179&ah=3). 22 | 23 | (neuromlevents:202107cns2021:registration)= 24 | ## Registration 25 | 26 | To take part in the tutorial, please register [here](https://www.cnsorg.org/cns-2021) for the CNS*2021 meeting. 27 | 28 | (neuromlevents:202107cns2021:prepare)= 29 | ## Pre-requisites 30 | 31 | The sessions will make use of the NeuroML Python tools. 32 | Please follow the documentation to install them on your system if you wish to use them locally: 33 | 34 | - {ref}`PyNeuroML ` 35 | - {ref}`libNeuroML ` 36 | 37 | You can also use the interactive Jupyter notebooks from the documentation if you prefer ([example](https://docs.neuroml.org/Userdocs/NML2_examples/SingleNeuron.html)). These can be run on Binder and Google Collab in your web browser and do not require you to install anything locally on your computer. 38 | 39 | 40 | (neuromlevents:202107cns2021:slack)= 41 | ## Slack 42 | 43 | To aid communication with the community during (and after) the meeting, we have a **Slack channel** for NeuroML related discussions. 44 | Please contact [Padraig Gleeson](mailto:p_DOT_gleeson_AT_ucl.ac.uk) for an invite. 45 | 46 | You can also contact the NeuroML community using one of our other {ref}`channels `. 47 | 48 | We look forward to working with the community to drive further uptake of NeuroML compliant models and tools! 49 | -------------------------------------------------------------------------------- /source/Events/202108-INCF-Training-Week.md: -------------------------------------------------------------------------------- 1 | (neuromlevents:202108incf)= 2 | # August 2021: NeuroML tutorial at INCF Training Weeks 3 | 4 | A NeuroML tutorial will be held at the [Virtual INCF Neuroinformatics Training Weeks 2021](https://www.incf.org/virtual-incf-neuroinformatics-training-week-2021). 5 | 6 | This tutorial is intended for members of the research community interested in learning more about how NeuroML and its related technologies facilitates the standardization, sharing, and collaborative development of models. 7 | 8 | (neuromlevents:202108incf:when)= 9 | ## Times and dates 10 | 11 | This tutorial will be offered twice during the Neuroinformatics Training Week: session 1 is targeted to participants residing in Europe, Africa, and the Americas while session 2 is targeted to participants residing in Asia and Australia. 12 | 13 | Session 1: 14 | - Dates: 23 Aug 2021 15 | - Time: : 11:00-15:00 EDT / 17:00-21:00 CEST 16 | 17 | Session 2 18 | - Dates: 26 Aug 2021 19 | - Time: 09:00-13:00 CEST / 16:00-20:00 JST / 17:00-21:00 AEST 20 | 21 | (neuromlevents:202108incf:who)= 22 | ## Target audience 23 | 24 | Anyone who is already familiar with computational modelling, but is keen to standardise, share and collaboratively develop their models. 25 | 26 | (neuromlevents:202108incf:agenda)= 27 | ## Agenda 28 | 29 | ### Part 1: Introduction to NeuroML 30 | 31 | - Overview of NeuroML 32 | - Introduce the NeuroML tool chain 33 | - Introduce main documentation 34 | - Related technologies and initiatives 35 | 36 | ### Part 2: Hands on demonstrations of building and using NeuroML models 37 | 38 | - Izhikevich neuron hands on tutorial 39 | - Spiking neuron network tutorial 40 | - Single compartment HH neuron tutorial 41 | - Multi compartmental HH neuron tutorial 42 | -------------------------------------------------------------------------------- /source/Events/202109-COMBINE.md: -------------------------------------------------------------------------------- 1 | (neuromlevents:202109combine)= 2 | # October 2021: NeuroML development workshop at COMBINE meeting 3 | 4 | ```{admonition} Registration for the COMBINE 2021 meeting is free. 5 | :class: tip 6 | Register for the COMBINE 2021 meeting [here](https://combine-org.github.io/events/). 7 | Registration is free. 8 | ``` 9 | 10 | A NeuroML development workshop will be held as part of the [annual COMBINE meeting in October 2021](https://combine-org.github.io/events/). 11 | 12 | The general theme of the workshop is to discuss the current status of the NeuroML standard and the complete software ecosystem, and future development plans. 13 | 14 | (neuromlevents:202109combine:when)= 15 | ## Times and dates 16 | 17 | - 13 October 2021 18 | - 8-11am PDT/11-2pm EST/4-7pm UK/5-8pm CET/8:30-11:30 IST 19 | 20 | 21 | (neuromlevents:202109combine:who)= 22 | ## Target audience 23 | Everyone that is involved/interested in developing tools that use/integrate with NeuroML is encouraged to join. 24 | 25 | Please register for the COMBINE meeting (free of charge) to receive access to the complete schedule of the meeting, including links to the various virtual meetings/sessions. 26 | 27 | ## Agenda/minutes 28 | 29 | The agenda/minutes for the meeting can be found [here](https://docs.google.com/document/d/1rZp6fggUe1vlo5fYK-CiUP__fdJV8xYg-wrkpIp0dHk/edit). 30 | -------------------------------------------------------------------------------- /source/Events/202204-Harmony.md: -------------------------------------------------------------------------------- 1 | (neuromlevents:202104harmony)= 2 | # April 2022: NeuroML development workshop at HARMONY 2022 3 | 4 | ```{admonition} Registration for the COMBINE initiative's HARMONY 2022 meeting is free. 5 | :class: tip 6 | Please register for the COMBINE HARMONY 2022 meeting [here](https://combine-org.github.io/events/) if you are coming to our NeuroML workshop. 7 | Registration for HARMONY is free. 8 | ``` 9 | 10 | We will be running a NeuroML development workshop during the upcoming [COMBINE network's HARMONY 2022](https://combine-org.github.io/events/) meeting on Thus 28 April 2022. 11 | This will be an opportunity for anyone interested in developing NeuroML or adding support for the format to their application talk about their work and hear about other developments. 12 | 13 | (neuromlevents:202104harmony:agenda)= 14 | ## Agenda 15 | 16 | The agenda for the meeting can be found [here](https://docs.google.com/document/d/1GOkmylIlLH3dbxB2Pmis7VxFPzipstLBdXn5uBu4rv8/edit). 17 | 18 | (neuromlevents:202104harmony:when)= 19 | ## Times and dates 20 | 21 | The workshop will take place on **Thus 28 April 2022** at 15:00-18:00 UTC ([converter](https://www.timeanddate.com/worldclock/converter.html?iso=20220427T150000&p1=136&p2=1440&p3=137&p4=179&p5=37&p6=176&p7=248)). 22 | 23 | (neuromlevents:202104harmony:registration)= 24 | ## Registration 25 | 26 | To take part in the workshop, please register [here](https://combine-org.github.io/events/) for the HARMONY meeting (registration is free). 27 | 28 | You will get sent details to access the HARMONY agenda, which will have links to the **Zoom session for the NeuroML workshop**. 29 | 30 | (neuromlevents:202104harmony:prepare)= 31 | ## Open an issue beforehand! 32 | While it will be possible to raise and discuss new issues at the workshop, it will be easier to manage and plan work/discussions if you open an issue with a description of the problem you are trying to address at: [https://github.com/NeuroML/NeuroML2/issues](https://github.com/NeuroML/NeuroML2/issues). 33 | 34 | (neuromlevents:202104harmony:slack)= 35 | ## Slack 36 | To aid communication with the community during (and after) the meeting, we have a **Slack channel** for NeuroML related discussions. 37 | Please contact [Padraig Gleeson](mailto:p_DOT_gleeson_AT_ucl.ac.uk) for an invite. 38 | 39 | We look forward to working with the community to drive further uptake of NeuroML compliant models and tools! 40 | -------------------------------------------------------------------------------- /source/Events/20220630-CNS2022.md: -------------------------------------------------------------------------------- 1 | (neuromlevents:202206cns)= 2 | # June 2022: NeuroML tutorial at CNS*2022 satellite tutorials 3 | 4 | An online NeuroML tutorial will be held at the [CNS*2022 satellite tutorials](https://ocns.github.io/SoftwareWG/pages/software-wg-satellite-tutorials-at-cns-2022.html). 5 | Registration for the satellite tutorials is free, but required. 6 | 7 | This tutorial is intended for members of the research community interested in learning more about how NeuroML and its related technologies facilitates the standardization, sharing, and collaborative development of models. 8 | 9 | (neuromlevents:202206cns:when)= 10 | ## Times and dates 11 | 12 | - Dates: June 30, 2022 13 | - Time: : [1400--1700 UTC](https://www.timeanddate.com/worldclock/fixedtime.html?iso=20220630T14) 14 | 15 | 16 | (neuromlevents:202206cns:who)= 17 | ## Target audience 18 | 19 | Anyone who is already familiar with computational modelling, but is keen to standardise, share and collaboratively develop their models. 20 | 21 | (neuromlevents:202206cns:where)= 22 | ## Where 23 | 24 | The tutorial be done online via Zoom and will make use of the [Open Source Brain v2](https://v2.opensourcebrain.org) integrated web research platform. 25 | 26 | Please register for the [CNS*2022 satellite tutorials](https://ocns.github.io/SoftwareWG/pages/software-wg-satellite-tutorials-at-cns-2022.html) to receive the Zoom links. 27 | 28 | (neuromlevents:202206cns:agenda)= 29 | ## Agenda 30 | 31 | ### Part 1: Introduction to NeuroML 32 | 33 | - Overview of NeuroML 34 | - Introduce the NeuroML tool chain 35 | - Introduce main documentation 36 | - Related technologies and initiatives 37 | 38 | ### Part 2: Hands on demonstrations of building and using NeuroML models 39 | 40 | - Izhikevich neuron hands on tutorial 41 | - Spiking neuron network tutorial 42 | - Single compartment HH neuron tutorial 43 | - Multi compartmental HH neuron tutorial 44 | -------------------------------------------------------------------------------- /source/Events/202404-Harmony.md: -------------------------------------------------------------------------------- 1 | (neuromlevents:202404harmony)= 2 | # April 2024: NeuroML hackathon at HARMONY 2024 3 | 4 | ```{admonition} Registration for the COMBINE initiative's HARMONY 2024 meeting is free. 5 | :class: tip 6 | Please register for the COMBINE HARMONY 2024 meeting [here](https://co.mbine.org/author/harmony-2024/) if you are coming to our NeuroML workshop. 7 | Registration for HARMONY is free. 8 | ``` 9 | 10 | We will be running a NeuroML workshop during the upcoming [COMBINE network's HARMONY 2024](https://co.mbine.org/author/harmony-2024/) meeting on Tues 9th April 2024 in London, entitled: 11 | 12 | **NeuroML hackathon: convert your neuron and network models to open, standardised, reusable formats** 13 | 14 | This will be an opportunity for developers of models in computational neuroscience to get an introduction to the aims and structure of NeuroML, a guide to the tools available for building/converting their models to NeuroML, and to receive hands on help with expressing their models (or other published models they are interested in) in NeuroML format, making them more open, accessible and reusable. 15 | 16 | (neuromlevents:202404harmony:agenda)= 17 | ## Agenda 18 | 19 | More details to follow 20 | 21 | (neuromlevents:202404harmony:when)= 22 | ## Times and dates 23 | 24 | 25 | More details to follow 26 | 27 | (neuromlevents:202404harmony:registration)= 28 | ## Registration 29 | 30 | To take part in the workshop, please register [here](https://co.mbine.org/author/harmony-2024/) for the HARMONY meeting (registration is free). 31 | 32 | 33 | (neuromlevents:202404harmony:prepare)= 34 | ## Open an issue beforehand! 35 | While it will be possible to raise and discuss new issues at the workshop, it will be easier to manage and plan work/discussions if you open an issue with a description of the problem you are trying to address at: [https://github.com/NeuroML/NeuroML2/issues](https://github.com/NeuroML/NeuroML2/issues). 36 | 37 | (neuromlevents:202404harmony:slack)= 38 | ## Slack 39 | To aid communication with the community during (and after) the meeting, we have a **Slack channel** for NeuroML related discussions. 40 | Please contact [Padraig Gleeson](mailto:p_DOT_gleeson_AT_ucl.ac.uk) for an invite. 41 | 42 | We look forward to working with the community to drive further uptake of NeuroML compliant models and tools! 43 | -------------------------------------------------------------------------------- /source/Events/Events.md: -------------------------------------------------------------------------------- 1 | (neuromlevents:main)= 2 | # NeuroML outreach and events 3 | 4 | The NeuroML community organises regular training and outreach events. 5 | Recent meetings are listed below (please see the individual pages for more details): 6 | 7 | - {ref}`July 2024: NeuroML tutorial at CNS 2024 ` 8 | - {ref}`April 2024: NeuroML hackathon at HARMONY 2024 ` 9 | - {ref}`June 2022: NeuroML tutorial at CNS*2022 satellite tutorials ` 10 | - {ref}`April 2022: NeuroML development workshop at HARMONY 2022 ` 11 | - {ref}`October 2021: NeuroML development workshop at COMBINE meeting ` 12 | - {ref}`August 2021: NeuroML tutorial at INCF Training Weeks ` 13 | - {ref}`July 2021: NeuroML tutorial at CNS*2021 ` 14 | - {ref}`March 2021: NeuroML hackathon at HARMONY 2021 ` 15 | -------------------------------------------------------------------------------- /source/Events/PastEvents.md: -------------------------------------------------------------------------------- 1 | (neuromlevents:past)= 2 | # Past NeuroML Events 3 | 4 | A number of developer workshops and editorial board meetings have been held since 2008 to coordinate and promote the work of the NeuroML community. These are listed {ref}`here `. 5 | 6 | There has been significant NeuroML involvement also at the meetings organised by the Open Source Brain initiative. See [here](https://www.opensourcebrain.org/docs#Meetings) for more details. 7 | -------------------------------------------------------------------------------- /source/NeuroMLOrg/CoC.md: -------------------------------------------------------------------------------- 1 | (coc)= 2 | # Code of Conduct 3 | 4 | Everyone is welcome in the NeuroML community. 5 | We request everyone interacting on the NeuroML channels in any capacity to treat each other respectfully. 6 | Please: 7 | 8 | - act in good faith 9 | - be friendly, welcoming, respectful, and patient 10 | - be mindful and considerate 11 | - be open, use prefer and promote Open Science practices. 12 | 13 | If you experience or become aware of behaviour that does not adhere to the Code of Conduct, please contact the moderators of the channel/event you are in. 14 | -------------------------------------------------------------------------------- /source/NeuroMLOrg/CommunicationChannels.md: -------------------------------------------------------------------------------- 1 | (contact)= 2 | # Getting in touch 3 | 4 | We're happy to talk with users, developers and modellers about using NeuroML in their work. 5 | 6 | (contact:ml)= 7 | ## Mailing list 8 | 9 | For announcements, general discussion, queries, and troubleshooting related to NeuroML please use the mailing list: [https://lists.sourceforge.net/lists/listinfo/neuroml-technology](https://lists.sourceforge.net/lists/listinfo/neuroml-technology). 10 | 11 | (contact:chat)= 12 | ## Chat channels 13 | 14 | A Gitter/Matrix chat channels for queries are also available. 15 | One can access it either via Gitter or Matrix/Element. 16 | 17 | - [Gitter](https://gitter.im/NeuroML/community) 18 | - [Matrix/Element](https://matrix.to/#/!EQLdKYsJxEfGHAybdP:gitter.im?via=gitter.im&via=matrix.org) 19 | 20 | Please note that activity in these rooms depends on time zones and the availability of community members. 21 | So, if you do not get a response soon, please post to the mailing list listed above or file an issue on GitHub as noted below. 22 | 23 | (contact:issues)= 24 | ## Issues related to the libraries or specification 25 | 26 | - Please file general issues related to NeuroML at the [NeuroML/NeuroML2 repository on GitHub](https://github.com/NeuroML/NeuroML2/issues). 27 | - Please file issues related to LEMS and jLEMS at the [LEMS/jLEMS repository on GitHub](https://github.com/LEMS/jLEMS/issues). 28 | - Additionally, please file issues related to the different NeuroML core tools at their individual {ref}`GitHub repositories `. 29 | 30 | (contact:social)= 31 | ## Social media 32 | 33 | You can follow NeuroML related updates on Twitter at [@NeuroML](https://twitter.com/NeuroML). 34 | -------------------------------------------------------------------------------- /source/NeuroMLOrg/Funding.md: -------------------------------------------------------------------------------- 1 | (neuromlorg:funding)= 2 | # Funding and Acknowledgements 3 | 4 | The NeuroML effort has been made possible by funding from research councils in the UK, EU, and the USA. 5 | 6 | 7 |
8 | 9 | 10 | 19 | 24 | 25 | 26 | 35 | 40 | 41 | 42 | 51 | 56 | 57 | 58 | 67 | 72 | 73 | 74 | 83 | 88 | 89 | 90 | 99 | 104 | 105 | 106 | 116 | 121 | 122 |
11 | 12 | ```{image} ../images/Funders/mrc.jpg 13 | --- 14 | alt: UK Medical Research Council 15 | align: center 16 | --- 17 | ``` 18 | 20 | 21 | [UK Medical Research Council](http://www.mrc.ac.uk/) 22 | 23 |
27 | 28 | ```{image} ../images/Funders/bbsrc.gif 29 | --- 30 | alt: UK Biotechnology and Biological Sciences Research Council 31 | align: center 32 | --- 33 | ``` 34 | 36 | 37 | [UK Biotechnology and Biological Sciences Research Council](http://www.bbsrc.ac.uk/) 38 | 39 |
43 | 44 | ```{image} ../images/Funders/nih.gif 45 | --- 46 | alt: National Institutes of Health 47 | align: center 48 | --- 49 | ``` 50 | 52 | 53 | [National Institutes of Health](http://www.nimh.nih.gov/) 54 | 55 |
59 | 60 | ```{image} ../images/Funders/EUS_200px.gif 61 | --- 62 | alt: EU Synapse Project 63 | align: center 64 | --- 65 | ``` 66 | 68 | 69 | [EU Synapse Project](http://www.eusynapse.mpg.de/) 70 | 71 |
75 | 76 | ```{image} ../images/Funders/nsf.gif 77 | --- 78 | alt: National Science Foundation 79 | align: center 80 | --- 81 | ``` 82 | 84 | 85 | [National Science Foundation](http://nsf.gov/) 86 | 87 |
91 | 92 | ```{image} ../images/Funders/incf.png 93 | --- 94 | alt: International Neuroinformatics Coordinating Facility 95 | align: center 96 | --- 97 | ``` 98 | 100 | 101 | [International Neuroinformatics Coordinating Facility](http://incf.org/) 102 | 103 |
107 | 108 | ```{image} ../images/Funders/wtlogo.png 109 | --- 110 | alt: Wellcome 111 | align: center 112 | width: 30% 113 | --- 114 | ``` 115 | 117 | 118 | [Wellcome](http://www.wellcome.ac.uk/) 119 | 120 |
123 |
124 | -------------------------------------------------------------------------------- /source/NeuroMLOrg/Standards.md: -------------------------------------------------------------------------------- 1 | (initiative:overview_standards)= 2 | # Overview of standards in neuroscience 3 | 4 | In biology, several community standards have been developed to describe experimental data (e.g. Brain Imaging Data Structure [BIDS](https://bids.neuroimaging.io/), Neurodata Without Borders [NWB](https://nwb.org) and computational models (e.g. Systems Biology Markup Language [SBML](https://sbml.org), [CellML](https://cellml.org), Scalable Open Network Architecture TemplAte [SONATA](https://github.com/AllenInstitute/sonata), [PyNN](https://neuralensemble.org/docs/PyNN/), and Neural Open Markup Language (NeuroML). 5 | These standards have enabled open and interoperable ecosystems of software applications, libraries, and databases to emerge, facilitating the sharing of research outputs, an endeavour encouraged by a growing number of funding agencies and scientific journals. 6 | 7 | (initiative:overview_standards:neuroml)= 8 | ## NeuroML as a standard 9 | 10 | NeuroML is an international, collaborative initiative to develop a language for describing detailed models of neural systems, which will serve as a standard data format for defining and exchanging descriptions of neuronal cell and network models. 11 | 12 | (initiative:overview_standards:neuroml:incf)= 13 | ### Endorsed INCF standard 14 | 15 | The mission of INCF is to promote the uptake of FAIR data management practices in neuroscience through the development of standards and best practices that support open, FAIR, and citable neuroscience. 16 | INCF also provides training on how standards and best practices facilitate reproducibility and enable the sharing of data and code. 17 | 18 | NeuroML is an [INCF endorsed standard](https://www.incf.org/sbp/neuroml). 19 | 20 | (initiative:overview_standards:neuroml:combine)= 21 | ### COMBINE standard 22 | 23 | The "COmputational Modeling in BIology NEtwork" (COMBINE) is an initiative to coordinate the development of the various community standards and formats for computational models. 24 | By doing so, it is expected that the federated projects will develop a set of interoperable and non-overlapping standards covering all aspects of modelling in biology. 25 | The global COMBINE effort is led by the COMBINE Coordination Board. 26 | 27 | Building on the experience of mature projects, which already have stable specifications, software support, user-base and community governance, COMBINE will help foster or support fledgling efforts aimed at filling gaps or new needs. 28 | As those efforts mature, they may become part of the core set of COMBINE standards. 29 | 30 | One of the initial activities of COMBINE is to coordinate the organization of scientific and technical events common to several standards. 31 | Those events, as others related to our field of research are gathered in a calendar. 32 | 33 | NeuroML is a [COMBINE official standard](http://co.mbine.org/standards/neuroml). 34 | -------------------------------------------------------------------------------- /source/Reference/Glossary.md: -------------------------------------------------------------------------------- 1 | # Glossary 2 | 3 | - XML: Extensible Markup Language (XML) is a markup language that defines a set of rules for encoding documents in a format that is both human-readable and machine-readable. (Read full entry on [Wikipedia](https://en.wikipedia.org/wiki/XML)) 4 | -------------------------------------------------------------------------------- /source/Reference/zBibliography.md: -------------------------------------------------------------------------------- 1 | # Bibliography 2 | 3 | ```{bibliography} ./references.bib 4 | :all: 5 | ``` 6 | -------------------------------------------------------------------------------- /source/Userdocs/Conventions.md: -------------------------------------------------------------------------------- 1 | (userdocs:conventions)= 2 | # Conventions 3 | 4 | This page documents various conventions in use in NeuroML. 5 | 6 | (userdocs:conventions:underscores)= 7 | ## Prefer underscores instead of spaces 8 | 9 | In general, please prefer underscores `_` instead of spaces wherever possible, in filenames and ids. 10 | 11 | (userdocs:conventions:nmlid)= 12 | ## Component IDs: NmlId 13 | 14 | Some Components take an `id` parameter of type `NmlId` to set an ID for them. 15 | They can then be referred to using their IDs when constructing paths and so on. 16 | 17 | IDs of type `NmlId` in NeuroML are strings and have certain constraints: 18 | 19 | - they **must** start with an alphabet (either small or capital) or an underscore 20 | - they may include alphabets, both small and capital letters, numbers and underscores 21 | 22 | IDs are also checked during validation, so if an ID does not follow these constraints, the validation will throw an error. 23 | 24 | 25 | (userdocs:conventions:files)= 26 | ## File naming 27 | 28 | When naming different NeuroML files, we suggest the following suffixes: 29 | 30 | - `channel.nml` for NeuroML files describing ion channels, for example: `Na.channel.nml` 31 | - `cell.nml` for NeuroML files describing cells, for example: `hh.cell.nml` 32 | - `synapse.nml` for NeuroML files describing synapses, for example: `AMPA.synapse.nml` 33 | - `net.nml` for NeuroML files describing networks of cells, for example: `excitatory.net.nml` 34 | 35 | For LEMS files that describe simulations of NeuroML models ({ref}`"LEMS Simulation files" `), we suggest that: 36 | 37 | - file names start with the `LEMS_` prefix, 38 | - file names end in `xml` 39 | 40 | For example `LEMS_HH_Simulation.xml`. 41 | 42 | ```{figure} ../images/lems_nml_files.png 43 | :alt: LEMS Simulation file and NeuroML file 44 | :align: center 45 | :scale: 24 % 46 | 47 | Typical organisation for a NeuroML simulation. The main NeuroML model is specified in a file with the network (`*.net.nml`), which can include/point to files containing individual synapses (`*.synapse.nml`) or cell files (`*.cell.nml`). If the latter are conductance based, they may include external channel files (`*.channel.nml`). The main LEMS Simulation file only needs to include the network file, and tools for running simulations of the model refer to just this LEMS file. Exceptions to these conventions are frequent and simulations will run perfectly well with all the elements inside the main LEMS file, but using this scheme will maximise reusability of model elements. 48 | 49 | ``` 50 | 51 | (userdocs:conventions:segments)= 52 | ## Neuron segments 53 | 54 | When naming segments in multi-compartmental neuron models, we suggest the following prefixes: 55 | 56 | - `axon_` for axonal segments 57 | - `dend_` for dendritic segments 58 | - `soma_` for somatic segments 59 | 60 | There are 3 specific recommended names for segment groups which contain **ALL** of the somatic, dendritic or axonal segments 61 | 62 | - `axon_group` for the group of all axonal segments 63 | - `dendrite_group` for the group of all dendritic segments 64 | - `soma_group` for the group of all somatic segments 65 | 66 | Ideally every segment should be a member of one and only one of these groups. 67 | -------------------------------------------------------------------------------- /source/Userdocs/CreatingNeuroMLModels.md: -------------------------------------------------------------------------------- 1 | (userdocs:creating_models)= 2 | # Creating NeuroML models 3 | 4 | There are 3 main ways of developing a new model of a neuronal system in NeuroML 5 | 6 | **1) Reuse elements from previous NeuroML models** 7 | 8 | There are an increasing number of resources where you can find and analyse previously developed NeuroML models to use as the basis for a new model. See {ref}`here ` for details. 9 | 10 | (userdocs:creating_models:from_scratch)= 11 | **2) Writing models from scratch using Python NeuroML tools** 12 | 13 | The toolchain around NeuroML means that it is possible to create a model in NeuroML format from the start. Please see the {ref}`Getting Started with NeuroML section ` for quick examples on how you can use {ref}`pyNeuroML ` to create NeuroML models and run them. 14 | 15 | (userdocs:creating_models:from_published)= 16 | **3) Convert a published model developed in a simulator specific format to NeuroML** 17 | 18 | Most computational models used in publications are released in the particular format used by the authors during their research, often in a general purpose simulator like {ref}`NEURON `. Many of these can be found on [ModelDB](https://senselab.med.yale.edu/ModelDB/default). Converting one of these to NeuroML format will mean that all further developments/modifications of the model will be standards compliant, and will give access to all of the NeuroML compliant tools for visualising/analysing/optimising/sharing the model, as well as providing multiple options for executing the model across multiple simulators. 19 | 20 | The next page is a **step by step guide** to creating a new NeuroML model based on an existing published model, verifying its behaviour, and sharing it with the community on the Open Source Brain platform. 21 | -------------------------------------------------------------------------------- /source/Userdocs/FAQ.md: -------------------------------------------------------------------------------- 1 | (userdocs:faq)= 2 | # Frequently asked questions (FAQ) 3 | 4 | ```{admonition} Please help improve the FAQ. 5 | :class: note 6 | This page lists some commonly asked questions related to NeuroML. 7 | Please [open issues](https://github.com/NeuroML/Documentation/issues) to add more entries to this FAQ. 8 | ``` 9 | 10 | (userdocs:faq:zero_length_segments)= 11 | ## 1. Are length 0 segments allowed in NeuroML? 12 | 13 | Discussion link: https://github.com/NeuroML/NeuroML2/issues/115 14 | 15 | There are a lot of SWC reconstructions which have adjacent points, which would get converted to zero length segments. 16 | This shouldn't be an issue for most visualisation applications, so no need for them to say that they can't visualise the cell if they see it's invalid. 17 | 18 | The `jnml -validate` option could throw a warning when it sees these segments, but currently doesn't (it could be added [here](https://github.com/NeuroML/org.neuroml.model/blob/development/src/main/java/org/neuroml/model/util/NeuroML2Validator.java#L199)). 19 | 20 | For individual simulators, they could have an issue with this, if they map each segment to a compartment (as Moose might), but for Neuron using cables/sections with multiple segments, it shouldn't matter as long as the section doesn't just have one segment. 21 | 22 | So ideally it should be the application which loads the NeuroML in (or the conversion/export code) which decides whether this is an issue. 23 | 24 | (userdocs:faq:pyneuroml_or_neuroml)= 25 | ## 2. What is the difference between reader/writer methods in pyNeuroML and libNeuroML? 26 | 27 | Both {ref}`libNeuroML ` and {ref}`pyNeuroML ` include methods that can read and write NeuroML files. 28 | However, they are not the same. 29 | 30 | libNeuroML is the low level Python API for working with NeuroML. 31 | The loaders/writers included here can therefore read/write NeuroML files. 32 | However, these are "low level" functions and do not include additional features. 33 | 34 | The readers/writers in pyNeuroML use these low level functions from libNeuroML but also run other checks and include other features. 35 | 36 | So, 37 | 38 | - [pyneuroml.io.read_neuroml2_file](https://pyneuroml.readthedocs.io/en/latest/pyneuroml.io.html#pyneuroml.io.read_neuroml2_file) should be preferred over [neuroml.loaders.read_neuroml2_file](https://libneuroml.readthedocs.io/en/latest/userdocs/loaders.html#neuroml.loaders.read_neuroml2_file): it also allows pre-loading validation checks, and it also handles morphologies referenced in other files. 39 | - [pyneuroml.io.write_neuroml2_file](https://pyneuroml.readthedocs.io/en/latest/pyneuroml.io.html#pyneuroml.io.write_neuroml2_file) should be preferred over [neuroml.writers.NeuroMLWriter.write](https://libneuroml.readthedocs.io/en/latest/userdocs/writers.html#neuroml.writers.NeuroMLWriter.write): it also validates the file after writing it. 40 | -------------------------------------------------------------------------------- /source/Userdocs/GetNeuroML.md: -------------------------------------------------------------------------------- 1 | (userdocs:get_neuroml)= 2 | # Get NeuroML 3 | 4 | While one can use Jupyter Notebooks on different platforms ([Binder](https://binder.org)/[Open Source Brain v2](https://v2.opensourcebrain.org)/[Google Colab](https://colab.research.google.com)) to work with NeuroML models (for example, the tutorials in this documentation can mostly be run on Jupyter Notebooks), for certain use cases, it may be preferable/necessary to install the stack on our own computers. 5 | One such use case, for example, is when one needs to run large scale simulations that require supercomputers/clusters to simulate. 6 | 7 | ## What you need 8 | 9 | The NeuroML stack is written primarily in Python and Java, and so requires: 10 | 11 | - a [supported](https://devguide.python.org/versions/), working [Python](https://www.python.org/downloads/) installation 12 | - a working Java Runtime Environment (JRE) 13 | 14 | The software stack is currently [tested on](https://github.com/NeuroML/jNeuroML/blob/master/.github/workflows/ci.yml#L19C15-L19C44): 15 | 16 | - Python versions: 3.8--3.12 (3.11 is preferred) 17 | - Java versions 8, 11, 16, 17, 19 on these [operating systems (OS)](https://github.com/actions/runner-images): Ubuntu 22.04 ("ubuntu-latest"), MacOS 14 Arm 64 ("macos-latest"), Windows 2019 ("windows-2019") 18 | 19 | 20 | Once you have these programming languages installed, all you need to do is install {ref}`pyNeuroML `, and that will install the other parts of the NeuroML software stack for you too. 21 | Please see the {ref}`pyNeuroML ` page for more details. 22 | -------------------------------------------------------------------------------- /source/Userdocs/GettingStarted.md: -------------------------------------------------------------------------------- 1 | (userdocs:getting_started_neuroml)= 2 | # Getting started with NeuroML 3 | 4 | The best way to understand NeuroML is to work through NeuroML examples to see how they are constructed and what they can do. 5 | We present below a set of step-by-step guides to illustrate how models are written and simulated using NeuroML. 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 |
        
15 | 16 | 17 | | Link to guide | Description | Model life cycle stages | 18 | | :------ | ----------- | ----------------------- | 19 | | | **Introductory guides** || 20 | | {ref}`Guide 1 ` | Create and simulate a simple regular spiking Izhikevich neuron in NeuroML | Create, Validate, Simulate | 21 | | {ref}`Guide 2 `| Create a network of two synaptically connected populations of Izhikevich neurons | Create, Validate, Visualise, Simulate | 22 | | {ref}`Guide 3 `| Build and simulate a single compartment Hodgkin-Huxley neuron | Create, Validate, Visualise, Simulate | 23 | | {ref}`Guide 4 `| Create and simulate a multi compartment hippocampal OLM neuron | Create, Validate, Visualise, Simulate | 24 | | | **Advanced guides** || 25 | | [Guide 5](https://docs.neuroml.org/Userdocs/NML2_examples/NeuroML-DB.html) | Create novel NeuroML models from components on NeuroML-DB | Reuse, Create, Validate, Simulate | 26 | | {ref}`Guide 6 ` | Optimise/fit NeuroML models to experimental data | Create, Validate, Simulate, Fit | 27 | | {ref}`Guide 7 `| Extend NeuroML by creating a novel model type in LEMS | Create, Simulate | 28 | | | **Step by step walkthroughs** || 29 | | {ref}`Guide 8 `| Guide to converting cell models to NeuroML and sharing them on Open Source Brain | Create, Validate, Simulate, Share | 30 | | {ref}`Guide 9 `| Conversion of Ray et al 2020 {cite}`Ray2020` to NeuroML | Create, Validate, Visualise, Simulate, Extend using LEMS | 31 | 32 | 33 | You do not need to install any software on your computers to run many of the examples above. 34 | These examples are followed by a [Jupyter notebook](https://jupyter.org/index.html) for you to experiment with inside your browser ({ref}`more info `). 35 | -------------------------------------------------------------------------------- /source/Userdocs/HDF5.md: -------------------------------------------------------------------------------- 1 | (userdocs:hdf5)= 2 | # HDF5 support 3 | 4 | The XML serializations of large NeuroML models can be prohibitive to store. 5 | For such cases, NeuroML also includes support for saving models in the binary [HDF5](https://www.hdfgroup.org/solutions/hdf5) format via the [NeuroMLHdf5Writer in libNeuroML](https://libneuroml.readthedocs.io/en/stable/userdocs/writers.html#neuroml.writers.NeuroMLHdf5Writer). The same format can be exported also from the Java API ([example](https://github.com/NeuroML/org.neuroml.model/blob/master/src/test/java/org/neuroml/model/test/HDF5Test.java)). 6 | 7 | The format of the export is documented below: 8 | 9 | 10 | - {ref}`Network ` is exported as a `network` HDF5 group with `id`, `notes`, and the `temperature` (optional) stored as attributes. 11 | - {ref}`Population ` is exported as a group with id `population_` with `id`, `component`, `size`, `type`, and `property` tags stored as attributes. 12 | - If the population is a {ref}`population list ` that includes {ref}`instances ` of cells, the locations of cells (x, y, z), these are stored in a 3 column table ("chunked array") with a row per instance. 13 | 14 | - {ref}`Projection ` is exported as a group with id `project_` with `id`, `type`, `presynapticPopulation`, `postSynapticPopulation`, `synapse` as attributes. 15 | - {ref}`Connection ` and {ref}`ConnectionWD ` elements in projections are stored as rows in a table with the first two columns as the `pre_cell_id` and `post_cell_id` respectively, and the successive columns for the necessary attributes. 16 | 17 | - {ref}`ElectricalProjection ` is exported similar to Projection with the {ref}`ElectricalConnection `, {ref}`ElectricalConnectionInstance `, and {ref}`ElectricalConnectionInstanceW ` entries stored in tables. 18 | - {ref}`ContinuousProjection ` is exported similar to Projection with the {ref}`ContinuousConnection `, {ref}`ContinuousConnectionInstance `, and {ref}`ContinuousConnectionInstanceW ` entries stored in tables. 19 | - {ref}`InputList ` is exported similar to Projection with the {ref}`Input `, and {ref}`InputW ` entries stored in tables. 20 | 21 | 22 | For more details, the source code of these export functions can be seen [here in the libNeuroML repository](https://github.com/NeuralEnsemble/libNeuroML/blob/2d8112178d8d82b07a20f8395ec22a23a6323a6c/neuroml/nml/helper_methods.py#L2548) and [here in org.neuroml.model](https://github.com/NeuroML/org.neuroml.model/blob/master/src/main/java/org/neuroml/model/util/hdf5/NeuroMLHDF5Writer.java). 23 | 24 | HDF5 NeuroML files can be read and processed by `jnml` and `pynml` in the same way as XML files (see [here](https://github.com/OpenSourceBrain/OpenCortex/tree/master/examples/HDF5) for LEMS Simulation file examples which reference HDF5 NeuroML models). 25 | -------------------------------------------------------------------------------- /source/Userdocs/LEMSExample2.md: -------------------------------------------------------------------------------- 1 | (userdocs:lemsexample2)= 2 | # Example 2: tidying up example 1 3 | 4 | This models is the same as in example 1, except that the definitions have been split out into several self-contained files. 5 | 6 | The main file, included below, uses the Include element to include definitions from other files. 7 | Each file is only read once, even if several files include it. 8 | Because some of these files, such as the HH channel definitions, are intended to be used on their own, they include all the dimension definitions they need. 9 | These may also occur in other files with the same dimension names. 10 | This is fine as long as the dimensions being declared are the same. 11 | An error will be reported if a new definition is supplied that changes any of the values. 12 | The same applies for Unit definitions. 13 | For other element types names and ids must be unique. 14 | An id or name can't appear twice, even if the content of the elements is the same. 15 | 16 | # Main model 17 | This defines a few components, then a network that uses them and a simulation to run it all. The HHCell component refers to channel types coming from the included hhmodels.xml file which in turn depends on hhcell.xml and hhchannel.xml. 18 | ```{literalinclude} ./LEMS_examples/example2.xml 19 | ---- 20 | language: xml 21 | ---- 22 | ``` 23 | 24 | # Included files 25 | 26 | ```{literalinclude} ./LEMS_examples/ex2dims.xml 27 | ---- 28 | language: xml 29 | ---- 30 | ``` 31 | 32 | The file hhchannel.xml contains complete definitions of a fairly general HH-style channel model with any number of gates based on the three standard types used in the original HH work. 33 | 34 | 35 | ```{literalinclude} ./LEMS_examples/hhchannel.xml 36 | ---- 37 | language: xml 38 | ---- 39 | ``` 40 | As mentioned in example1, the numerics are too feeble to cope with this gate definition though, so a change of variables is employed instead: 41 | 42 | ```{literalinclude} ./LEMS_examples/hhaltgate.xml 43 | ---- 44 | language: xml 45 | ---- 46 | ``` 47 | 48 | The file hhcell.xml defines a simple cell model with some populations of HH channels. 49 | 50 | 51 | ```{literalinclude} ./LEMS_examples/hhcell.xml 52 | ---- 53 | language: xml 54 | ---- 55 | ``` 56 | 57 | A couple of spike generators. 58 | 59 | ```{literalinclude} ./LEMS_examples/spikegenerators.xml 60 | ---- 61 | language: xml 62 | ---- 63 | ``` 64 | 65 | And now the components themselves. 66 | These are the standard HH sodium and potassium channels (as used in Rallpack3). 67 | 68 | 69 | ```{literalinclude} ./LEMS_examples/hhmodels.xml 70 | ---- 71 | language: xml 72 | ---- 73 | ``` 74 | 75 | Some miscellaneous iaf models. 76 | 77 | ```{literalinclude} ./LEMS_examples/misciaf.xml 78 | ---- 79 | language: xml 80 | ---- 81 | ``` 82 | 83 | Finally, a small collection of dimension definitions useful for things like the miscellaneous iaf cell definitions. 84 | 85 | 86 | ```{literalinclude} ./LEMS_examples/elecdims.xml 87 | ---- 88 | language: xml 89 | ---- 90 | ``` 91 | -------------------------------------------------------------------------------- /source/Userdocs/LEMSExample3.md: -------------------------------------------------------------------------------- 1 | (userdocs:lemsexample3)= 2 | # Example 3: Connection dependent synaptic components 3 | 4 | In many models, a synapse is only created where a connection exists. 5 | This means that the model of the receiving cell should only declare that particular types of synapse can be added to it, not the actual synapse sub-components themselves. 6 | 7 | Not much is needed beyond the elements described in example 1 except for some extensions to the component that declares the connectivity and a new child element in the component that the synapses are attached to. 8 | The full example is shown below. 9 | The synapse type includes an EventPort just like the previously defined cell type. 10 | The cell type however includes a new child element: Attachments defined as: 11 | 12 | ```{code-block} xml 13 | 14 | ``` 15 | 16 | This operates rather like the Children element except that when a component is defined using this type the sub-elements are not included in the component definition. 17 | Instead it indicates that instances of components of the particular type may be attached later when the model is actually run. 18 | -------------------------------------------------------------------------------- /source/Userdocs/LEMSExample8.md: -------------------------------------------------------------------------------- 1 | (userdocs:lemsexample8)= 2 | # Example 8: Regimes in Dynamics definitions 3 | 4 | This example introduces the Regime, Transition and OnEntry elements within a Dynamics block. 5 | Rather than having a single state instance, the entity can be on one of the defined regimes at any given time. 6 | The Transition element occurring inside a condition block serves to move it from one regime to another. 7 | The OnEntry block inside a regime can contain initialization directives that apply each time the entity enters that regime. 8 | 9 | ```{code-block} xml 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | ``` 50 | 51 | Full listing: 52 | ```{literalinclude} ./LEMS_examples/example8.xml 53 | ---- 54 | language: xml 55 | ---- 56 | ``` 57 | -------------------------------------------------------------------------------- /source/Userdocs/LEMSSchema.md: -------------------------------------------------------------------------------- 1 | (userdocs:lemsschema)= 2 | # LEMS 3 | 4 | The current version of the LEMS specification is 0.7.6 and the schema for this can be seen [here](https://github.com/LEMS/LEMS/blob/master/Schemas/LEMS/LEMS_v0.7.6.xsd). 5 | The following figure, taken from Cannon et al. 2014 ({cite}`Cannon2014`) shows the structure of LEMS models. 6 | The following pages give details of all the elements that are included in LEMS. 7 | For examples on LEMS, and using LEMS to extend NeuroML, please see the relevant sections in the documentation. 8 | 9 | ```{figure} ../images/lems-figure2.png 10 | :alt: Structure of LEMS models 11 | :align: center 12 | :width: 80% 13 | 14 | *(A)* Models in LEMS are specified using ComponentType definitions with nested 15 | Dynamics elements. Any Parameter or StateVariable declaration must refer to a 16 | Dimension element defined at the top level. A Component element sets parameter 17 | values for a particular instance of a ComponentType. Each Parameter value must 18 | refer to one of the Unit elements defined at the top level. The Dynamics 19 | element supports continuous time systems defined in terms of first order 20 | differential equations, and event driven processing as specified by the various 21 | "On. . ." elements. Multiple Regimes, each with independent TimeDerivative 22 | expressions can be defined, along with the rules to transition between them. 23 | *(B)* Example of a ComponentType, the passive channel model from Figure 1. 24 | *(C)* The XML equivalent of the ComponentType (top) and Component (bottom) for this 25 | model. *(D)* Defining containment in LEMS, using Child (exactly one sub element 26 | of the given type) or Children (zero or multiple copies). **(E)** Extension in 27 | LEMS. Extending ComponentTypes inherit the structure of the base type. Example 28 | Components in XML are shown in *(D,E)*. 29 | 30 | ``` 31 | 32 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_elements/DefiningComponents.md: -------------------------------------------------------------------------------- 1 | 2 | (lemsschema:page:defining_components_)= 3 | # Defining Components 4 | 5 | 6 | 7 | Schema against which LEMS based on these should be valid: [LEMS_v0.7.6.xsd](https://github.com/LEMS/LEMS/tree/master/Schemas/LEMS/LEMS_v0.7.6.xsd). 8 | Generated on 18/06/24 from [this](https://github.com/LEMS/LEMS/commit/fd7b30eceb6735ac343745c8f6992bdde72b248b) commit. 9 | Please file any issues or questions at the [issue tracker here](https://github.com/LEMS/LEMS/issues). 10 | 11 | --- 12 | 13 | (lemsschema:component_)= 14 | ## Component 15 | 16 | 17 | 18 | `````{tab-set} 19 | ````{tab-item} Properties 20 | ```{csv-table} 21 | :widths: 1, 2, 7 22 | :width: 100% 23 | :delim: $ 24 | 25 | **id**$ String$ 26 | **name**$ String$ Name by which the component was declared - this shouldn't be accessible. 27 | **declaredType**$ String$ Name by which the component was declared - this shouldn't be accessible. 28 | **type**$ String$ 29 | **eXtends**$ String$ 30 | 31 | ``` 32 | ```` 33 | 34 | ````{tab-item} can contain these elements 35 | ```{csv-table} 36 | :widths: 2, 8 37 | :width: 100% 38 | :delim: $ 39 | 40 | **insertions**$ {ref}`lemsschema:insertion_` 41 | **components**$ {ref}`lemsschema:component_` 42 | **abouts**$ {ref}`lemsschema:about_` 43 | **metas**$ {ref}`lemsschema:meta_` 44 | 45 | ``` 46 | ```` 47 | 48 | ````{tab-item} Schema 49 | ```{code-block} xml 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | ``` 58 | ```` 59 | 60 | 61 | ````{tab-item} Usage: XML 62 | ```{code-block} xml 63 | 64 | ``` 65 | ```{code-block} xml 66 | 67 | ``` 68 | ```{code-block} xml 69 | 70 | ``` 71 | ```{code-block} xml 72 | 73 | ``` 74 | ```{code-block} xml 75 | 76 | ``` 77 | ```` 78 | ````` -------------------------------------------------------------------------------- /source/Userdocs/LEMS_elements/Geometry.md: -------------------------------------------------------------------------------- 1 | 2 | (lemsschema:page:geometry_)= 3 | # Geometry 4 | 5 | 6 | 7 | Schema against which LEMS based on these should be valid: [LEMS_v0.7.6.xsd](https://github.com/LEMS/LEMS/tree/master/Schemas/LEMS/LEMS_v0.7.6.xsd). 8 | Generated on 18/06/24 from [this](https://github.com/LEMS/LEMS/commit/fd7b30eceb6735ac343745c8f6992bdde72b248b) commit. 9 | Please file any issues or questions at the [issue tracker here](https://github.com/LEMS/LEMS/issues). 10 | 11 | --- 12 | 13 | (lemsschema:geometry_)= 14 | ## Geometry 15 | 16 | Specifies the geometrical interpretation of the properties of components realizing this ComponentType. 17 | 18 | `````{tab-set} 19 | ````{tab-item} can contain these elements 20 | ```{csv-table} 21 | :widths: 2, 8 22 | :width: 100% 23 | :delim: $ 24 | 25 | **frustums**$ {ref}`lemsschema:frustum_` 26 | **solids**$ {ref}`lemsschema:solid_` 27 | **skeletons**$ {ref}`lemsschema:skeleton_` 28 | 29 | ``` 30 | ```` 31 | ````` 32 | (lemsschema:frustum_)= 33 | ## Frustum 34 | 35 | 36 | 37 | 38 | (lemsschema:solid_)= 39 | ## Solid 40 | 41 | 42 | 43 | 44 | (lemsschema:location_)= 45 | ## Location 46 | 47 | 48 | 49 | 50 | (lemsschema:skeleton_)= 51 | ## Skeleton 52 | 53 | 54 | 55 | `````{tab-set} 56 | ````{tab-item} can contain these elements 57 | ```{csv-table} 58 | :widths: 2, 8 59 | :width: 100% 60 | :delim: $ 61 | 62 | **scalarFields**$ {ref}`lemsschema:scalarfield_` 63 | 64 | ``` 65 | ```` 66 | ````` 67 | (lemsschema:scalarfield_)= 68 | ## ScalarField 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_elements/Procedure.md: -------------------------------------------------------------------------------- 1 | 2 | (lemsschema:page:procedure_)= 3 | # Procedure 4 | 5 | 6 | 7 | Schema against which LEMS based on these should be valid: [LEMS_v0.7.6.xsd](https://github.com/LEMS/LEMS/tree/master/Schemas/LEMS/LEMS_v0.7.6.xsd). 8 | Generated on 18/06/24 from [this](https://github.com/LEMS/LEMS/commit/fd7b30eceb6735ac343745c8f6992bdde72b248b) commit. 9 | Please file any issues or questions at the [issue tracker here](https://github.com/LEMS/LEMS/issues). 10 | 11 | --- 12 | 13 | (lemsschema:procedure_)= 14 | ## Procedure 15 | 16 | 17 | 18 | `````{tab-set} 19 | ````{tab-item} can contain these elements 20 | ```{csv-table} 21 | :widths: 2, 8 22 | :width: 100% 23 | :delim: $ 24 | 25 | **statements**$ {ref}`lemsschema:statement_` 26 | 27 | ``` 28 | ```` 29 | ````` 30 | (lemsschema:equilibrate_)= 31 | ## Equilibrate 32 | 33 | 34 | 35 | 36 | (lemsschema:foreachcomponent_)= 37 | ## ForEachComponent 38 | 39 | 40 | 41 | `````{tab-set} 42 | ````{tab-item} can contain these elements 43 | ```{csv-table} 44 | :widths: 2, 8 45 | :width: 100% 46 | :delim: $ 47 | 48 | **statements**$ {ref}`lemsschema:statement_` 49 | 50 | ``` 51 | ```` 52 | ````` 53 | (lemsschema:print_)= 54 | ## Print 55 | 56 | 57 | 58 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/elecdims.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/ex2dims.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/example2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/example7.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/hhaltgate.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/hhcell.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/hhchannel.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/hhmodels.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/lems_example4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/LEMS_examples/lems_example4.png -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/lems_example6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/LEMS_examples/lems_example6.png -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/lems_example7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/LEMS_examples/lems_example7.png -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/misciaf.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /source/Userdocs/LEMS_examples/spikegenerators.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /source/Userdocs/MissionAndAims.md: -------------------------------------------------------------------------------- 1 | (missions_and_aims)= 2 | # Mission and Aims 3 | 4 | Computational models, based on detailed neuroanatomical and electrophysiological data, are heavily used as an aid for understanding the nervous system. 5 | NeuroML is an international, collaborative initiative to develop a language for describing detailed models of neural systems, which will serve as a 6 | standard data format for defining and exchanging descriptions of neuronal cell and network models. 7 | 8 | NeuroML specifications are developed by the {ref}`NeuroML Editorial Board ` and overseen by its {ref}`Scientific Committee `. 9 | NeuroML is [endorsed by the INCF](https://www.incf.org/sbp/neuroml), and is also an official [COMBINE standard](http://co.mbine.org/standards/neuroml). 10 | 11 | The NeuroML project community develops an [XML (eXtensible Markup Language)](https://wikipedia.org/XML) based description language where [XML Schemas](https://www.w3schools.com/xml/schema_intro.asp) are used to define model specifications. 12 | The community also develops and maintains a number of libraries (in {ref}`Python, Java and other languages `) to facilitate use of these specifications. 13 | 14 | The **aims of the NeuroML initiative** are: 15 | 16 | - To create specifications for an XML-based language that describes the biophysics, anatomy and network architecture of neuronal systems at multiple scales 17 | - To facilitate the exchange of complex neuronal models between researchers, allowing for greater transparency and accessibility of models 18 | - To promote software tools which support NeuroML and support the development of new software and databases for neural modeling 19 | - To encourage researchers with models within the scope of NeuroML to exchange and publish their models in this format 20 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/FergusonEtAl2015_PYR3.nwb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/FergusonEtAl2015_PYR3.nwb -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/FittedIzhFergusonPyr3.net.nml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_example_cell.nml: -------------------------------------------------------------------------------- 1 | 2 | HH cell 3 | 4 | 5 | 6 | 7 | A single compartment HH cell 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_example_k_channel.nml: -------------------------------------------------------------------------------- 1 | 2 | k channel for HH neuron 3 | 4 | Potassium channel for HH cell 5 | 6 | n gate for k channel 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_example_k_channel_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/HH_example_k_channel_1.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_example_k_channel_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/HH_example_k_channel_2.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_example_leak_channel.nml: -------------------------------------------------------------------------------- 1 | 2 | leak channel for HH neuron 3 | 4 | Leak conductance 5 | 6 | 7 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_example_na_channel.nml: -------------------------------------------------------------------------------- 1 | 2 | Na channel for HH neuron 3 | 4 | Sodium channel for HH cell 5 | 6 | m gate for na channel 7 | 8 | 9 | 10 | 11 | h gate for na channel 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_example_na_channel_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/HH_example_na_channel_1.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_example_na_channel_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/HH_example_na_channel_2.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_example_net.nml: -------------------------------------------------------------------------------- 1 | 2 | HH cell network 3 | 4 | 5 | Simple pulse generator 6 | 7 | 8 | 9 | A population for our cell 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_example_net.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/HH_example_net.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_single_compartment_example_sim-i.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/HH_single_compartment_example_sim-i.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_single_compartment_example_sim-iden.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/HH_single_compartment_example_sim-iden.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/HH_single_compartment_example_sim-v.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/HH_single_compartment_example_sim-v.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/IzNet-1.gv.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/IzNet-1.gv.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/IzNet.gv.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/IzNet.gv.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/LEMS_HH_single_compartment_example_sim.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/LEMS_example-izhikevich2007cell-sim.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/LEMS_example-single-izhikevich2007cell-sim.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/LEMS_example_izhikevich2007network_sim.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/LEMS_example_izhikevich2007network_sim.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/LEMS_example_izhikevich2007network_sim.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/LEMS_fitted_izhikevich_sim.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/LEMS_lorenz.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/LEMS_olm_example_sim.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/LorenzLems.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import lems.api as lems 4 | from lems.base.util import validate_lems 5 | 6 | model = lems.Model() 7 | 8 | model.add(lems.Dimension(name="time", t=1)) 9 | model.add(lems.Unit(name="second", symbol="s", dimension="time", power=1)) 10 | model.add(lems.Unit(name="milli second", symbol="ms", dimension="time", power=-3)) 11 | 12 | lorenz = lems.ComponentType(name="lorenz1963", description="The Lorenz system is a simplified model for atomspheric convection, derived from the Navier Stokes equations") 13 | model.add(lorenz) 14 | 15 | lorenz.add(lems.Parameter(name="sigma", dimension="none", description="Prandtl Number")) 16 | lorenz.add(lems.Parameter(name="beta", dimension="none", description="Also named b elsewhere")) 17 | lorenz.add(lems.Parameter(name="rho", dimension="none", description="Related to the Rayleigh number, also named r elsewhere")) 18 | 19 | 20 | lorenz.add(lems.Parameter(name="x0", dimension="none")) 21 | lorenz.add(lems.Parameter(name="y0", dimension="none")) 22 | lorenz.add(lems.Parameter(name="z0", dimension="none")) 23 | 24 | lorenz.add(lems.Exposure(name="x", dimension="none")) 25 | lorenz.add(lems.Exposure(name="y", dimension="none")) 26 | lorenz.add(lems.Exposure(name="z", dimension="none")) 27 | 28 | lorenz.add(lems.Constant(name="sec", value="1s", dimension="time")) 29 | 30 | lorenz.dynamics.add(lems.StateVariable(name="x", dimension="none", exposure="x")) 31 | lorenz.dynamics.add(lems.StateVariable(name="y", dimension="none", exposure="y")) 32 | lorenz.dynamics.add(lems.StateVariable(name="z", dimension="none", exposure="z")) 33 | 34 | lorenz.dynamics.add(lems.TimeDerivative(variable="x", value="( sigma * (y - x)) / sec")) 35 | lorenz.dynamics.add(lems.TimeDerivative(variable="y", value="( rho * x - y - x * z ) / sec")) 36 | lorenz.dynamics.add(lems.TimeDerivative(variable="z", value="( x * y - beta * z) / sec")) 37 | 38 | onstart = lems.OnStart() 39 | onstart.add(lems.StateAssignment(variable="x", value="x0")) 40 | onstart.add(lems.StateAssignment(variable="y", value="y0")) 41 | onstart.add(lems.StateAssignment(variable="z", value="z0")) 42 | lorenz.dynamics.add(onstart) 43 | 44 | 45 | model.add(lems.Component(id_="lorenzCell", type_=lorenz.name, sigma="10", 46 | beta="2.67", rho="28", x0="1.0", y0="1.0", z0="1.0")) 47 | 48 | file_name = "LEMS_lorenz.xml" 49 | model.export_to_file(file_name) 50 | 51 | 52 | validate_lems(file_name) 53 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/SingleNeuron.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/SingleNeuron.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/TuneIzhFergusonPyr3.net.nml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/example-single-izhikevich2007cell-sim-v.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/example-single-izhikevich2007cell-sim-v.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/example_izhikevich2007network_sim-spikes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/example_izhikevich2007network_sim-spikes.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/fitted_izhikevich_fitness.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/fitted_izhikevich_fitness.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/fitted_izhikevich_hist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/fitted_izhikevich_hist.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/fitted_izhikevich_output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/fitted_izhikevich_output.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/fitted_izhikevich_scatter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/fitted_izhikevich_scatter.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/fitted_izhikevich_screenshot_nwbexplorer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/fitted_izhikevich_screenshot_nwbexplorer.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/fitted_izhikevich_sim-exp-v.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/fitted_izhikevich_sim-exp-v.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/fitted_izhikevich_sim-model-v.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/fitted_izhikevich_sim-model-v.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/izhikevich2007_single_cell_network.nml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/lems_sim/LEMS_SimulationExample.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/lems_sim/create_lems.py: -------------------------------------------------------------------------------- 1 | from pyneuroml.lems import LEMSSimulation 2 | 3 | ls = LEMSSimulation('sim1', 500, 0.05, 'net1') 4 | ls.include_neuroml2_file('NML2_SingleCompHHCell.nml') 5 | 6 | ls.create_display('display0', "Voltages", "-90", "50") 7 | ls.add_line_to_display('display0', "v", "hhpop[0]/v", "1mV", "#ffffff") 8 | 9 | ls.create_output_file('Volts_file', "v.dat") 10 | ls.add_column_to_output_file('Volts_file', 'v', "hhpop[0]/v") 11 | 12 | ls.save_to_file() 13 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/matplotlibrc: -------------------------------------------------------------------------------- 1 | font.size: 16 2 | axes.titlesize: 16 3 | axes.labelsize: 16 4 | xtick.major.size: 16 5 | ytick.major.size: 16 6 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm-example/HCNolm.channel.nml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | NeuroML file containing a single ion channel 5 | 6 | 7 | 8 | Hyperpolarization-activated, CN-gated h channel description (from Lawrence 2006) 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm-example/Kdrfast.channel.nml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | NeuroML file containing a single ion channel 5 | 6 | 7 | 8 | Fast delayed rectifier K+ channel description (from Yuen and Durand 1991, modeled by Marianne Bezaire) 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm-example/KvAolm.channel.nml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | NeuroML file containing a single ion channel 5 | 6 | 7 | 8 | A-type K+ channel description (from Zhang and McBain 1995, Martina 1998, Warman 1994 - modeled by Marianne Bezaire) 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm-example/Nav.channel.nml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | NeuroML file containing a single ion channel 5 | 6 | 7 | 8 | Na+ channel description (modeled by Marianne Bezaire) 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm-example/leak_chan.channel.nml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | NeuroML file containing a single passive Channel description 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm.cell.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/olm.cell.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm.cell.xy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/olm.cell.xy.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm_example_net.nml: -------------------------------------------------------------------------------- 1 | 2 | OLM cell network 3 | 4 | 5 | Simple pulse generator 6 | 7 | 8 | 9 | A population for our cell 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm_example_sim_seg0_axon0-v.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/olm_example_sim_seg0_axon0-v.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm_example_sim_seg0_soma0-v.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/olm_example_sim_seg0_soma0-v.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm_example_sim_seg1_axon0-v.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/olm_example_sim_seg1_axon0-v.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/olm_example_sim_seg1_soma0-v.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/olm_example_sim_seg1_soma0-v.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/single_hh_cell_network.gv.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/single_hh_cell_network.gv.png -------------------------------------------------------------------------------- /source/Userdocs/NML2_examples/single_olm_cell_network.gv.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/Userdocs/NML2_examples/single_olm_cell_network.gv.png -------------------------------------------------------------------------------- /source/Userdocs/NeuroMLv1.md: -------------------------------------------------------------------------------- 1 | (userdocs:neuromlv1)= 2 | # NeuroML v1 3 | 4 | ```{warning} 5 | NeuroML v1.x is deprecated. This page is maintained for archival purposes only. 6 | 7 | Please use {ref}`NeuroML v2 `. 8 | 9 | {ref}`neuroConstruct ` can be used for converting NeuroML v1 models into NeuroML v2. 10 | ``` 11 | 12 | There are three Levels of compliance to the NeuroML v1 specifications: 13 | 14 | ## Level 1 15 | 16 | - [Metadata v1.8.1](https://github.com/NeuroML/org.neuroml1.model/blob/master/src/main/resources/NeuroML1Schemas/Level1/Metadata_v1.8.1.xsd) 17 | - [MorphML v1.8.1](https://github.com/NeuroML/org.neuroml1.model/blob/master/src/main/resources/NeuroML1Schemas/Level1/MorphML_v1.8.1.xsd) 18 | 19 | Any Level 1 NeuroML v1 file will also be compliant to [this schema](https://github.com/NeuroML/org.neuroml1.model/blob/master/src/main/resources/NeuroML1Schemas/Level1/NeuroML_Level1_v1.8.1.xsd). 20 | 21 | ## Level 2 22 | 23 | - [Biophysics v1.8.1](https://github.com/NeuroML/org.neuroml1.model/blob/master/src/main/resources/NeuroML1Schemas/Level2/Biophysics_v1.8.1.xsd) 24 | - [ChannelML v1.8.1](https://github.com/NeuroML/org.neuroml1.model/blob/master/src/main/resources/NeuroML1Schemas/Level2/ChannelML_v1.8.1.xsd) 25 | 26 | Any Level 1 or Level 2 NeuroML v1 file will also be compliant to [this schema](https://github.com/NeuroML/org.neuroml1.model/blob/master/src/main/resources/NeuroML1Schemas/Level2/NeuroML_Level2_v1.8.1.xsd). 27 | 28 | ## Level 3 29 | 30 | - [NetworkML v1.8.1](https://github.com/NeuroML/org.neuroml1.model/blob/master/src/main/resources/NeuroML1Schemas/Level3/NetworkML_v1.8.1.xsd) 31 | 32 | Any Level 1 or Level 2 or Level 3 NeuroML v1 file will also be compliant to [this schema](https://github.com/NeuroML/org.neuroml1.model/blob/master/src/main/resources/NeuroML1Schemas/Level3/NeuroML_Level3_v1.8.1.xsd). 33 | 34 | These files are archived in [this GitHub repository](https://github.com/NeuroML/org.neuroml1.model/tree/master/src/main/resources/NeuroML1Schemas). 35 | -------------------------------------------------------------------------------- /source/Userdocs/QuantitiesAndRecording.md: -------------------------------------------------------------------------------- 1 | (userdocs:quantitiesandrecording)= 2 | # Quantities and recording 3 | 4 | In LEMS and NeuroML, `quantities` from all `exposures` and all `events` can be recorded by referring to them using {ref}`paths `. 5 | For examples, please see the {ref}`Getting Started with NeuroML ` section. 6 | 7 | (userdocs:quantitiesandrecording:events)= 8 | ## Recording events 9 | 10 | In NeuroML, all `event`s can be recorded to files declared using the {ref}`EventOutputFile ` component. 11 | Once an `EventOutputFile` has been declared, events to record can be selected using the {ref}`EventSelection ` component. 12 | 13 | {ref}`pyNeuroML ` provides the [create_event_output_file](https://pyneuroml.readthedocs.io/en/development/pyneuroml.lems.html?highlight=add_selection_to_event_output_file#pyneuroml.lems.LEMSSimulation.LEMSSimulation.create_event_output_file) function to create a `EventOutputFile` to record `events` to, and the [add_selection_to_event_output_file](https://pyneuroml.readthedocs.io/en/development/pyneuroml.lems.html?highlight=add_selection_to_event_output_file#pyneuroml.lems.LEMSSimulation.LEMSSimulation.add_selection_to_event_output_file) function to record `events` to the declared data file(s). 14 | 15 | (userdocs:quantitiesandrecording:exposures)= 16 | ## Recording quantities from exposures 17 | In NeuroML, all `quantities` can be recorded to files declared using the {ref}`OutputFile ` component. 18 | Once the `OutputFile` has been declared, `quantities` to record can be selected using the {ref}`OutputColumn ` component. 19 | 20 | {ref}`pyNeuroML ` provides the [create_output_file](https://pyneuroml.readthedocs.io/en/development/pyneuroml.lems.html?highlight=add_selection_to_event_output_file#pyneuroml.lems.LEMSSimulation.LEMSSimulation.create_event_output_file) function to create a `OutputFile` to record `quantities` to, and the [add_column_to_output_file](https://pyneuroml.readthedocs.io/en/development/pyneuroml.lems.html?highlight=add_selection_to_event_output_file#pyneuroml.lems.LEMSSimulation.LEMSSimulation.create_event_output_file) function to select `quantities` to record to the declared data file(s). 21 | -------------------------------------------------------------------------------- /source/Userdocs/Software/MatLab.md: -------------------------------------------------------------------------------- 1 | (matlab)= 2 | # MatLab NeuroML Toolbox 3 | 4 | The NeuroML 2 Toolbox for MATLAB facilitates access to the Java NeuroML 2 API functionality ({ref}`jNeuroML `) directly within Matlab. 5 | 6 | (neuromlmatlab:quickstart)= 7 | ## Quick start 8 | 9 | Please install jNeuroML following the instructions provided {ref}`here `. 10 | Run Matlab and run the `prefdir` command to find the location of your preferences folder. 11 | Create a file `javaclasspath.txt` within that folder containing, on a single line, the full path to the `jNeuroML--jar-with-dependencies.jar` from jNeuroML. 12 | 13 | Restart Matlab, and you will be able to access jNeuroML classes. 14 | You can test your setup by validating an example file: 15 | 16 | ```{code-block} 17 | import org.neuroml.model.util.NeuroML2Validator 18 | file = java.io.File('/full/path/to/model.nml'); 19 | validator = NeuroML2Validator(); 20 | validator.validateWithTests(file); 21 | disp(validator.getValidity()) 22 | ``` 23 | 24 | (neuromlmatlab:docs)= 25 | ## Documentation 26 | 27 | Please refer to the {ref}`jNeuroML documentation ` for information on the Java NeuroML API. 28 | Examples on using the Matlab toolbox are available [here](https://github.com/NeuroML/NeuroMLToolbox/blob/master/examples/run_examples.m). 29 | 30 | (neuromlmatlab:gethelp)= 31 | ## Getting help 32 | 33 | For any questions regarding the NeuroML Matlab toolbix, please open an issue on the GitHub issue tracker [here](https://github.com/NeuroML/NeuroMLToolbox/issues). 34 | Any bugs and feature requests can also be filed there. 35 | 36 | You can also use any of the {ref}`communication channels of the NeuroML community `. 37 | 38 | (neuromlmatlab:development)= 39 | ## Development 40 | 41 | The NeuroML Matlab toolbox is developed on GitHub at [https://github.com/NeuroML/NeuroMLToolbox](https://github.com/NeuroML/NeuroMLToolbox). 42 | -------------------------------------------------------------------------------- /source/Userdocs/Software/NeuroML_API.md: -------------------------------------------------------------------------------- 1 | (neuromlc++)= 2 | # NeuroML C++ API 3 | 4 | A C++ API for NeuroML. 5 | 6 | (neuromlc++:quickstart)= 7 | ## Quick start 8 | 9 | The C++ API is generated from the {ref}`NeuroML specification ` using the [CodeSynthesis XSD XML Schema to C++ data binding compiler](https://www.codesynthesis.com/products/xsd/). 10 | The C++ API needs to be compiled from source. 11 | Please refer to the instructions in the [Readme document](https://github.com/NeuroML/NeuroML_API/blob/master/README.md) for instructions on building and installing the API. 12 | 13 | (neuromlc++:docs)= 14 | ## Documentation 15 | 16 | For information on the generated C++ structure, please see the [XSD user manual](http://www.codesynthesis.com/projects/xsd/documentation/cxx/tree/manual/). 17 | 18 | (neuromlc++:api_docs)= 19 | ### API documentation 20 | 21 | API documentation for the C++ API can be found [here](https://neuroml.github.io/NeuroML_API/). 22 | It can also be generated while building the API from source, as documented in the [Readme](https://github.com/NeuroML/NeuroML_API/blob/master/README.md). 23 | 24 | (neuromlc++:gethelp)= 25 | ## Getting help 26 | 27 | For any questions regarding the C++ NeuroML API, please open an issue on the GitHub issue tracker [here](https://github.com/NeuroML/NeuroML_API/issues). 28 | Any bugs and feature requests can also be filed there. 29 | 30 | You can also use any of the {ref}`communication channels of the NeuroML community `. 31 | 32 | (neuromlc++:development)= 33 | ## Development 34 | 35 | The C++ NeuroML API is developed on GitHub at [https://github.com/NeuroML/NeuroML_API](https://github.com/NeuroML/NeuroML_API) under the [MIT license](https://github.com/NeuroML/NeuroML_API/blob/master/License.txt). 36 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Software.md: -------------------------------------------------------------------------------- 1 | (userdocs:software)= 2 | # Software and Tools 3 | 4 | ## Core NeuroML Tools 5 | 6 | The NeuroML initiative supports **a core set of libraries** (mainly in Python and Java) to enable the creation/validation/analysis/simulation of NeuroML models as well as to facilitate adding support for the language to other applications. 7 | 8 | ```{figure} ../../images/pynml_jnml.svg 9 | :alt: jNeuroML and pyNeuroML 10 | :align: center 11 | :width: 500px 12 | 13 | Relationship between {ref}`jLEMS `, {ref}`jNeuroML `, the {ref}`NeuroML 2 LEMS definitions `, {ref}`libNeuroML `, {ref}`pyLEMS ` and {ref}`pyNeuroML `. 14 | 15 | ``` 16 | 17 | ### Python based applications 18 | 19 | For most users, {ref}`pyNeuroML ` will provide all of the key functionality for building, validating, simulating, visualising, and converting NeuroML 2 and LEMS models. It builds on {ref}`libNeuroML ` and {ref}`pyLEMS ` and bundles all of the functionality of {ref}`jNeuroML ` to provide access to this through a Python interface. 20 | 21 | 22 | ### Java based applications 23 | 24 | {ref}`jNeuroML ` (for validating, simulating and converting NeuroML 2 models) and {ref}`jLEMS ` (for simulating LEMS models) are the key applications 25 | created in Java for supporting NeuroML 2/LEMS. 26 | 27 | ### NeuroML support in other languages 28 | 29 | There are preliminary APIs for using NeuroML in {ref}`C++ ` and {ref}`MATLAB `. 30 | 31 | ## Other NeuroML supporting applications 32 | 33 | Many other simulators, applications and libraries support NeuroML. See {ref}`here ` for more details. 34 | 35 | A number of databases and neuroinformatics initiatives support NeuroML as a core interchange format. See {ref}`here ` for more details. 36 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Tools/Approaches.md: -------------------------------------------------------------------------------- 1 | (userdocs:neuroml_support_approaches)= 2 | # Approaches to adding NeuroML support 3 | 4 | There are a number of ways that a neuronal simulator can add "support for NeuroML", depending on how deeply it embeds/supports the elements of the language. 5 | 6 | ## Commonly used approaches 7 | 8 | (userdocs:neuroml_support_approaches:native)= 9 | ### 1) Native support for NeuroML elements 10 | 11 | A simulator may have an equivalent internal representation of the core concepts from NeuroML2/LEMS, and so be able to natively read/write these formats. 12 | 13 | This is the approach taken in {ref}`jNeuroML ` and {ref}`EDEN `. 14 | 15 | 16 | (userdocs:neuroml_support_approaches:native_import)= 17 | ### 2) Native ability to import NeuroML elements 18 | 19 | Another approach is for simulators to natively support importing (a subset of) NeuroML models, whereby the NeuroML components are converted to the equivalent entities in the simulator's internal representation of the model. 20 | 21 | This is the approach taken in {ref}`MOOSE `, {ref}`Arbor ` and {ref}`NetPyNE `. 22 | 23 | (userdocs:neuroml_support_approaches:native_export)= 24 | ### 3) Native ability to export NeuroML elements 25 | 26 | Some simulators allow models to be created with their preferred native model description format, and then exported in valid NeuroML. 27 | 28 | This is the approach taken in {ref}`NEURON ` and {ref}`NetPyNE `. It is also possible to export {ref}`PyNN ` models to NeuroML equivalents. 29 | 30 | (userdocs:neuroml_support_approaches:mapping)= 31 | ### 4) 3rd party mapping to simulator's own format 32 | 33 | This is the approach taken in {ref}`NEURON ` via {ref}`jNeuroML `. 34 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Tools/Arbor.md: -------------------------------------------------------------------------------- 1 | (userdocs:arbor)= 2 | # Arbor and NeuroML 3 | 4 | ![Arbor logo](../../../images/tools/arbor.png) 5 | 6 | [Arbor](https://arbor-sim.org/) is a high performance multicompartmental neural simulation library. Addition of support for NeuroML2 and LEMS is under active development. 7 | 8 | ## Importing NeuroML into Arbor 9 | 10 | The current approach to supporting NeuroML in Arbor involves {ref}`importing NeuroML to Arbor's internal format `. 11 | 12 | See [here](https://docs.arbor-sim.org/en/stable/fileformat/neuroml.html) for Arbor's own documentation on this. It involves calling the [neuroml()](https://docs.arbor-sim.org/en/stable/python/morphology.html#arbor.neuroml) method in arbor pointing at the NeuroML file containing the cell you wish to load: 13 | 14 | ```{code-block} python 15 | nml = arbor.neuroml('mymorphology.cell.nml') 16 | 17 | ``` 18 | See [here](https://github.com/OpenSourceBrain/ArborShowcase/blob/main/NeuroML2/test_arbor.py) for a worked example of this, importing a multicompartmental cell with only a passive membrane conductance. 19 | 20 | ### Support for channels/synapses in LEMS 21 | 22 | There is work under way to allow reading of the dynamics of ion channels and synapses which are specified in LEMS into Arbor. 23 | 24 | See https://github.com/thorstenhater/nmlcc for more details. 25 | 26 | ## Network models in Arbor with NeuroMLlite 27 | 28 | There is preliminary support for building network specified in {ref}`NeuroMLlite ` format directly in Arbor. See [here](https://github.com/NeuroML/NeuroMLlite/tree/master/examples/arbor) for an example. 29 | 30 | ## Examples 31 | 32 | Example code for interactions between NeuroML models and Arbor can be found in the [Arbor Showcase](https://github.com/OpenSourceBrain/ArborShowcase) repository. 33 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Tools/Brian.md: -------------------------------------------------------------------------------- 1 | (userdocs:brian)= 2 | # Brian and NeuroML 3 | 4 | ![Brian logo](../../../images/tools/brian2.png) 5 | 6 | [Brian](https://briansimulator.org/) is an easy to use, Python based simulator of spiking networks. 7 | 8 | ## Converting NeuroML model to Brian 9 | 10 | {ref}`jNeuroML ` or {ref}`pyNeuroML ` can be used to convert NeuroML2/LEMS models to [Brian version 2](https://github.com/brian-team/brian2). This involves pointing at a {ref}`LEMS Simulation file ` describing what to simulate, and using the `-brian2` option: 11 | 12 | ```{code-block} console 13 | # Using jnml 14 | jnml -brian2 15 | 16 | # Using pynml 17 | pynml -brian2 18 | ``` 19 | 20 | This command generates a Python script (a file ending in `_brian2.py`) which can be run in Python and will simulate the model and plot/save the results, as outlined in the {ref}`LEMS Simulation file `. 21 | 22 | Notes: 23 | 24 | - Only single compartment cells can be converted to Brian format so far. While there is support in Brian for multicompartmental cell simulation, this is not yet covered in the jNeuroML based export. 25 | - There has been support for converting NeuroML models to Brian v1 (using `-brian`), but since this version of Brian is deprecated, and only supports Python 2, this export is no longer actively developed. 26 | - There is limited support for executing networks of cells in Brian, and the most likely route for adding this functionality is via {ref}`NeuroMLlite `. 27 | 28 | 29 | ## Examples 30 | 31 | Example code for interactions between NeuroML models and Brian can be found [here](https://github.com/OpenSourceBrain/BrianShowcase). 32 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Tools/EDEN.md: -------------------------------------------------------------------------------- 1 | (userdocs:eden)= 2 | # EDEN and NeuroML 3 | 4 | 5 | [EDEN](https://gitlab.com/neurocomputing-lab/Inferior_OliveEMC/eden) is a recently developed simulation engine which incorporates native NeuroML 2 support from the start. 6 | 7 | Initial tests of using EDEN with NeuroML models and example code can be found [here](https://github.com/OpenSourceBrain/EDENShowcase). 8 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Tools/MOOSE.md: -------------------------------------------------------------------------------- 1 | (userdocs:moose)= 2 | # MOOSE and NeuroML 3 | 4 | 5 | [MOOSE](https://moose.ncbs.res.in/) is the Multiscale Object-Oriented Simulation Environment. It is the base and numerical core for large, detailed multi-scale simulations that span computational neuroscience and systems biology. It is based on a complete reimplementation of the GENESIS 2 core. 6 | 7 | Some tests of using MOOSE with NeuroML models and example code can be found in the [MOOSE Showcase](https://github.com/OpenSourceBrain/MOOSEShowcase) repository. 8 | 9 | 10 | ## Simulating NeuroML models in MOOSE 11 | 12 | 13 | You can export NeuroML models to the MOOSE simulator format using {ref}`jNeuroML ` or {ref}`pyNeuroML `, pointing at a {ref}`LEMS Simulation file ` describing what to simulate, and using the `-moose` option: 14 | 15 | ```{code-block} console 16 | # Using jnml 17 | jnml -moose 18 | 19 | # Using pynml 20 | pynml -moose 21 | ``` 22 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Tools/N2A.md: -------------------------------------------------------------------------------- 1 | (userdocs:n2a)= 2 | # N2A and NeuroML 3 | 4 | "Neurons to Algorithms" (N2A) is a language for modeling neural systems, along with a software tool for editing models and simulating them 5 | 6 | See [https://github.com/sandialabs/n2a/wiki/Backend%20LEMS](https://github.com/sandialabs/n2a/wiki/Backend%20LEMS) for information on the interactions between NeuroML/LEMS and N2A. 7 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Tools/NEST.md: -------------------------------------------------------------------------------- 1 | (userdocs:nest)= 2 | # NEST and NeuroML 3 | 4 | ![NEST logo](../../../images/tools/nest-logo.png) 5 | 6 | NEST is a simulator for spiking neural network models that focuses on the dynamics, size and structure of neural systems rather than on the exact morphology of individual neurons. The development of NEST is coordinated by the NEST Initiative. 7 | 8 | NEST is ideal for networks of spiking neurons of any size, for example: 9 | 10 | - Models of information processing e.g. in the visual or auditory cortex of mammals, 11 | - Models of network activity dynamics, e.g. laminar cortical networks or balanced random networks, 12 | - Models of learning and plasticity. 13 | 14 | See [https://github.com/OpenSourceBrain/NESTShowcase](https://github.com/OpenSourceBrain/NESTShowcase) for examples of usage of NeuroML and NEST. 15 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Tools/NEURON.md: -------------------------------------------------------------------------------- 1 | (userdocs:neuron)= 2 | # NEURON and NeuroML 3 | 4 | ![NEURON logo](../../../images/tools/neuron.png) 5 | 6 | [NEURON](http://www.neuron.yale.edu/neuron) is a widely used simulation environment and is one of the main target platforms for a standard facilitating exchange of neuronal models. 7 | 8 | (userdocs:neuron:simulating)= 9 | ## Simulating NeuroML models in NEURON 10 | 11 | {ref}`jNeuroML ` or {ref}`pyNeuroML ` can be used to convert NeuroML2/LEMS models to NEURON. This involves pointing at a {ref}`LEMS Simulation file ` describing what to simulate, and using the `-neuron` option: 12 | 13 | ```{code-block} console 14 | # Simulate the model using NEURON with python/hoc/mod files generated by jNeuroML 15 | jnml -neuron -run 16 | 17 | # Simulate the model using NEURON with python/hoc/mod files generated by pyNeuroML 18 | pynml -neuron -run 19 | ``` 20 | 21 | These commands generate a PyNeuron script and run it (a file ending in `_nrn.py`). 22 | So you must have NEURON installed on your system, with its Python bindings (PyNeuron). 23 | Skipping the `-run` flag will generate the Python script but will not run it: you can run it manually later. 24 | Adding `-nogui` will suppress the NEURON graphical elements/menu opening and just run the model in NEURON in the background 25 | 26 | You can also run LEMS simulations using the NEURON simulator using the {ref}`pyNeuroML ` API: 27 | 28 | ```{code-block} python 29 | from pyneuroml.pynml import run_lems_with_jneuroml_neuron 30 | 31 | ... 32 | 33 | run_lems_with_jneuroml_neuron(lems_file_name) 34 | ``` 35 | 36 | (userdocs:neuron:envvar)= 37 | ## Setting the NEURON_HOME environment variable 38 | 39 | Since it is possible to install multiple versions of NEURON in different places, the NeuroML tools need to be told where the NEURON tools are. 40 | To do this, they look at the `NEURON_HOME` environment variable. 41 | This needs to hold the path to where the binary (`bin`) folder holding the NEURON tools such as `nrniv` are located. 42 | On Linux like systems, one can use `which` to find these tools and set the variable: 43 | 44 | ``` {code-block} bash 45 | $ which nrniv 46 | ~/.local/share/virtualenvs/neuroml-311-dev/bin/nrniv 47 | 48 | $ export NEURON_HOME="~/.local/share/virtualenvs/neuroml-311-dev/" 49 | ``` 50 | 51 | One can combine these commands together also: 52 | 53 | ``` {code-block} bash 54 | $ export NEURON_HOME="$(dirname $(dirname $(which nrniv)))" 55 | ``` 56 | 57 | (userdocs:neuron:neuroconstruct)= 58 | ## Using neuroConstruct 59 | 60 | NEURON simulations can also be generated from NeuroML model components by {ref}`neuroConstruct `, but most of this functionality is related to {ref}`NeuroML v1 `. 61 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Tools/NetPyNE.md: -------------------------------------------------------------------------------- 1 | (userdocs:netpyne)= 2 | # NetPyNE and NeuroML 3 | 4 | ![NetPyNE logo](../../../images/tools/netpyne.png) 5 | 6 | [NetPyNE](http://netpyne.org) is a Python package to facilitate the development, simulation, parallelization, analysis, and optimization of biological neuronal networks using the NEURON simulator. NetPyNE can import from and export to NeuroML. NetPyNE also provides a web based [Graphical User Interface](https://github.com/MetaCell/NetPyNE-UI/wiki). 7 | 8 | ## Importing NeuroML into NetPyNE 9 | 10 | An example of how to import a network in NeuroML into NetPyNE can be found [here](https://github.com/Neurosim-lab/netpyne/blob/development/examples/NeuroMLImport/SimpleNet_import.py). 11 | 12 | ## Exporting NeuroML from NetPyNE 13 | 14 | An example of how to export a network built using NetPyNE to NeuroML can be found [here](https://github.com/OpenSourceBrain/NetPyNEShowcase/blob/master/NetPyNE/HHSmall/HH_export.py). 15 | 16 | ## Running NetPyNE on OSBv2 17 | 18 | Building and running NetPyNE models will be a core feature of Open Source Brain v2.0. See [here](https://docs.opensourcebrain.org/OSBv2/NetPyNE.html) for more details. 19 | 20 | ## NeuroMLlite 21 | 22 | NetPyNE is also a key target for cross simulator network creation using {ref}`NeuroMLlite `. There are ongoing plans for greater alignment between formats used for network specification in NetPyNE and NeuroMLlite. 23 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Tools/PyNN.md: -------------------------------------------------------------------------------- 1 | (userdocs:pynn)= 2 | # PyNN and NeuroML 3 | 4 | ![PyNN logo](../../../images/tools/pynn.png) 5 | 6 | 7 | [PyNN](http://neuralensemble.org/PyNN/) is a Python package for simulator independent specification of neuronal network models. Model code can be developed using the PyNN API and then run using [NEURON](http://www.neuron.yale.edu/neuron/), [NEST](https://nest-simulator.org/) or [Brian](https://briansimulator.org/). The developed model also can be stored as a NeuroML document. 8 | 9 | The latest version of {ref}`neuroConstruct ` can be used to generate executable scripts for PyNN based simulators based on NeuroML components, although the majority of multicompartmental conductance based models which are available in neuroConstruct are outside the current scope of the PyNN API. 10 | 11 | See [https://github.com/OpenSourceBrain/PyNNShowcase](https://github.com/OpenSourceBrain/PyNNShowcase) for examples of usage of NeuroML and PyNN. 12 | 13 | More info on the latest support for running NeuroML models in PyNN and vice versa can be found [here](https://github.com/NeuroML/NeuroML2/issues/73). 14 | 15 | PyNN is also a key target for cross simulator network creation using {ref}`NeuroMLlite `. 16 | -------------------------------------------------------------------------------- /source/Userdocs/Software/Tools/SWC.md: -------------------------------------------------------------------------------- 1 | (userdocs:swc)= 2 | # SWC and NeuroML 3 | 4 | The SWC format was developed to cover most of the information common between Neurolucida, NEURON, and GENESIS formats. 5 | It is used by resources such as NeuroMorpho.org. 6 | 7 | Information on the SWC format can be found in the [NeuroMorpho FAQ](http://neuromorpho.org/myfaq.jsp) under the "What is SWC format" entry. 8 | 9 | Recommended applications for converting SWC into NeuroML are CVApp and neuroConstruct (see below). 10 | 11 | ## Tools 12 | 13 | A number of tools support conversion of SWC to NeuroML. 14 | 15 | (userdocs:cvapp:tools:cvapp)= 16 | ### CVApp 17 | 18 | [CVApp](https://github.com/NeuroML/Cvapp-NeuroMorpho.org) is a standalone Java tool that can visualize SWC files (for example from [NeuroMorpho.org](https://neuromorpho.org)) and export them into NeuroML2. 19 | 20 | ```{figure} ../../../images/cvapp.png 21 | :alt: Screenshot of CVApp 22 | :align: center 23 | :width: 80% 24 | 25 | Screenshot of CVApp 26 | ``` 27 | 28 | One can select "NeuroMLv2" from the "Save As" drop down box to export the loaded reconstruction to NeuroML. 29 | 30 | (userdocs:cvapp:tools:neuroconstruct)= 31 | ### neuroConstruct 32 | 33 | {ref}`neuroConstruct ` includes functionality to interactively convert CVapp (SWC) files to NeuroML2. 34 | Please see the [neuroConstruct documentation](http://www.neuroconstruct.org/docs/import.html) for more information. 35 | -------------------------------------------------------------------------------- /source/Userdocs/Software/jLEMS.md: -------------------------------------------------------------------------------- 1 | (jlems)= 2 | # jLEMS 3 | 4 | jLEMS is an interpreter for the Low Entropy Model Specification language written in Java. 5 | ```{admonition} jLEMS is the reference implementation of LEMS 6 | :class: dropdown 7 | jLEMS was developed by Robert Cannon when the LEMS language was being devised and serves at the key reference for how to implement/interpret the language. 8 | ``` 9 | 10 | (jlems:quickstart)= 11 | ## Quick start 12 | 13 | Since jLEMS is included in {ref}`jNeuroML `, it does need not to be installed it separately. 14 | Please follow the instructions on installing jNeuroML provided {ref}`here `. 15 | 16 | Please see the {ref}`development section below ` for information on building the jLEMS interpreter from source. 17 | 18 | (jlems:docs)= 19 | ## Documentation 20 | 21 | Detailed documentation on LEMS is maintained [here](http://lems.github.io/LEMS/). 22 | For more information on LEMS, please also see Cannon et al. ({cite}`Cannon2014`) 23 | 24 | (jlems:gethelp)= 25 | ## Getting help 26 | 27 | For any questions regarding jLEMS, please open an issue on the GitHub issue tracker [here](https://github.com/LEMS/jLEMS/issues). 28 | Any bugs and feature requests can also be filed there. 29 | 30 | You can also use any of the {ref}`communication channels of the NeuroML community `. 31 | 32 | (jlems:development)= 33 | ## Development 34 | 35 | jLEMS is developed on GitHub at [https://github.com/LEMS/jLEMS](https://github.com/LEMS/jLEMS) under the [MIT license](https://github.com/LEMS/jLEMS/blob/master/LICENSE). 36 | The repository contains the complete source code along with instructions on building/installing jLEMS. 37 | -------------------------------------------------------------------------------- /source/Userdocs/Specification.md: -------------------------------------------------------------------------------- 1 | (userdocs:specification)= 2 | # Schema/Specification 3 | 4 | ```{admonition} NeuroML v2.3 is the current stable release of the language, and is described below. 5 | For an overview of the various releases of the language see: {ref}`A brief history of NeuroML `. 6 | ``` 7 | 8 | We've briefly seen the XML representation of NeuroML models and simulations in the {ref}`Getting Started ` tutorials. 9 | Here, we dive a little deeper into the underlying details of NeuroML. 10 | 11 | XML itself does not define a set of standard tags: any tags may be used as long as the resultant document is [well-formed](https://en.wikipedia.org/wiki/Well-formed_document). 12 | Therefore, NeuroML defines a standard set of XML elements (the tags and attributes which specify the model and parameters, e.g. ``) that may be used in NeuroML documents: the NeuroML [XML Schema Definition](https://en.wikipedia.org/wiki/XML_Schema_(W3C)). 13 | This is referred to as the NeuroML *schema* or the NeuroML *specification*. 14 | 15 | As the wiki page says: 16 | ```{epigraph} 17 | XSD (XML Schema Definition), a recommendation of the World Wide Web Consortium (W3C), specifies how to formally describe the elements in an Extensible Markup Language (XML) document. It can be used by programmers to verify each piece of item content in a document, to assure it adheres to the description of the element it is placed in. 18 | ``` 19 | 20 | This gives us an idea of the advantages of using an XML based system. 21 | All NeuroML models must use these pre-defined tags/components---this is what we check for when we {ref}`validate NeuroML models `. 22 | A valid NeuroML model is said to adhere to the NeuroML schema. 23 | 24 | ```{admonition} Purpose of the NeuroML specification/schema. 25 | :class: note 26 | The NeuroML schema/specification defines the structure of a valid NeuroML document. The {ref}`core NeuroML tools ` adhere to this specification and can read/write/interpret the language correctly. 27 | ``` 28 | 29 | In the next section, we learn more about the NeuroML 2 schema, and see how the dynamics of the NeuroML 2 entities are defined in LEMS. 30 | -------------------------------------------------------------------------------- /source/Userdocs/TestingNeuroMLModels.md: -------------------------------------------------------------------------------- 1 | (userdocs:testing_validating_models)= 2 | # Testing/validating NeuroML Models 3 | 4 | Models described in NeuroML can be run across multiple simulators, and it it essential that the activity (e.g. spike times) of the models are as close as possible across all of these independently developed platforms. 5 | 6 | It is also important to validate that the behaviour of a given NeuroML model reproduces some recorded aspect of the biological equivalent. 7 | 8 | (userdocs:testing_models)= 9 | ## Testing behaviour of NeuroML models across simulators 10 | 11 | This type of testing addresses the question: **Does a given NeuroML model produce the same results when run across multiple simulators?** 12 | 13 | (userdocs:testing_models:omv)= 14 | ### OMV - Open Source Brain Model Validation framework 15 | 16 | The OSB Model Validation framework was originally developed as an automated model validation package for [Open Source Brain](http://www.opensourcebrain.org) projects, which can be used for testing model behaviour across many [simulation engines](https://github.com/OpenSourceBrain/osb-model-validation/tree/master/omv/engines) both: 17 | 18 | - on your local machine when developing models 19 | - on [GitHub Actions](https://github.com/features/actions), to ensure tests pass on every commit. 20 | 21 | This framework has been used to test the 30+ NeuroML and PyNN models described in the [Open Source Brain paper (Gleeson et al. 2019)](https://www.cell.com/neuron/fulltext/S0896-6273(19)30444-1), and [many more](https://github.com/OpenSourceBrain/.github/blob/main/testsheet/README.md). 22 | 23 | See https://github.com/OpenSourceBrain/osb-model-validation for more details. 24 | 25 | (userdocs:validating_models_bio)= 26 | ## Validating that NeuroML model reproduce biological activity 27 | 28 | This type of testing addresses the question: **How well does a given NeuroML model replicate the activity as seen in real neurons/channels/networks?** 29 | 30 | ### SciUnit/NeuronUnit 31 | 32 | [SciUnit](https://scidash.org/sciunit.html) is a Python framework for test-driven validation of scientific models, and [NeuronUnit](https://scidash.org/neuronunit.html) 33 | is a package based on this for data-driven validation of neuron and ion channel models. See also [SciDash](https://scidash.org/) for more information. 34 | 35 | Interactive Jupyter notebooks for running NeuronUnit examples can be found [this repository](https://github.com/scidash/neuronunit/tree/master/docs). 36 | 37 | TODO: Add details on using [SciUnit](https://scidash.org/sciunit.html) and [NeuronUnit](https://scidash.org/neuronunit.html) with NeuroML models. 38 | -------------------------------------------------------------------------------- /source/Userdocs/UnitsAndDimensions.md: -------------------------------------------------------------------------------- 1 | (userdocs:unitsanddimensions)= 2 | # Units and dimensions 3 | 4 | Support for dimensional quantities is a fundamental (and essential) feature of NeuroML, backed up by support for units and dimensions in LEMS. 5 | 6 | The basic rules are: 7 | 8 | - specify the **dimensions** of quantities in LEMS 9 | - use compatible **units** defined in the NeuroML schema in NeuroML models. 10 | 11 | The main motivation for this is that fundamental expressions for defining a model are independent of any particular units. 12 | For example, Ohm's law, **V = I * R** relates to quantities with dimensions voltage, current and resistance, not millivolts, picoamps, ohms, etc. 13 | 14 | Users can therefore use a wide range of commonly used units for each dimension defined in the {ref}`standard unit and dimension definitions ` of NeuroML 2 without worrying about conversion factors. 15 | 16 | Additionally, please keep in mind that: 17 | 18 | - all quantities are saved and {ref}`recorded ` in SI Units 19 | - when plotting data using NeuroML/LEMS using the {ref}`Line ` component, users can use the `scale` parameter to convert quantities to other units. 20 | -------------------------------------------------------------------------------- /source/Userdocs/Walkthroughs/RayEtAl2020/RayEtAl2020.md: -------------------------------------------------------------------------------- 1 | (userdocs:walkthroughs:rayetal2020)= 2 | # Converting Ray et al 2020 to NeuroML 3 | 4 | This section documents the conversion of Ray et al 2020 {cite}`Ray2020`, which was originally implemented in NEURON, to NeuroML. 5 | It broadly follows the steps outlined in the {ref}`converting models ` section. 6 | 7 | For any queries, please contact Ankur Sinha on any of the NeuroML channels. 8 | 9 | -------------------------------------------------------------------------------- /source/Userdocs/Walkthroughs/RayEtAl2020/Setup.md: -------------------------------------------------------------------------------- 1 | # Setting up 2 | 3 | ## Step 1) Find the original model code 4 | 5 | The original code is published on [ModelDB](https://modeldb.science/262670). 6 | 7 | ## Step 2) Create GitHub and Open Source Brain accounts for sharing the code 8 | 9 | ### 2a) Sign up to GitHub and Open Source Brain 10 | 11 | We signed in to GitHub and OSBv1 12 | 13 | ### 2b) Create GitHub repository 14 | 15 | ModelDB provides GitHub repositories for all its models now. 16 | This model is available on GitHub here: https://github.com/ModelDBRepository/262670. 17 | The Open Source Brain (OSB) organization on GitHub also keeps a "fork" of these repositories to allow users to easily add them to both Open Source Brain v1 and v2. 18 | This fork is here, and is the one that we will work with: https://github.com/OpenSourceBrain/262670. 19 | 20 | For the conversion, I (Ankur) created a fork of this repository with a new branch to work in: https://github.com/sanjayankur31/262670. 21 | A pull request work flow was used to submit converted bits back to the repository. 22 | 23 | The first step was to re-organise the code to prepare it for conversion. 24 | All the existing code was moved to a new NEURON folder, and a new NeuroML2 folder set up to store the NeuroML version. 25 | 26 | ### 2c) Create Open Source Brain project 27 | 28 | A new project was created on OSBv1 and linked to the OSB repository: https://v1.opensourcebrain.org/projects/locust-mushroom-body. 29 | 30 | 31 | -------------------------------------------------------------------------------- /source/Userdocs/Walkthroughs/RayEtAl2020/scripts/cellmorph2nml.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Convert cell morphology to NeuroML. 4 | 5 | We only export morphologies here. We add the biophysics manually. 6 | 7 | File: NeuroML2/scripts/cell2nml.py 8 | """ 9 | 10 | import os 11 | import sys 12 | 13 | import pyneuroml 14 | from pyneuroml.neuron import export_to_neuroml2 15 | from neuron import h 16 | 17 | 18 | def main(acell): 19 | """Main runner method. 20 | 21 | :param acell: name of cell 22 | :returns: None 23 | 24 | """ 25 | loader_hoc_file = f"{acell}_loader.hoc" 26 | loader_hoc_file_txt = """ 27 | /*load_file("nrngui.hoc")*/ 28 | load_file("stdrun.hoc") 29 | xopen("../../NEURON/mb/cell_templates/GGN_20170309_sc.hoc") 30 | objref cell 31 | cell = new GGN_20170309_sc() 32 | """ 33 | 34 | with open(loader_hoc_file, 'w') as f: 35 | print(loader_hoc_file_txt, file=f) 36 | 37 | export_to_neuroml2(loader_hoc_file, f"{acell}.morph.cell.nml", 38 | includeBiophysicalProperties=False, validate=False) 39 | 40 | os.remove(loader_hoc_file) 41 | # Note--a couple of diameters are 0.0, modified to 0.001 to validate the 42 | # model 43 | 44 | 45 | if __name__ == "__main__": 46 | if len(sys.argv) != 2: 47 | print("This script only accepts one argument.") 48 | sys.exit(1) 49 | main(sys.argv[1]) 50 | -------------------------------------------------------------------------------- /source/Userdocs/Walkthroughs/Walkthroughs.md: -------------------------------------------------------------------------------- 1 | (userdocs:walkthroughs)= 2 | # Walk throughs 3 | 4 | This chapter documents a number of real-world tasks for users to refer to. 5 | -------------------------------------------------------------------------------- /source/_config.yml: -------------------------------------------------------------------------------- 1 | # Book settings 2 | # Learn more at https://jupyterbook.org/customize/config.html 3 | 4 | title: "NeuroML Documentation" # Leave empty: logo says NeuroML 5 | author: "NeuroML contributors" 6 | copyright: "2025" # Copyright year to be placed in the footer 7 | logo: images/logo.png 8 | 9 | # List bibtex source file 10 | bibtex_bibfiles: 11 | - Reference/references.bib 12 | 13 | # Force re-execution of notebooks on each build. 14 | # See https://jupyterbook.org/content/execute.html 15 | execute: 16 | execute_notebooks: auto 17 | 18 | # Define the name of the latex output file for PDF builds 19 | latex: 20 | latex_documents: 21 | targetname: neuroml-documentation.tex 22 | use_jupyterbook_latex: true 23 | latex_engine: pdflatex 24 | 25 | # Information about where the book exists on the web 26 | repository: 27 | url: https://github.com/NeuroML/Documentation # Online location of your book 28 | path_to_book: source # Optional path to your book, relative to the repository root 29 | branch: main # Which branch of the repository should be used when creating links (optional) 30 | 31 | # Add GitHub buttons to your book 32 | # See https://jupyterbook.org/customize/config.html#add-a-link-to-your-repository 33 | html: 34 | use_issues_button: true 35 | use_repository_button: true 36 | use_edit_page_button: true 37 | favicon: images/favicon.ico 38 | google_analytics_id: "" # To be added 39 | home_page_in_navbar: false 40 | 41 | 42 | # Binder bits 43 | # Configure your Binder links, such as the URL of the BinderHub. 44 | launch_buttons: 45 | binderhub_url: "https://mybinder.org" 46 | colab_url: "https://colab.research.google.com" 47 | notebook_interface: "classic" 48 | thebe: true 49 | 50 | # Extra parsing options 51 | parse: 52 | myst_enable_extensions: # default extensions to enable in the myst parser. See https://myst-parser.readthedocs.io/en/latest/using/syntax-optional.html 53 | - amsmath 54 | - colon_fence 55 | - deflist 56 | - dollarmath 57 | - html_admonition 58 | - html_image 59 | - linkify 60 | - replacements 61 | - smartquotes 62 | - substitution 63 | myst_url_schemes : [mailto, http, https] 64 | 65 | sphinx: 66 | config: 67 | pygments_style: "tango" 68 | latex_elements: # https://github.com/mcmtroffaes/sphinxcontrib-bibtex/issues/276#issuecomment-1102154800 69 | preamble: | 70 | % make phantomsection empty inside figures 71 | \usepackage{etoolbox} 72 | \AtBeginEnvironment{figure}{\pretocmd{\hyperlink}{\protect}{}{}} 73 | fontpkg: | 74 | % https://github.com/sphinx-doc/sphinx/issues/10347#issuecomment-1647984313 75 | \setmainfont{FreeSerif}[ 76 | UprightFont = *, 77 | ItalicFont = *Italic, 78 | BoldFont = *Bold, 79 | BoldItalicFont = *BoldItalic 80 | ] 81 | \setsansfont{FreeSans}[ 82 | UprightFont = *, 83 | ItalicFont = *Oblique, 84 | BoldFont = *Bold, 85 | BoldItalicFont = *BoldOblique, 86 | ] 87 | \setmonofont{FreeMono}[ 88 | UprightFont = *, 89 | ItalicFont = *Oblique, 90 | BoldFont = *Bold, 91 | BoldItalicFont = *BoldOblique, 92 | ] 93 | -------------------------------------------------------------------------------- /source/_static/NeuroML2012/DWaltemath_sed-ml_edinburgh2012_.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/NeuroML2012/DWaltemath_sed-ml_edinburgh2012_.pdf -------------------------------------------------------------------------------- /source/_static/NeuroML2012/JKozloski_NeuroML_workshop_2012.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/NeuroML2012/JKozloski_NeuroML_workshop_2012.pdf -------------------------------------------------------------------------------- /source/_static/NeuroML2012/MHull_NineML.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/NeuroML2012/MHull_NineML.pdf -------------------------------------------------------------------------------- /source/_static/NeuroML2012/NeuroLexNIFupdate_3-13-12.ppt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/NeuroML2012/NeuroLexNIFupdate_3-13-12.ppt -------------------------------------------------------------------------------- /source/_static/NeuroML2012/NleNovere_NeuroML-COMBINE.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/NeuroML2012/NleNovere_NeuroML-COMBINE.pdf -------------------------------------------------------------------------------- /source/_static/NeuroML2012/Open_Worm_03-13-12.ppt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/NeuroML2012/Open_Worm_03-13-12.ppt -------------------------------------------------------------------------------- /source/_static/NeuroML2012/PGleeson_NeuroMLIntro2012.ppt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/NeuroML2012/PGleeson_NeuroMLIntro2012.ppt -------------------------------------------------------------------------------- /source/_static/NeuroML2012/RCannon_ModellingIonChannels.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/NeuroML2012/RCannon_ModellingIonChannels.pdf -------------------------------------------------------------------------------- /source/_static/NeuroML2012/RTomsett_LargeScaleCorticalModel.ppt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/NeuroML2012/RTomsett_LargeScaleCorticalModel.ppt -------------------------------------------------------------------------------- /source/_static/NeuroML2012/SKeating_libsbml-and-sbml.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/NeuroML2012/SKeating_libsbml-and-sbml.pdf -------------------------------------------------------------------------------- /source/_static/NeuroML2012/YleFranc_CNO.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/NeuroML2012/YleFranc_CNO.pdf -------------------------------------------------------------------------------- /source/_static/files/20231122-ACNet.webm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/files/20231122-ACNet.webm -------------------------------------------------------------------------------- /source/_static/files/20231122-HL23PYR.webm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/files/20231122-HL23PYR.webm -------------------------------------------------------------------------------- /source/_static/files/NeuroMLEditorialBoardMeeting2014.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/files/NeuroMLEditorialBoardMeeting2014.pdf -------------------------------------------------------------------------------- /source/_static/files/NeuroMLEditorialBoardMeeting2015.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/files/NeuroMLEditorialBoardMeeting2015.pdf -------------------------------------------------------------------------------- /source/_static/files/NeuroMLEditorialBoardMeeting2016.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/files/NeuroMLEditorialBoardMeeting2016.pdf -------------------------------------------------------------------------------- /source/_static/files/NeuroMLEditorialBoardMeeting2018.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/files/NeuroMLEditorialBoardMeeting2018.pdf -------------------------------------------------------------------------------- /source/_static/files/NeuroMLEditorialBoardMeeting2019.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/files/NeuroMLEditorialBoardMeeting2019.pdf -------------------------------------------------------------------------------- /source/_static/files/NeuroMLWorkshop2009.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/files/NeuroMLWorkshop2009.pdf -------------------------------------------------------------------------------- /source/_static/files/NeuroMLWorkshop2010.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/files/NeuroMLWorkshop2010.pdf -------------------------------------------------------------------------------- /source/_static/files/NeuroMLWorkshop2011.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/files/NeuroMLWorkshop2011.pdf -------------------------------------------------------------------------------- /source/_static/files/neuroml-documentation.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/_static/files/neuroml-documentation.pdf -------------------------------------------------------------------------------- /source/_static/zcustom.css: -------------------------------------------------------------------------------- 1 | /* Wrap code blocks */ 2 | pre, .highlight-python, .highlight-xml, .highlight { 3 | white-space : pre-wrap !important; /*for Mozilla*/ 4 | word-wrap: break-word !important; /*for IE*/ 5 | overflow: visible !important; 6 | overflow-x: visible !important; 7 | overflow-y: auto !important; 8 | max-height: 30em !important; 9 | } 10 | 11 | /* For highlights in search results 12 | * https://github.com/executablebooks/jupyter-book/issues/1244#issuecomment-867181414 13 | */ 14 | dt:target, span.highlighted { 15 | background-color: #fbe54e; 16 | } 17 | -------------------------------------------------------------------------------- /source/images/20231122-ACNet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/20231122-ACNet.png -------------------------------------------------------------------------------- /source/images/Acnet-LEMS.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Acnet-LEMS.png -------------------------------------------------------------------------------- /source/images/Acnet-matrix-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Acnet-matrix-1.png -------------------------------------------------------------------------------- /source/images/Acnet-matrix-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Acnet-matrix-2.png -------------------------------------------------------------------------------- /source/images/Acnet-matrix-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Acnet-matrix-3.png -------------------------------------------------------------------------------- /source/images/Acnet-matrix-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Acnet-matrix-4.png -------------------------------------------------------------------------------- /source/images/Acnet-matrix-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Acnet-matrix-5.png -------------------------------------------------------------------------------- /source/images/Acnet-medium-graph-level1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Acnet-medium-graph-level1.png -------------------------------------------------------------------------------- /source/images/Acnet-medium-graph-level5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Acnet-medium-graph-level5.png -------------------------------------------------------------------------------- /source/images/Acnet-medium.net.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Acnet-medium.net.png -------------------------------------------------------------------------------- /source/images/Acnet-medium.povray.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Acnet-medium.povray.png -------------------------------------------------------------------------------- /source/images/Board/ankur.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Board/ankur.png -------------------------------------------------------------------------------- /source/images/Board/boris.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Board/boris.jpg -------------------------------------------------------------------------------- /source/images/Board/padraig2.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Board/padraig2.jpeg -------------------------------------------------------------------------------- /source/images/Board/salva.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Board/salva.png -------------------------------------------------------------------------------- /source/images/Board/sotirios.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Board/sotirios.jpg -------------------------------------------------------------------------------- /source/images/Board/subhasis.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Board/subhasis.jpg -------------------------------------------------------------------------------- /source/images/Figure6a.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Figure6a.png -------------------------------------------------------------------------------- /source/images/Funders/EUS_200px.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Funders/EUS_200px.gif -------------------------------------------------------------------------------- /source/images/Funders/bbsrc.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Funders/bbsrc.gif -------------------------------------------------------------------------------- /source/images/Funders/incf.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Funders/incf.png -------------------------------------------------------------------------------- /source/images/Funders/mrc.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Funders/mrc.jpg -------------------------------------------------------------------------------- /source/images/Funders/nih.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Funders/nih.gif -------------------------------------------------------------------------------- /source/images/Funders/nsf.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Funders/nsf.gif -------------------------------------------------------------------------------- /source/images/Funders/wtlogo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Funders/wtlogo.png -------------------------------------------------------------------------------- /source/images/GGN-vispy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/GGN-vispy.png -------------------------------------------------------------------------------- /source/images/GGN.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/GGN.png -------------------------------------------------------------------------------- /source/images/Ih-combined.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Ih-combined.png -------------------------------------------------------------------------------- /source/images/KC-NEURON.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/KC-NEURON.png -------------------------------------------------------------------------------- /source/images/KC-NeuroML.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/KC-NeuroML.png -------------------------------------------------------------------------------- /source/images/MorphologyNeuroML2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/MorphologyNeuroML2.png -------------------------------------------------------------------------------- /source/images/NML-DB.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/NML-DB.png -------------------------------------------------------------------------------- /source/images/NaTa.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/NaTa.png -------------------------------------------------------------------------------- /source/images/OSB.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/OSB.png -------------------------------------------------------------------------------- /source/images/OSBv1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/OSBv1.png -------------------------------------------------------------------------------- /source/images/ScientificCommittee/andrew.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/ScientificCommittee/andrew.jpg -------------------------------------------------------------------------------- /source/images/ScientificCommittee/angus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/ScientificCommittee/angus.png -------------------------------------------------------------------------------- /source/images/ScientificCommittee/avrama.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/ScientificCommittee/avrama.jpg -------------------------------------------------------------------------------- /source/images/ScientificCommittee/bhalla.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/ScientificCommittee/bhalla.png -------------------------------------------------------------------------------- /source/images/ScientificCommittee/cgunay.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/ScientificCommittee/cgunay.JPG -------------------------------------------------------------------------------- /source/images/ScientificCommittee/hugo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/ScientificCommittee/hugo.png -------------------------------------------------------------------------------- /source/images/ScientificCommittee/lyle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/ScientificCommittee/lyle.png -------------------------------------------------------------------------------- /source/images/ScientificCommittee/michael.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/ScientificCommittee/michael.png -------------------------------------------------------------------------------- /source/images/ScientificCommittee/robertmcdougal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/ScientificCommittee/robertmcdougal.png -------------------------------------------------------------------------------- /source/images/ScientificCommittee/rsz_crook.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/ScientificCommittee/rsz_crook.jpg -------------------------------------------------------------------------------- /source/images/Steady_state(s)_of_activation_variables_in_nas_at_6.3_degC.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Steady_state(s)_of_activation_variables_in_nas_at_6.3_degC.png -------------------------------------------------------------------------------- /source/images/Steady_state(s)_of_activation_variables_of_kv_from_kv.channel.nml_at_6.3_degC.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Steady_state(s)_of_activation_variables_of_kv_from_kv.channel.nml_at_6.3_degC.png -------------------------------------------------------------------------------- /source/images/Time_Course(s)_of_activation_variables_of_kv_from_kv.channel.nml_at_6.3_degC.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Time_Course(s)_of_activation_variables_of_kv_from_kv.channel.nml_at_6.3_degC.png -------------------------------------------------------------------------------- /source/images/Time_course(s)_of_activation_variables_in_nas_at_6.3_degC.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/Time_course(s)_of_activation_variables_in_nas_at_6.3_degC.png -------------------------------------------------------------------------------- /source/images/crook2007-morphml-figure1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/crook2007-morphml-figure1.png -------------------------------------------------------------------------------- /source/images/cvapp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/cvapp.png -------------------------------------------------------------------------------- /source/images/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/favicon.ico -------------------------------------------------------------------------------- /source/images/izhikevich-binder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/izhikevich-binder.png -------------------------------------------------------------------------------- /source/images/izhikevich-google.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/izhikevich-google.png -------------------------------------------------------------------------------- /source/images/izhikevich-livecode.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/izhikevich-livecode.png -------------------------------------------------------------------------------- /source/images/izhikevich-rocket-options.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/izhikevich-rocket-options.png -------------------------------------------------------------------------------- /source/images/izhikevich-rocket.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/izhikevich-rocket.png -------------------------------------------------------------------------------- /source/images/jupyter-download.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/jupyter-download.png -------------------------------------------------------------------------------- /source/images/jupyterbook-issue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/jupyterbook-issue.png -------------------------------------------------------------------------------- /source/images/lems-figure2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/lems-figure2.png -------------------------------------------------------------------------------- /source/images/lems-neuroml2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/lems-neuroml2.png -------------------------------------------------------------------------------- /source/images/lems_nml_files.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/lems_nml_files.png -------------------------------------------------------------------------------- /source/images/libneuroml.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/libneuroml.png -------------------------------------------------------------------------------- /source/images/logo-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/logo-large.png -------------------------------------------------------------------------------- /source/images/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/logo.png -------------------------------------------------------------------------------- /source/images/neuromldb-channel-analysis.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/neuromldb-channel-analysis.png -------------------------------------------------------------------------------- /source/images/nml-db-morphology.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/nml-db-morphology.png -------------------------------------------------------------------------------- /source/images/nmllite-example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/nmllite-example.png -------------------------------------------------------------------------------- /source/images/olm-cell-fi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/olm-cell-fi.png -------------------------------------------------------------------------------- /source/images/olm-cell-subthresholdVi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/olm-cell-subthresholdVi.png -------------------------------------------------------------------------------- /source/images/olm-cell-voltage-traces.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/olm-cell-voltage-traces.png -------------------------------------------------------------------------------- /source/images/osb-channel-analysis.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/osb-channel-analysis.png -------------------------------------------------------------------------------- /source/images/osb-conversion.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/osb-conversion.png -------------------------------------------------------------------------------- /source/images/osb-morphology.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/osb-morphology.png -------------------------------------------------------------------------------- /source/images/pynml-channelanalysis.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/pynml-channelanalysis.png -------------------------------------------------------------------------------- /source/images/pynml-plotmorph-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/pynml-plotmorph-1.png -------------------------------------------------------------------------------- /source/images/pynml-plotmorph-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/pynml-plotmorph-2.png -------------------------------------------------------------------------------- /source/images/slider/combine.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/slider/combine.png -------------------------------------------------------------------------------- /source/images/slider/endorsed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/slider/endorsed.png -------------------------------------------------------------------------------- /source/images/slider/moose_mod.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/slider/moose_mod.png -------------------------------------------------------------------------------- /source/images/slider/openworm2-mod.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/slider/openworm2-mod.png -------------------------------------------------------------------------------- /source/images/slider/osbnivo_mod2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/slider/osbnivo_mod2.png -------------------------------------------------------------------------------- /source/images/test_morphology_plot_2d_Cell_497232312_cell_nml_xy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/test_morphology_plot_2d_Cell_497232312_cell_nml_xy.png -------------------------------------------------------------------------------- /source/images/tools/arbor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/arbor.png -------------------------------------------------------------------------------- /source/images/tools/biosimulators.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/biosimulators.png -------------------------------------------------------------------------------- /source/images/tools/brian2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/brian2.png -------------------------------------------------------------------------------- /source/images/tools/catmaid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/catmaid.png -------------------------------------------------------------------------------- /source/images/tools/cx3d.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/cx3d.png -------------------------------------------------------------------------------- /source/images/tools/genesis.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/genesis.png -------------------------------------------------------------------------------- /source/images/tools/geppetto.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/geppetto.png -------------------------------------------------------------------------------- /source/images/tools/lfpy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/lfpy.png -------------------------------------------------------------------------------- /source/images/tools/mdf.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/mdf.png -------------------------------------------------------------------------------- /source/images/tools/moose.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/moose.jpg -------------------------------------------------------------------------------- /source/images/tools/myokit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/myokit.png -------------------------------------------------------------------------------- /source/images/tools/nest-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/nest-logo.png -------------------------------------------------------------------------------- /source/images/tools/netpyne.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/netpyne.png -------------------------------------------------------------------------------- /source/images/tools/neuroconstruct.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/neuroconstruct.png -------------------------------------------------------------------------------- /source/images/tools/neuron.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/neuron.png -------------------------------------------------------------------------------- /source/images/tools/neuronland.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/neuronland.png -------------------------------------------------------------------------------- /source/images/tools/neuronvisio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/neuronvisio.png -------------------------------------------------------------------------------- /source/images/tools/openworm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/openworm.png -------------------------------------------------------------------------------- /source/images/tools/pynn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/pynn.png -------------------------------------------------------------------------------- /source/images/tools/trakem2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/trakem2.png -------------------------------------------------------------------------------- /source/images/tools/trees.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/trees.png -------------------------------------------------------------------------------- /source/images/tools/tvb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NeuroML/Documentation/b68058298acb586c34edd58ab1673cc0c06c22dd/source/images/tools/tvb.png --------------------------------------------------------------------------------