├── docs_gh_pages
├── genindex.rst
├── _static
│ ├── css
│ │ └── style.css
│ ├── IBL_data.png
│ └── 03b_tuto_matlab_cluster.png
├── docs_external
│ ├── alf_intro.md
│ └── ibl_viewer.md
├── _templates
│ ├── style.css.txt
│ ├── autosummary
│ │ └── module.rst
│ ├── custom-class-template.rst
│ └── custom-module-template.rst
├── scripts
│ ├── one_setup.py
│ ├── myavi_to_png.py
│ ├── gh_push.sh
│ ├── permalinks_check.py
│ └── execute_notebooks.py
├── requirements-docs.txt
├── public_docs
│ ├── information_contact.md
│ ├── data_release_pilot.md
│ ├── dataset_overview.md
│ └── public_introduction.md
├── templates
│ ├── docs_example_py.py
│ ├── colab_template.ipynb
│ └── docs_example_ipynb.ipynb
├── Makefile
├── 010_api_reference.rst
├── loading_examples.rst
├── make.bat
├── 06_examples.rst
├── atlas_examples.rst
├── 02_installation.md
├── 09_contribution.md
├── documentation_contribution_guidelines.md
├── index.rst
├── README.md
├── make_script.py
└── conf.py
├── README.md
├── .github
├── ISSUE_TEMPLATE
│ ├── usage-question.md
│ ├── extraction-issues.md
│ ├── feature_request.md
│ ├── bug_report.md
│ └── postmortem_report.md
└── workflows
│ ├── main.yaml
│ ├── build_docs.yml
│ └── deploy_docs.yml
├── requirements.txt
├── LICENSE
└── .gitignore
/docs_gh_pages/genindex.rst:
--------------------------------------------------------------------------------
1 | Detailed Index
2 | ==============
3 |
--------------------------------------------------------------------------------
/docs_gh_pages/_static/css/style.css:
--------------------------------------------------------------------------------
1 | .highlight {
2 | background: #f5f5f5;
3 | }
--------------------------------------------------------------------------------
/docs_gh_pages/docs_external/alf_intro.md:
--------------------------------------------------------------------------------
1 | ```{include} ../../../ONE/one/alf/README.md
2 | ```
3 |
--------------------------------------------------------------------------------
/docs_gh_pages/_templates/style.css.txt:
--------------------------------------------------------------------------------
1 | .highlight {
2 | background: #ffffff !important;
3 | }
4 |
5 |
--------------------------------------------------------------------------------
/docs_gh_pages/_static/IBL_data.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/iblenv/HEAD/docs_gh_pages/_static/IBL_data.png
--------------------------------------------------------------------------------
/docs_gh_pages/docs_external/ibl_viewer.md:
--------------------------------------------------------------------------------
1 | ```{include} ../../../iblviewer-repo/README.md
2 | :relative-docs: assests/
3 | :relative-images:
4 | ```
--------------------------------------------------------------------------------
/docs_gh_pages/_static/03b_tuto_matlab_cluster.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/iblenv/HEAD/docs_gh_pages/_static/03b_tuto_matlab_cluster.png
--------------------------------------------------------------------------------
/docs_gh_pages/scripts/one_setup.py:
--------------------------------------------------------------------------------
1 | from one.api import ONE
2 | pw = 'international'
3 | one = ONE(base_url='https://openalyx.internationalbrainlab.org', password=pw, silent=True)
--------------------------------------------------------------------------------
/docs_gh_pages/_templates/autosummary/module.rst:
--------------------------------------------------------------------------------
1 | {{ fullname }}
2 | {{ underline }}
3 |
4 | .. automodule:: {{ fullname }}
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
--------------------------------------------------------------------------------
/docs_gh_pages/scripts/myavi_to_png.py:
--------------------------------------------------------------------------------
1 | import nbformat
2 | from pathlib import Path
3 |
4 | file_path = Path('C:/Users/Mayo/iblenv/ibllib-repo/examples/one/histology/docs_find_nearby_trajectories.ipynb')
5 |
6 |
--------------------------------------------------------------------------------
/docs_gh_pages/requirements-docs.txt:
--------------------------------------------------------------------------------
1 | ipyevents
2 | jupyter
3 | nbsphinx
4 | nbsphinx-link
5 | myst_parser
6 | pandoc
7 | sphinx >=3.1.2
8 | sphinx-copybutton
9 | sphinx-gallery
10 | sphinx_rtd_theme
11 | sphinx_reredirects
12 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # IBLENV installation guide
2 | Unified environment and issue tracker for IBL github repositories.
3 |
4 | ## Installation
5 |
6 | Installation instructions can be found on the main documentation page [here](http://docs.internationalbrainlab.org/02_installation.html)
7 |
--------------------------------------------------------------------------------
/docs_gh_pages/public_docs/information_contact.md:
--------------------------------------------------------------------------------
1 | # Getting Help: Information and Troubleshooting
2 | - Issues with the data? Post an issue here: with the tag `ibl`
3 | - Alternatively post an issue here:
4 | - General questions about the datasets or publications? Email: [info@internationalbrainlab.org](info@internationalbrainlab.org)
--------------------------------------------------------------------------------
/docs_gh_pages/templates/docs_example_py.py:
--------------------------------------------------------------------------------
1 | """
2 | Title of your example
3 | =====================
4 | Brief description of example should go here. This example shows how to structure the docstring for
5 | the title and description so that it is rendered correctly during building of documentation
6 | """
7 |
8 | # Author: Mayo
9 | # Code should follow
10 | import numpy as np
11 | import pandas as pd
12 |
13 | # etc etc
14 |
--------------------------------------------------------------------------------
/docs_gh_pages/scripts/gh_push.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Script to push html doc files to gh pages
3 | # Clone the gh-pages branch to local documentation directory
4 | git clone -b gh-pages https://github.com/int-brain-lab/iblenv.git gh-pages
5 | cd gh-pages
6 |
7 | # Copy everything from output of build into gh-pages branch
8 | cp -R ../_build/html/* ./
9 |
10 | # Add and commit all changes
11 | git add -A .
12 | git commit -m "$1";
13 |
14 | # Push the changes
15 | git push -q origin gh-pages
16 |
17 | # Leave gh-pages repo and delete
18 | cd ../
19 | rm -rf gh-pages
20 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/usage-question.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Usage question
3 | about: Ask 'how to' questions related to product usage
4 | title: "[Usage question] - Add your issue title here"
5 | labels: question
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the question you have**
11 | A clear and concise description of the question you have. Ex. How can I do [...] ?
12 |
13 | **Is your question related to a specific product?**
14 | Add links to relevant repositories or documents if so.
15 |
16 | **Additional context**
17 | Add any other context or screenshots about the usage question here.
18 |
--------------------------------------------------------------------------------
/docs_gh_pages/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = ibllib
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | apptools >=4.5.0
2 | boto3
3 | click
4 | colorcet
5 | colorlog
6 | cython
7 | dataclasses
8 | datajoint
9 | flake8
10 | globus-sdk
11 | graphviz
12 | h5py
13 | ibl-neuropixel
14 | ibllib
15 | iblutil
16 | jupyter
17 | jupyterlab
18 | matplotlib
19 | mtscomp
20 | nbformat
21 | numba
22 | numpy
23 | opencv-python # macOS 10.13 and prior are incompatible with modern versions of opencv
24 | ONE-api
25 | pandas
26 | phylib
27 | pillow
28 | plotly
29 | pyarrow
30 | pyflakes >= 2.4.0
31 | pynrrd
32 | pyopengl
33 | PyQt5
34 | pyqtgraph
35 | pytest
36 | requests
37 | scikits-bootstrap
38 | scikit-learn
39 | scipy >=1.4.1
40 | seaborn
41 | SimpleITK
42 | soundfile
43 | sphinx_gallery
44 | statsmodels
45 | tqdm
46 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/extraction-issues.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Extraction issues
3 | about: Create a report on data extraction issues
4 | title: "[EXTRACT]"
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **EID / PID**
11 | Session EID / probe ID and path, e.g.
12 | - 09156021-9a1d-4e1d-ae59-48cbde3c5d42 hausserlab/PL015/2022-02-22/001
13 | (it can be a list if you have multiple failures)
14 |
15 | **What task has failed, with what error message**
16 | e.g. EphysSynchPulses failed for all the above sessions with error `gnagna` and I do not know what I can do about it, please advise.
17 |
18 | **Background on the recording**
19 | Is this session a dud ? Is it worth trying to extract it? Has anything unusual happened during recording?
20 |
--------------------------------------------------------------------------------
/docs_gh_pages/010_api_reference.rst:
--------------------------------------------------------------------------------
1 | API Reference
2 | ######################
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 | :titlesonly:
7 | :glob:
8 |
9 |
10 | .. autosummary::
11 | :toctree: _autosummary
12 | :template: custom-module-template.rst
13 | :recursive:
14 |
15 | ibllib
16 |
17 | .. autosummary::
18 | :toctree: _autosummary
19 | :template: custom-module-template.rst
20 | :recursive:
21 |
22 | brainbox
23 |
24 | .. autosummary::
25 | :toctree: _autosummary
26 | :template: custom-module-template.rst
27 | :recursive:
28 |
29 | iblatlas
30 |
31 | .. autosummary::
32 | :toctree: _autosummary
33 | :template: custom-module-template.rst
34 | :recursive:
35 |
36 | iblutil
37 |
38 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: "[Feature request] - Add a title to your issue here"
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/docs_gh_pages/_templates/custom-class-template.rst:
--------------------------------------------------------------------------------
1 | {{ fullname | escape | underline}}
2 |
3 | .. currentmodule:: {{ module }}
4 |
5 | .. autoclass:: {{ objname }}
6 | :members:
7 | :show-inheritance:
8 |
9 | {% block methods %}
10 | {% if methods %}
11 | .. rubric:: {{ _('Methods') }}
12 |
13 | .. autosummary::
14 | :nosignatures:
15 | {% for item in methods %}
16 | {%- if not item.startswith('_') %}
17 | ~{{ name }}.{{ item }}
18 | {%- endif -%}
19 | {%- endfor %}
20 | {% endif %}
21 | {% endblock %}
22 |
23 | {% block attributes %}
24 | {% if attributes %}
25 | .. rubric:: {{ _('Attributes') }}
26 |
27 | .. autosummary::
28 | {% for item in attributes %}
29 | ~{{ name }}.{{ item }}
30 | {%- endfor %}
31 | {% endif %}
32 | {% endblock %}
--------------------------------------------------------------------------------
/docs_gh_pages/loading_examples.rst:
--------------------------------------------------------------------------------
1 | Loading Data
2 | ============
3 |
4 | Below is a list of examples showing how to load different types of IBL data
5 |
6 | .. toctree::
7 | :maxdepth: 1
8 |
9 | notebooks_external/loading_trials_data
10 | notebooks_external/loading_wheel_data
11 | notebooks_external/loading_spikesorting_data
12 | loading_examples/loading_spike_waveforms
13 | notebooks_external/loading_passive_data
14 | notebooks_external/loading_ephys_data
15 | notebooks_external/loading_raw_ephys_data
16 | notebooks_external/loading_video_data
17 | notebooks_external/loading_raw_video_data
18 | notebooks_external/loading_widefield_data
19 | notebooks_external/loading_multi_photon_imaging_data
20 | notebooks_external/loading_raw_mesoscope_data
21 | notebooks_external/loading_photometry_data
--------------------------------------------------------------------------------
/docs_gh_pages/templates/colab_template.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "Untitled0.ipynb",
7 | "provenance": [],
8 | "collapsed_sections": []
9 | },
10 | "kernelspec": {
11 | "name": "python3",
12 | "display_name": "Python 3"
13 | }
14 | },
15 | "cells": [
16 | {
17 | "cell_type": "code",
18 | "metadata": {
19 | "id": "HzRwehxFukh_",
20 | "colab_type": "code",
21 | "colab": {}
22 | },
23 | "source": [
24 | "!pip install ibllib\n",
25 | "from google.colab import drive\n",
26 | "drive.mount('/content/drive')\n",
27 | "!cp drive/My\\ Drive/params/.one_params ../root"
28 | ],
29 | "execution_count": null,
30 | "outputs": []
31 | }
32 | ]
33 | }
--------------------------------------------------------------------------------
/docs_gh_pages/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 | set SPHINXPROJ=one_ibl
13 |
14 | if "%1" == "" goto help
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
20 | echo.installed, then set the SPHINXBUILD environment variable to point
21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
22 | echo.may add the Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.http://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | :end
36 | popd
37 |
--------------------------------------------------------------------------------
/docs_gh_pages/06_examples.rst:
--------------------------------------------------------------------------------
1 | Examples
2 | ========
3 |
4 | Below is a list of short examples using public IBL data
5 |
6 | .. toctree::
7 | :maxdepth: 1
8 |
9 | notebooks_external/docs_get_training_status
10 | notebooks_external/docs_get_rms_data
11 | notebooks_external/docs_get_power_spectrum_data
12 | notebooks_external/docs_compute_drift
13 | notebooks_external/docs_load_spike_sorting
14 | notebooks_external/docs_raw_data_decompress
15 | notebooks_external/docs_scatter_raster_plot
16 | notebooks_external/docs_explore_passive
17 | notebooks_external/docs_get_first_pass_map_sessions
18 | notebooks_external/docs_find_nearby_trajectories
19 | notebooks_external/docs_find_dist_neighbouring_region
20 | notebooks_external/docs_visualize_session_coronal_tilted
21 | notebooks_external/docs_visualization3D_subject_channels
22 | notebooks_external/docs_access_DLC
23 | notebooks_external/docs_load_video
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: "[Bug report] - Add a title to your issue here"
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/.github/workflows/main.yaml:
--------------------------------------------------------------------------------
1 | name: CI
2 | on:
3 | push:
4 | branches: [ master ]
5 | pull_request:
6 | branches: [ master ]
7 |
8 | jobs:
9 | incubator:
10 | name: build (${{ matrix.python-version }}, ${{ matrix.os }})
11 | runs-on: ${{ matrix.os }}
12 | strategy:
13 | max-parallel: 3
14 | matrix:
15 | os: ["ubuntu-latest", "macos-latest", "windows-latest"]
16 | python-version: ["3.12"]
17 | steps:
18 | - name: Checkout branch
19 | uses: actions/checkout@v3
20 |
21 | - uses: conda-incubator/setup-miniconda@v3.0.3
22 | with:
23 | auto-update-conda: true
24 | python-version: ${{ matrix.python-version }}
25 |
26 | - name: Install all packages
27 | shell: bash -l {0}
28 | run: |
29 | conda activate test
30 | cd ..
31 | git clone https://github.com/int-brain-lab/iblapps.git
32 | pip install --editable iblapps
33 | cd iblenv
34 | pip install --requirement requirements.txt
35 | echo "----- pip list -----"
36 | pip list
37 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 International Brain Laboratory
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/docs_gh_pages/templates/docs_example_ipynb.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Title of your example\n",
8 | "Brief description of example should go here. This example shows how to structure the introduction for\n",
9 | "the title and description so that it is rendered correctly during building of documentation"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": null,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "# Author: Mayo\n",
19 | "# Code should follow\n",
20 | "import numpy as np\n",
21 | "import pandas as pd\n",
22 | "\n",
23 | "# etc etc"
24 | ]
25 | }
26 | ],
27 | "metadata": {
28 | "kernelspec": {
29 | "display_name": "Python [conda env:ibl_docs] *",
30 | "language": "python",
31 | "name": "conda-env-ibl_docs-py"
32 | },
33 | "language_info": {
34 | "codemirror_mode": {
35 | "name": "ipython",
36 | "version": 3
37 | },
38 | "file_extension": ".py",
39 | "mimetype": "text/x-python",
40 | "name": "python",
41 | "nbconvert_exporter": "python",
42 | "pygments_lexer": "ipython3",
43 | "version": "3.7.7"
44 | }
45 | },
46 | "nbformat": 4,
47 | "nbformat_minor": 4
48 | }
49 |
--------------------------------------------------------------------------------
/docs_gh_pages/_templates/custom-module-template.rst:
--------------------------------------------------------------------------------
1 | {{ fullname | escape | underline}}
2 |
3 | .. automodule:: {{ fullname }}
4 |
5 | {% block attributes %}
6 | {% if attributes %}
7 | .. rubric:: Module attributes
8 |
9 | .. autosummary::
10 | :toctree:
11 | {% for item in attributes %}
12 | {{ item }}
13 | {%- endfor %}
14 | {% endif %}
15 | {% endblock %}
16 |
17 | {% block functions %}
18 | {% if functions %}
19 | .. rubric:: {{ _('Functions') }}
20 |
21 | .. autosummary::
22 | :nosignatures:
23 | {% for item in functions %}
24 | {{ item }}
25 | {%- endfor %}
26 | {% endif %}
27 | {% endblock %}
28 |
29 | {% block classes %}
30 | {% if classes %}
31 | .. rubric:: {{ _('Classes') }}
32 |
33 | .. autosummary::
34 | :nosignatures:
35 | {% for item in classes %}
36 | {{ item }}
37 | {%- endfor %}
38 | {% endif %}
39 | {% endblock %}
40 |
41 | {% block exceptions %}
42 | {% if exceptions %}
43 | .. rubric:: {{ _('Exceptions') }}
44 |
45 | .. autosummary::
46 | {% for item in exceptions %}
47 | {{ item }}
48 | {%- endfor %}
49 | {% endif %}
50 | {% endblock %}
51 |
52 | {% block modules %}
53 | {% if modules %}
54 | .. autosummary::
55 | :toctree:
56 | :template: custom-module-template.rst
57 | :recursive:
58 | {% for item in modules %}
59 | {{ item }}
60 | {%- endfor %}
61 | {% endif %}
62 | {% endblock %}
--------------------------------------------------------------------------------
/docs_gh_pages/public_docs/data_release_pilot.md:
--------------------------------------------------------------------------------
1 | # Data Release - Pilot Dataset
2 | The IBL has released the datasets associated with 4 Neuropixels pilot sessions.
3 |
4 | You can view the datasets in a web-browser by clicking on these links:
5 |
6 | - [churchlandlab/Subjects/CSHL049/2020-01-08/001](https://ibl.flatironinstitute.org/public/churchlandlab/Subjects/CSHL049/2020-01-08/001/)
7 | - [cortexlab/Subjects/KS023/2019-12-10/001](https://ibl.flatironinstitute.org/public/cortexlab/Subjects/KS023/2019-12-10/001/)
8 | - [hoferlab/Subjects/SWC_043/2020-09-21/001](https://ibl.flatironinstitute.org/public/hoferlab/Subjects/SWC_043/2020-09-21/001/)
9 | - [zadorlab/Subjects/CSH_ZAD_029/2020-09-19/001](https://ibl.flatironinstitute.org/public/zadorlab/Subjects/CSH_ZAD_029/2020-09-19/001/)
10 |
11 | ## Data structure and download
12 | The organisation of the data follows the standard IBL data structure.
13 |
14 | Please see
15 |
16 | - [These instructions](https://int-brain-lab.github.io/iblenv/notebooks_external/data_structure.html) to download an example dataset for one session, and get familiarised with the data structure
17 | - [These instructions](https://int-brain-lab.github.io/iblenv/notebooks_external/data_download.html) to learn how to use the ONE-api to search and download the released datasets
18 | - [These instructions](https://int-brain-lab.github.io/iblenv/loading_examples.html) to get familiarised with specific data loading functions
19 |
20 | Note:
21 | - The tag associated to this release is `2021_Q2_PreRelease`
--------------------------------------------------------------------------------
/docs_gh_pages/public_docs/dataset_overview.md:
--------------------------------------------------------------------------------
1 | # What is available for download ?
2 |
3 | ## Collaboration-wide datasets
4 |
5 | ### [2025 Brain wide map data](../notebooks_external/2025_data_release_brainwidemap)
6 | The flagship dataset of the IBL ! See the dataset [technical paper](https://doi.org/10.6084/m9.figshare.21400815).
7 |
8 | ### [2024 Reproducible ephys data](../notebooks_external/2024_data_release_repro_ephys)
9 | Data associated with the publication [Reproducibility of in vivo electrophysiological measurements in mice](https://elifesciences.org/articles/100840).
10 |
11 | ### [2020 Behavioral data for standardized behaviour task](../notebooks_external/2021_data_release_behavior)
12 | Data associated with the publication [Standardized and reproducible measurement of decision-making in mice](https://elifesciences.org/articles/63711) via ONE and Datajoint.
13 |
14 | ## Projects
15 |
16 | ### [2025 Decision-making accross mouse models of Autism (Noel)](../notebooks_external/2025_data_release_autism_noel)
17 | Neuropixel recordings under the IBL decision-making tasks in mice harbouring mutations of _Fmr1, Cntnap2, Shank3B_.
18 | The data is associated with the publication [A common computational and neural anomaly across mouse models of autism](https://www.nature.com/articles/s41593-025-01965-8).
19 |
20 | ### [2022 Spike sorting benchmark recordings](../notebooks_external/data_release_spikesorting_benchmarks)
21 | A list of 13 insertions in different brain regions to provide a smaller brain-wide Neuropixel benchmarks is available .
22 |
23 | ### [2020 Pilot neuropixels datasets](../notebooks_external/data_release_pilot)
24 | The IBL has released a handful of pilot datasets that are available for download through ONE.
25 |
--------------------------------------------------------------------------------
/docs_gh_pages/atlas_examples.rst:
--------------------------------------------------------------------------------
1 | Atlas Examples
2 | ==============
3 |
4 | We present a set of hands-on examples to illustrate how to manipulate and visualize hierarchical brain atlas ontologies.
5 | The full package documentation can be found `here `_
6 |
7 | Anatomical Atlases
8 | ******************
9 |
10 | Below is a list of examples using the ibllib.atlas module
11 |
12 | The Allen Mouse Brain Common Coordinate Framework: A 3D Reference Atlas. Cell. 181(4):936-953.e20. doi: 10.1016/j.cell.2020.04.007.
13 | https://www.sciencedirect.com/science/article/pii/S0092867420304025
14 |
15 | .. toctree::
16 | :maxdepth: 1
17 |
18 | notebooks_external/atlas_working_with_ibllib_atlas
19 | notebooks_external/atlas_mapping
20 | notebooks_external/atlas_plotting_scalar_on_slice
21 | notebooks_external/atlas_dorsal_cortex_flatmap
22 | notebooks_external/atlas_circular_pyramidal_flatmap
23 | notebooks_external/atlas_plotting_points_on_slice
24 | notebooks_external/atlas_swanson_flatmap
25 |
26 |
27 | Gene expression
28 | ***************
29 |
30 | .. toctree::
31 | :maxdepth: 1
32 |
33 | notebooks_external/atlas_genomics_load_agea
34 |
35 | Ng, L., Bernard, A., Lau, C. et al. An anatomic gene expression atlas of the adult mouse brain. Nat Neurosci 12, 356–362 (2009). https://doi.org/10.1038/nn.2281
36 | https://www.nature.com/articles/nn.2281
37 |
38 | Lein, E.S. et al. (2007). Genome-wide atlas of gene expression in the adult mouse brain, Nature 445: 168-176. https://doi:10.1038/nature05453
39 |
40 |
41 | Streamlines
42 | ***************
43 |
44 | .. toctree::
45 | :maxdepth: 1
46 |
47 | notebooks_external/atlas_streamlines
48 |
--------------------------------------------------------------------------------
/docs_gh_pages/02_installation.md:
--------------------------------------------------------------------------------
1 | # Installation of IBL Unified Environment
2 | To facilitate the use of `ibllib` and `IBL-pipeline`, we have compiled all the dependencies into a unified python
3 | environment `iblenv`. In addition to these two libraries, this environment is also compatible with other visualisation
4 | tools and analysis pipelines being developed as part of the IBL.
5 |
6 | To use IBL data you will need a python environment with python >= 3.10, although Python 3.13 is recommended.
7 | To create a new environment from scratch you can install either [uv/pip](https://docs.astral.sh/uv/) or [anaconda](https://www.anaconda.com/products/distribution#download-section) and follow the instructions below to create a new python environment (more information can also be found [here](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html))
8 |
9 | Please follow the installation instructions below for your favourite package manager.
10 |
11 | ## UV / pip instructions
12 |
13 | Run the following commands in your terminal:
14 |
15 | ```bash
16 | uv venv --python 3.13
17 | ```
18 | Make sure to always activate this environment before installing or working with the IBL data
19 | ```bash
20 | source .venv/bin/activate
21 | ```
22 |
23 | Install required packages to access the data
24 | ```shell
25 | uv pip install ONE-api
26 | uv pip install ibllib
27 | ```
28 |
29 | ## Conda instructions
30 |
31 | ### Install
32 |
33 | In your git terminal, navigate to the directory in which you want to install the IBL repositories (e.g. create a folder named
34 | something like `int-brain-lab` and work from within it). Then run the following commands:
35 |
36 | ```bash
37 | conda update -n base -c defaults conda
38 | conda create --name ibl python=3.13 --yes
39 | conda activate ibl
40 |
41 | pip install ONE-api
42 | pip install ibllib
43 | ```
44 |
45 | ### Removing an old installation
46 | The following command will completely remove an anaconda environment and all of its packages: `conda remove --name ibl --all`
47 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/postmortem_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Postmortem report
3 | about: Used to describe a downtime event that has since been resolved
4 | title: "[Postmortem report]"
5 | labels: postmortem
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Status:** {draft|final}
11 | **Owners:** {who worked on finding the resolution}
12 |
13 | ## Summary
14 | Description: {brief description of symptoms and root cause}
15 | Component: {affected area}
16 | Date/time: {YYYY-MM-DD HH:MM}
17 | Duration: {time from initial breakage to final resolution}
18 | User impact: {who was affected by the incident}
19 |
20 | ## Timeline (all times in UTC+00:00)
21 |
22 | ### 2022-01-01
23 |
24 | 14:44 - something happened
25 | 14:45 - next thing happened **<START OF OUTAGE>**
26 |
27 | ### 1900-01-02
28 |
29 | 09:12 - another thing happened **<END OF OUTAGE>**
30 |
31 | ### Impact & root cause
32 |
33 | {a more thorough summary of the problems that the outage caused, and **without blame**, describe the root cause of the outage}
34 |
35 | ### What worked
36 |
37 | {list things where things worked as expected in a positive manner}
38 |
39 | ### Where we got lucky
40 |
41 | {list things that mitigated this incident but not because of our foresight}
42 |
43 | ### What didn't work
44 |
45 | {things that failed or prevented a quicker resolution}
46 |
47 | ## Action items for the future
48 |
49 | {each item here should have an owner}
50 |
51 | ### Prevention
52 |
53 | {things that would have prevented this failure from happening in the first place, such as input validation, pinning dependencies, etc}
54 |
55 | ### Detection
56 |
57 | {things that would have detected this failure before it became an incident, such as better testing, monitoring, etc}
58 |
59 | ### Mitigation
60 |
61 | {things that would have made this failure less serious, such as graceful degradation, better exception handling, etc}
62 |
63 | ### Process
64 |
65 | {things that would have helped us resolve this failure faster, such as documented processes and protocols, etc}
66 |
67 | ### Fixes
68 |
69 | {the fixes that were necessary to resolve this incident}
70 |
71 | ## Other
72 |
73 | {any other useful information, such as relevant logs}
--------------------------------------------------------------------------------
/docs_gh_pages/09_contribution.md:
--------------------------------------------------------------------------------
1 | # How to contribute
2 |
3 | ## Code
4 | ### Linting
5 | We use `flake8` python module to enforce a consistent style in the CI.
6 |
7 | ### Testing
8 | Unit testing. For Python we use the `pytest` module and alternate between `unittest` and `pytest` syntax.
9 | For `Matlab` we use the embedded test framework.
10 |
11 | ### Continuous Integration
12 | For production repositories such as ibllib and alyx, continuous integration is set on Travis to install the application and run the tests on pull request.
13 |
14 |
15 | ## Contributions and releases
16 | ### Contributions and code review
17 | The branches `develop` and `master` are protected.
18 |
19 | Contributions are developed either:
20 | - on a separate branch
21 | - on a separate fork of the repository
22 |
23 | and then merged in `develop` through a pull request.
24 |
25 |
26 | #### Practical tips:
27 | - to avoid merge conflicts merge `develop` into your branch (or rebase your branch) before submitting the PR !
28 | - make sure your branch passes tests:
29 | - before pushing by running unit tests and flake8 locally
30 | - after pushing looking at continuous integration results on Github
31 | - go through the review process with maintainers on the pull request interface on GitHub. Remind them if not done in a timely manner. Github sends them a bazillion emails daily.
32 |
33 |
34 | ### Branching model: gitflow
35 | Branching model as close as possible to gitflow, i.e. one master branch with every commit tagged with a version number: one develop branch to integrate the different volatile feature branches.
36 | **Both develop and master should pass CI tests**
37 |
38 | ### Releasing scheme: semantic versioning
39 | If any releasing scheme (such as ibllib), we use semantic versioning using the major.minor.micro or major.minor.patch model. patch/micro for bugfixes; minor for augmented functionality; major for retrocompatibility breaks.
40 | Version 0.*.* are developing versions w/ no guarantee of retrocompatibility.
41 |
42 | It is a good practice to document the changes in a `RELEASE_NOTES.md` document at the root of the repository.
43 | NB: those notes should be geared towards users not other fellow contributors.
44 |
--------------------------------------------------------------------------------
/docs_gh_pages/scripts/permalinks_check.py:
--------------------------------------------------------------------------------
1 | # %%
2 | import requests
3 | from requests.adapters import HTTPAdapter
4 | from urllib3.util.retry import Retry
5 | import sys
6 |
7 |
8 | links = [
9 | 'https://int-brain-lab.github.io/iblenv/notebooks_external/data_release_repro_ephys',
10 | 'https://int-brain-lab.github.io/iblenv/notebooks_external/data_release_brainwidemap.html',
11 | ]
12 |
13 |
14 | def check_link(url):
15 | """
16 | Check if a URL returns a 404 or other error status code.
17 | Properly handles redirects by following them to the final destination.
18 |
19 | Args:
20 | url (str): The URL to check
21 |
22 | Returns:
23 | tuple: (is_valid, status_code, final_url) where is_valid is a boolean,
24 | status_code is the HTTP status code, and final_url is the URL after redirects
25 | """
26 | # Configure session with retry strategy
27 | session = requests.Session()
28 | retry_strategy = Retry(
29 | total=3,
30 | backoff_factor=1,
31 | status_forcelist=[429, 500, 502, 503, 504],
32 | )
33 | adapter = HTTPAdapter(max_retries=retry_strategy)
34 | session.mount("http://", adapter)
35 | session.mount("https://", adapter)
36 |
37 | try:
38 | # For redirects, we need to use GET with allow_redirects=True
39 | # to follow the redirect chain to the final destination
40 | response = session.get(url, timeout=10, allow_redirects=True)
41 |
42 | # Get the final URL after redirects
43 | final_url = response.url
44 |
45 | # Check if we have a valid response at the final destination
46 | is_valid = response.status_code < 400
47 |
48 | # If there was a redirect, report it
49 | if final_url != url:
50 | print(f"Redirect: {url} → {final_url}")
51 |
52 | return is_valid, response.status_code, final_url
53 |
54 | except requests.RequestException as e:
55 | print(f"Error checking {url}: {e}")
56 | return False, None, url
57 |
58 |
59 | # Check each link and print results
60 | print("Checking links for errors...")
61 | has_errors = False
62 |
63 | for link in links:
64 | valid, status_code, final_url = check_link(link)
65 | if valid:
66 | print(f"✓ {link} - OK ({status_code})")
67 | else:
68 | has_errors = True
69 | print(f"✗ {link} - ERROR ({status_code})")
70 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | # miscellaneous
132 | .DS_Store
133 | /.idea/*
134 |
--------------------------------------------------------------------------------
/.github/workflows/build_docs.yml:
--------------------------------------------------------------------------------
1 | name: Build Docs
2 | on:
3 | workflow_dispatch: # manual trigger to kick off workflow
4 | inputs:
5 | logLevel:
6 | description: "Log level"
7 | required: true
8 | default: "warning"
9 |
10 | jobs:
11 | build_docs:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - name: Free disk space
15 | run: |
16 | echo "Disk space before cleanup:"
17 | df -h
18 |
19 | sudo rm -rf /usr/local/lib/android
20 | sudo rm -rf /usr/share/dotnet
21 | sudo rm -rf /opt/ghc
22 | sudo rm -rf /usr/local/share/boost
23 | sudo rm -rf /usr/lib/jvm
24 |
25 | docker system prune -af || true
26 |
27 | echo "Disk space after cleanup:"
28 | df -h
29 | - name: Configure AWS Credentials
30 | uses: aws-actions/configure-aws-credentials@v1
31 | with:
32 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
33 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
34 | aws-region: us-east-1
35 |
36 | - name: Checkout iblenv doc build branch
37 | uses: actions/checkout@v3
38 | with:
39 | ref: docs
40 |
41 | - name: Checkout ibllib doc build branch
42 | uses: actions/checkout@v3
43 | with:
44 | repository: int-brain-lab/ibllib
45 | ref: docs
46 | path: ibllib-repo
47 |
48 | - name: Checkout ONE-api
49 | uses: actions/checkout@v3
50 | with:
51 | repository: int-brain-lab/one
52 | path: ONE
53 |
54 | - name: Checkout iblatlas
55 | uses: actions/checkout@v3
56 | with:
57 | repository: int-brain-lab/iblatlas
58 | path: iblatlas
59 |
60 | - name: Move ibllib and ONE up a directory
61 | run: |
62 | mv ibllib-repo ..
63 | mv ONE ..
64 | mv iblatlas ..
65 |
66 | - name: Setup Python
67 | uses: actions/setup-python@v4
68 | with:
69 | python-version: 3.12
70 |
71 | - name: Install docs requirements
72 | run: |
73 | sudo apt-get install -y pandoc
74 | export TQDM_DISABLE=1
75 | pip install -r docs_gh_pages/requirements-docs.txt
76 | pip install -e ../ibllib-repo
77 | pip install git+https://github.com/jcouto/wfield.git
78 | pip install jupyter
79 |
80 | - name: ONE setup and build docs
81 | run: |
82 | cd docs_gh_pages
83 | python scripts/one_setup.py
84 | python make_script.py -e
85 | ls -l _build
86 |
87 | - name: Zip up documentation
88 | run: |
89 | sudo apt-get install -y zip
90 | zip -r build_zip docs_gh_pages/_build
91 |
92 | - name: Store zip file as artifacts
93 | uses: actions/upload-artifact@v4
94 | with:
95 | name: build_zip
96 | path: |
97 | build_zip.zip
98 |
99 | - name: Copy files to the production website with the AWS CLI
100 | run: |
101 | cd docs_gh_pages/_build/html
102 | aws s3 sync . s3://testdocs.internationalbrainlab.org
103 |
--------------------------------------------------------------------------------
/docs_gh_pages/public_docs/public_introduction.md:
--------------------------------------------------------------------------------
1 | # Publicly Available IBL Data
2 |
3 | ## Introduction to the IBL experiments
4 | The aim of the International Brain Laboratory (IBL) is to understand the brain functions
5 | underlying decision making. Understanding these processes is a problem with a scale and complexity
6 | that far exceed what can be tackled by any single laboratory and that demands computational theory
7 | to be interwoven with experimental design and analysis in a manner not yet achieved. To overcome these
8 | challenges, we have created a virtual laboratory, unifying a group of 22 highly experienced neuroscience
9 | groups distributed across the world.
10 |
11 | Datasets are acquired in a dozen of laboratories performing experimental work (e.g. Churchland lab, Mainen lab, Zador lab).
12 | In each of these laboratories, mice are first trained in the IBL decision-making task
13 | [following a standardized training pipeline](https://elifesciences.org/articles/63711). Briefly, the mice are fixed
14 | in front of a screen, and have to turn a Lego wheel in order to position a visual stimulus appearing on one of the
15 | side of the screen towards its center. The complexity of the task increases as the contrast of the visual stimulus is lowered,
16 | and the probability of the stimulus appearing on a given side varies.
17 |
18 | Various sensors are used to monitor the animal's performance and condition (e.g. a rotary encoder attached to the Lego wheel,
19 | camera(s) to view the mouse posture). Behavior data is acquired throughout the learning phase on "Training rigs" (see our
20 | [article on behavioral training](https://elifesciences.org/articles/63711) for details).
21 | Once a mouse has reached proficiency in the IBL task, it is moved to an "Ephys rig" where its brain activity is recorded from.
22 |
23 | We aim to use different brain recording modalities, so as to have complimentary views on the brain activity. For example:
24 | Neuropixels, Mesoscope, Fiberfluorophotometry, Widefield Imaging techniques all have their unique advantages over one another.
25 | Our most advanced project as of now is the one involving Neuropixels recordings.
26 |
27 | ### Neuropixels datasets
28 |
29 | The data consists of neurophysiological and behavior measurements acquired in mice, using Neuropixels probes.
30 | In a single recording session, up to two Neuropixels probes (labelled typically `probe00` or `probe01`)
31 | are inserted in the mouse's brain. The location of these probes in the brain will vary from mouse to mouse, as the
32 | aim of this IBL project is to tile the whole mouse brain using these probes.
33 |
34 | The data is acquired on three computers (one decidated to acquiring the raw ephys, raw video and raw behavioral data
35 | respectively), and saved into their corresponding folder (see sections below for details).
36 | At the end of a Neuropixels recording session, some stimuli are replayed whilst the mouse is passive
37 | (i.e. not engaged in the IBL task). The behavioral data acquired during this replay of stimuli is also saved in a dedicated folder.
38 |
39 | Once acquired, the data are centralised onto a single computer, and processed (using heavy algorithms, such as
40 | Deep Lab Cut for tracking points on video data, or pyKilosort for detecting and sorting the spikes of cells on the ephys traces),
41 | before being sent and stored onto our centralised database (see our [article on data architecture ](https://www.biorxiv.org/content/10.1101/827873v3) for details).
42 |
--------------------------------------------------------------------------------
/.github/workflows/deploy_docs.yml:
--------------------------------------------------------------------------------
1 | name: Deploy Docs
2 | on:
3 | workflow_dispatch: # manual trigger to kick off workflow
4 | inputs:
5 | logLevel:
6 | description: "Log level"
7 | required: true
8 | default: "warning"
9 |
10 | jobs:
11 | deploy_docs:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - name: Free disk space
15 | run: |
16 | echo "Disk space before cleanup:"
17 | df -h
18 |
19 | sudo rm -rf /usr/local/lib/android
20 | sudo rm -rf /usr/share/dotnet
21 | sudo rm -rf /opt/ghc
22 | sudo rm -rf /usr/local/share/boost
23 | sudo rm -rf /usr/lib/jvm
24 |
25 | docker system prune -af || true
26 |
27 | echo "Disk space after cleanup:"
28 | df -h
29 |
30 | - name: Configure AWS Credentials
31 | uses: aws-actions/configure-aws-credentials@v1
32 | with:
33 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
34 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
35 | aws-region: us-east-1
36 |
37 | - name: Checkout iblenv doc build branch
38 | uses: actions/checkout@v3
39 | with:
40 | ref: master
41 |
42 | - name: Checkout ibllib doc build branch
43 | uses: actions/checkout@v3
44 | with:
45 | repository: int-brain-lab/ibllib
46 | ref: develop
47 | path: ibllib-repo
48 |
49 | - name: Checkout ONE-api
50 | uses: actions/checkout@v3
51 | with:
52 | repository: int-brain-lab/one
53 | path: ONE
54 |
55 | - name: Checkout iblatlas
56 | uses: actions/checkout@v3
57 | with:
58 | repository: int-brain-lab/iblatlas
59 | path: iblatlas
60 |
61 | - name: Move ibllib and ONE up a directory
62 | run: |
63 | mv ibllib-repo ..
64 | mv ONE ..
65 | mv iblatlas ..
66 |
67 | - name: Setup Python
68 | uses: actions/setup-python@v4
69 | with:
70 | python-version: 3.12
71 |
72 | - name: Install docs requirements
73 | run: |
74 | sudo apt-get install -y pandoc
75 | pip install -r docs_gh_pages/requirements-docs.txt
76 | pip install -e ../ibllib-repo
77 | pip install git+https://github.com/jcouto/wfield.git
78 | pip install jupyter
79 |
80 | - name: ONE setup and build docs
81 | run: |
82 | cd docs_gh_pages
83 | python scripts/one_setup.py
84 | python make_script.py -e
85 | ls -l _build
86 |
87 | - name: Clean up documentation
88 | run: |
89 | cd docs_gh_pages
90 | python make_script.py -pc
91 |
92 | - name: Commit documentation changes
93 | run: |
94 | git clone https://github.com/int-brain-lab/iblenv.git --branch gh-pages --single-branch gh-pages
95 | rm -rf gh-pages/*
96 | cp -r docs_gh_pages/_build/html/* gh-pages/
97 | cd gh-pages
98 | echo "docs.internationalbrainlab.org" > CNAME
99 | touch .nojekyll
100 | git config --local user.email "action@github.com"
101 | git config --local user.name "GitHub Action"
102 | git add .
103 | git commit -m "Update documentation" -a || true
104 |
105 | - name: Push changes
106 | uses: ad-m/github-push-action@master
107 | with:
108 | branch: gh-pages
109 | directory: gh-pages
110 | force: True
111 | github_token: ${{ secrets.GITHUB_TOKEN }}
112 |
113 |
114 | - name: Copy files to the production website with the AWS CLI
115 | run: |
116 | cd docs_gh_pages/_build/html
117 | aws s3 sync . s3://docs.internationalbrainlab.org
118 |
--------------------------------------------------------------------------------
/docs_gh_pages/documentation_contribution_guidelines.md:
--------------------------------------------------------------------------------
1 | # Overview of documentation
2 |
3 | The documentation is built locally and hosted on a github-pages website at this address:
4 | https://int-brain-lab.github.io/iblenv/
5 |
6 | The website is generated using
7 | 1. The markdown files in the `./docs-gh-pages` folder
8 | 2. The python or ipython notebooks in the `./docs-gh-pages/notebooks`
9 | 3. The python or ipython notebooks in the ibllib repo `./examples` and `./brainbox/examples` folders
10 | 4. The docstrings in the source code of the `./ibllib`, `./alf`, `./one` and `./brainbox` folders
11 |
12 |
13 | # Contributing to documentation
14 |
15 | ### Adding examples or tutorials to the documentation
16 | Examples or tutorials should be placed in the folders (can be in sub-folders within these folders)
17 | `ibllib-repo/examples`
18 | or
19 | `ibllib-repo/brainbox/examples`
20 |
21 | They can be either `.py` or `.ipynb` form but must have a prefix of `docs` to be included in the documentation,
22 | e.g `docs_coronal_slice.py` or `docs_get_LFP_data.ipynb`. Each example/ tutorial must start with a title and a brief
23 | description of the content. Please refer to the templates in the [templates folder](./templates) for examples of
24 | how to layout the title and description in order for it to be correctly rendered and displayed on the website.
25 |
26 | Once you have created the example/ tutorial you should link to the file in either `05_tutorials.rst` or `06_examples.rst`.
27 | The link should be made by adding in the following line `notebooks_external\name_of_example_without_extension`, e.g
28 | `notebooks_external\docs_coronal_slice`
29 |
30 | `notebooks_external\docs_get_LFP_data`
31 |
32 | An example implementation can be seen in the `06_examples.rst` file
33 |
34 | ## Making documentation
35 | ### Prepare build environment
36 | At the same level of the `iblenv` repository, clone and install in editable mode the repositories containing external documentation notebooks:
37 |
38 | ```shell
39 | git clone git@github.com:int-brain-lab/ibllib.git ./ibllib-repo # it expects the repository in this folder
40 | git clone git@github.com:int-brain-lab/ONE
41 | git clone git@github.com:int-brain-lab/iblatlas
42 | ```
43 |
44 | ```shell
45 | pip install -r ./docs_gh_pages/requirements-docs.txt
46 | ```
47 |
48 | ### Option 1: Only building changes to documentation
49 | If you have only made changes to the documentation (any of the files with `.md` or `.rst` extenstion), you can build the
50 | documentation without running the examples. The examples previously updated on the website will remain. To only
51 | build the documentation, the following command can be used
52 |
53 | ```python
54 | cd ./docs_gh_pages
55 | python make_script.py -d
56 | ```
57 |
58 | ### Option 2: Building changes to documentation and specific examples
59 | If you want to add a new example or change a few of the existing examples, it is possible to the build the documentation
60 | while only executing a few specified examples. The documentation can be built using the following commnand and providing
61 | the path to your .ipynb or .py example scripts.
62 |
63 | ```python
64 | cd ./docs_gh_pages
65 | python make_script.py -e -s
66 | ```
67 |
68 | An example would be
69 | ```
70 | python make_script.py -e -s C:\Users\Mayo\iblenv\ibllib-repo\brainbox\examples\docs_get_training_status.py C:\Users\Mayo\iblenv\iblenv\docs_gh_pages\notebooks\one_basics\one_basics.ipynb
71 | ```
72 |
73 | ### Option 3: Building changes to documentation and all examples
74 | If you want to rebuild the documentation and all examples you can use the following code
75 |
76 | ```python
77 | cd ./docs_gh_pages
78 | python make_script.py -e
79 | ```
80 |
81 | ### Previewing the built documentation
82 | Once the `make_script.py` has completed a preview of the documentation can be viewed by opening
83 | `./docs-gh-pages/_build/html/index.html` in a web browser.
84 |
85 | Check that all notebooks have run without errors and that your changes have been implemented correctly! (N.B if you have
86 | run the `make_script.py` using option 1 or 2, some or all of the examples will not have executed, this is expected)
87 |
88 |
89 | ## Pushing changes to gh-pages
90 | Once you are happy with the built documentation, the changes can be deployed to the website by running the following
91 | command
92 |
93 | ```python
94 | python make_script.py -gh -m "your commit message"
95 | ```
96 |
97 | ## Cleaning up your build
98 | Once your changes have been pushed to github, run the following command to clean up your ibllib and iblenv
99 | directories and unexecute example notebooks
100 | ```python
101 | python make_script.py -c
102 | ```
103 |
104 |
105 |
--------------------------------------------------------------------------------
/docs_gh_pages/index.rst:
--------------------------------------------------------------------------------
1 | .. one_ibl documentation master file, created by
2 | sphinx-quickstart on Fri Jul 20 17:20:00 2018.
3 |
4 | Welcome to IBL code library documentation!
5 | ##########################################
6 |
7 | IBL data structure
8 | *************************
9 | .. image:: ./_static/IBL_data.png
10 | :alt: Alyx data structure
11 |
12 | In the IBL, data acquired in laboratories spread across countries needs to be centralized into a
13 | common store, accessible from anywhere in the world, at all times.
14 | This challenge is met by the IBL data architecture, documented briefly below; a thorough description
15 | can be found in our `preprint `_.
16 |
17 | The central store has two components:
18 |
19 | * A **Bulk Data Store** that stores **large raw data files** (e.g. raw electrophysiology and video data) as well as **pre-processed data** (e.g. results of spike sorting or video segmentation). This database is accessible through HTTP, FTP and Globus. This is known informally as the "Flatiron server" as our original data server was generously hosted by the `Flatiron Institute `_.
20 | * A **Relational Database** that stores **metadata** (e.g. information on each experiment and experimental subject) in a structured manner, together with links to the bulk data files. This database is known as `Alyx `_, for reasons no-one can remember. Alyx contains a web-based front-end to allow users to perform colony management and enter metadata during experiments; documentation on this front end is `here `_. Information on how to connect to Alyx programmatically is `here `_.
21 |
22 | Tools to access the data
23 | *************************
24 | There are two main ways to access the data:
25 |
26 | * `ONE `_: an API that connects to the central store, allowing users to search and load data of interest from specific experiments.
27 | * `Datajoint `_: a framework to perform automated pipelined analyses on a subset of lightweight data such as behavioral choices and spike times, that allows rapid integration of data from multiple experiments and users.
28 |
29 | The full IBL data will be publically released when we have completed collection, preprocessing, curation, and quality control. In the meantime, a subset of curated data are publically available.
30 |
31 | Software to analyze IBL data
32 | ****************************
33 | IBL has released a suite of tools to process and visualize our data.
34 |
35 | * `Brainbox `_: A library of analysis functions that can be used on IBL data or other neurophysiology recordings.
36 | * `IBL Viewer `_: A simple and fast interactive visualization tool based on VTK that uses GPU accelerated volume and surface rendering. From electrophysiological data to neuronal connectivity, this tool allows simple and effective 3D visualization for many use-cases like multi-slicing and time series (even on volumes), and can be embedded within Jupyter Lab/Notebook and Qt user interfaces.
37 |
38 | .. attention::
39 | To get all the software, including ONE, brainbox and visualization tools, install the
40 | `Unified Environment <./02_installation.html>`_. This is recommended for IBL members.
41 |
42 | .. toctree::
43 | :hidden:
44 | :caption: The Open Neurophysiology Environment
45 | :maxdepth: 1
46 |
47 | notebooks_external/one_quickstart
48 | Full documentation Website for ONE
49 | public_docs/public_introduction
50 | public_docs/dataset_overview
51 |
52 | .. toctree::
53 | :hidden:
54 | :caption: Datasets - Collaboration
55 | :maxdepth: 1
56 |
57 | notebooks_external/2025_data_release_brainwidemap
58 | notebooks_external/2024_data_release_repro_ephys
59 | notebooks_external/2021_data_release_behavior
60 |
61 | .. toctree::
62 | :hidden:
63 | :caption: Datasets - Projects
64 | :maxdepth: 1
65 |
66 | notebooks_external/2025_data_release_autism_noel
67 | notebooks_external/2025_data_release_autism_davatolhagh
68 | notebooks_external/2022_data_release_spikesorting_benchmarks
69 | public_docs/data_release_pilot
70 |
71 | .. toctree::
72 | :hidden:
73 | :caption: Exploring IBL Data
74 | :maxdepth: 1
75 |
76 | 02_installation
77 | notebooks_external/data_structure
78 | notebooks_external/data_download
79 | loading_examples
80 |
81 |
82 | .. toctree::
83 | :hidden:
84 | :caption: Miscellaneous
85 | :maxdepth: 1
86 |
87 | 09_contribution
88 | public_docs/information_contact
89 |
90 | .. toctree::
91 | :hidden:
92 | :caption: Examples & Tutorials
93 | :maxdepth: 1
94 |
95 | atlas_examples
96 | notebooks_external/docs_wheel_moves
97 | notebooks_external/docs_wheel_screen_stimulus
98 |
99 | .. toctree::
100 | :hidden:
101 | :caption: API Reference
102 | :maxdepth: 1
103 |
104 | 010_api_reference.rst
105 | genindex
106 |
--------------------------------------------------------------------------------
/docs_gh_pages/README.md:
--------------------------------------------------------------------------------
1 | # Overview of documentation
2 |
3 | The documentation is built locally and hosted on a github-pages website at this address:
4 | https://int-brain-lab.github.io/iblenv/
5 |
6 | The website is generated using
7 | 1. The markdown files in the `./docs-gh-pages` folder
8 | 2. The python or ipython notebooks in the `./docs-gh-pages/notebooks`
9 | 3. The python or ipython notebooks in the ibllib repo `./examples` and `./brainbox/examples` folders
10 | 4. The docstrings in the source code of the `./ibllib`, `./alf`, `./one` and `./brainbox` folders
11 |
12 |
13 | # Contributing to documentation
14 |
15 | ### Adding examples or tutorials to the documentation
16 | Examples or tutorials should be placed in the folders (can be in sub-folders within these folders)
17 | `ibllib-repo/examples`
18 | or
19 | `ibllib-repo/brainbox/examples`
20 |
21 | They can be either `.py` or `.ipynb` form but must have a prefix of `docs` to be included in the documentation,
22 | e.g `docs_coronal_slice.py` or `docs_get_LFP_data.ipynb`. Each example/ tutorial must start with a title and a brief
23 | description of the content. Please refer to the templates in the [templates folder](./templates) for examples of
24 | how to layout the title and description in order for it to be correctly rendered and displayed on the website.
25 |
26 | Once you have created the example/ tutorial you should link to the file in the appropriate `.rst` file: `index.rst`, `06_examples.rst`
27 | , `atlas_examples.rst` etc...
28 | The link should be made by adding in the following line `notebooks_external\name_of_example_without_extension`, e.g
29 | `notebooks_external\docs_coronal_slice`
30 |
31 | `notebooks_external\docs_get_LFP_data`
32 |
33 | An example implementation can be seen in the `06_examples.rst` file
34 |
35 | ### Tips to create and edit example notebooks
36 |
37 | #### Hide a cell in the documentation
38 | In the cell metadata, add the key `nbsphinx` with the value `hidden` to hide the cell in the documentation.
39 |
40 | ```json
41 | {
42 | "nbsphinx": "hidden",
43 | "trusted": false
44 | }
45 | ```
46 |
47 | #### Prevent execution of a cell in the build documentation
48 | Let's say an example is using too large of a dataset. One cell can be disabled by adding the following key to the to cell metadata.
49 |
50 | ```json
51 | {
52 | "ibl_execute": false
53 | }
54 | ```
55 |
56 | #### Prevent execution of the whole notebook in the build documentation
57 | If the full notebook is to be skipped, you can also set the `ibl_execute` flag to `false` in the notebook metadata.
58 |
59 | #### Disable logging and tqdm output:
60 | To have a clean output in the documentation, it is recommended to disable the logging and tqdm output in the example by adding a hidden cell at the top of the notebook.
61 | (make sure the cell metadata contains the key `nbsphinx` with the value `hidden` as specified above)
62 |
63 | ```python
64 | # Turn off logging and disable tqdm this is a hidden cell on docs page
65 | import os
66 | os.environ["TQDM_DISABLE"] = "1"
67 | import logging
68 |
69 | logger = logging.getLogger('ibllib')
70 | logger.setLevel(logging.CRITICAL)
71 | ```
72 |
73 | ## Making documentation using github actions
74 | Two github actions workflows have been made available to automate the building and the deployment of the docs. These are located in the int-brain-lab/iblenv repository and can be accessed under the actions tab
75 |
76 | ### Developing docs
77 |
78 | Steps:
79 | - create documentation branches called `docs` on the `ibllib` and `iblenv` repositories. The workflow will only run if the branch exists in both repos (TODO if it doesn't exist make github action fallback to master)
80 | - add your changes to the documentation
81 | - run the [Build docs workflow](https://github.com/int-brain-lab/iblenv/actions/workflows/build_docs.yml). To run the workflow click on the `run_workflow` button in the top left corner and choose the branch you want to launch it from (this should normally be docs).
82 |
83 | After the docs build has completed succesfully your documentation will appear at this site http://testdocs.internationalbrainlab.org.s3-website-us-east-1.amazonaws.com
84 |
85 |
86 | ### Deploying docs
87 | **WARNING: Do not run this workflow unless you have run the build docs workflow above and checked that the documentation is correct**
88 |
89 | Steps:
90 | - merge the `docs` branch into `master` on the `iblenv` repository
91 | - merge the `docs` branch into `develop` on the `ibllib` repository
92 | - run the [Deploy docs workflow](https://github.com/int-brain-lab/iblenv/actions/workflows/deploy_docs.yml). To run the workflow click on the `run_workflow` button in the top left corner and choose the branch you want to launch it from (this should be master).
93 |
94 | The new docs will then be deployed to the main documentation website https://int-brain-lab.github.io/iblenv/
95 |
96 |
97 | ### Checking for permalinks
98 | As we maintain published datasets, we need to make sure the published urls are still valid and redirect to appropriate resources if we move the pages.
99 |
100 | #### checking permalinks
101 | The python script in [docs_gh_pages/scripts/permalinks_check.py](docs_gh_pages/scripts/permalinks_check.py) contains our published links and will check that they still point or redirect to live content. It is a good practice to run this after a major refactor.
102 |
103 | #### redirects
104 | The redirects are configured in [docs_gh_pages/conf.py](docs_gh_pages/conf.py), using the **sphinx_reredirects** package. Look for the `redirects` dictionary.
105 |
106 | ```python
107 | redirects = {
108 | "notebooks_external/data_release_brainwidemap": "2025_data_release_brainwidemap.html",
109 | "notebooks_external/data_release_repro_ephys": "2024_data_release_repro_ephys.html",
110 | }
111 | ```
112 |
113 | ## Making documentation locally
114 | ### Install dependencies to build the website locally
115 | Activate your iblenv environment first and install the dependencies on top using pip
116 | ```shell
117 | sudo apt-get install pandoc
118 | pip install -r ./docs_gh_pages/requirements-docs.txt
119 | ```
120 |
121 | ### Option 1: Only building changes to documentation
122 | If you have only made changes to the documentation (any of the files with `.md` or `.rst` extenstion), you can build the
123 | documentation without running the examples. The examples previously updated on the website will remain. To only
124 | build the documentation, the following command can be used
125 |
126 | ```python
127 | cd ./docs_gh_pages
128 | python make_script.py -d
129 | ```
130 |
131 | ### Option 2: Building changes to documentation and specific examples
132 | If you want to add a new example or change a few of the existing examples, it is possible to the build the documentation
133 | while only executing a few specified examples. The documentation can be built using the following commnand and providing
134 | the path to your .ipynb or .py example scripts.
135 |
136 | ```python
137 | cd ./docs_gh_pages
138 | python make_script.py -e -s
139 | ```
140 |
141 | An example would be
142 | ```
143 | python make_script.py -e -s C:\Users\Mayo\iblenv\ibllib-repo\brainbox\examples\docs_get_training_status.py C:\Users\Mayo\iblenv\iblenv\docs_gh_pages\notebooks\one_basics\one_basics.ipynb
144 | ```
145 |
146 | ### Option 3: Building changes to documentation and all examples
147 | If you want to rebuild the documentation and all examples you can use the following code
148 |
149 | ```python
150 | cd ./docs_gh_pages
151 | python make_script.py -e
152 | ```
153 |
154 | ### Previewing the built documentation
155 | Once the `make_script.py` has completed a preview of the documentation can be viewed by opening
156 | `./docs-gh-pages/_build/html/index.html` in a web browser.
157 |
158 | Check that all notebooks have run without errors and that your changes have been implemented correctly! (N.B if you have
159 | run the `make_script.py` using option 1 or 2, some or all of the examples will not have executed, this is expected)
160 |
161 |
162 | ## Pushing changes to gh-pages
163 | Once you are happy with the built documentation, the changes can be deployed to the website by running the following
164 | command
165 |
166 | ```python
167 | python make_script.py -gh -m "your commit message"
168 | ```
169 |
170 | ## Cleaning up your build
171 | Once your changes have been pushed to github, run the following command to clean up your ibllib and iblenv
172 | directories and unexecute example notebooks
173 | ```python
174 | python make_script.py -c
175 | ```
176 |
177 |
178 |
179 |
--------------------------------------------------------------------------------
/docs_gh_pages/make_script.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.environ["TQDM_DISABLE"] = "1" # noqa
3 |
4 | import sys
5 | import shutil
6 | import argparse
7 | import subprocess
8 | from pathlib import Path
9 |
10 | from iblutil.util import setup_logger
11 | from scripts.execute_notebooks import process_notebooks
12 |
13 | _logger = setup_logger(name='íbllib', level=20)
14 |
15 | root = Path.cwd()
16 | scripts_path = root.joinpath('scripts')
17 |
18 | nb_path = root.joinpath('notebooks')
19 | nb_path_external = [# Path(root.parent.parent).joinpath('ibllib-repo', 'examples'),
20 | Path(root.parent.parent).joinpath('ibllib-repo', 'examples', 'loading_data'),
21 | Path(root.parent.parent).joinpath('iblatlas', 'examples'),
22 | Path(root.parent.parent).joinpath('ibllib-repo', 'examples', 'data_release'),
23 | Path(root.parent.parent).joinpath('ibllib-repo', 'examples', 'exploring_data'),
24 | Path(root.parent.parent).joinpath('ibllib-repo', 'brainbox', 'examples'),
25 | Path(root.parent.parent).joinpath('ONE', 'docs', 'notebooks')]
26 | # external_file_patterns = ['docs', 'loading', 'atlas', 'docs', 'quickstart']
27 | external_file_patterns = ['loading', 'atlas', 'data', 'data', 'docs_wheel', 'quickstart']
28 |
29 |
30 | def make_documentation(execute, force, documentation, clean, specific, github, message, pre_clean, serve=False):
31 |
32 | # Clean up any nblink files
33 | nb_external_files = root.joinpath('notebooks_external').glob('*')
34 | for file in nb_external_files:
35 | os.remove(file)
36 |
37 | assert len(external_file_patterns) == len(nb_path_external)
38 | status = 0
39 | # Case where we want to rebuild all examples
40 | if execute and not specific:
41 | # Execute notebooks in docs folder
42 | #### remove the running of datajoint docs
43 | # status += process_notebooks(nb_path, execute=True, force=force)
44 | # Execute notebooks in external folders
45 | for nb_path_ext, pattern in zip(nb_path_external, external_file_patterns):
46 | status += process_notebooks(nb_path_ext, execute=True, force=force,
47 | link=True, filename_pattern=pattern)
48 | _logger.info("Finished processing notebooks")
49 |
50 | if status != 0:
51 | # One or more examples returned an error
52 | sys.exit(1)
53 | else:
54 | # If no errors make the documentation
55 | _logger.info("Cleaning up previous documentation")
56 | os.system("make clean")
57 | _logger.info("Making documentation")
58 | os.system("make html")
59 | sys.exit(0)
60 |
61 | # Case where we only want to build specific examples
62 | if execute and specific:
63 | for nb in specific:
64 | if str(nb).startswith(str(root)):
65 | status += process_notebooks(nb, execute=True, force=force)
66 | else:
67 | status += process_notebooks(nb, execute=True, force=force, link=True)
68 | _logger.info("Finished processing notebooks")
69 |
70 | # Create the link files for the other notebooks in external paths that we haven't
71 | # executed. N.B this must be run after the above commands
72 | for nb_path_ext, pattern in zip(nb_path_external, external_file_patterns):
73 | process_notebooks(nb_path_ext, execute=False, link=True, filename_pattern=pattern)
74 |
75 | if status != 0:
76 | # One or more examples returned an error
77 | sys.exit(1)
78 | else:
79 | # If no errors make the documentation
80 | _logger.info("Cleaning up previous documentation")
81 | os.system("make clean")
82 | _logger.info("Making documentation")
83 | os.system("make html")
84 | sys.exit(0)
85 |
86 | if documentation:
87 | for nb_path_ext, pattern in zip(nb_path_external, external_file_patterns):
88 | process_notebooks(nb_path_ext, execute=False, link=True, filename_pattern=pattern)
89 |
90 | _logger.info("Cleaning up previous documentation")
91 | os.system("make clean")
92 | _logger.info("Making documentation")
93 | os.system("make html")
94 | print("Documentation built successfully, to serve the site locally, run the following command:")
95 | print("python -m http.server -d ./_build/html")
96 | sys.exit(0)
97 |
98 | if pre_clean:
99 | # clean up for github but don't commit. In the examples only notebooks with an execute flag=True are kept,
100 | # the rest are deleted.
101 | # Clean up the build path regardless
102 | build_nb_path = root.joinpath('_build', 'html', 'notebooks')
103 | build_nb_external_path = root.joinpath('_build', 'html', 'notebooks_external')
104 | process_notebooks(build_nb_path, execute=False, cleanup=True, remove_gh=True)
105 | process_notebooks(build_nb_external_path, execute=False, cleanup=True, remove_gh=True)
106 |
107 | # remove the _sources folder as we don't need this
108 | build_nb_source_path = root.joinpath('_build', 'html', '_sources')
109 | if build_nb_source_path.exists():
110 | shutil.rmtree(build_nb_source_path)
111 |
112 |
113 | if github:
114 | # clean up for github. In the examples only notebooks with an execute flag=True are kept,
115 | # the rest are deleted.
116 | # Clean up the build path regardless
117 | build_nb_path = root.joinpath('_build', 'html', 'notebooks')
118 | build_nb_external_path = root.joinpath('_build', 'html', 'notebooks_external')
119 | process_notebooks(build_nb_path, execute=False, cleanup=True, remove_gh=True)
120 | process_notebooks(build_nb_external_path, execute=False, cleanup=True, remove_gh=True)
121 |
122 | # remove the _sources folder as we don't need this
123 | build_nb_source_path = root.joinpath('_build', 'html', '_sources')
124 | if build_nb_source_path.exists():
125 | shutil.rmtree(build_nb_source_path)
126 |
127 | # Need to figure out how to do this
128 | if not message:
129 | message = "commit latest documentation"
130 |
131 | exec = Path('scripts').joinpath('gh_push.sh')
132 | command = f'{exec} "{message}"'
133 | print(command)
134 | subprocess.call(command, shell=True) # noqa: E605
135 |
136 | # Clean up notebooks in directory if also specified
137 | if clean:
138 | _logger.info("Cleaning up notebooks")
139 | process_notebooks(nb_path, execute=False, cleanup=True)
140 | for nb_path_ext, pattern in zip(nb_path_external, external_file_patterns):
141 | process_notebooks(nb_path_ext, execute=False, cleanup=True,
142 | filename_pattern=pattern)
143 |
144 | try:
145 | build_path = root.joinpath('_build')
146 | if build_path.exists():
147 | shutil.rmtree(build_path)
148 | except Exception as err:
149 | print(err)
150 | _logger.error('Could not remove _build directory in iblenv/docs_gh_pages, please '
151 | 'delete manually')
152 | try:
153 | autosummary_path = root.joinpath('_autosummary')
154 | if autosummary_path.exists():
155 | shutil.rmtree(autosummary_path)
156 | except Exception as err:
157 | print(err)
158 | _logger.error('Could not remove _autosummary directory in iblenv/docs_gh_pages, please'
159 | ' delete manually')
160 |
161 |
162 | if __name__ == "__main__":
163 |
164 | parser = argparse.ArgumentParser(description='Make IBL documentation')
165 |
166 | parser.add_argument('-e', '--execute', default=False, action='store_true',
167 | help='Execute notebooks')
168 | parser.add_argument('-f', '--force', default=False, action='store_true',
169 | help='Force notebook execution even if already run')
170 | parser.add_argument('-d', '--documentation', default=False, action='store_true',
171 | help='Make documentation')
172 | parser.add_argument('-s', '--specific', nargs='+', required=False,
173 | help='List of specific files to execute')
174 | parser.add_argument('-c', '--cleanup', default=False, action='store_true',
175 | help='Cleanup notebooks once documentation made')
176 | parser.add_argument('-gh', '--github', default=False, action='store_true',
177 | help='Push documentation to gh-pages')
178 | parser.add_argument('-pc', '--preclean', default=False, action='store_true',
179 | help='Clean up documentation for gh-pages')
180 | parser.add_argument('-m', '--message', default=None, required=False, type=str,
181 | help='Commit message')
182 | args = parser.parse_args()
183 | make_documentation(execute=args.execute, force=args.force, documentation=args.documentation,
184 | clean=args.cleanup, specific=args.specific, github=args.github,
185 | message=args.message, pre_clean=args.preclean)
186 |
--------------------------------------------------------------------------------
/docs_gh_pages/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Configuration file for the Sphinx documentation builder.
4 | #
5 | # This file does only contain a selection of the most common options. For a
6 | # full list see the documentation:
7 | # http://www.sphinx-doc.org/en/master/config
8 |
9 | # -- Path setup --------------------------------------------------------------
10 |
11 | # If extensions (or modules to document with autodoc) are in another directory,
12 | # add these directories to sys.path here. If the directory is relative to the
13 | # documentation root, use os.path.abspath to make it absolute, like shown here.
14 | #
15 |
16 | import sys
17 | from pathlib import Path
18 | import matplotlib
19 | matplotlib.use('agg')
20 |
21 | print(Path.cwd().parent.parent)
22 | sys.path.insert(0, Path.cwd().parent.parent)
23 |
24 | print('Python %s on %s' % (sys.version, sys.platform))
25 | print(sys.path)
26 |
27 | # -- Project information -----------------------------------------------------
28 |
29 | project = 'IBL Library'
30 | copyright = '2020, International Brain Laboratory'
31 | author = 'International Brain Laboratory'
32 |
33 | # The short X.Y version
34 | version = ''
35 | # The full version, including alpha/beta/rc tags
36 | release = ''
37 |
38 |
39 | # -- General configuration ---------------------------------------------------
40 |
41 | # If your documentation needs a minimal Sphinx version, state it here.
42 | #
43 | # needs_sphinx = '1.0'
44 |
45 | # Add any Sphinx extension module names here, as strings. They can be
46 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
47 | # ones.
48 | extensions = ['sphinx.ext.autodoc',
49 | 'sphinx.ext.autosummary',
50 | 'sphinx.ext.mathjax',
51 | 'sphinx.ext.githubpages',
52 | 'sphinx.ext.intersphinx',
53 | 'sphinx_copybutton',
54 | 'nbsphinx',
55 | 'nbsphinx_link',
56 | 'myst_parser',
57 | 'sphinx.ext.napoleon',
58 | 'sphinx.ext.viewcode',
59 | 'sphinx_reredirects']
60 | #'sphinx_gallery.gen_gallery']
61 |
62 | # Add any paths that contain templates here, relative to this directory.
63 | templates_path = ['_templates']
64 |
65 | # Looks for objects in external projects
66 | intersphinx_mapping = {
67 | 'one_api': ('https://int-brain-lab.github.io/ONE/', None),
68 | }
69 |
70 | redirects = {
71 | "notebooks_external/data_release_brainwidemap": "2025_data_release_brainwidemap.html",
72 | "notebooks_external/data_release_repro_ephys": "2024_data_release_repro_ephys.html",
73 | }
74 |
75 | #sphinx_gallery_conf = {
76 | # 'examples_dirs': '../../ibllib-repo/examples/one/ephys', # path to your example scripts
77 | # 'gallery_dirs': 'auto_examples', # path to where to save gallery generated output
78 | # 'filename_pattern': 'docs_',
79 | #}
80 |
81 |
82 | #autoapi_add_toctree_entry = False
83 | #autoapi_dirs = ['../../ibllib-repo/ibllib', '../../ibllib-repo/alf', '../../ibllib-repo/oneibl']
84 | # The master toctree document.
85 | master_doc = 'index'
86 |
87 | # The language for content autogenerated by Sphinx. Refer to documentation
88 | # for a list of supported languages.
89 | #
90 | # This is also used if you do content translation via gettext catalogs.
91 | # Usually you set "language" from the command line for these cases.
92 | language = None
93 |
94 | # List of patterns, relative to source directory, that match files and
95 | # directories to ignore when looking for source files.
96 | # This pattern also affects html_static_path and html_extra_path .
97 | exclude_patterns = ['_build', '_templates', 'documentation_contribution_guidelines.md',
98 | '.ipynb_checkpoints', 'templates', 'README.md', 'gh-pages']
99 |
100 | # The name of the Pygments (syntax highlighting) style to use.
101 | pygments_style = 'sphinx'
102 |
103 |
104 | # -- Options for HTML output -------------------------------------------------
105 |
106 | # The theme to use for HTML and HTML Help pages. See the documentation for
107 | # a list of builtin themes.
108 |
109 | html_theme = 'sphinx_rtd_theme'
110 |
111 |
112 | # Theme options are theme-specific and customize the look and feel of a theme
113 | # further. For a list of options available for each theme, see the
114 | # documentation.
115 | #
116 | # html_theme_options = {}
117 |
118 | # Add any paths that contain custom static files (such as style sheets) here,
119 | # relative to this directory. They are copied after the builtin static files,
120 | # so a file named "default.css" will overwrite the builtin "default.css".
121 | html_static_path = ['_static']
122 | html_css_files = ['css/style.css']
123 |
124 | # Custom sidebar templates, must be a dictionary that maps document names
125 | # to template names.
126 | #
127 | # The default sidebars (for documents that don't match any pattern) are
128 | # defined by theme itself. Builtin themes are using these templates by
129 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
130 | # 'searchbox.html']``.
131 | #
132 | html_sidebars = {}
133 |
134 |
135 | # -- Options for HTMLHelp output ---------------------------------------------
136 |
137 | # Output file base name for HTML help builder.
138 | htmlhelp_basename = 'ibllibdoc'
139 |
140 |
141 | # -- Options for LaTeX output ------------------------------------------------
142 |
143 | latex_elements = {
144 | # The paper size ('letterpaper' or 'a4paper').
145 | #
146 | # 'papersize': 'letterpaper',
147 |
148 | # The font size ('10pt', '11pt' or '12pt').
149 | #
150 | # 'pointsize': '10pt',
151 |
152 | # Additional stuff for the LaTeX preamble.
153 | #
154 | # 'preamble': '',
155 |
156 | # Latex figure (float) alignment
157 | #
158 | # 'figure_align': 'htbp',
159 | }
160 |
161 | # Grouping the document tree into LaTeX files. List of tuples
162 | # (source start file, target name, title,
163 | # author, documentclass [howto, manual, or own class]).
164 | latex_documents = [
165 | (master_doc, 'ibllib.tex', 'ibllib Documentation',
166 | 'International Brain Laboratory', 'manual'),
167 | ]
168 |
169 |
170 | # -- Options for manual page output ------------------------------------------
171 |
172 | # One entry per manual page. List of tuples
173 | # (source start file, name, description, authors, manual section).
174 | man_pages = [
175 | (master_doc, 'ibllib', 'ibllib Documentation',
176 | [author], 1)
177 | ]
178 |
179 |
180 | # -- Options for Texinfo output ----------------------------------------------
181 |
182 | # Grouping the document tree into Texinfo files. List of tuples
183 | # (source start file, target name, title, author,
184 | # dir menu entry, description, category)
185 | texinfo_documents = [
186 | (master_doc, 'ibllib', 'ibllib Documentation',
187 | author, 'ibllib', 'One line description of project.',
188 | 'Miscellaneous'),
189 | ]
190 |
191 |
192 | # -- Options for autosummary and autodoc ------------------------------------
193 | autosummary_generate = True
194 | # Don't add module names to function docs
195 | add_module_names = False
196 |
197 | autodoc_default_options = {
198 | 'members': True,
199 | 'member-order': 'bysource',
200 | 'undoc-members': True,
201 | 'show-inheritance': False
202 | }
203 |
204 |
205 | def param_line_break(app, what, name, obj, options, lines):
206 | first_param = next((i for i, j in enumerate(lines) if ':param' in j), -1)
207 | if first_param != -1:
208 | # if the first param is not preceded by a line break add one in
209 | if lines[first_param - 1] != '':
210 | lines.insert(first_param, '')
211 | return
212 |
213 |
214 | def setup(app):
215 | # Connect the autodoc-skip-member event from apidoc to the callback
216 | app.connect('autodoc-process-docstring', param_line_break)
217 |
218 | # def autodoc_skip_member_handler(app, what, name, obj, skip, options):
219 | # # Basic approach; you might want a regex instead
220 | # # TODO still makes the folder structure, need to figure out how not to do that also makes
221 | # all the private methods which we don't want
222 | # if 'test' in name.lower():
223 | # return True
224 | # else:
225 | # return False
226 | #
227 | # # Automatically called by sphinx at startup
228 | # def setup(app):
229 | # # Connect the autodoc-skip-member event from apidoc to the callback
230 | # app.connect('autodoc-skip-member', autodoc_skip_member_handler)
231 |
232 | # -- Options for nbsphinx ------------------------------------
233 |
234 | # Only use nbsphinx for formatting the notebooks i.e never execute
235 | nbsphinx_execute = 'never'
236 | # Cancel compile on errors in notebooks
237 | nbsphinx_allow_errors = False
238 | # Add cell execution out number
239 | nbsphinx_output_prompt = 'Out[%s]:'
240 | # Configuration for images
241 | nbsphinx_execute_arguments = [
242 | "--InlineBackend.figure_formats={'svg', 'pdf'}",
243 | "--InlineBackend.rc={'figure.dpi': 96}",
244 | ]
245 | plot_formats = [('png', 512)]
246 |
247 | # Add extra prolog to beginning of each .ipynb file
248 | # Add option to download notebook and link to github page
249 | # nbsphinx_prolog = r"""
250 | #
251 | # {% if env.metadata[env.docname]['nbsphinx-link-target'] %}
252 | # {% set nb_path = env.metadata[env.docname]['nbsphinx-link-target'] | dirname %}
253 | # {% set nb_name = env.metadata[env.docname]['nbsphinx-link-target'] | basename %}
254 | # {% else %}
255 | # {% set nb_name = env.doc2path(env.docname, base=None) | basename %}
256 | # {% set nb_path = env.doc2path(env.docname, base=None) | dirname %}
257 | # {% endif %}
258 | #
259 | # .. raw:: html
260 | #
261 | #
262 | #
264 | #
265 | # """
266 |
--------------------------------------------------------------------------------
/docs_gh_pages/scripts/execute_notebooks.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | import json
4 | import logging
5 | import time
6 | import shutil
7 | from pathlib import Path
8 | import re
9 |
10 |
11 | from nbconvert.preprocessors import (ExecutePreprocessor, CellExecutionError,
12 | ClearOutputPreprocessor)
13 | import nbformat
14 | import sphinx_gallery.notebook as sph_nb
15 | import sphinx_gallery.gen_gallery as gg
16 |
17 | _logger = logging.getLogger('íbllib')
18 | IPYTHON_VERSION = 4
19 | TIMEOUT_CELLS = 1200
20 | PATH_EXTERNAL_NOTEBOOKS = Path(__file__).parent.parent.joinpath('notebooks_external')
21 |
22 | class NotebookConverter(object):
23 |
24 | def __init__(self, nb_path, output_path=None, overwrite=True, kernel_name=None):
25 | """
26 | Parameters
27 | ----------
28 | nb_path : str
29 | Path to ipython notebook
30 | output_path: str, default=None
31 | Path to where executed notebook, rst file and colab notebook will be saved. Default is
32 | to save in same directory of notebook
33 | overwrite: bool, default=True
34 | Whether to save executed notebook as same filename as unexecuted notebook or create new
35 | file with naming convention 'exec_....'. Default is to write to same file
36 | kernel_name: str
37 | Kernel to use to run notebooks. If not specified defaults to 'python3'
38 | """
39 | self.nb_path = Path(nb_path).absolute()
40 | self.nb_link_path = PATH_EXTERNAL_NOTEBOOKS
41 | os.makedirs(self.nb_link_path, exist_ok=True)
42 | self.nb = self.nb_path.parts[-1]
43 | self.nb_dir = self.nb_path.parent
44 | self.nb_name = self.nb_path.stem
45 | self.overwrite = overwrite
46 |
47 | # If no output path is specified save everything into directory containing notebook
48 | if output_path is not None:
49 | self.output_path = Path(output_path).absolute()
50 | os.makedirs(self.output_path, exist_ok=True)
51 | else:
52 | self.output_path = self.nb_dir
53 |
54 | # If overwrite is True, write the executed notebook to the same name as the notebook
55 | if self.overwrite:
56 | self.executed_nb_path = self.output_path.joinpath(self.nb)
57 | self.temp_nb_path = self.output_path.joinpath(f'executed_{self.nb}')
58 | else:
59 | self.executed_nb_path = self.output_path.joinpath(f'executed_{self.nb}')
60 |
61 | if kernel_name is not None:
62 | self.execute_kwargs = dict(timeout=TIMEOUT_CELLS, kernel_name=kernel_name, allow_errors=False)
63 | else:
64 | self.execute_kwargs = dict(timeout=TIMEOUT_CELLS, kernel_name='python3', allow_errors=False)
65 |
66 | @staticmethod
67 | def py_to_ipynb(py_path):
68 | """
69 | Convert python script to ipython notebook
70 | Returns
71 | -------
72 | """
73 | nb_path = sph_nb.replace_py_ipynb(py_path)
74 | if not Path(nb_path).exists():
75 | file_conf, blocks = sph_nb.split_code_and_text_blocks(py_path)
76 | gallery_config = gg.DEFAULT_GALLERY_CONF
77 | gallery_config['first_notebook_cell'] = None
78 | example_nb = sph_nb.jupyter_notebook(blocks, gallery_config, nb_path)
79 | sph_nb.save_notebook(example_nb, nb_path)
80 | return nb_path
81 |
82 | def link(self):
83 | """
84 | Create nb_sphinx link file for notebooks external to the docs directory
85 | """
86 | link_path = os.path.relpath(self.nb_path, self.nb_link_path)
87 | link_dict = {"path": link_path}
88 | link_save_path = self.nb_link_path.joinpath(str(self.nb_name) + '.nblink')
89 |
90 | with open(link_save_path, 'w') as f:
91 | json.dump(link_dict, f)
92 |
93 | def execute(self, force=False):
94 | """
95 | Executes the specified notebook file, and writes the executed notebook to a
96 | new file.
97 | Parameters
98 | ----------
99 | force : bool, optional
100 | To force rerun notebook even if it has already been executed
101 | Returns
102 | -------
103 | executed_nb_path : str, ``None``
104 | The path to the executed notebook path, or ``None`` if ``write=False``.
105 | status: bool
106 | Whether the notebook executed without errors or not, 0 = ran without error, 1 = error
107 | """
108 |
109 | with open(self.nb_path, encoding='utf-8') as f:
110 | nb = nbformat.read(f, as_version=IPYTHON_VERSION)
111 |
112 | skip_execution = not nb['metadata'].get('ibl_execute', True)
113 | is_executed = nb['metadata'].get('docs_executed')
114 |
115 | if skip_execution:
116 | _logger.info(f"Notebook {self.nb} in {self.nb_dir} has the 'ibl_execute' flag set to True,"
117 | f"skipping")
118 | status = 0
119 | elif is_executed == 'executed' and not force:
120 | _logger.info(f"Notebook {self.nb} in {self.nb_dir} already executed, skipping,"
121 | f"to force execute, parse argument -f")
122 | status = 0
123 | else:
124 |
125 | # Execute the notebook
126 | _logger.info(f"Executing notebook {self.nb} in {self.nb_dir}")
127 | t0 = time.time()
128 |
129 | clear_executor = ClearOutputPreprocessor()
130 | executor = ExecuteNotebooks(**self.execute_kwargs)
131 |
132 | # First clean up the notebook and remove any cells that have been run
133 | clear_executor.preprocess(nb, {})
134 |
135 | try:
136 | executor.preprocess(nb, {'metadata': {'path': self.nb_dir}})
137 | execute_dict = {'docs_executed': 'executed'}
138 | nb['metadata'].update(execute_dict)
139 | status = 0
140 | except CellExecutionError as err:
141 | execute_dict = {'docs_executed': 'errored'}
142 | nb['metadata'].update(execute_dict)
143 | _logger.error(f"Error executing notebook {self.nb}")
144 | _logger.error(err)
145 | status = 1
146 |
147 | _logger.info(f"Finished running notebook ({time.time() - t0})")
148 |
149 | _logger.info(f"Writing executed notebook to {self.executed_nb_path}")
150 | # Makes sure original notebook isn't left blank in case of error during writing
151 | if self.overwrite:
152 | with open(self.temp_nb_path, 'w', encoding='utf-8') as f:
153 | nbformat.write(nb, f)
154 | shutil.copyfile(self.temp_nb_path, self.executed_nb_path)
155 | os.remove(self.temp_nb_path)
156 | else:
157 | with open(self.executed_nb_path, 'w', encoding='utf-8') as f:
158 | nbformat.write(nb, f)
159 |
160 | return self.executed_nb_path, status
161 |
162 | def unexecute(self, remove_gh=False):
163 | """
164 | Unexecutes the notebook i.e. removes all output cells. If remove_gh=True looks to see if
165 | notebook metadata contains an executed tag. If it doesn't it means the notebook either
166 | errored or was not run (for case when only specific notebooks chosen to build examples) and
167 | removes the notebooks so old ones can be used.
168 |
169 | If the notebook has the flag `ibl_execute` set to false, we do not interfere with any of the outputs
170 | """
171 | _logger.info(f"Cleaning up notebook {self.nb} in {self.nb_dir}")
172 | if not self.executed_nb_path.exists():
173 | _logger.warning(f"{self.executed_nb_path} not found, nothing to clean")
174 | return
175 |
176 | with open(self.executed_nb_path, encoding='utf-8') as f:
177 | nb = nbformat.read(f, as_version=IPYTHON_VERSION)
178 |
179 | # if the flag for automatic execution is set to false, it means the notebook has
180 | # been run manually as it may rely on large datasets, and in this case we do not interfere with outputs
181 | skip_execution = not nb['metadata'].get('ibl_execute', True)
182 | if skip_execution:
183 | return
184 |
185 | if not remove_gh:
186 | if nb['metadata'].get('docs_executed', None):
187 | nb['metadata'].pop('docs_executed')
188 |
189 | clear_executor = ClearOutputPreprocessor()
190 | clear_executor.remove_metadata_fields.add('execution')
191 | clear_executor.preprocess(nb, {})
192 |
193 | with open(self.executed_nb_path, 'w', encoding='utf-8') as f:
194 | nbformat.write(nb, f)
195 |
196 | elif remove_gh:
197 | executed_flag = nb['metadata'].get('docs_executed', None)
198 | if executed_flag != 'executed':
199 | _logger.warning(f"Notebook {self.nb} not executed or errored, "
200 | f"version already on website will be used")
201 | os.remove(self.executed_nb_path)
202 | os.remove(self.output_path.joinpath(self.nb_name + '.html'))
203 | else:
204 | _logger.info(f"Notebook {self.nb} executed, "
205 | f"new version will be uploaded to website")
206 | clear_executor = ClearOutputPreprocessor()
207 | clear_executor.preprocess(nb, {})
208 |
209 | with open(self.executed_nb_path, 'w', encoding='utf-8') as f:
210 | nbformat.write(nb, f)
211 |
212 |
213 | def process_notebooks(nbfile_or_path, execute=True, force=False, link=False, cleanup=False,
214 | filename_pattern='', remove_gh=False, **kwargs):
215 | """
216 | Execute and optionally convert the specified notebook file or directory of
217 | notebook files.
218 | Wrapper for `NotebookConverter` class that does all the file handling.
219 | Parameters
220 | ----------
221 | nbfile_or_path : str
222 | Either a single notebook filename or a path containing notebook files.
223 | execute : bool
224 | Whether or not to execute the notebooks
225 | link : bool, default = False
226 | Whether to create nbsphink link file
227 | cleanup : bool, default = False
228 | Whether to unexecute notebook and clean up files. To clean up must set this to True and
229 | execute argument to False
230 | filename_pattern: str default = ''
231 | Filename pattern to look for in .py or .ipynb files to include in docs
232 | remove_gh: bool default = False
233 | Whether to remove notebook from build examples (in case where we want to use old version)
234 | **kwargs
235 | Other keyword arguments that are passed to the 'NotebookExecuter'
236 | """
237 |
238 | overall_status = 0
239 | if os.path.isdir(nbfile_or_path):
240 | # It's a path, so we need to walk through recursively and find any
241 | # notebook files
242 | for root, dirs, files in os.walk(nbfile_or_path):
243 | for name in files:
244 |
245 | _, ext = os.path.splitext(name)
246 | full_path = os.path.join(root, name)
247 |
248 | # skip checkpoints
249 | if 'ipynb_checkpoints' in full_path:
250 | if cleanup:
251 | os.remove(full_path)
252 | continue
253 | else:
254 | continue
255 |
256 | # if file has 'ipynb' extension create the NotebookConverter object
257 | if ext == '.ipynb':
258 | if re.search(filename_pattern, name):
259 | nbc = NotebookConverter(full_path, **kwargs)
260 | # Want to create the link file
261 | if link:
262 | nbc.link()
263 | # Execute the notebook
264 | if execute:
265 | _logger.info(f"Executing notebook {full_path}")
266 | _, status = nbc.execute(force=force)
267 | overall_status += status
268 | # If cleanup is true and execute is false unexecute the notebook
269 | if cleanup:
270 | _logger.info(f"Cleaning up notebook {full_path}")
271 | nbc.unexecute(remove_gh=remove_gh)
272 |
273 | # if file has 'py' extension convert to '.ipynb' and then execute
274 | elif ext == '.py':
275 | if re.search(filename_pattern, name):
276 | # See if the ipynb version already exists
277 | ipy_path = sph_nb.replace_py_ipynb(full_path)
278 | if Path(ipy_path).exists():
279 | # If it does and we want to execute, skip as it would have been
280 | # executed above already
281 | if execute:
282 | continue
283 | # If cleanup then we want to delete this file
284 | if cleanup:
285 | os.remove(ipy_path)
286 | else:
287 | # If it doesn't exist, we need to make it
288 | full_path = NotebookConverter.py_to_ipynb(full_path)
289 | nbc = NotebookConverter(full_path, **kwargs)
290 | if link:
291 | nbc.link()
292 | # Execute the notebook
293 | if execute:
294 | _, status = nbc.execute(force=force)
295 | overall_status += status
296 | # If cleanup then we want to delete this file
297 | if cleanup:
298 | os.remove(full_path)
299 | elif ext in ['.md', '.rst']:
300 | # the rst or md files are just copied over
301 | if link:
302 | shutil.copy(full_path, PATH_EXTERNAL_NOTEBOOKS.joinpath(Path(full_path).name))
303 | # if cleanup:
304 | # PATH_EXTERNAL_NOTEBOOKS.joinpath(Path(full_path).name).unlink()
305 | else:
306 | full_path = Path(nbfile_or_path)
307 | ext = full_path.suffix
308 |
309 | if ext == '.py':
310 | ipy_path = sph_nb.replace_py_ipynb(full_path)
311 | if not Path(ipy_path).exists():
312 | full_path = NotebookConverter.py_to_ipynb(full_path)
313 | else:
314 | full_path = ipy_path
315 |
316 | nbc = NotebookConverter(full_path, **kwargs)
317 | # Want to create the link file
318 | if link:
319 | nbc.link()
320 | # Execute the notebook
321 | if execute:
322 | _, status = nbc.execute(force=force)
323 | overall_status += status
324 | # If cleanup is true and execute is false, unexecute the notebook
325 | if cleanup:
326 | nbc.unexecute()
327 | if ext == '.py':
328 | os.remove(full_path)
329 |
330 | return overall_status
331 |
332 |
333 | class ExecuteNotebooks(ExecutePreprocessor):
334 |
335 | def __init__(self, **kw):
336 | super().__init__(**kw)
337 |
338 | def preprocess_cell(self, cell, resources, index):
339 | """
340 | Override if you want to apply some preprocessing to each cell.
341 | Must return modified cell and resource dictionary.
342 |
343 | Parameters
344 | ----------
345 | cell : NotebookNode cell
346 | Notebook cell being processed
347 | resources : dictionary
348 | Additional resources used in the conversion process. Allows
349 | preprocessors to pass variables into the Jinja engine.
350 | index : int
351 | Index of the cell being processed
352 | """
353 | self._check_assign_resources(resources)
354 | if self.get_execute_meta(cell['metadata']):
355 | cell = self.execute_cell(cell, index, store_history=True)
356 | return cell, self.resources
357 |
358 | def get_execute_meta(self, metadata):
359 | return metadata.get('ibl_execute', True)
360 |
361 |
--------------------------------------------------------------------------------