├── .gitattributes ├── testbed ├── testenv1.yaml ├── testenv2.yaml ├── croot.yml └── build.sh ├── conda-recipe ├── conda_build_config.yaml ├── post-link.sh ├── pre-unlink.sh ├── post-link.bat ├── pre-unlink.bat ├── build.sh ├── bld.bat └── meta.yaml ├── requirements.txt ├── renovate.json ├── nb_conda_kernels ├── __init__.py ├── __main__.py ├── runner.py ├── install.py ├── manager.py └── _version.py ├── setup.cfg ├── MANIFEST.in ├── tests ├── js │ ├── test_notebook_default_r.js │ ├── test_notebook_env_r.js │ ├── test_notebook_default_py.js │ ├── test_notebook_env_py.js │ ├── test_notebook_root_py.js │ ├── test_notebook_basic.js │ └── _utils.js ├── conftest.py ├── test_install.py ├── test_api.py ├── test_runner.py └── test_config.py ├── setup.py ├── .gitignore ├── LICENSE ├── .github └── workflows │ └── main.yml ├── README.md └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | nb_conda_kernels/_version.py export-subst 2 | -------------------------------------------------------------------------------- /testbed/testenv1.yaml: -------------------------------------------------------------------------------- 1 | name: test_env1 2 | dependencies: 3 | - r-irkernel 4 | - ipykernel 5 | -------------------------------------------------------------------------------- /testbed/testenv2.yaml: -------------------------------------------------------------------------------- 1 | name: "t\u00c6st_env2" 2 | dependencies: 3 | - ipykernel 4 | - python=3 5 | -------------------------------------------------------------------------------- /conda-recipe/conda_build_config.yaml: -------------------------------------------------------------------------------- 1 | python: 2 | - "3.8" 3 | - "3.9" 4 | - "3.10" 5 | - "3.11" 6 | 7 | -------------------------------------------------------------------------------- /conda-recipe/post-link.sh: -------------------------------------------------------------------------------- 1 | "${PREFIX}/bin/python" -m nb_conda_kernels.install --enable >>"${PREFIX}/.messages.txt" 2>&1 2 | -------------------------------------------------------------------------------- /conda-recipe/pre-unlink.sh: -------------------------------------------------------------------------------- 1 | "${PREFIX}/bin/python" -m nb_conda_kernels.install --disable >>"${PREFIX}/.messages.txt" 2>&1 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | notebook 2 | jupyter_client 3 | r-irkernel 4 | requests 5 | flake8 6 | psutil 7 | pytest 8 | pytest-cov 9 | mock 10 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "github>anaconda/renovate-config" 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /nb_conda_kernels/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | from .manager import CondaKernelSpecManager 3 | from . import _version 4 | __version__ = _version.get_versions()['version'] 5 | -------------------------------------------------------------------------------- /testbed/croot.yml: -------------------------------------------------------------------------------- 1 | name: conda 2 | dependencies: 3 | - conda 4 | - conda-build 5 | - conda-verify 6 | - notebook 7 | - jupyter_client 8 | - ipykernel 9 | - pytest 10 | - pytest-cov 11 | - requests 12 | - mock 13 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore=W504,E501 3 | 4 | [versioneer] 5 | VCS = git 6 | style = pep440 7 | versionfile_source = nb_conda_kernels/_version.py 8 | versionfile_build = nb_conda_kernels/_version.py 9 | tag_prefix = 10 | parentdir_prefix = nb_conda_kernels- 11 | -------------------------------------------------------------------------------- /conda-recipe/post-link.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | :: Set PATH explicitly as it may not be set correctly by some versions of conda 3 | set "PATH=%PATH%;%PREFIX%\Library\bin" 4 | "%PREFIX%\python.exe" -m nb_conda_kernels.install --enable >>"%PREFIX%\.messages.txt" 2>&1 && if errorlevel 1 exit 1 5 | -------------------------------------------------------------------------------- /conda-recipe/pre-unlink.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | :: Set PATH explicitly as it may not be set correctly by some versions of conda 3 | set "PATH=%PATH%;%PREFIX%\Library\bin" 4 | "%PREFIX%\python.exe" -m nb_conda_kernels.install --disable >>"%PREFIX%\.messages.txt" 2>&1 && if errorlevel 1 exit 1 5 | -------------------------------------------------------------------------------- /nb_conda_kernels/__main__.py: -------------------------------------------------------------------------------- 1 | from jupyter_client import kernelspec 2 | from .manager import CondaKernelSpecManager 3 | kernelspec.KernelSpecManager = CondaKernelSpecManager 4 | 5 | from jupyter_client.kernelspecapp import KernelSpecApp # noqa 6 | 7 | KernelSpecApp.launch_instance() 8 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include *.rst 3 | include *.md 4 | include package.json 5 | 6 | recursive-include nb_conda_kernels * 7 | 8 | recursive-exclude * __pycache__ 9 | recursive-exclude * *.py[co] 10 | recursive-exclude node_modules *.* 11 | recursive-exclude * node_modules 12 | recursive-exclude * .git 13 | include versioneer.py 14 | include nb_conda_kernels/_version.py 15 | -------------------------------------------------------------------------------- /conda-recipe/build.sh: -------------------------------------------------------------------------------- 1 | $PYTHON -m pip install --no-deps --ignore-installed . 2 | 3 | BINDIR="$PREFIX/bin" 4 | mkdir -p "$BINDIR" 5 | 6 | POST_LINK="$BINDIR/.nb_conda_kernels-post-link" 7 | PRE_UNLINK="$BINDIR/.nb_conda_kernels-pre-unlink" 8 | 9 | cp "$SRC_DIR/conda-recipe/post-link.sh" "$POST_LINK.sh" 10 | cp "$SRC_DIR/conda-recipe/pre-unlink.sh" "$PRE_UNLINK.sh" 11 | cp "$SRC_DIR/conda-recipe/post-link.bat" "$POST_LINK.bat" 12 | cp "$SRC_DIR/conda-recipe/pre-unlink.bat" "$PRE_UNLINK.bat" 13 | -------------------------------------------------------------------------------- /tests/js/test_notebook_default_r.js: -------------------------------------------------------------------------------- 1 | /* global casper */ 2 | 3 | var kernel_prefix = 'ir', 4 | kernel_suffix = '', 5 | kernel_label = 'R'; 6 | 7 | casper.notebook_test_kernel(kernel_prefix, kernel_suffix, function(){ 8 | casper.screenshot.init("default-r-kernel"); 9 | casper.viewport(1440, 900) 10 | .then(default_r_kernel_test); 11 | }); 12 | 13 | function default_r_kernel_test(){ 14 | this.screenshot("kernel_indicator_name"); 15 | this.test.assertSelectorHasText('.kernel_indicator_name', kernel_label); 16 | } 17 | -------------------------------------------------------------------------------- /tests/js/test_notebook_env_r.js: -------------------------------------------------------------------------------- 1 | /* global casper */ 2 | 3 | var kernel_prefix = 'conda-env', 4 | kernel_suffix = 'r', 5 | kernel_label = 'R'; 6 | 7 | casper.notebook_test_kernel(kernel_prefix, kernel_suffix, function(){ 8 | casper.screenshot.init("env-r-kernel"); 9 | casper.viewport(1440, 900) 10 | .then(default_python_kernel_test); 11 | }); 12 | 13 | function default_python_kernel_test(){ 14 | this.screenshot("kernel_indicator_name"); 15 | this.test.assertSelectorHasText('.kernel_indicator_name', kernel_label); 16 | } 17 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | import versioneer 3 | 4 | setuptools.setup( 5 | name="nb_conda_kernels", 6 | version=versioneer.get_version(), 7 | cmdclass=versioneer.get_cmdclass(), 8 | url="https://github.com/Anaconda-Platform/nb_conda_kernels", 9 | author="Continuum Analytics", 10 | description="Launch Jupyter kernels for any installed conda environment", 11 | long_description=open('README.md').read(), 12 | packages=setuptools.find_packages(), 13 | include_package_data=True, 14 | zip_safe=False 15 | ) 16 | -------------------------------------------------------------------------------- /tests/js/test_notebook_default_py.js: -------------------------------------------------------------------------------- 1 | /* global casper */ 2 | 3 | var kernel_prefix = 'python', 4 | kernel_suffix = '', 5 | kernel_label = 'Python'; 6 | 7 | casper.notebook_test_kernel(kernel_prefix, kernel_suffix, function(){ 8 | casper.screenshot.init("default-python-kernel"); 9 | casper.viewport(1440, 900) 10 | .then(default_python_kernel_test); 11 | }); 12 | 13 | function default_python_kernel_test(){ 14 | this.screenshot("kernel_indicator_name"); 15 | this.test.assertSelectorHasText('.kernel_indicator_name', kernel_label); 16 | } 17 | -------------------------------------------------------------------------------- /tests/js/test_notebook_env_py.js: -------------------------------------------------------------------------------- 1 | /* global casper */ 2 | 3 | var kernel_prefix = 'conda-env', 4 | kernel_suffix = 'py', 5 | kernel_label = 'Python'; 6 | 7 | casper.notebook_test_kernel(kernel_prefix, kernel_suffix, function(){ 8 | casper.screenshot.init("env-python-kernel"); 9 | casper.viewport(1440, 900) 10 | .then(default_python_kernel_test); 11 | }); 12 | 13 | function default_python_kernel_test(){ 14 | this.screenshot("kernel_indicator_name"); 15 | this.test.assertSelectorHasText('.kernel_indicator_name', kernel_label); 16 | } 17 | -------------------------------------------------------------------------------- /tests/js/test_notebook_root_py.js: -------------------------------------------------------------------------------- 1 | /* global casper */ 2 | 3 | var kernel_prefix = 'conda-root-py', 4 | kernel_suffix = '', 5 | kernel_label = 'Python [conda env:root]'; 6 | 7 | casper.notebook_test_kernel(kernel_prefix, kernel_suffix, function(){ 8 | casper.screenshot.init("root-python-kernel"); 9 | casper.viewport(1440, 900) 10 | .then(root_python_kernel_test); 11 | }); 12 | 13 | function root_python_kernel_test(){ 14 | this.screenshot("kernel_indicator_name"); 15 | this.test.assertSelectorHasText('.kernel_indicator_name', kernel_label); 16 | } 17 | -------------------------------------------------------------------------------- /conda-recipe/bld.bat: -------------------------------------------------------------------------------- 1 | %PYTHON% -m pip install --no-deps --ignore-installed . 2 | if errorlevel 1 exit 1 3 | 4 | set SCRDIR="%PREFIX%\Scripts" 5 | if not exist %SCRDIR% mkdir %SCRDIR% 6 | if errorlevel 1 exit 1 7 | 8 | POST_LINK="%SCRDIR%\.nb_conda_kernels-post-link" 9 | PRE_UNLINK="%SCRDIR%\.nb_conda_kernels-pre-unlink" 10 | 11 | copy "%SRC_DIR%\conda-recipe\post-link.bat" "%POST_LINK%.bat" || exit 1 12 | copy "%SRC_DIR%\conda-recipe\pre-unlink.bat" "%PRE_UNLINK%.bat" || exit 1 13 | copy "%SRC_DIR%\conda-recipe\post-link.sh" "%POST_LINK%.sh" || exit 1 14 | copy "%SRC_DIR%\conda-recipe\pre-unlink.sh" "%PRE_UNLINK%.sh" || exit 1 15 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from jupyter_client.kernelspec import KernelSpecManager 4 | from jupyter_client.manager import KernelManager 5 | 6 | from nb_conda_kernels.manager import CondaKernelSpecManager 7 | 8 | 9 | @pytest.fixture(scope="function") 10 | def jupyter_manager(tmp_path): 11 | jupyter_data_dir = tmp_path / "share" / "jupyter" 12 | jupyter_data_dir.mkdir(parents=True, exist_ok=True) 13 | manager = CondaKernelSpecManager(kernelspec_path=str(tmp_path)) 14 | # Install the kernel specs 15 | manager.find_kernel_specs() 16 | 17 | return KernelSpecManager(kernel_dirs=[str(jupyter_data_dir / "kernels")]) 18 | 19 | 20 | @pytest.fixture 21 | def jupyter_kernel(jupyter_manager, request): 22 | return KernelManager( 23 | kernel_spec_manager=jupyter_manager, 24 | kernel_name=request.param, 25 | ) 26 | -------------------------------------------------------------------------------- /tests/js/test_notebook_basic.js: -------------------------------------------------------------------------------- 1 | /* global casper */ 2 | casper.dashboard_test(function(){ 3 | casper.screenshot.init("basic"); 4 | casper.viewport(1440, 900) 5 | .then(basic_test); 6 | }); 7 | 8 | function basic_test(){ 9 | var default_py = '#new-menu li[id^=kernel-python]', 10 | default_r = '#new-menu li[id=kernel-ir]', 11 | root_py = '#new-menu li[id=kernel-conda-root-py]', 12 | env_py = '#new-menu li[id^=kernel-conda-env-][id$=-py]', 13 | env_r = '#new-menu li[id^=kernel-conda-env-][id$=-r]'; 14 | 15 | return this.canSeeAndClick( 16 | "the kernel selector", "#new-buttons > .dropdown-toggle" 17 | ) 18 | .canSee("the default python kernel", default_py) 19 | .canSee("a conda env python kernel", env_py) 20 | .canSee("a conda root python kernel", root_py) 21 | .canSee("the default r kernel", default_r) 22 | .canSee("an r kernel", env_r) 23 | .canSeeAndClick("fin", "body"); 24 | } 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | 26 | # PyInstaller 27 | # Usually these files are written by a python script from a template 28 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 29 | *.manifest 30 | *.spec 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .coverage 40 | .coverage.* 41 | .cache 42 | nosetests.xml 43 | coverage.xml 44 | *,cover 45 | 46 | # Translations 47 | *.mo 48 | *.pot 49 | 50 | # Django stuff: 51 | *.log 52 | 53 | # Sphinx documentation 54 | docs/_build/ 55 | 56 | # PyBuilder 57 | target/ 58 | .ipynb_checkpoints/ 59 | node_modules/ 60 | screenshots/ 61 | *.xunit.xml 62 | 63 | # npm 64 | package-lock.json 65 | 66 | # PyCharm 67 | .idea/ 68 | 69 | conda/ 70 | -------------------------------------------------------------------------------- /conda-recipe/meta.yaml: -------------------------------------------------------------------------------- 1 | {% set data = load_setup_py_data() %} 2 | 3 | package: 4 | name: nb_conda_kernels 5 | version: {{ data.get('version') }} 6 | 7 | source: 8 | path: ../ 9 | 10 | build: 11 | number: 0 12 | noarch: python 13 | 14 | requirements: 15 | host: 16 | - python 17 | - setuptools 18 | - wheel 19 | - pip 20 | run: 21 | - python >=3.6 22 | - jupyter_client >=4.2 23 | - jupyter_core 24 | - psutil 25 | run_constrained: 26 | - notebook >=5.3.0 27 | 28 | test: 29 | source_files: 30 | - setup.cfg 31 | - tests 32 | requires: 33 | - pytest 34 | - pytest-cov 35 | - ipykernel 36 | - notebook <7 37 | - requests 38 | - mock 39 | commands: 40 | - pip check 41 | - python -m nb_conda_kernels list 42 | # Skips any tests that assume the existence of the testbed 43 | - python -m pytest -v -m "not testbed" --cov=nb_conda_kernels tests 44 | 45 | about: 46 | home: https://github.com/Anaconda-Platform/nb_conda_kernels 47 | license: BSD 3-Clause 48 | license_file: LICENSE 49 | summary: 'Launch Jupyter kernels for any installed conda environment.' 50 | -------------------------------------------------------------------------------- /tests/test_install.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | from subprocess import check_output, call, STDOUT 5 | 6 | IS_ENABLED = 'Status: enabled' 7 | IS_DISABLED = 'Status: disabled' 8 | 9 | if sys.platform.startswith('win'): 10 | PYTHON = os.path.join(sys.prefix, 'python.exe') 11 | else: 12 | PYTHON = os.path.join(sys.prefix, 'bin', 'python') 13 | 14 | 15 | def check_command_(command, out, verbose=False, quiet=False): 16 | cmd = [PYTHON, '-m', 'nb_conda_kernels.install', '--' + command] 17 | if verbose: 18 | cmd.append('--verbose') 19 | print('Testing: {}'.format(' '.join(cmd))) 20 | output = check_output(cmd, stderr=STDOUT).decode() 21 | print('\n'.join('| ' + x for x in output.splitlines())) 22 | if not quiet: 23 | assert out in output 24 | 25 | 26 | def test_install(): 27 | call([PYTHON, '-m', 'nb_conda_kernels.install', '--enable']) 28 | for verbose in (False, True): 29 | for test in (('status', IS_ENABLED), 30 | ('disable', IS_DISABLED), 31 | ('status', IS_DISABLED), 32 | ('enable', IS_ENABLED), 33 | ('status', IS_ENABLED)): 34 | print(test) 35 | check_command_(*test, verbose=verbose, quiet=False) 36 | 37 | 38 | if __name__ == '__main__': 39 | test_install() 40 | -------------------------------------------------------------------------------- /tests/test_api.py: -------------------------------------------------------------------------------- 1 | from subprocess import check_output, CalledProcessError 2 | 3 | try: 4 | from notebook.services.kernelspecs.tests import test_kernelspecs_api 5 | except Exception: 6 | import pytest 7 | pytest.skip('Requires notebook<7', allow_module_level=True) 8 | 9 | try: 10 | from unittest.mock import patch 11 | except ImportError: 12 | from mock import patch # py2 13 | 14 | 15 | CONDA_INFO_ARGS = ["conda", "info", "--json"] 16 | 17 | 18 | class APITest(test_kernelspecs_api.APITest): 19 | """ Run all the upstream tests.""" 20 | pass 21 | 22 | 23 | class BadCondaAPITest(test_kernelspecs_api.APITest): 24 | @classmethod 25 | def setup_class(cls): 26 | def _mock_check_output(cmd, *args, **kwargs): 27 | if cmd == CONDA_INFO_ARGS: 28 | raise CalledProcessError("bad conda") 29 | 30 | return check_output(cmd, *args, **kwargs) 31 | 32 | cls.cond_info_patch = patch("subprocess.check_output", 33 | _mock_check_output) 34 | cls.cond_info_patch.start() 35 | super(BadCondaAPITest, cls).setup_class() 36 | 37 | @classmethod 38 | def teardown_class(cls): 39 | super(BadCondaAPITest, cls).teardown_class() 40 | cls.cond_info_patch.stop() 41 | 42 | def test_no_conda_kernels(self): 43 | model = self.ks_api.list().json() 44 | self.assertEquals( 45 | [], 46 | [name for name in model["kernelspecs"].keys() 47 | if name.startswith("conda-")] 48 | ) 49 | -------------------------------------------------------------------------------- /nb_conda_kernels/runner.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import os 4 | import sys 5 | import subprocess 6 | import locale 7 | try: 8 | from shlex import quote 9 | except ImportError: 10 | from pipes import quote 11 | 12 | 13 | def exec_in_env(conda_prefix, env_path, *command): 14 | # Run the standard conda activation script, and print the 15 | # resulting environment variables to stdout for reading. 16 | is_current_env = env_path == sys.prefix 17 | if sys.platform.startswith('win'): 18 | if is_current_env: 19 | subprocess.Popen(list(command)).wait() 20 | else: 21 | activate = os.path.join(conda_prefix, 'Scripts', 'activate.bat') 22 | ecomm = [os.environ['COMSPEC'], '/S', '/U', '/C', '@echo', 'off', '&&', 23 | 'chcp', '65001', '&&', 'call', activate, env_path, '&&', 24 | '@echo', 'CONDA_PREFIX=%CONDA_PREFIX%', '&&',] + list(command) 25 | subprocess.Popen(ecomm).wait() 26 | else: 27 | quoted_command = [quote(c) for c in command] 28 | if is_current_env: 29 | os.execvp(quoted_command[0], quoted_command) 30 | else: 31 | activate = os.path.join(conda_prefix, 'bin', 'activate') 32 | ecomm = ". '{}' '{}' && echo CONDA_PREFIX=$CONDA_PREFIX && exec {}".format(activate, env_path, ' '.join(quoted_command)) 33 | ecomm = ['sh' if 'bsd' in sys.platform else 'bash', '-c', ecomm] 34 | os.execvp(ecomm[0], ecomm) 35 | 36 | 37 | if __name__ == '__main__': 38 | exec_in_env(*(sys.argv[1:])) 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Continuum Analytics, Inc. and contributors 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | * Neither the name of Continuum Analytics nor the names of any contributors 15 | may be used to endorse or promote products derived from this software 16 | without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 22 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF 28 | THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /testbed/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # GitHub action specific items. These are no-ops locally 4 | [ "$RUNNER_OS" == "Windows" ] && CONDA_EXE="$CONDA/Scripts/conda.exe" 5 | [ "$RUNNER_OS" == "macOS" ] && export CONDA_PKGS_DIRS=~/.pkgs 6 | 7 | # Determine the root location of the testbed 8 | cwd=$(cd $(dirname ${BASH_SOURCE[0]}) && pwd) 9 | [ $CONDA_ROOT ] || CONDA_ROOT=${cwd%/*/*}/nbckdev 10 | mkdir -p $CONDA_ROOT 11 | export CONDA_ROOT=$(cd $CONDA_ROOT && pwd) 12 | echo "Testbed location: $CONDA_ROOT" 13 | 14 | function full_deactivate { 15 | old_prefix=${CONDA_EXE%/*/*} 16 | if [ -d "$old_prefix/conda-meta" ]; then 17 | old_source=$old_prefix/etc/profile.d/conda.sh 18 | source $old_source && conda deactivate 19 | new_path=$(echo $PATH | tr ':' '\n' | grep -v "^$old_prefix/" | tr '\n' ':') 20 | export PATH=${new_path%:} 21 | fi 22 | } 23 | 24 | # Skip creation if the cached version is available 25 | if [ ! -d $CONDA_ROOT/conda-meta ]; then 26 | ${CONDA_EXE:-conda} env create -f $cwd/croot.yml -p $CONDA_ROOT 27 | if [[ "$RUNNER_OS" == "" && "$OS" == "Windows_NT" ]]; then 28 | conda install -y -p $CONDA_ROOT m2-bash m2-coreutils m2-filesystem 29 | fi 30 | fi 31 | 32 | full_deactivate 33 | source $CONDA_ROOT/etc/profile.d/conda.sh 34 | conda activate base 35 | 36 | if [ ! -f $CONDA_ROOT/.created ]; then 37 | conda config --prepend channels conda-forge --system 38 | pip install -e . 39 | python -m nb_conda_kernels.install --enable 40 | 41 | # We need to create additional environments to fully test the logic, 42 | # including an R kernel, a Python kernel, and environment names with at 43 | # least one non-ASCII character and one space. We also need one environment 44 | # installed in a non-default environment location. 45 | conda env create -f $cwd/testenv1.yaml 46 | conda env create -f $cwd/testenv2.yaml 47 | mkdir -p $CONDA_ROOT/ext1/ext2/env 48 | conda env create -f $cwd/testenv1.yaml -p $CONDA_ROOT/ext1/ext2/env/test_env1 49 | rm -rf $CONDA_ROOT/pkgs 50 | 51 | touch $CONDA_ROOT/.created 52 | fi 53 | 54 | # Make sure the external environment is in the environments.txt file 55 | ext_env=$CONDA_ROOT/ext1/ext2/env/test_env1 56 | if [ "$OS" == "Windows_NT" ]; then 57 | CONDA_HOME=$USERPROFILE 58 | ext_env=$(echo $ext_env | sed -E 's@^/([^/]*)@\U\1:@;s@/@\\@g') 59 | ext_env_g=^$(echo $ext_env | sed -E 's@\\@\\\\@g') 60 | else 61 | CONDA_HOME=$HOME 62 | ext_env_g=^$ext_env 63 | fi 64 | if ! grep -q "$ext_env_g" $CONDA_HOME/environments.txt 2>/dev/null; then 65 | mkdir -p $CONDA_HOME/.conda 66 | echo "$ext_env" >> $CONDA_HOME/.conda/environments.txt 67 | fi 68 | 69 | # Display final result 70 | echo PATH=$PATH 71 | env | grep ^CONDA 72 | conda info --envs 73 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Build and test the package 2 | on: 3 | push: 4 | branches: 5 | - master 6 | tags: 7 | - '*' 8 | pull_request: 9 | branches: 10 | - master 11 | defaults: 12 | run: 13 | shell: bash 14 | jobs: 15 | testbed: 16 | runs-on: ${{ matrix.os }} 17 | strategy: 18 | matrix: 19 | os: [ubuntu-latest,macos-latest,windows-latest] 20 | steps: 21 | - name: Retrieve the source code 22 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 23 | - id: conda-root 24 | name: Set CONDA_ROOT 25 | run: | 26 | CONDA_ROOT=$(dirname $GITHUB_WORKSPACE)/conda 27 | echo "value=$CONDA_ROOT" >> $GITHUB_OUTPUT 28 | echo "CONDA_ROOT=$CONDA_ROOT" >> $GITHUB_ENV 29 | # Use a smaller cache entry to enable a quicker exit if we 30 | # have already built the testbed. Any small file will do 31 | - id: cache-key 32 | name: Retrieve cache key 33 | uses: actions/cache@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4 34 | with: 35 | path: ./LICENSE 36 | key: key-${{ matrix.os }}-${{ hashFiles('testbed') }} 37 | - id: cache 38 | name: Retrieve or create the conda cache 39 | if: steps.cache-key.outputs.cache-hit != 'true' 40 | uses: actions/cache@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4 41 | with: 42 | path: ${{ steps.conda-root.outputs.value }} 43 | key: testbed-${{ matrix.os }}-${{ hashFiles('testbed') }} 44 | - name: Install Miniconda 45 | uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3 46 | if: steps.cache-key.outputs.cache-hit != 'true' 47 | with: 48 | auto-activate-base: true 49 | activate-environment: "" 50 | - name: Verify or build the testbed 51 | if: steps.cache-key.outputs.cache-hit != 'true' 52 | # The argument tells the script we are in caching mode 53 | run: testbed/build.sh 54 | build: 55 | runs-on: ubuntu-latest 56 | steps: 57 | - name: Retrieve the source code 58 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 59 | with: 60 | fetch-depth: 0 61 | - name: Build the package 62 | id: build 63 | run: | 64 | source $CONDA/etc/profile.d/conda.sh 65 | conda install conda conda-build --yes 66 | conda build conda-recipe 67 | mv $CONDA/conda-bld . 68 | - name: Upload the build artifact 69 | uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4 70 | with: 71 | name: package-${{ github.sha }} 72 | path: conda-bld 73 | tests: 74 | runs-on: ${{ matrix.os }} 75 | needs: [build,testbed] 76 | strategy: 77 | fail-fast: false 78 | matrix: 79 | os: [macos-latest,ubuntu-latest,windows-latest] 80 | pyver: ["3.8","3.10","3.12"] 81 | steps: 82 | - name: Retrieve the source code 83 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 84 | with: 85 | fetch-depth: 0 86 | - id: conda-root 87 | name: Set CONDA_ROOT and artifact suffix 88 | run: | 89 | if [ "$RUNNER_OS" = "Windows" ]; then sfx=win; else sfx=unx; fi 90 | echo "::set-output name=suffix::$sfx" 91 | CONDA_ROOT=$(dirname $GITHUB_WORKSPACE)/conda 92 | echo "::set-output name=value::$CONDA_ROOT" 93 | echo "CONDA_ROOT=$CONDA_ROOT" >> $GITHUB_ENV 94 | echo "PACKAGE SUFFIX: $sfx" 95 | echo "CONDA_ROOT: $CONDA_ROOT" 96 | - name: Retrieve the build artfiact 97 | uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4 98 | with: 99 | name: package-${{ github.sha }} 100 | path: conda-bld 101 | - name: Retrieve the testbed 102 | uses: actions/cache@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4 103 | with: 104 | path: ${{ steps.conda-root.outputs.value }} 105 | key: testbed-${{ matrix.os }}-${{ hashFiles('testbed') }} 106 | - name: Verify the testbed 107 | run: testbed/build.sh 108 | - name: Test the package 109 | run: | 110 | source $CONDA_ROOT/etc/profile.d/conda.sh 111 | [ "$RUNNER_OS" = "Windows" ] && export PYTHONIOENCODING=UTF-8 112 | export PYTHONUNBUFFERED=1 113 | export NBVER=6 114 | [ 3.12 = ${{ matrix.pyver }} ] && export NBVER=7 115 | conda create -n testbase -c ./conda-bld nb_conda_kernels python=${{ matrix.pyver }} notebook=$NBVER pytest pytest-cov mock requests 116 | conda activate testbase 117 | python -m nb_conda_kernels list 118 | python -m pytest -v --cov=nb_conda_kernels tests 2>&1 | tee build.log 119 | # Because Windows refuses to preserve the error code 120 | if grep -E '^(FAILED|ERROR) ' build.log; then exit -1; fi 121 | upload: 122 | needs: tests 123 | runs-on: ubuntu-latest 124 | if: github.event_name == 'push' 125 | steps: 126 | - name: Retrieve the source code 127 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 128 | with: 129 | fetch-depth: 0 130 | - name: Retrieve the artfiact 131 | uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4 132 | with: 133 | name: package-${{ github.sha }} 134 | path: conda-bld 135 | - name: Upload to anaconda.org 136 | env: 137 | ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} 138 | run: | 139 | source $CONDA/bin/activate 140 | conda install -y anaconda-client 141 | git describe --exact-match --tags HEAD || export LABEL="--label dev" 142 | anaconda --verbose --token $ANACONDA_TOKEN upload --user jupycon $LABEL conda-bld/*/*.tar.bz2 --force 143 | - name: Clean up older artifacts 144 | uses: glassechidna/artifact-cleaner@master 145 | with: 146 | minimumAge: 86400 147 | -------------------------------------------------------------------------------- /tests/test_runner.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import io 4 | import locale 5 | import os 6 | import sys 7 | import json 8 | import tempfile 9 | import time 10 | import pytest 11 | 12 | from jupyter_client.manager import KernelManager 13 | from nb_conda_kernels.manager import RUNNER_COMMAND, CondaKernelSpecManager 14 | 15 | START_TIMEOUT = 10 16 | CMD_TIMEOUT = 3 17 | NUM_RETRIES = 10 18 | is_win = sys.platform.startswith('win') 19 | CKSM = None 20 | 21 | 22 | def _cksm(): 23 | global CKSM 24 | if CKSM is None: 25 | CKSM = CondaKernelSpecManager(conda_only=True) 26 | return CKSM 27 | 28 | 29 | old_print = print 30 | def print(x): 31 | old_print('\n'.join(json.dumps(y)[1:-1] for y in x.splitlines())) 32 | sys.stdout.flush() 33 | 34 | 35 | if is_win: 36 | # Create a job object and assign ourselves to it, so that 37 | # all remaining test subprocesses get killed off on completion. 38 | # This prevents AppVeyor from waiting an hour 39 | # https://stackoverflow.com/a/23587108 (and its first comment) 40 | import win32api, win32con, win32job # noqa 41 | hJob = win32job.CreateJobObject(None, "") 42 | extended_info = win32job.QueryInformationJobObject(hJob, win32job.JobObjectExtendedLimitInformation) 43 | extended_info['BasicLimitInformation']['LimitFlags'] = win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE 44 | win32job.SetInformationJobObject(hJob, win32job.JobObjectExtendedLimitInformation, extended_info) 45 | perms = win32con.PROCESS_TERMINATE | win32con.PROCESS_SET_QUOTA 46 | hProcess = win32api.OpenProcess(perms, False, os.getpid()) 47 | win32job.AssignProcessToJobObject(hJob, hProcess) 48 | 49 | 50 | def find_test_keys(): 51 | if os.environ.get('CONDA_BUILD'): 52 | # The current version of conda build manually adds the activation 53 | # directories to the PATH---and then calls the standard conda 54 | # activation script, which does it again. This frustrates conda's 55 | # ability to deactivate this environment. Most package builds are 56 | # not affected by this, but we are, because our tests need to do 57 | # environment activation and deactivation. To fix this, we remove 58 | # the duplicate PATH entries conda-build added. 59 | print('BEFORE: {}'.format(os.environ['PATH'])) 60 | path_list = os.environ['PATH'].split(os.pathsep) 61 | path_dups = set() 62 | path_list = [p for p in path_list 63 | if not p.startswith(sys.prefix) or 64 | p not in path_dups and not path_dups.add(p)] 65 | os.environ['PATH'] = os.pathsep.join(path_list) 66 | print('AFTER: {}'.format(os.environ['PATH'])) 67 | keys = [] 68 | for key in _cksm().get_all_specs(): 69 | assert key.startswith('conda-') 70 | if key.endswith('-py') or key.endswith('-r'): 71 | keys.append(key) 72 | return keys 73 | 74 | 75 | def call_kernel(kernel_manager, **kw): 76 | name = kernel_manager.kernel_name 77 | 78 | valid = False 79 | # For reasons we do not fully understand, the kernels sometimes die immediately 80 | # and sometimes hang in this loop. Frankly the purpose of this test is not to 81 | # understand why that is but to simply test that a successfully run kernel is 82 | # using the correct environment. So we're using a simple retry loop, and we 83 | # use a timeout when waiting for messages from the kernel. 84 | for tries in range(NUM_RETRIES): 85 | outputs = [] 86 | client = None 87 | try: 88 | print('\n--- attempt {}'.format(tries+1)) 89 | kernel_manager.start_kernel(**kw) 90 | client = kernel_manager.client() 91 | client.start_channels() 92 | client.wait_for_ready(timeout=START_TIMEOUT) 93 | if name.endswith('-r'): 94 | commands = ['cat(Sys.getenv("CONDA_PREFIX"),fill=TRUE)', 95 | 'cat(dirname(dirname(dirname(.libPaths()))),fill=TRUE)', 96 | 'quit(save="no")'] 97 | else: 98 | commands = ['import os, sys', 99 | 'print(os.environ["CONDA_PREFIX"])', 100 | 'print(sys.prefix)', 101 | 'quit'] 102 | for command in commands: 103 | print('>>> {}'.format(command)) 104 | m_id = client.execute(command) 105 | while True: 106 | msg = client.get_iopub_msg(timeout=CMD_TIMEOUT)['content'] 107 | if msg.get('execution_state') == 'idle': 108 | break 109 | if msg.get('name') == 'stdout': 110 | outputs.append(msg['text'].strip()) 111 | print(outputs[-1]) 112 | valid = True 113 | except: 114 | time.sleep(CMD_TIMEOUT) 115 | pass 116 | finally: 117 | if client is not None: 118 | client.stop_channels() 119 | if kernel_manager.is_alive(): 120 | kernel_manager.request_shutdown() 121 | kernel_manager.finish_shutdown() 122 | if valid: 123 | break 124 | else: 125 | assert False, 'Did not successfully run kernel' 126 | 127 | return valid, outputs 128 | 129 | 130 | @pytest.mark.parametrize("key", find_test_keys()) 131 | def test_runner(key): 132 | if sys.platform.startswith("darwin") and key.endswith('-r'): 133 | pytest.xfail("R kernels on macos are failing for now") 134 | kernel_manager = KernelManager(kernel_spec_manager=_cksm(), kernel_name=key) 135 | if kernel_manager.kernel_spec.argv[:3] == RUNNER_COMMAND: 136 | env_path = kernel_manager.kernel_spec.argv[4] 137 | else: 138 | env_path = sys.prefix 139 | env_path = os.path.normcase(os.path.normpath(env_path)) 140 | 141 | valid, outputs = call_kernel(kernel_manager) 142 | 143 | assert valid and len(outputs) >= 2 144 | for o in outputs[-2:]: 145 | assert os.path.normcase(os.path.normpath(o)) == env_path, (o, env_path) 146 | 147 | 148 | @pytest.mark.parametrize("jupyter_kernel", find_test_keys(), indirect=True) 149 | def test_jupyter_kernelspecs_runner(tmp_path, jupyter_kernel): 150 | if sys.platform.startswith("darwin") and jupyter_kernel.kernel_name.endswith('-r'): 151 | pytest.xfail("R kernels on macos are failing for now") 152 | 153 | fake_stdout = tmp_path / "stdout.log" 154 | # RUNNER_COMMAND is installed in all exported kernelspec 155 | assert jupyter_kernel.kernel_spec.argv[:3] == RUNNER_COMMAND 156 | 157 | env_path = jupyter_kernel.kernel_spec.argv[4] 158 | env_path = os.path.normcase(os.path.normpath(env_path)) 159 | 160 | with fake_stdout.open("wb") as t: # Catch the echo set in the runner 161 | valid, outputs = call_kernel(jupyter_kernel, stdout=t) 162 | 163 | assert valid and len(outputs) >= 2 164 | for o in outputs[-2:]: 165 | assert os.path.normcase(os.path.normpath(o)) == env_path, (o, env_path) 166 | 167 | # The nb_conda_kernels.runner skip activation if sys.prefix is the active environment 168 | # Don't know why but character from the echo command are separated 169 | # with a null character on Windows 170 | captured_stdout = fake_stdout.read_text().replace("\00", "") 171 | assert ("CONDA_PREFIX=" in captured_stdout) == (env_path.lower() != sys.prefix.lower()) 172 | 173 | 174 | if __name__ == '__main__': 175 | for key in find_test_keys(): 176 | test_runner(key) 177 | -------------------------------------------------------------------------------- /tests/js/_utils.js: -------------------------------------------------------------------------------- 1 | ;(function(){ 2 | "use strict"; 3 | 4 | var system = require('system'); 5 | 6 | var root = casper, 7 | _img = 0, 8 | _shotDir = "unnamed"; 9 | 10 | function nextId(){ 11 | return ("000" + (_img++)).slice(-4); 12 | } 13 | 14 | function slug(text){ 15 | return text.replace(/[^a-z0-9]/g, "_"); 16 | } 17 | 18 | root.screenshot = function(message){ 19 | this.captureSelector([ 20 | "screenshots/", 21 | _shotDir, 22 | "/", 23 | nextId(), 24 | "_", 25 | slug(message), 26 | ".png", 27 | ].join(""), 28 | "body" 29 | ); 30 | }; 31 | 32 | 33 | root.screenshot.init = function(ns){ 34 | _shotDir = ns; 35 | _img = 0; 36 | }; 37 | 38 | root.canSee = function(message, visible){ 39 | return this 40 | .waitUntilVisible(visible) 41 | .then(function(){ 42 | this.test.assertExists(visible, "I can see " + message); 43 | this.screenshot(message); 44 | }); 45 | }; 46 | 47 | root.canSeeAndClick = function(message, visible, click){ 48 | return this 49 | .waitUntilVisible(visible) 50 | .then(function(){ 51 | this.test.assertExists(click || visible, "I can see and click " + message); 52 | this.screenshot(message); 53 | this.click(click || visible); 54 | }); 55 | }; 56 | 57 | root.dragRelease = function(message, selector, opts){ 58 | var it, x, y, x1, y1; 59 | return this.then(function(){ 60 | it = this.getElementBounds(selector); 61 | x = it.left + it.width / 2; 62 | y = it.top + it.height / 2; 63 | x1 = x + (opts.right || -opts.left || 0); 64 | y1 = y + (opts.down || -opts.up || 0); 65 | }) 66 | .then(function(){ 67 | this.mouse.down(x, y); 68 | }) 69 | .then(function(){ 70 | this.screenshot("click " + message); 71 | this.mouse.move(x1, y1); 72 | }) 73 | .then(function(){ 74 | this.screenshot("drag " + message); 75 | this.mouse.up(x1, y1); 76 | }) 77 | .then(function(){ 78 | this.screenshot("release " + message); 79 | }); 80 | }; 81 | 82 | root.baseline_notebook = function(){ 83 | // the actual test 84 | this.set_cell_text(0, [ 85 | 'from IPython.display import Markdown', 86 | 'Markdown("# Hello World!")' 87 | ].join("\n")); 88 | this.execute_cell_then(0); 89 | 90 | this.append_cell(); 91 | }; 92 | 93 | root.runCell = function(idx, lines){ 94 | // the actual test 95 | this.set_cell_text(idx, lines.join("\n")); 96 | this.execute_cell_then(idx); 97 | }; 98 | 99 | casper.notebook_test_kernel = function(kernel_prefix, kernel_suffix, test) { 100 | // Wrap a notebook test to reduce boilerplate. 101 | this.open_new_notebook_kernel(kernel_prefix, kernel_suffix); 102 | 103 | // Echo whether or not we are running this test using SlimerJS 104 | if (this.evaluate(function(){ 105 | return typeof InstallTrigger !== 'undefined'; // Firefox 1.0+ 106 | })) { 107 | console.log('This test is running in SlimerJS.'); 108 | this.slimerjs = true; 109 | } 110 | 111 | // Make sure to remove the onbeforeunload callback. This callback is 112 | // responsible for the "Are you sure you want to quit?" type messages. 113 | // PhantomJS ignores these prompts, SlimerJS does not which causes hangs. 114 | this.then(function(){ 115 | this.evaluate(function(){ 116 | window.onbeforeunload = function(){}; 117 | }); 118 | }); 119 | 120 | this.then(test); 121 | 122 | // Kill the kernel and delete the notebook. 123 | this.shutdown_current_kernel(); 124 | // This is still broken but shouldn't be a problem for now. 125 | // this.delete_current_notebook(); 126 | 127 | // This is required to clean up the page we just finished with. If we don't call this 128 | // casperjs will leak file descriptors of all the open WebSockets in that page. We 129 | // have to set this.page=null so that next time casper.start runs, it will create a 130 | // new page from scratch. 131 | this.then(function () { 132 | this.page.close(); 133 | this.page = null; 134 | }); 135 | 136 | // Run the browser automation. 137 | this.run(function() { 138 | this.test.done(); 139 | }); 140 | }; 141 | 142 | root.open_new_notebook_kernel = function (kernel_prefix, kernel_suffix) { 143 | // Create and open a new notebook. 144 | var baseUrl = this.get_notebook_server(); 145 | this.start(baseUrl); 146 | 147 | this.waitFor(this.page_loaded); 148 | this.waitForSelector("#new-buttons > .dropdown-toggle"); 149 | this.thenClick("#new-buttons > .dropdown-toggle"); 150 | 151 | var kernel_li_id = '[id^="kernel-' + kernel_prefix + '"]'; 152 | if(kernel_suffix){ 153 | kernel_li_id += '[id$="-' + kernel_suffix + '"]'; 154 | } 155 | var kernel_selector = '#new-menu li' + kernel_li_id + ' a'; 156 | 157 | this.waitForSelector(kernel_selector); 158 | this.thenClick(kernel_selector); 159 | 160 | this.screenshot("picking kernel"); 161 | 162 | this.waitForPopup(''); 163 | 164 | this.withPopup('', function () {this.waitForSelector('.CodeMirror-code');}); 165 | this.then(function () { 166 | this.open(this.popups[0].url); 167 | }); 168 | this.waitFor(this.page_loaded); 169 | 170 | // Hook the log and error methods of the console, forcing them to 171 | // serialize their arguments before printing. This allows the 172 | // Objects to cross into the phantom/slimer regime for display. 173 | this.thenEvaluate(function(){ 174 | var serialize_arguments = function(f, context) { 175 | return function() { 176 | var pretty_arguments = []; 177 | for (var i = 0; i < arguments.length; i++) { 178 | var value = arguments[i]; 179 | if (value instanceof Object) { 180 | var name = value.name || 'Object'; 181 | // Print a JSON string representation of the object. 182 | // If we don't do this, [Object object] gets printed 183 | // by casper, which is useless. The long regular 184 | // expression reduces the verbosity of the JSON. 185 | pretty_arguments.push(name + ' {' + JSON.stringify(value, null, ' ') 186 | .replace(/(\s+)?({)?(\s+)?(}(\s+)?,?)?(\s+)?(\s+)?\n/g, '\n') 187 | .replace(/\n(\s+)?\n/g, '\n')); 188 | } else { 189 | pretty_arguments.push(value); 190 | } 191 | } 192 | f.apply(context, pretty_arguments); 193 | }; 194 | }; 195 | console.log = serialize_arguments(console.log, console); 196 | console.error = serialize_arguments(console.error, console); 197 | }); 198 | 199 | // Make sure the kernel has started 200 | this.waitFor(this.kernel_running); 201 | 202 | // track the IPython busy/idle state 203 | this.thenEvaluate(function () { 204 | require(['base/js/namespace', 'base/js/events'], function (IPython, events) { 205 | 206 | events.on('kernel_idle.Kernel',function () { 207 | IPython._status = 'idle'; 208 | }); 209 | events.on('kernel_busy.Kernel',function () { 210 | IPython._status = 'busy'; 211 | }); 212 | }); 213 | }); 214 | 215 | // Because of the asynchronous nature of SlimerJS (Gecko), we need to make 216 | // sure the notebook has actually been loaded into the IPython namespace 217 | // before running any tests. 218 | this.waitFor(function() { 219 | return this.evaluate(function () { 220 | return IPython.notebook; 221 | }); 222 | }); 223 | }; 224 | 225 | }).call(this); 226 | -------------------------------------------------------------------------------- /nb_conda_kernels/install.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | import logging 4 | import os 5 | import sys 6 | 7 | from os.path import join, abspath, exists 8 | 9 | from jupyter_core.paths import jupyter_config_path 10 | 11 | try: 12 | from notebook import __version__ as nb_version 13 | except ImportError: 14 | nb_version = '999' 15 | 16 | try: 17 | from jupyter_server.config_manager import BaseJSONConfigManager 18 | except ImportError: 19 | try: 20 | from notebook.config_manager import BaseJSONConfigManager 21 | except ImportError: 22 | raise ImportError("Must have notebook>=5.3 or jupyter_server installed") 23 | 24 | 25 | # If true, we need to add a NotebokApp entry into jupyter_config.json. 26 | # If false, we should avoid doing so, since notebook 7 and later have 27 | # removed direct support for kernel spec managers in favor of relying 28 | # on jupyter_server. 29 | NEED_NOTEBOOK = int(nb_version.split('.', 1)[0]) < 7 30 | 31 | 32 | log = logging.getLogger(__name__) 33 | 34 | 35 | JA = "JupyterApp" 36 | NBA = "NotebookApp" 37 | SA = "ServerApp" 38 | CKSM = "nb_conda_kernels.CondaKernelSpecManager" 39 | JKSM = "jupyter_client.kernelspec.KernelSpecManager" 40 | KSMC = "kernel_spec_manager_class" 41 | JC = "jupyter_config" 42 | JNC = "jupyter_notebook_config" 43 | ENDIS = ['disabled', 'enabled'] 44 | 45 | 46 | def shorten(path, prefix=True): 47 | if prefix and path.startswith(sys.prefix + os.sep): 48 | var = '%CONDA_PREFIX%' if sys.platform.startswith('win') else '$CONDA_PREFIX' 49 | return var + path[len(sys.prefix):] 50 | home = os.path.expanduser('~') 51 | if path.startswith(home + os.sep): 52 | var = '%USERPROFILE%' if sys.platform.startswith('win') else '~' 53 | return var + path[len(home):] 54 | return path 55 | 56 | 57 | def install(enable=False, disable=False, status=None, prefix=None, path=None, verbose=False): 58 | """Installs the nb_conda_kernels configuration data. 59 | 60 | Parameters 61 | ---------- 62 | enable: bool 63 | Enable nb_conda_kernels; that is, make the changes to 64 | the Jupyter notebook configuration so that it will is 65 | available for Jupyter notebooks. 66 | disable: bool 67 | Disable nb_conda_kernels. 68 | status: bool 69 | Print the installation status, but make no changes. 70 | Exactly one of enable/disable/status must be supplied. 71 | 72 | verbose: bool 73 | If true, print more verbose output during operation. 74 | 75 | prefix: None 76 | The prefix of the Python environment where the Jupyter 77 | configuration is to be created and/or modified. It is 78 | equivalent to supplying 79 | path = join(prefix, 'etc', 'jupyter') 80 | path: None 81 | The directory where the Jupyter configuration file is 82 | to be created and/or modified. The name of the file is 83 | hardcoded to jupyter_notebook_config.json. 84 | Either prefix or path may be supplied, but not both. If 85 | neither is supplied, then the first path found in 86 | jupyter_core_paths.jupyter_config_path() 87 | whose directory is within sys.prefix will be selected. If 88 | there is no such path, the first path will be selected. 89 | """ 90 | if verbose: 91 | log.setLevel(logging.DEBUG) 92 | verbose = log.getEffectiveLevel() == logging.DEBUG 93 | if status: 94 | log.info("Determining the status of nb_conda_kernels...") 95 | else: 96 | log.info("{}ing nb_conda_kernels...".format(ENDIS[enable][:-2].capitalize())) 97 | log.info("CONDA_PREFIX: {}".format(sys.prefix)) 98 | 99 | all_paths = [abspath(p) for p in jupyter_config_path()] 100 | default_path = join(sys.prefix, 'etc', 'jupyter') 101 | search_paths = all_paths[::-1] 102 | if path or prefix: 103 | if prefix: 104 | path = join(prefix, 'etc', 'jupyter') 105 | path = abspath(path) 106 | else: 107 | prefix_s = sys.prefix + os.sep 108 | for path in search_paths: 109 | if path.startswith(prefix_s): 110 | break 111 | else: 112 | path = default_path 113 | if path != default_path or path not in all_paths: 114 | log.info("Target path: {}".format(path)) 115 | if path not in all_paths: 116 | log.warning('WARNING: the configuration for the current environment\n' 117 | 'is not affected by the target configuration path.') 118 | search_paths.append(path) 119 | 120 | # Determine the effective configuration by going through the search path 121 | # in reverse order. Moving forward we will be modifying only the JupyterApp 122 | # key in the jupyter_config.json file. However for legacy reasons we are 123 | # also looking at NotebookApp keys and the jupyter_notebook_config.json file, 124 | # and cleaning those out as we can. 125 | log.debug('Configuration files:') 126 | fpaths = set() 127 | is_enabled_all = {} 128 | is_enabled_local = {} 129 | need_keys = (SA, NBA) if NEED_NOTEBOOK else (SA,) 130 | for path_g in search_paths: 131 | flag = '-' if path != path_g else ('*' if path in all_paths else 'x') 132 | value = '' 133 | for fbase in (JC, JNC): 134 | fpath = join(path_g, fbase + '.json') 135 | cfg = BaseJSONConfigManager(config_dir=path_g).get(fbase) 136 | dirty = False 137 | for key in (JA, NBA, SA): 138 | spec = cfg.get(key, {}).get(KSMC) 139 | if status or path_g != path: 140 | # No changes in status mode, or if we're not in the target path 141 | expected = spec 142 | elif enable and fbase == JC and key in need_keys: 143 | # Add the spec if we are enabling, the entry point is not active, 144 | # and we're using the new file (jupyter_config.json) and key (JupyterApp) 145 | expected = CKSM 146 | else: 147 | # In all other cases, clear the spec out for cleanup 148 | expected = None 149 | if spec != expected: 150 | if expected is None: 151 | cfg[key].pop(KSMC) 152 | if not cfg[key]: 153 | cfg.pop(key) 154 | else: 155 | cfg.setdefault(key, {})[KSMC] = expected 156 | spec = expected 157 | dirty = True 158 | if spec: 159 | if path_g in all_paths: 160 | is_enabled_all[key] = spec == CKSM 161 | if path_g == path: 162 | is_enabled_local[key] = spec == CKSM 163 | else: 164 | fpaths.add(join(path_g, fbase + '.json')) 165 | if dirty: 166 | BaseJSONConfigManager(config_dir=path).set(fbase, cfg) 167 | if dirty or exists(fpath): 168 | value += '\n ' + fbase + '.json' 169 | if dirty: 170 | value += ' (MODIFIED)' 171 | value += ': ' 172 | value += '\n '.join(json.dumps(cfg, indent=2).splitlines()) 173 | log.debug(' {} {}: {}'.format(flag, shorten(path_g), value or '')) 174 | is_enabled_all = all(is_enabled_all.get(k) for k in need_keys) 175 | is_enabled_local = all(is_enabled_local.get(k) for k in need_keys) 176 | 177 | if is_enabled_all != is_enabled_local: 178 | sev = 'WARNING' if status else 'ERROR' 179 | if path not in all_paths: 180 | msg = fpaths = [] 181 | elif status: 182 | msg = ['{}: the local configuration of nb_conda_kernels'.format(sev), 183 | 'conflicts with the global configuration. Please examine'] 184 | else: 185 | what = ENDIS[is_enabled_local][:-1] 186 | msg = ['{}: the attempt to {} nb_conda_kernels failed due to'.format(sev, what), 187 | 'conflicts with global configuration files. Please examine'] 188 | if fpaths: 189 | msg.append('the following file{} for potential conflicts:' 190 | .format('s' if len(fpaths) > 1 else '')) 191 | for fpath in fpaths: 192 | msg.append(' ' + shorten(fpath, False)) 193 | if not verbose: 194 | msg.append('Use the --verbose flag for more information.') 195 | if msg: 196 | (log.warning if status else log.error)('\n'.join(msg)) 197 | 198 | log.info('Status: {}'.format(ENDIS[is_enabled_all])) 199 | return 0 if status or is_enabled_all == is_enabled_local else 1 200 | 201 | 202 | if __name__ == '__main__': 203 | log.addHandler(logging.StreamHandler()) 204 | log.setLevel(logging.INFO) 205 | 206 | # Arguments for command line 207 | parser = argparse.ArgumentParser( 208 | description="Installs the nb_conda_kernels notebook extension") 209 | group = parser.add_mutually_exclusive_group(required=True) 210 | group.add_argument( 211 | "-s", "--status", 212 | help="Print the current status of nb_conda_kernels installation", 213 | action="store_true") 214 | group.add_argument( 215 | "-e", "--enable", 216 | help="Automatically load nb_conda_kernels on notebook launch", 217 | action="store_true") 218 | group.add_argument( 219 | "-d", "--disable", 220 | help="Remove nb_conda_kernels from config on notebook launch", 221 | action="store_true") 222 | group2 = parser.add_mutually_exclusive_group(required=False) 223 | group2.add_argument( 224 | "-p", "--prefix", 225 | help="Prefix where to load nb_conda_kernels config (default: sys.prefix)", 226 | action="store") 227 | group2.add_argument( 228 | "--path", 229 | help="Absolute path to the jupyter configuration directory", 230 | action="store") 231 | parser.add_argument( 232 | "-v", "--verbose", 233 | help="Show more output", 234 | action="store_true" 235 | ) 236 | 237 | exit(install(**parser.parse_args().__dict__)) 238 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import json 4 | import os 5 | import sys 6 | 7 | try: 8 | from unittest.mock import call, patch 9 | except ImportError: 10 | from mock import call, patch # py2 11 | 12 | import pytest 13 | from traitlets.config import Config, TraitError 14 | from nb_conda_kernels.manager import RUNNER_COMMAND, CondaKernelSpecManager, _canonicalize 15 | 16 | # The testing regime for nb_conda_kernels is unique, in that it needs to 17 | # see an entire conda installation with multiple environments and both 18 | # Python and R kernels in those environments. In contrast, most conda 19 | # build sessions are by design limited in context to a single environment. 20 | # For this reason, we're doing some checks here to verify that this 21 | # global environment is ready to receive the other tests. 22 | 23 | 24 | old_print = print 25 | def print(x): 26 | old_print('\n'.join(json.dumps(y)[1:-1] for y in x.splitlines())) 27 | sys.stdout.flush() 28 | 29 | 30 | @pytest.mark.testbed 31 | def test_configuration(): 32 | print('\nConda configuration') 33 | print('-------------------') 34 | spec_manager = CondaKernelSpecManager() 35 | conda_info = spec_manager._conda_info 36 | if conda_info is None: 37 | print('ERROR: Could not find conda find conda.') 38 | exit(-1) 39 | print(u'Current prefix: {}'.format(sys.prefix)) 40 | print(u'Root prefix: {}'.format(conda_info['root_prefix'])) 41 | print(u'Conda version: {}'.format(conda_info['conda_version'])) 42 | print(u'Environments:') 43 | for env in conda_info['envs']: 44 | print(u' - {}'.format(env)) 45 | checks = {} 46 | prefix = _canonicalize(sys.prefix) 47 | print('Kernels included in get_all_specs') 48 | print('---------------------------------') 49 | for key, value in spec_manager.get_all_specs().items(): 50 | if value['spec']['argv'][:3] == RUNNER_COMMAND: 51 | long_env = value['spec']['argv'][4] 52 | assert long_env == value['spec']['metadata']['conda_env_path'] 53 | else: 54 | long_env = prefix 55 | print(u' - {}: {}'.format(key, long_env)) 56 | if key.startswith('conda-'): 57 | if long_env == prefix: 58 | checks['env_current'] = True 59 | if key.startswith('conda-base-'): 60 | checks['root_py'] = True 61 | if key.startswith('conda-env-'): 62 | if len(key.split('-')) >= 5: 63 | checks['env_project'] = True 64 | if key.endswith('-py'): 65 | checks['env_py'] = True 66 | if key.endswith('-r'): 67 | checks['env_r'] = True 68 | try: 69 | long_env.encode('ascii') 70 | except UnicodeEncodeError: 71 | checks['env_unicode'] = True 72 | print('Scenarios required for proper testing') 73 | print('-------------------------------------') 74 | print(' - Python kernel in test environment: {}'.format(bool(checks.get('env_current')))) 75 | print(' - Python kernel in root environment: {}'.format(bool(checks.get('root_py')))) 76 | print(' - Python kernel in other environment: {}'.format(bool(checks.get('env_py')))) 77 | print(' - R kernel in non-test environment: {}'.format(bool(checks.get('env_r')))) 78 | print(' - Environment with non-ASCII name: {}'.format(bool(checks.get('env_unicode')))) 79 | print(' - External project environment: {}'.format(bool(checks.get('env_project')))) 80 | # In some conda build scenarios, the test environment is not returned by conda 81 | # in the listing of conda environments. 82 | if 'conda-bld' in prefix: 83 | checks.setdefault('env_current', False) 84 | # It is difficult to get AppVeyor to handle Unicode environments well, but manual testing 85 | # on Windows works fine. So it is likely related to the way AppVeyor captures output 86 | if sys.platform.startswith('win'): 87 | checks.setdefault('env_unicode', False) 88 | assert len(checks) >= 6 89 | 90 | 91 | @pytest.mark.parametrize("name_format, expected", [ 92 | ("{0} [conda env:{1}]", "Python [conda env:{env_name}]"), 93 | ("{language} [conda env:{environment}]", "Python [conda env:{env_name}]"), 94 | ( 95 | "{0} {1} {conda_kernel} {display_name} {environment} {kernel} {language}", 96 | "Python {env_name} conda-env-{env_name}-xpython Python 3 (XPython) {env_name} xpython Python" 97 | ) 98 | ]) 99 | def test_kernel_name_format(monkeypatch, tmp_path, name_format, expected): 100 | kernelspec = { 101 | "display_name": "Python 3 (XPython)", 102 | "argv": [ 103 | "@XPYTHON_KERNELSPEC_PATH@xpython", 104 | "-f", 105 | "{connection_file}" 106 | ], 107 | "language": "python", 108 | "metadata": { "debugger": True } 109 | } 110 | mock_info = { 111 | 'conda_prefix': '/' 112 | } 113 | env_name = "dummy_env" 114 | def envs(*args): 115 | return { 116 | env_name: str(tmp_path) 117 | } 118 | 119 | kernel_file = tmp_path / 'share' / 'jupyter' / 'kernels' / 'xpython' / 'kernel.json' 120 | kernel_file.parent.mkdir(parents=True, exist_ok=True) 121 | if sys.version_info >= (3, 0): 122 | kernel_file.write_text(json.dumps(kernelspec)) 123 | else: 124 | kernel_file.write_bytes(json.dumps(kernelspec)) 125 | 126 | monkeypatch.setattr(CondaKernelSpecManager, "_conda_info", mock_info) 127 | monkeypatch.setattr(CondaKernelSpecManager, "_all_envs", envs) 128 | 129 | manager = CondaKernelSpecManager(name_format=name_format) 130 | specs = manager._all_specs() 131 | 132 | assert len(specs) == 1 133 | for spec in specs.values(): 134 | assert spec["display_name"] == expected.format(env_name=env_name) 135 | 136 | 137 | @pytest.mark.parametrize("kernelspec_path, user, prefix, expected", [ 138 | ( 139 | "", 140 | False, "", ""), # Usually it is not allowed to write at system level 141 | ( 142 | "--user", 143 | True, None, "--user"), 144 | ( 145 | "--sys-prefix", 146 | False, sys.prefix, "--sys-prefix"), 147 | ( 148 | os.path.dirname(__file__), 149 | False, os.path.dirname(__file__), os.path.dirname(__file__)), 150 | ( 151 | "/dummy/path", 152 | False, None, TraitError), 153 | ( 154 | __file__, 155 | False, None, TraitError), 156 | ]) 157 | def test_kernelspec_path(tmp_path, kernelspec_path, user, prefix, expected): 158 | config = Config({"CondaKernelSpecManager": {"kernelspec_path": kernelspec_path}}) 159 | with patch("nb_conda_kernels.manager.CondaKernelSpecManager.install_kernel_spec") as install: 160 | install.return_value = str(tmp_path) 161 | if isinstance(expected, type) and issubclass(expected, Exception): 162 | with pytest.raises(expected): 163 | CondaKernelSpecManager(config=config) 164 | else: 165 | spec_manager = CondaKernelSpecManager(config=config) 166 | assert spec_manager.kernelspec_path == expected 167 | assert spec_manager.conda_only == (spec_manager.kernelspec_path is not None) 168 | for call_ in install.call_args_list: 169 | assert call_[1]["user"] == user 170 | assert call_[1]["prefix"] ==prefix 171 | 172 | 173 | @pytest.mark.testbed 174 | @pytest.mark.parametrize("kernelspec_path", ["", None]) 175 | def test_install_kernelspec(tmp_path, kernelspec_path): 176 | config = Config({"CondaKernelSpecManager": {"kernelspec_path": kernelspec_path}}) 177 | with patch("nb_conda_kernels.manager.CondaKernelSpecManager.install_kernel_spec") as install: 178 | install.return_value = str(tmp_path) 179 | CondaKernelSpecManager(config=config) 180 | 181 | assert install.called == (kernelspec_path is not None) 182 | 183 | @pytest.mark.parametrize("kernel_name, expected", [ 184 | ("not-conda-kernel", False), 185 | ("conda-env-dummy-cpp", True) 186 | ]) 187 | def test_remove_kernelspec(tmp_path, kernel_name, expected): 188 | config = Config({"CondaKernelSpecManager": {"kernelspec_path": ""}}) 189 | kernel_spec = tmp_path / kernel_name / "kernel.json" 190 | kernel_spec.parent.mkdir() 191 | kernel_spec.write_bytes(b"{}") 192 | with patch("nb_conda_kernels.manager.CondaKernelSpecManager.install_kernel_spec") as install: 193 | install.return_value = str(tmp_path) 194 | with patch("nb_conda_kernels.manager.CondaKernelSpecManager._get_destination_dir") as destination: 195 | destination.return_value = str(tmp_path) 196 | with patch("shutil.rmtree") as remove: 197 | CondaKernelSpecManager(config=config) 198 | 199 | assert remove.called == expected 200 | 201 | 202 | @pytest.mark.parametrize("kernelspec", [ 203 | { 204 | "display_name": "xpython", 205 | "argv": [ 206 | "@XPYTHON_KERNELSPEC_PATH@xpython", 207 | "-f", 208 | "{connection_file}" 209 | ], 210 | "language": "python", 211 | "metadata": { "debugger": True } 212 | } 213 | ]) 214 | def test_kernel_metadata(monkeypatch, tmp_path, kernelspec): 215 | 216 | mock_info = { 217 | 'conda_prefix': '/' 218 | } 219 | 220 | def envs(*args): 221 | return { 222 | 'env_name': str(tmp_path) 223 | } 224 | 225 | kernel_file = tmp_path / 'share' / 'jupyter' / 'kernels' / 'my_kernel' / 'kernel.json' 226 | kernel_file.parent.mkdir(parents=True, exist_ok=True) 227 | if sys.version_info >= (3, 0): 228 | kernel_file.write_text(json.dumps(kernelspec)) 229 | else: 230 | kernel_file.write_bytes(json.dumps(kernelspec)) 231 | 232 | monkeypatch.setattr(CondaKernelSpecManager, "_conda_info", mock_info) 233 | monkeypatch.setattr(CondaKernelSpecManager, "_all_envs", envs) 234 | 235 | manager = CondaKernelSpecManager() 236 | specs = manager._all_specs() 237 | 238 | assert len(specs) == 1 239 | for spec in specs.values(): 240 | metadata = spec['metadata'] 241 | for key, value in kernelspec['metadata'].items(): 242 | assert key in metadata 243 | assert metadata[key] == value 244 | 245 | 246 | @pytest.mark.parametrize("kernelspec", [ 247 | { 248 | "display_name": "xpython", 249 | "argv": [ 250 | "@XPYTHON_KERNELSPEC_PATH@xpython", 251 | "-f", 252 | "{connection_file}" 253 | ], 254 | "language": "python", 255 | "metadata": { "debugger": True } 256 | } 257 | ]) 258 | def test_kernel_metadata_debugger_override(monkeypatch, tmp_path, kernelspec): 259 | 260 | mock_info = { 261 | 'conda_prefix': '/' 262 | } 263 | 264 | def envs(*args): 265 | return { 266 | 'env_name': str(tmp_path) 267 | } 268 | 269 | kernel_file = tmp_path / 'share' / 'jupyter' / 'kernels' / 'my_kernel' / 'kernel.json' 270 | kernel_file.parent.mkdir(parents=True, exist_ok=True) 271 | if sys.version_info >= (3, 0): 272 | kernel_file.write_text(json.dumps(kernelspec)) 273 | else: 274 | kernel_file.write_bytes(json.dumps(kernelspec)) 275 | 276 | monkeypatch.setattr(CondaKernelSpecManager, "_conda_info", mock_info) 277 | monkeypatch.setattr(CondaKernelSpecManager, "_all_envs", envs) 278 | 279 | manager = CondaKernelSpecManager() 280 | specs = manager._all_specs() 281 | assert specs['conda-env-env_name-my_kernel']['metadata']['debugger'] is True 282 | 283 | manager = CondaKernelSpecManager(enable_debugger=False) 284 | specs = manager._all_specs() 285 | assert specs['conda-env-env_name-my_kernel']['metadata']['debugger'] is False 286 | 287 | 288 | 289 | if __name__ == '__main__': 290 | test_configuration() 291 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | | [![Build and test the package](https://github.com/Anaconda-Platform/nb_conda_kernels/workflows/Build%20and%20test%20the%20package/badge.svg)](https://github.com/Anaconda-Platform/nb_conda_kernels/actions?query=workflow%3A%22Build+and+test+the+package%22) | [![Anaconda-Server Badge](https://anaconda.org/jupycon/nb_conda_kernels/badges/latest_release_date.svg)](https://anaconda.org/jupycon/nb_conda_kernels) | 2 | | --- | :-: | 3 | | [`conda install jupycon/label/dev::nb_conda_kernels`](https://anaconda.org/jupycon/nb_conda_kernels) | [![Anaconda-Server Badge](https://anaconda.org/jupycon/nb_conda_kernels/badges/version.svg)](https://anaconda.org/jupycon/nb_conda_kernels) | 4 | | [`conda install defaults::nb_conda_kernels`](https://anaconda.org/anaconda/nb_conda_kernels) | [![Anaconda-Server Badge](https://anaconda.org/anaconda/nb_conda_kernels/badges/version.svg)](https://anaconda.org/anaconda/nb_conda_kernels) | 5 | | [`conda install conda-forge::nb_conda_kernels`](https://anaconda.org/conda-forge/nb_conda_kernels) | [![Anaconda-Server Badge](https://anaconda.org/conda-forge/nb_conda_kernels/badges/version.svg)](https://anaconda.org/conda-forge/nb_conda_kernels) | 6 | 7 | # nb_conda_kernels 8 | 9 | This extension enables a [Jupyter Notebook](http://jupyter.org) 10 | or [JupyterLab](https://jupyterlab.readthedocs.io/en/stable/) 11 | application in one [conda](https://conda.io/docs/) 12 | environment to access kernels for Python, R, and other languages 13 | found in other environments. When a kernel from an external environment is selected, the kernel conda environment is 14 | automatically activated before the kernel is launched. 15 | This allows you to utilize different versions of Python, R, 16 | and other languages from a single Jupyter installation. 17 | 18 | The package works by defining a custom `KernelSpecManager` that 19 | scans the current set of `conda` environments for kernel 20 | specifications. It dynamically modifies each `KernelSpec` 21 | so that it can be properly run from the notebook environment. 22 | When you create a new notebook, these modified kernels 23 | will be made available in the selection list. 24 | 25 | ## Installation 26 | 27 | This package is designed to be managed solely using `conda`. 28 | It should be installed in the environment from which 29 | you run Jupyter Notebook or JupyterLab. This might be your base 30 | `conda` environment, but it need not be. For instance, 31 | if the environment `notebook_env` contains the `notebook` 32 | package, then you would run 33 | 34 | ```shell 35 | conda install -n notebook_env nb_conda_kernels 36 | ``` 37 | 38 | Any _other_ environments you wish to access in your 39 | notebooks must have an appropriate kernel 40 | package installed. For instance, to access a Python 41 | environment, it must have the `ipykernel` package; e.g. 42 | 43 | ```shell 44 | conda install -n python_env ipykernel 45 | ``` 46 | 47 | To utilize an R environment, it must have the `r-irkernel` package; e.g. 48 | 49 | ```shell 50 | conda install -n r_env r-irkernel 51 | ``` 52 | 53 | For other languages, their [corresponding kernels](https://github.com/jupyter/jupyter/wiki/Jupyter-kernels) 54 | must be installed. 55 | 56 | ### Use with nbconvert, voila, papermill,... 57 | 58 | This extension works out of the box _only_ with Jupyter notebooks and 59 | JupyterLab. 60 | 61 | A new [kernel discovery system](https://github.com/jupyter/jupyter_server/pull/112) 62 | is being developed that should enable the 63 | wider Jupyter ecosystem to take advantage of these external 64 | kernels. This package will require modification to 65 | function properly in this new system. 66 | 67 | But you can activate a workaround for it to work with 68 | Jupyter Console, `nbconvert`, and other tools. As 69 | these tools were not designed to allow for the use of custom 70 | KernelSpecs, you can set the configuration parameter `kernelspec_path` 71 | to tell this extension to add dynamically the conda environment to 72 | the kernel list. To set it up: 73 | 74 | 1. Create a configuration file for jupyter named `jupyter_config.json` 75 | in the folder returned by `jupyter --config-dir`. 76 | 2. Add the following configuration to install all kernel spec for the current user: 77 | ```json 78 | { 79 | "CondaKernelSpecManager": { 80 | "kernelspec_path": "--user" 81 | } 82 | } 83 | ``` 84 | 3. Execute the command (or open the classical Notebook or JupyterLab UI): 85 | ```sh 86 | python -m nb_conda_kernels list 87 | ``` 88 | 4. Check that the conda environment kernels are discovered by `jupyter`: 89 | ```sh 90 | jupyter kernelspec list 91 | ``` 92 | The previous command should list the same kernel than `nb_conda_kernels`. 93 | 94 | You are now all set. `nbconvert`, `voila`, `papermill`,... should find the 95 | conda environment kernels. 96 | 97 | ## Configuration 98 | 99 | This package introduces two additional configuration options: 100 | 101 | - `conda_only`: Whether to include only the kernels not visible from Jupyter normally or not (default: False except if `kernelspec_path` is set) 102 | - `env_filter`: Regex to filter environment path matching it. Default: `None` (i.e. no filter) 103 | - `kernelspec_path`: Path to install conda kernel specs to if not `None`. Default: `None` (i.e. don't install the conda environment as kernel specs for other Jupyter tools) 104 | Possible values are: 105 | - `""` (empty string): Install for all users 106 | - `--user`: Install for the current user instead of system-wide 107 | - `--sys-prefix`: Install to Python's sys.prefix 108 | - `PREFIX`: Specify an install prefix for the kernelspec. The kernel specs will be 109 | written in `PREFIX/share/jupyter/kernels`. Be careful that the PREFIX 110 | may not be discoverable by Jupyter; set JUPYTER_DATA_DIR to force it or run 111 | `jupyter --paths` to get the list of data directories. 112 | 113 | - `name_format`: String name format 114 | Default: `'{language} [conda env:{environment}]'` 115 | Available field names within the string: 116 | - `{0}` = Language 117 | - `{1}` = Environment name 118 | - `{conda_kernel}` = Dynamically built kernel name for conda environment 119 | - `{display_name}` = Kernel displayed name (as defined in the kernel spec) 120 | - `{environment}` = Environment name (identical to `{1}`) 121 | - `{kernel}` = Original kernel name (name of the folder containing the kernel spec) 122 | - `{language}` = Language (identical to `{0}`) 123 | 124 | - `enable_debugger`: Override kernelspec debugger metadata 125 | Default: None 126 | Possible values are: 127 | - True: Override environment kernelspec metadata and set the debugger flag to `true` 128 | - False: Override environment kernelspec metadata and set the debugger flag to `false` 129 | 130 | In order to pass a configuration option in the command line use ```python -m nb_conda_kernels list --CondaKernelSpecManager.env_filter="regex"``` where regex is the regular expression for filtering envs "this|that|and|that" works. 131 | To set it in jupyter config file, edit the jupyter configuration file (py or json) located in your ```jupyter --config-dir``` 132 | - for `jupyter_config.py` - add a line "c.CondaKernelSpecManager.env_filter = 'regex'" 133 | - for `jupyter_config.json` - add a json key 134 | 135 | ```json 136 | { 137 | "CondaKernelSpecManager": { 138 | "env_filter": "regex" 139 | } 140 | ``` 141 | 142 | ## Development 143 | 144 | 1. Install [Anaconda](https://www.anaconda.com/download/) or 145 | [Miniconda](https://conda.io/miniconda.html). If you are 146 | on Windows, make sure you have a Bash shell on your path. 147 | 148 | 2. Create and activate the testbed environment by running 149 | 150 | ```shell 151 | source testbed/build.sh 152 | ``` 153 | 154 | This performs the following steps: 155 | - Builds a new root conda environment in `../nbckdev`, 156 | or in `CONDA_ROOT` if that environment variable is defined. 157 | (Note that the default directory `../nbckdev` is at the same 158 | level as your copy of the repository. This is because we do 159 | not want `conda build` to try to capture the entire testbed 160 | into the build workspace.) 161 | - Installs conda-build and the necessary dependencies to 162 | locally test the package 163 | - Installs the package in development mode 164 | - Creates a set of environments that the test scripts 165 | require to fully exercise the package. 166 | - Activates the environment, including a deliberate scrubbing 167 | of variables and paths from your primary conda environment. 168 | 169 | If the environment already exists, `testbed/build.sh` will 170 | quickly exit, so it is safe to run it if you are not sure. 171 | 172 | 3. Run pytest to test the package. 173 | 174 | ```shell 175 | pytest tests 176 | ``` 177 | 178 | 4. The root environment of our testbed uses Python 3.7. If you would 179 | like to test `nb_conda_kernels` with a different Python version, 180 | create a new child environment: 181 | 182 | ```shell 183 | conda create -n ptest python=... notebook pytest pytest-cov requests mock 184 | conda install backports.functools_lru_cache # python 2 only 185 | conda activate ptest 186 | pip install -e . 187 | python -m nb_conda_kernels.install --enable 188 | pytest tests 189 | ``` 190 | 191 | ## Changelog 192 | 193 | ### 2.3.2 194 | 195 | - Added code to clear zombie child processes left behind by the `conda info` subprocess call in manager.py 196 | 197 | ### 2.3.1 198 | 199 | - Provide more options to set the display name of an environment (see [`name_format`](README.md#Configuration) setting) 200 | - Improve the runner script by activating the environment only if required 201 | - Installation script improvements 202 | - Fix GitHub CI actions 203 | 204 | ### 2.3.0 205 | 206 | - Provide a mechanism for using `nb_conda_kernels` 207 | with tools such as `voila`, `papermill`, `nbconvert` 208 | - Preserve kernel metadata properly 209 | - Testbed improvements 210 | 211 | ### 2.2.4 212 | 213 | - Tested support for noarch packages 214 | - Windows bug fixes 215 | - Better documentation for `env_filter` 216 | - Fixes to kernel metadata 217 | 218 | ### 2.2.3 219 | 220 | - Restore compatibiltiy with Jupyter V6 221 | - Testing and support for Python 3.8 222 | - Enhanced kernelSpec metadata 223 | 224 | ### 2.2.2 225 | 226 | - Adds project name to kernel name for environments that 227 | live outside of the default environment location 228 | - Improved runner scripts: linear execution, better handling 229 | of environment variables 230 | - Migrate from nosetests to pytest 231 | 232 | ### 2.2.1 233 | 234 | - Put the default environment back into the conda-env list; 235 | the redundancy is worth the elimination of confusion. 236 | - Fix post-link scripts on windows 237 | 238 | ### 2.2.0 239 | 240 | - Perform full activation of kernel conda environments 241 | - Discover kernels from their kernel specs, enabling the use 242 | of kernels besides Python and R 243 | - Support for spaces and accented characters in environment 244 | paths, with properly validating kernel names 245 | - Configurable format for kernel display names 246 | - Remove NodeJS-based testing 247 | 248 | ### 2.1.1 249 | 250 | - move to a full conda-based approach to build and test 251 | - add support for conda 4.4 and later, which can remove `conda` from the PATH 252 | 253 | ### 2.1.0 254 | 255 | - add support for regex-based filtering of conda environments that should not appear in the list 256 | 257 | ### 2.0.0 258 | 259 | - change kernel naming scheme to leave default kernels in place 260 | 261 | ### 1.0.3 262 | 263 | - ignore build cleanup on windows due to poorly-behaved PhantomJS processes 264 | 265 | ### 1.0.2 266 | 267 | - use [Travis-CI](https://travis-ci.org/Anaconda-Platform/nb_conda_kernels) for continuous integration 268 | - use [Coveralls](https://coveralls.io/github/Anaconda-Platform/nb_conda_kernels) for code coverage 269 | - use a [conda-forge](https://github.com/conda-forge/nb_conda_kernels-feedstock) for cross-platform `conda` package building 270 | 271 | ### 1.0.1 272 | 273 | - minor build changes 274 | 275 | ### 1.0.0 276 | 277 | - update to notebook 4.2 278 | -------------------------------------------------------------------------------- /nb_conda_kernels/manager.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import json 3 | import re 4 | import shutil 5 | import subprocess 6 | import threading 7 | import sys 8 | import time 9 | import glob 10 | import psutil 11 | 12 | import os 13 | from os.path import join, split, dirname, basename, abspath 14 | from traitlets import Bool, Unicode, TraitError, validate 15 | 16 | from jupyter_client.kernelspec import KernelSpecManager, KernelSpec, NoSuchKernel 17 | 18 | CACHE_TIMEOUT = 60 19 | 20 | CONDA_EXE = os.environ.get("CONDA_EXE", "conda") 21 | 22 | RUNNER_COMMAND = ['python', '-m', 'nb_conda_kernels.runner'] 23 | 24 | _canonical_paths = {} 25 | 26 | 27 | def _canonicalize(path): 28 | """ 29 | On case-sensitive filesystems, return the path unchanged. 30 | On case-insensitive filesystems, cache the first value of 31 | the path that we encounter, and return that for any other 32 | case variation. 33 | """ 34 | def _inode(p): 35 | try: 36 | return os.stat(p).st_ino 37 | except FileNotFoundError: 38 | return -1 39 | inode1 = _inode(path) 40 | plower = path.lower() 41 | inode2 = _inode(plower) 42 | if inode1 != inode2: 43 | return path 44 | inode3 = _inode(path.upper()) 45 | if inode3 != inode2: 46 | return path 47 | return _canonical_paths.setdefault(plower, path) 48 | 49 | 50 | class CondaKernelSpecManager(KernelSpecManager): 51 | """ A custom KernelSpecManager able to search for conda environments and 52 | create kernelspecs for them. 53 | """ 54 | base_name = Unicode("base", config=True, 55 | help="The name to give the base/root environment. " 56 | "The default is 'base', mirroring conda's naming convention. " 57 | "Historically, 'root' was used as well.") 58 | conda_only = Bool(False, config=True, 59 | help="Include only the kernels not visible from Jupyter normally. If False, any " 60 | "duplication will be resolved in favor of nb_conda_kernels. This is assumed to " 61 | "be true if kernelspec_path is supplied as well.") 62 | env_filter = Unicode(None, config=True, allow_none=True, 63 | help="Exclude kernels from environments that match this regex.") 64 | kernelspec_path = Unicode(None, config=True, allow_none=True, 65 | help="""Path to install conda kernel specs to. 66 | 67 | The acceptable values are: 68 | - ``""`` (empty string): Install for all users 69 | - ``--user``: Install for the current user instead of system-wide 70 | - ``--sys-prefix``: Install to Python's sys.prefix 71 | - ``PREFIX``: Specify an install prefix for the kernelspec. The kernel specs will be 72 | written in ``PREFIX/share/jupyter/kernels``. Be careful that the PREFIX 73 | may not be discoverable by Jupyter; set JUPYTER_DATA_DIR to force it or run 74 | ``jupyter --paths`` to get the list of data directories. 75 | 76 | If None, the conda kernel specs will only be available dynamically on notebook editors. 77 | """) 78 | enable_debugger = Bool(None, config=True, allow_none=True, 79 | help="Optional: Override debugger setting in kernelspec metadata. " 80 | "If this parameter is unset it will default to the source kernel metadata.") 81 | 82 | @validate("kernelspec_path") 83 | def _validate_kernelspec_path(self, proposal): 84 | new_value = proposal["value"] 85 | if new_value is not None: 86 | if new_value not in ("", "--user", "--sys-prefix"): 87 | if not os.path.isdir(self.kernelspec_path): 88 | raise TraitError("CondaKernelSpecManager.kernelspec_path is not a directory.") 89 | self.log.debug("nb_conda_kernels | Force conda_only=True as kernelspec_path is not None.") 90 | self.conda_only = True 91 | 92 | return new_value 93 | 94 | name_format = Unicode( 95 | '{language} [conda env:{environment}]', 96 | config=True, 97 | help="""String name format; available field names within the string: 98 | '{0}' = Language 99 | '{1}' = Environment name 100 | '{conda_kernel}' = Dynamically built kernel name for conda environment 101 | '{display_name}' = Kernel displayed name (as defined in the kernel spec) 102 | '{environment}' = Environment name (identical to '{1}') 103 | '{kernel}' = Original kernel name (name of the folder containing the kernel spec) 104 | '{language}' = Language (identical to '{0}') 105 | """ 106 | ) 107 | 108 | def __init__(self, **kwargs): 109 | super(CondaKernelSpecManager, self).__init__(**kwargs) 110 | 111 | self._conda_info_cache = None 112 | self._conda_info_cache_expiry = None 113 | self._conda_info_cache_thread = None 114 | 115 | self._conda_kernels_cache = None 116 | self._conda_kernels_cache_expiry = None 117 | 118 | if self.env_filter is not None: 119 | self._env_filter_regex = re.compile(self.env_filter) 120 | 121 | self._kernel_user = self.kernelspec_path == "--user" 122 | self._kernel_prefix = None 123 | if not self._kernel_user: 124 | self._kernel_prefix = sys.prefix if self.kernelspec_path == "--sys-prefix" else self.kernelspec_path 125 | 126 | self.log.info( 127 | "nb_conda_kernels | enabled, %s kernels found.", len(self._conda_kspecs) 128 | ) 129 | 130 | @staticmethod 131 | def clean_kernel_name(kname): 132 | """ Replaces invalid characters in the Jupyter kernelname, with 133 | a bit of effort to preserve readability. 134 | """ 135 | try: 136 | kname.encode('ascii') 137 | except UnicodeEncodeError: 138 | # Replace accented characters with unaccented equivalents 139 | import unicodedata 140 | nfkd_form = unicodedata.normalize('NFKD', kname) 141 | kname = u"".join([c for c in nfkd_form if not unicodedata.combining(c)]) 142 | # Replace anything else, including spaces, with underscores 143 | kname = re.sub(r'[^a-zA-Z0-9._\-]', '_', kname) 144 | return kname 145 | 146 | @property 147 | def _conda_info(self): 148 | """ Get and parse the whole conda information output 149 | 150 | Caches the information for CACHE_TIMEOUT seconds, as this is 151 | relatively expensive. 152 | """ 153 | 154 | def get_conda_info_data(): 155 | # This is to make sure that subprocess can find 'conda' even if 156 | # it is a Windows batch file---which is the case in non-root 157 | # conda environments. 158 | shell = CONDA_EXE == 'conda' and sys.platform.startswith('win') 159 | try: 160 | # Let json do the decoding for non-ASCII characters 161 | out = subprocess.check_output([CONDA_EXE, "info", "--json"], shell=shell) 162 | conda_info = json.loads(out) 163 | return conda_info, None 164 | except Exception as err: 165 | return None, err 166 | finally: 167 | self.wait_for_child_processes_cleanup() 168 | 169 | class CondaInfoThread(threading.Thread): 170 | def run(self): 171 | self.out, self.err = get_conda_info_data() 172 | 173 | expiry = self._conda_info_cache_expiry 174 | t = self._conda_info_cache_thread 175 | 176 | # cache is empty 177 | if expiry is None: 178 | self.log.debug("nb_conda_kernels | refreshing conda info (blocking call)") 179 | conda_info, err = get_conda_info_data() 180 | if conda_info is None: 181 | self.log.error("nb_conda_kernels | couldn't call conda:\n%s", err) 182 | self._conda_info_cache = conda_info 183 | self._conda_info_cache_expiry = time.time() + CACHE_TIMEOUT 184 | 185 | # subprocess just finished 186 | elif t and not t.is_alive(): 187 | t.join() 188 | conda_info = t.out 189 | if conda_info is None: 190 | self.log.error("nb_conda_kernels | couldn't call conda:\n%s", t.err) 191 | else: 192 | self.log.debug("nb_conda_kernels | collected conda info (async call)") 193 | self._conda_info_cache = conda_info 194 | self._conda_info_cache_expiry = time.time() + CACHE_TIMEOUT 195 | self._conda_info_cache_thread = None 196 | 197 | # cache expired 198 | elif not t and expiry < time.time(): 199 | self.log.debug("nb_conda_kernels | refreshing conda info (async call)") 200 | t = CondaInfoThread() 201 | t.start() 202 | self._conda_info_cache_thread = t 203 | 204 | # else, just return cache 205 | 206 | return self._conda_info_cache 207 | 208 | def _all_envs(self): 209 | """ Find all of the environments we should be checking. We skip 210 | environments in the conda-bld directory. Returns a dict with 211 | canonical environment names as keys, and full paths as values. 212 | """ 213 | conda_info = self._conda_info 214 | envs = list(map(_canonicalize, conda_info['envs'])) 215 | base_prefix = _canonicalize(conda_info['conda_prefix']) 216 | envs_prefix = join(base_prefix, 'envs') 217 | build_prefix = join(base_prefix, 'conda-bld', '') 218 | # Older versions of conda do not seem to include the base prefix 219 | # in the environment list, but we do want to scan that 220 | if base_prefix not in envs: 221 | envs.insert(0, base_prefix) 222 | envs_dirs = conda_info['envs_dirs'] 223 | if not envs_dirs: 224 | envs_dirs = [join(base_prefix, 'envs')] 225 | all_envs = {} 226 | for env_path in envs: 227 | if self.env_filter and self._env_filter_regex.search(env_path): 228 | continue 229 | elif env_path == base_prefix: 230 | env_name = self.base_name 231 | elif env_path.startswith(build_prefix): 232 | # Skip the conda-bld directory entirely 233 | continue 234 | else: 235 | env_base, env_name = split(env_path) 236 | # Add a prefix to environments not found in the default 237 | # environment location. The assumed convention is that a 238 | # directory named 'envs' is a collection of environments 239 | # as created by, say, conda or anaconda-project. The name 240 | # of the parent directory, then, provides useful context. 241 | if basename(env_base) == 'envs' and (env_base != envs_prefix or env_name in all_envs): 242 | env_name = u'{}-{}'.format(basename(dirname(env_base)), env_name) 243 | # Further disambiguate, if necessary, with a counter. 244 | if env_name in all_envs: 245 | base_name = env_name 246 | for count in range(len(all_envs)): 247 | env_name = u'{}-{}'.format(base_name, count + 2) 248 | if env_name not in all_envs: 249 | break 250 | all_envs[env_name] = env_path 251 | return all_envs 252 | 253 | def _all_specs(self): 254 | """ Find the all kernel specs in all environments. 255 | 256 | Returns a dict with unique env names as keys, and the kernel.json 257 | content as values, modified so that they can be run properly in 258 | their native environments. 259 | 260 | Caches the information for CACHE_TIMEOUT seconds, as this is 261 | relatively expensive. 262 | """ 263 | 264 | all_specs = {} 265 | # We need to be able to find conda-run in the base conda environment 266 | # even if this package is not running there 267 | conda_prefix = self._conda_info['conda_prefix'] 268 | all_envs = self._all_envs() 269 | for env_name, env_path in all_envs.items(): 270 | kspec_base = join(env_path, 'share', 'jupyter', 'kernels') 271 | kspec_glob = glob.glob(join(kspec_base, '*', 'kernel.json')) 272 | for spec_path in kspec_glob: 273 | try: 274 | with open(spec_path, 'rb') as fp: 275 | data = fp.read() 276 | spec = json.loads(data.decode('utf-8')) 277 | except Exception as err: 278 | self.log.error("nb_conda_kernels | error loading %s:\n%s", 279 | spec_path, err) 280 | continue 281 | kernel_dir = dirname(spec_path) 282 | kernel_name = raw_kernel_name = basename(kernel_dir) 283 | if self.kernelspec_path is not None and kernel_name.startswith("conda-"): 284 | self.log.debug("nb_conda_kernels | Skipping kernel spec %s", spec_path) 285 | continue # Ensure to skip dynamically added kernel spec within the environment prefix 286 | # We're doing a few of these adjustments here to ensure that 287 | # the naming convention is as close as possible to the previous 288 | # versions of this package; particularly so that the tests 289 | # pass without change. 290 | if kernel_name in ('python2', 'python3'): 291 | kernel_name = 'py' 292 | elif kernel_name == 'ir': 293 | kernel_name = 'r' 294 | is_base = env_name == self.base_name 295 | kernel_prefix = '' if is_base else 'env-' 296 | kernel_name = u'conda-{}{}-{}'.format(kernel_prefix, env_name, kernel_name) 297 | # Replace invalid characters with dashes 298 | kernel_name = self.clean_kernel_name(kernel_name) 299 | 300 | display_prefix = spec['display_name'] 301 | if display_prefix.startswith('Python'): 302 | display_prefix = 'Python' 303 | display_name = self.name_format.format( 304 | display_prefix, 305 | env_name, 306 | conda_kernel=kernel_name, 307 | display_name=spec['display_name'], 308 | environment=env_name, 309 | kernel=raw_kernel_name, 310 | language=display_prefix, 311 | ) 312 | is_current = env_path == sys.prefix 313 | if is_current: 314 | display_name += ' *' 315 | spec['display_name'] = display_name 316 | if env_path != sys.prefix: 317 | spec['argv'] = RUNNER_COMMAND + [conda_prefix, env_path] + spec['argv'] 318 | metadata = spec.get('metadata', {}) 319 | metadata.update({ 320 | 'conda_env_name': env_name, 321 | 'conda_env_path': env_path, 322 | 'conda_language': display_prefix, 323 | 'conda_raw_kernel_name': raw_kernel_name, 324 | 'conda_is_base_environment': is_base, 325 | 'conda_is_currently_running': is_current 326 | }) 327 | if self.enable_debugger is not None: 328 | metadata.update({"debugger": self.enable_debugger}) 329 | spec['metadata'] = metadata 330 | 331 | if self.kernelspec_path is not None: 332 | # Install the kernel spec 333 | try: 334 | destination = self.install_kernel_spec( 335 | kernel_dir, 336 | kernel_name=kernel_name, 337 | user=self._kernel_user, 338 | prefix=self._kernel_prefix 339 | ) 340 | # Update the kernel spec 341 | kernel_spec = join(destination, "kernel.json") 342 | tmp_spec = spec.copy() 343 | if env_path == sys.prefix: # Add the conda runner to the installed kernel spec 344 | tmp_spec['argv'] = RUNNER_COMMAND + [conda_prefix, env_path] + spec['argv'] 345 | with open(kernel_spec, "w") as f: 346 | json.dump(tmp_spec, f) 347 | except OSError as error: 348 | self.log.warning( 349 | u"nb_conda_kernels | Fail to install kernel '{}'.".format(kernel_dir), 350 | exc_info=error 351 | ) 352 | 353 | # resource_dir is not part of the spec file, so it is added at the latest time 354 | spec['resource_dir'] = abspath(kernel_dir) 355 | 356 | all_specs[kernel_name] = spec 357 | 358 | # Remove non-existing conda environments 359 | if self.kernelspec_path is not None: 360 | kernels_destination = self._get_destination_dir( 361 | "", 362 | user=self._kernel_user, 363 | prefix=self._kernel_prefix 364 | ) 365 | for folder in glob.glob(join(kernels_destination, "*", "kernel.json")): 366 | kernel_dir = dirname(folder) 367 | kernel_name = basename(kernel_dir) 368 | if kernel_name.startswith("conda-") and kernel_name not in all_specs: 369 | self.log.info("Removing %s", kernel_dir) 370 | if os.path.islink(kernel_dir): 371 | os.remove(kernel_dir) 372 | else: 373 | shutil.rmtree(kernel_dir) 374 | 375 | return all_specs 376 | 377 | @property 378 | def _conda_kspecs(self): 379 | """ Get (or refresh) the cache of conda kernels 380 | """ 381 | if self._conda_info is None: 382 | return {} 383 | 384 | expiry = self._conda_kernels_cache_expiry 385 | if expiry is not None and expiry >= time.time(): 386 | return self._conda_kernels_cache 387 | 388 | kspecs = {} 389 | for name, info in self._all_specs().items(): 390 | kspecs[name] = KernelSpec(**info) 391 | 392 | self._conda_kernels_cache_expiry = time.time() + CACHE_TIMEOUT 393 | self._conda_kernels_cache = kspecs 394 | 395 | return kspecs 396 | 397 | def find_kernel_specs(self): 398 | """ Returns a dict mapping kernel names to resource directories. 399 | 400 | The update process also adds the resource dir for the conda 401 | environments. 402 | """ 403 | if self.conda_only: 404 | kspecs = {} 405 | else: 406 | kspecs = super(CondaKernelSpecManager, self).find_kernel_specs() 407 | kspecs = {k: _canonicalize(v) for k, v in kspecs.items()} 408 | spec_rev = {v: k for k, v in kspecs.items()} 409 | for name, spec in self._conda_kspecs.items(): 410 | kspecs[name] = spec.resource_dir 411 | dup = spec_rev.get(kspecs[name]) 412 | if dup: 413 | del kspecs[dup] 414 | allow = getattr(self, 'allowed_kernelspecs', None) or getattr(self, 'whitelist', None) 415 | if allow: 416 | kspecs = {k: v for k, v in kspecs.items() if k in allow} 417 | return kspecs 418 | 419 | def get_kernel_spec(self, kernel_name): 420 | """ Returns a :class:`KernelSpec` instance for the given kernel_name. 421 | 422 | Additionally, conda kernelspecs are generated on the fly 423 | accordingly with the detected environments. 424 | """ 425 | 426 | res = self._conda_kspecs.get(kernel_name) 427 | if res is None and not self.conda_only: 428 | res = super(CondaKernelSpecManager, self).get_kernel_spec(kernel_name) 429 | return res 430 | 431 | def get_all_specs(self): 432 | """ Returns a dict mapping kernel names to dictionaries with two 433 | entries: "resource_dir" and "spec". This was added to fill out 434 | the full public interface to KernelManagerSpec. 435 | """ 436 | res = {} 437 | for name, resource_dir in self.find_kernel_specs().items(): 438 | try: 439 | spec = self.get_kernel_spec(name) 440 | res[name] = {'resource_dir': resource_dir, 441 | 'spec': spec.to_dict()} 442 | except NoSuchKernel: 443 | self.log.warning("Error loading kernelspec %r", name, exc_info=True) 444 | return res 445 | 446 | def remove_kernel_spec(self, name): 447 | """Remove a kernel spec directory by name. 448 | 449 | Returns the path that was deleted. 450 | """ 451 | save_native = self.ensure_native_kernel 452 | try: 453 | self.ensure_native_kernel = False 454 | # Conda environment kernelspec are only virtual, so remove can only be applied 455 | # on non-virtual kernels. 456 | specs = super(CondaKernelSpecManager, self).find_kernel_specs() 457 | finally: 458 | self.ensure_native_kernel = save_native 459 | spec_dir = specs[name] 460 | self.log.debug("Removing %s", spec_dir) 461 | if os.path.islink(spec_dir): 462 | os.remove(spec_dir) 463 | else: 464 | shutil.rmtree(spec_dir) 465 | return spec_dir 466 | 467 | def __del__(self): 468 | t = getattr(self, '_conda_info_cache_thread', None) 469 | # if there is a thread, wait for it to finish 470 | if t: 471 | t.join() 472 | 473 | def wait_for_child_processes_cleanup(self): 474 | p = psutil.Process() 475 | for c in p.children(): 476 | try: 477 | c.wait(timeout=0) 478 | except psutil.TimeoutExpired: 479 | pass 480 | -------------------------------------------------------------------------------- /nb_conda_kernels/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. 9 | # Generated by versioneer-0.29 10 | # https://github.com/python-versioneer/python-versioneer 11 | 12 | """Git implementation of _version.py.""" 13 | 14 | import errno 15 | import os 16 | import re 17 | import subprocess 18 | import sys 19 | from typing import Any, Callable, Dict, List, Optional, Tuple 20 | import functools 21 | 22 | 23 | def get_keywords() -> Dict[str, str]: 24 | """Get the keywords needed to look up the version information.""" 25 | # these strings will be replaced by git during git-archive. 26 | # setup.py/versioneer.py will grep for the variable names, so they must 27 | # each be defined on a line of their own. _version.py will just call 28 | # get_keywords(). 29 | git_refnames = " (HEAD -> master)" 30 | git_full = "3202804cf05d0eaf38897f6f44d95d0a73ab1a74" 31 | git_date = "2025-02-12 10:27:52 -0600" 32 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 33 | return keywords 34 | 35 | 36 | class VersioneerConfig: 37 | """Container for Versioneer configuration parameters.""" 38 | 39 | VCS: str 40 | style: str 41 | tag_prefix: str 42 | parentdir_prefix: str 43 | versionfile_source: str 44 | verbose: bool 45 | 46 | 47 | def get_config() -> VersioneerConfig: 48 | """Create, populate and return the VersioneerConfig() object.""" 49 | # these strings are filled in when 'setup.py versioneer' creates 50 | # _version.py 51 | cfg = VersioneerConfig() 52 | cfg.VCS = "git" 53 | cfg.style = "pep440" 54 | cfg.tag_prefix = "" 55 | cfg.parentdir_prefix = "nb_conda_kernels-" 56 | cfg.versionfile_source = "nb_conda_kernels/_version.py" 57 | cfg.verbose = False 58 | return cfg 59 | 60 | 61 | class NotThisMethod(Exception): 62 | """Exception raised if a method is not valid for the current scenario.""" 63 | 64 | 65 | LONG_VERSION_PY: Dict[str, str] = {} 66 | HANDLERS: Dict[str, Dict[str, Callable]] = {} 67 | 68 | 69 | def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator 70 | """Create decorator to mark a method as the handler of a VCS.""" 71 | def decorate(f: Callable) -> Callable: 72 | """Store f in HANDLERS[vcs][method].""" 73 | if vcs not in HANDLERS: 74 | HANDLERS[vcs] = {} 75 | HANDLERS[vcs][method] = f 76 | return f 77 | return decorate 78 | 79 | 80 | def run_command( 81 | commands: List[str], 82 | args: List[str], 83 | cwd: Optional[str] = None, 84 | verbose: bool = False, 85 | hide_stderr: bool = False, 86 | env: Optional[Dict[str, str]] = None, 87 | ) -> Tuple[Optional[str], Optional[int]]: 88 | """Call the given command(s).""" 89 | assert isinstance(commands, list) 90 | process = None 91 | 92 | popen_kwargs: Dict[str, Any] = {} 93 | if sys.platform == "win32": 94 | # This hides the console window if pythonw.exe is used 95 | startupinfo = subprocess.STARTUPINFO() 96 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW 97 | popen_kwargs["startupinfo"] = startupinfo 98 | 99 | for command in commands: 100 | try: 101 | dispcmd = str([command] + args) 102 | # remember shell=False, so use git.cmd on windows, not just git 103 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, 104 | stdout=subprocess.PIPE, 105 | stderr=(subprocess.PIPE if hide_stderr 106 | else None), **popen_kwargs) 107 | break 108 | except OSError as e: 109 | if e.errno == errno.ENOENT: 110 | continue 111 | if verbose: 112 | print("unable to run %s" % dispcmd) 113 | print(e) 114 | return None, None 115 | else: 116 | if verbose: 117 | print("unable to find command, tried %s" % (commands,)) 118 | return None, None 119 | stdout = process.communicate()[0].strip().decode() 120 | if process.returncode != 0: 121 | if verbose: 122 | print("unable to run %s (error)" % dispcmd) 123 | print("stdout was %s" % stdout) 124 | return None, process.returncode 125 | return stdout, process.returncode 126 | 127 | 128 | def versions_from_parentdir( 129 | parentdir_prefix: str, 130 | root: str, 131 | verbose: bool, 132 | ) -> Dict[str, Any]: 133 | """Try to determine the version from the parent directory name. 134 | 135 | Source tarballs conventionally unpack into a directory that includes both 136 | the project name and a version string. We will also support searching up 137 | two directory levels for an appropriately named parent directory 138 | """ 139 | rootdirs = [] 140 | 141 | for _ in range(3): 142 | dirname = os.path.basename(root) 143 | if dirname.startswith(parentdir_prefix): 144 | return {"version": dirname[len(parentdir_prefix):], 145 | "full-revisionid": None, 146 | "dirty": False, "error": None, "date": None} 147 | rootdirs.append(root) 148 | root = os.path.dirname(root) # up a level 149 | 150 | if verbose: 151 | print("Tried directories %s but none started with prefix %s" % 152 | (str(rootdirs), parentdir_prefix)) 153 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 154 | 155 | 156 | @register_vcs_handler("git", "get_keywords") 157 | def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: 158 | """Extract version information from the given file.""" 159 | # the code embedded in _version.py can just fetch the value of these 160 | # keywords. When used from setup.py, we don't want to import _version.py, 161 | # so we do it with a regexp instead. This function is not used from 162 | # _version.py. 163 | keywords: Dict[str, str] = {} 164 | try: 165 | with open(versionfile_abs, "r") as fobj: 166 | for line in fobj: 167 | if line.strip().startswith("git_refnames ="): 168 | mo = re.search(r'=\s*"(.*)"', line) 169 | if mo: 170 | keywords["refnames"] = mo.group(1) 171 | if line.strip().startswith("git_full ="): 172 | mo = re.search(r'=\s*"(.*)"', line) 173 | if mo: 174 | keywords["full"] = mo.group(1) 175 | if line.strip().startswith("git_date ="): 176 | mo = re.search(r'=\s*"(.*)"', line) 177 | if mo: 178 | keywords["date"] = mo.group(1) 179 | except OSError: 180 | pass 181 | return keywords 182 | 183 | 184 | @register_vcs_handler("git", "keywords") 185 | def git_versions_from_keywords( 186 | keywords: Dict[str, str], 187 | tag_prefix: str, 188 | verbose: bool, 189 | ) -> Dict[str, Any]: 190 | """Get version information from git keywords.""" 191 | if "refnames" not in keywords: 192 | raise NotThisMethod("Short version file found") 193 | date = keywords.get("date") 194 | if date is not None: 195 | # Use only the last line. Previous lines may contain GPG signature 196 | # information. 197 | date = date.splitlines()[-1] 198 | 199 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 200 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 201 | # -like" string, which we must then edit to make compliant), because 202 | # it's been around since git-1.5.3, and it's too difficult to 203 | # discover which version we're using, or to work around using an 204 | # older one. 205 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 206 | refnames = keywords["refnames"].strip() 207 | if refnames.startswith("$Format"): 208 | if verbose: 209 | print("keywords are unexpanded, not using") 210 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 211 | refs = {r.strip() for r in refnames.strip("()").split(",")} 212 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 213 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 214 | TAG = "tag: " 215 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} 216 | if not tags: 217 | # Either we're using git < 1.8.3, or there really are no tags. We use 218 | # a heuristic: assume all version tags have a digit. The old git %d 219 | # expansion behaves like git log --decorate=short and strips out the 220 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 221 | # between branches and tags. By ignoring refnames without digits, we 222 | # filter out many common branch names like "release" and 223 | # "stabilization", as well as "HEAD" and "master". 224 | tags = {r for r in refs if re.search(r'\d', r)} 225 | if verbose: 226 | print("discarding '%s', no digits" % ",".join(refs - tags)) 227 | if verbose: 228 | print("likely tags: %s" % ",".join(sorted(tags))) 229 | for ref in sorted(tags): 230 | # sorting will prefer e.g. "2.0" over "2.0rc1" 231 | if ref.startswith(tag_prefix): 232 | r = ref[len(tag_prefix):] 233 | # Filter out refs that exactly match prefix or that don't start 234 | # with a number once the prefix is stripped (mostly a concern 235 | # when prefix is '') 236 | if not re.match(r'\d', r): 237 | continue 238 | if verbose: 239 | print("picking %s" % r) 240 | return {"version": r, 241 | "full-revisionid": keywords["full"].strip(), 242 | "dirty": False, "error": None, 243 | "date": date} 244 | # no suitable tags, so version is "0+unknown", but full hex is still there 245 | if verbose: 246 | print("no suitable tags, using unknown + full revision id") 247 | return {"version": "0+unknown", 248 | "full-revisionid": keywords["full"].strip(), 249 | "dirty": False, "error": "no suitable tags", "date": None} 250 | 251 | 252 | @register_vcs_handler("git", "pieces_from_vcs") 253 | def git_pieces_from_vcs( 254 | tag_prefix: str, 255 | root: str, 256 | verbose: bool, 257 | runner: Callable = run_command 258 | ) -> Dict[str, Any]: 259 | """Get version from 'git describe' in the root of the source tree. 260 | 261 | This only gets called if the git-archive 'subst' keywords were *not* 262 | expanded, and _version.py hasn't already been rewritten with a short 263 | version string, meaning we're inside a checked out source tree. 264 | """ 265 | GITS = ["git"] 266 | if sys.platform == "win32": 267 | GITS = ["git.cmd", "git.exe"] 268 | 269 | # GIT_DIR can interfere with correct operation of Versioneer. 270 | # It may be intended to be passed to the Versioneer-versioned project, 271 | # but that should not change where we get our version from. 272 | env = os.environ.copy() 273 | env.pop("GIT_DIR", None) 274 | runner = functools.partial(runner, env=env) 275 | 276 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, 277 | hide_stderr=not verbose) 278 | if rc != 0: 279 | if verbose: 280 | print("Directory %s not under git control" % root) 281 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 282 | 283 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 284 | # if there isn't one, this yields HEX[-dirty] (no NUM) 285 | describe_out, rc = runner(GITS, [ 286 | "describe", "--tags", "--dirty", "--always", "--long", 287 | "--match", f"{tag_prefix}[[:digit:]]*" 288 | ], cwd=root) 289 | # --long was added in git-1.5.5 290 | if describe_out is None: 291 | raise NotThisMethod("'git describe' failed") 292 | describe_out = describe_out.strip() 293 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 294 | if full_out is None: 295 | raise NotThisMethod("'git rev-parse' failed") 296 | full_out = full_out.strip() 297 | 298 | pieces: Dict[str, Any] = {} 299 | pieces["long"] = full_out 300 | pieces["short"] = full_out[:7] # maybe improved later 301 | pieces["error"] = None 302 | 303 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 304 | cwd=root) 305 | # --abbrev-ref was added in git-1.6.3 306 | if rc != 0 or branch_name is None: 307 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 308 | branch_name = branch_name.strip() 309 | 310 | if branch_name == "HEAD": 311 | # If we aren't exactly on a branch, pick a branch which represents 312 | # the current commit. If all else fails, we are on a branchless 313 | # commit. 314 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 315 | # --contains was added in git-1.5.4 316 | if rc != 0 or branches is None: 317 | raise NotThisMethod("'git branch --contains' returned error") 318 | branches = branches.split("\n") 319 | 320 | # Remove the first line if we're running detached 321 | if "(" in branches[0]: 322 | branches.pop(0) 323 | 324 | # Strip off the leading "* " from the list of branches. 325 | branches = [branch[2:] for branch in branches] 326 | if "master" in branches: 327 | branch_name = "master" 328 | elif not branches: 329 | branch_name = None 330 | else: 331 | # Pick the first branch that is returned. Good or bad. 332 | branch_name = branches[0] 333 | 334 | pieces["branch"] = branch_name 335 | 336 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 337 | # TAG might have hyphens. 338 | git_describe = describe_out 339 | 340 | # look for -dirty suffix 341 | dirty = git_describe.endswith("-dirty") 342 | pieces["dirty"] = dirty 343 | if dirty: 344 | git_describe = git_describe[:git_describe.rindex("-dirty")] 345 | 346 | # now we have TAG-NUM-gHEX or HEX 347 | 348 | if "-" in git_describe: 349 | # TAG-NUM-gHEX 350 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 351 | if not mo: 352 | # unparsable. Maybe git-describe is misbehaving? 353 | pieces["error"] = ("unable to parse git-describe output: '%s'" 354 | % describe_out) 355 | return pieces 356 | 357 | # tag 358 | full_tag = mo.group(1) 359 | if not full_tag.startswith(tag_prefix): 360 | if verbose: 361 | fmt = "tag '%s' doesn't start with prefix '%s'" 362 | print(fmt % (full_tag, tag_prefix)) 363 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 364 | % (full_tag, tag_prefix)) 365 | return pieces 366 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 367 | 368 | # distance: number of commits since tag 369 | pieces["distance"] = int(mo.group(2)) 370 | 371 | # commit: short hex revision ID 372 | pieces["short"] = mo.group(3) 373 | 374 | else: 375 | # HEX: no tags 376 | pieces["closest-tag"] = None 377 | out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) 378 | pieces["distance"] = len(out.split()) # total number of commits 379 | 380 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 381 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() 382 | # Use only the last line. Previous lines may contain GPG signature 383 | # information. 384 | date = date.splitlines()[-1] 385 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 386 | 387 | return pieces 388 | 389 | 390 | def plus_or_dot(pieces: Dict[str, Any]) -> str: 391 | """Return a + if we don't already have one, else return a .""" 392 | if "+" in pieces.get("closest-tag", ""): 393 | return "." 394 | return "+" 395 | 396 | 397 | def render_pep440(pieces: Dict[str, Any]) -> str: 398 | """Build up version string, with post-release "local version identifier". 399 | 400 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 401 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 402 | 403 | Exceptions: 404 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 405 | """ 406 | if pieces["closest-tag"]: 407 | rendered = pieces["closest-tag"] 408 | if pieces["distance"] or pieces["dirty"]: 409 | rendered += plus_or_dot(pieces) 410 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 411 | if pieces["dirty"]: 412 | rendered += ".dirty" 413 | else: 414 | # exception #1 415 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 416 | pieces["short"]) 417 | if pieces["dirty"]: 418 | rendered += ".dirty" 419 | return rendered 420 | 421 | 422 | def render_pep440_branch(pieces: Dict[str, Any]) -> str: 423 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 424 | 425 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 426 | (a feature branch will appear "older" than the master branch). 427 | 428 | Exceptions: 429 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 430 | """ 431 | if pieces["closest-tag"]: 432 | rendered = pieces["closest-tag"] 433 | if pieces["distance"] or pieces["dirty"]: 434 | if pieces["branch"] != "master": 435 | rendered += ".dev0" 436 | rendered += plus_or_dot(pieces) 437 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 438 | if pieces["dirty"]: 439 | rendered += ".dirty" 440 | else: 441 | # exception #1 442 | rendered = "0" 443 | if pieces["branch"] != "master": 444 | rendered += ".dev0" 445 | rendered += "+untagged.%d.g%s" % (pieces["distance"], 446 | pieces["short"]) 447 | if pieces["dirty"]: 448 | rendered += ".dirty" 449 | return rendered 450 | 451 | 452 | def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: 453 | """Split pep440 version string at the post-release segment. 454 | 455 | Returns the release segments before the post-release and the 456 | post-release version number (or -1 if no post-release segment is present). 457 | """ 458 | vc = str.split(ver, ".post") 459 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None 460 | 461 | 462 | def render_pep440_pre(pieces: Dict[str, Any]) -> str: 463 | """TAG[.postN.devDISTANCE] -- No -dirty. 464 | 465 | Exceptions: 466 | 1: no tags. 0.post0.devDISTANCE 467 | """ 468 | if pieces["closest-tag"]: 469 | if pieces["distance"]: 470 | # update the post release segment 471 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) 472 | rendered = tag_version 473 | if post_version is not None: 474 | rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) 475 | else: 476 | rendered += ".post0.dev%d" % (pieces["distance"]) 477 | else: 478 | # no commits, use the tag as the version 479 | rendered = pieces["closest-tag"] 480 | else: 481 | # exception #1 482 | rendered = "0.post0.dev%d" % pieces["distance"] 483 | return rendered 484 | 485 | 486 | def render_pep440_post(pieces: Dict[str, Any]) -> str: 487 | """TAG[.postDISTANCE[.dev0]+gHEX] . 488 | 489 | The ".dev0" means dirty. Note that .dev0 sorts backwards 490 | (a dirty tree will appear "older" than the corresponding clean one), 491 | but you shouldn't be releasing software with -dirty anyways. 492 | 493 | Exceptions: 494 | 1: no tags. 0.postDISTANCE[.dev0] 495 | """ 496 | if pieces["closest-tag"]: 497 | rendered = pieces["closest-tag"] 498 | if pieces["distance"] or pieces["dirty"]: 499 | rendered += ".post%d" % pieces["distance"] 500 | if pieces["dirty"]: 501 | rendered += ".dev0" 502 | rendered += plus_or_dot(pieces) 503 | rendered += "g%s" % pieces["short"] 504 | else: 505 | # exception #1 506 | rendered = "0.post%d" % pieces["distance"] 507 | if pieces["dirty"]: 508 | rendered += ".dev0" 509 | rendered += "+g%s" % pieces["short"] 510 | return rendered 511 | 512 | 513 | def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: 514 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 515 | 516 | The ".dev0" means not master branch. 517 | 518 | Exceptions: 519 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 520 | """ 521 | if pieces["closest-tag"]: 522 | rendered = pieces["closest-tag"] 523 | if pieces["distance"] or pieces["dirty"]: 524 | rendered += ".post%d" % pieces["distance"] 525 | if pieces["branch"] != "master": 526 | rendered += ".dev0" 527 | rendered += plus_or_dot(pieces) 528 | rendered += "g%s" % pieces["short"] 529 | if pieces["dirty"]: 530 | rendered += ".dirty" 531 | else: 532 | # exception #1 533 | rendered = "0.post%d" % pieces["distance"] 534 | if pieces["branch"] != "master": 535 | rendered += ".dev0" 536 | rendered += "+g%s" % pieces["short"] 537 | if pieces["dirty"]: 538 | rendered += ".dirty" 539 | return rendered 540 | 541 | 542 | def render_pep440_old(pieces: Dict[str, Any]) -> str: 543 | """TAG[.postDISTANCE[.dev0]] . 544 | 545 | The ".dev0" means dirty. 546 | 547 | Exceptions: 548 | 1: no tags. 0.postDISTANCE[.dev0] 549 | """ 550 | if pieces["closest-tag"]: 551 | rendered = pieces["closest-tag"] 552 | if pieces["distance"] or pieces["dirty"]: 553 | rendered += ".post%d" % pieces["distance"] 554 | if pieces["dirty"]: 555 | rendered += ".dev0" 556 | else: 557 | # exception #1 558 | rendered = "0.post%d" % pieces["distance"] 559 | if pieces["dirty"]: 560 | rendered += ".dev0" 561 | return rendered 562 | 563 | 564 | def render_git_describe(pieces: Dict[str, Any]) -> str: 565 | """TAG[-DISTANCE-gHEX][-dirty]. 566 | 567 | Like 'git describe --tags --dirty --always'. 568 | 569 | Exceptions: 570 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 571 | """ 572 | if pieces["closest-tag"]: 573 | rendered = pieces["closest-tag"] 574 | if pieces["distance"]: 575 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 576 | else: 577 | # exception #1 578 | rendered = pieces["short"] 579 | if pieces["dirty"]: 580 | rendered += "-dirty" 581 | return rendered 582 | 583 | 584 | def render_git_describe_long(pieces: Dict[str, Any]) -> str: 585 | """TAG-DISTANCE-gHEX[-dirty]. 586 | 587 | Like 'git describe --tags --dirty --always -long'. 588 | The distance/hash is unconditional. 589 | 590 | Exceptions: 591 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 592 | """ 593 | if pieces["closest-tag"]: 594 | rendered = pieces["closest-tag"] 595 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 596 | else: 597 | # exception #1 598 | rendered = pieces["short"] 599 | if pieces["dirty"]: 600 | rendered += "-dirty" 601 | return rendered 602 | 603 | 604 | def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: 605 | """Render the given version pieces into the requested style.""" 606 | if pieces["error"]: 607 | return {"version": "unknown", 608 | "full-revisionid": pieces.get("long"), 609 | "dirty": None, 610 | "error": pieces["error"], 611 | "date": None} 612 | 613 | if not style or style == "default": 614 | style = "pep440" # the default 615 | 616 | if style == "pep440": 617 | rendered = render_pep440(pieces) 618 | elif style == "pep440-branch": 619 | rendered = render_pep440_branch(pieces) 620 | elif style == "pep440-pre": 621 | rendered = render_pep440_pre(pieces) 622 | elif style == "pep440-post": 623 | rendered = render_pep440_post(pieces) 624 | elif style == "pep440-post-branch": 625 | rendered = render_pep440_post_branch(pieces) 626 | elif style == "pep440-old": 627 | rendered = render_pep440_old(pieces) 628 | elif style == "git-describe": 629 | rendered = render_git_describe(pieces) 630 | elif style == "git-describe-long": 631 | rendered = render_git_describe_long(pieces) 632 | else: 633 | raise ValueError("unknown style '%s'" % style) 634 | 635 | return {"version": rendered, "full-revisionid": pieces["long"], 636 | "dirty": pieces["dirty"], "error": None, 637 | "date": pieces.get("date")} 638 | 639 | 640 | def get_versions() -> Dict[str, Any]: 641 | """Get version information or return default if unable to do so.""" 642 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 643 | # __file__, we can work backwards from there to the root. Some 644 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 645 | # case we can only use expanded keywords. 646 | 647 | cfg = get_config() 648 | verbose = cfg.verbose 649 | 650 | try: 651 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 652 | verbose) 653 | except NotThisMethod: 654 | pass 655 | 656 | try: 657 | root = os.path.realpath(__file__) 658 | # versionfile_source is the relative path from the top of the source 659 | # tree (where the .git directory might live) to this file. Invert 660 | # this to find the root from __file__. 661 | for _ in cfg.versionfile_source.split('/'): 662 | root = os.path.dirname(root) 663 | except NameError: 664 | return {"version": "0+unknown", "full-revisionid": None, 665 | "dirty": None, 666 | "error": "unable to find root of source tree", 667 | "date": None} 668 | 669 | try: 670 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 671 | return render(pieces, cfg.style) 672 | except NotThisMethod: 673 | pass 674 | 675 | try: 676 | if cfg.parentdir_prefix: 677 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 678 | except NotThisMethod: 679 | pass 680 | 681 | return {"version": "0+unknown", "full-revisionid": None, 682 | "dirty": None, 683 | "error": "unable to compute version", "date": None} 684 | -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | 2 | # Version: 0.29 3 | 4 | """The Versioneer - like a rocketeer, but for versions. 5 | 6 | The Versioneer 7 | ============== 8 | 9 | * like a rocketeer, but for versions! 10 | * https://github.com/python-versioneer/python-versioneer 11 | * Brian Warner 12 | * License: Public Domain (Unlicense) 13 | * Compatible with: Python 3.7, 3.8, 3.9, 3.10, 3.11 and pypy3 14 | * [![Latest Version][pypi-image]][pypi-url] 15 | * [![Build Status][travis-image]][travis-url] 16 | 17 | This is a tool for managing a recorded version number in setuptools-based 18 | python projects. The goal is to remove the tedious and error-prone "update 19 | the embedded version string" step from your release process. Making a new 20 | release should be as easy as recording a new tag in your version-control 21 | system, and maybe making new tarballs. 22 | 23 | 24 | ## Quick Install 25 | 26 | Versioneer provides two installation modes. The "classic" vendored mode installs 27 | a copy of versioneer into your repository. The experimental build-time dependency mode 28 | is intended to allow you to skip this step and simplify the process of upgrading. 29 | 30 | ### Vendored mode 31 | 32 | * `pip install versioneer` to somewhere in your $PATH 33 | * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is 34 | available, so you can also use `conda install -c conda-forge versioneer` 35 | * add a `[tool.versioneer]` section to your `pyproject.toml` or a 36 | `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) 37 | * Note that you will need to add `tomli; python_version < "3.11"` to your 38 | build-time dependencies if you use `pyproject.toml` 39 | * run `versioneer install --vendor` in your source tree, commit the results 40 | * verify version information with `python setup.py version` 41 | 42 | ### Build-time dependency mode 43 | 44 | * `pip install versioneer` to somewhere in your $PATH 45 | * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is 46 | available, so you can also use `conda install -c conda-forge versioneer` 47 | * add a `[tool.versioneer]` section to your `pyproject.toml` or a 48 | `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) 49 | * add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`) 50 | to the `requires` key of the `build-system` table in `pyproject.toml`: 51 | ```toml 52 | [build-system] 53 | requires = ["setuptools", "versioneer[toml]"] 54 | build-backend = "setuptools.build_meta" 55 | ``` 56 | * run `versioneer install --no-vendor` in your source tree, commit the results 57 | * verify version information with `python setup.py version` 58 | 59 | ## Version Identifiers 60 | 61 | Source trees come from a variety of places: 62 | 63 | * a version-control system checkout (mostly used by developers) 64 | * a nightly tarball, produced by build automation 65 | * a snapshot tarball, produced by a web-based VCS browser, like github's 66 | "tarball from tag" feature 67 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 68 | 69 | Within each source tree, the version identifier (either a string or a number, 70 | this tool is format-agnostic) can come from a variety of places: 71 | 72 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 73 | about recent "tags" and an absolute revision-id 74 | * the name of the directory into which the tarball was unpacked 75 | * an expanded VCS keyword ($Id$, etc) 76 | * a `_version.py` created by some earlier build step 77 | 78 | For released software, the version identifier is closely related to a VCS 79 | tag. Some projects use tag names that include more than just the version 80 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 81 | needs to strip the tag prefix to extract the version identifier. For 82 | unreleased software (between tags), the version identifier should provide 83 | enough information to help developers recreate the same tree, while also 84 | giving them an idea of roughly how old the tree is (after version 1.2, before 85 | version 1.3). Many VCS systems can report a description that captures this, 86 | for example `git describe --tags --dirty --always` reports things like 87 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 88 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 89 | uncommitted changes). 90 | 91 | The version identifier is used for multiple purposes: 92 | 93 | * to allow the module to self-identify its version: `myproject.__version__` 94 | * to choose a name and prefix for a 'setup.py sdist' tarball 95 | 96 | ## Theory of Operation 97 | 98 | Versioneer works by adding a special `_version.py` file into your source 99 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 100 | dynamically ask the VCS tool for version information at import time. 101 | 102 | `_version.py` also contains `$Revision$` markers, and the installation 103 | process marks `_version.py` to have this marker rewritten with a tag name 104 | during the `git archive` command. As a result, generated tarballs will 105 | contain enough information to get the proper version. 106 | 107 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 108 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 109 | that configures it. This overrides several distutils/setuptools commands to 110 | compute the version when invoked, and changes `setup.py build` and `setup.py 111 | sdist` to replace `_version.py` with a small static file that contains just 112 | the generated version data. 113 | 114 | ## Installation 115 | 116 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions. 117 | 118 | ## Version-String Flavors 119 | 120 | Code which uses Versioneer can learn about its version string at runtime by 121 | importing `_version` from your main `__init__.py` file and running the 122 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 123 | import the top-level `versioneer.py` and run `get_versions()`. 124 | 125 | Both functions return a dictionary with different flavors of version 126 | information: 127 | 128 | * `['version']`: A condensed version string, rendered using the selected 129 | style. This is the most commonly used value for the project's version 130 | string. The default "pep440" style yields strings like `0.11`, 131 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 132 | below for alternative styles. 133 | 134 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 135 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 136 | 137 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the 138 | commit date in ISO 8601 format. This will be None if the date is not 139 | available. 140 | 141 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 142 | this is only accurate if run in a VCS checkout, otherwise it is likely to 143 | be False or None 144 | 145 | * `['error']`: if the version string could not be computed, this will be set 146 | to a string describing the problem, otherwise it will be None. It may be 147 | useful to throw an exception in setup.py if this is set, to avoid e.g. 148 | creating tarballs with a version string of "unknown". 149 | 150 | Some variants are more useful than others. Including `full-revisionid` in a 151 | bug report should allow developers to reconstruct the exact code being tested 152 | (or indicate the presence of local changes that should be shared with the 153 | developers). `version` is suitable for display in an "about" box or a CLI 154 | `--version` output: it can be easily compared against release notes and lists 155 | of bugs fixed in various releases. 156 | 157 | The installer adds the following text to your `__init__.py` to place a basic 158 | version in `YOURPROJECT.__version__`: 159 | 160 | from ._version import get_versions 161 | __version__ = get_versions()['version'] 162 | del get_versions 163 | 164 | ## Styles 165 | 166 | The setup.cfg `style=` configuration controls how the VCS information is 167 | rendered into a version string. 168 | 169 | The default style, "pep440", produces a PEP440-compliant string, equal to the 170 | un-prefixed tag name for actual releases, and containing an additional "local 171 | version" section with more detail for in-between builds. For Git, this is 172 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 173 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 174 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 175 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 176 | software (exactly equal to a known tag), the identifier will only contain the 177 | stripped tag, e.g. "0.11". 178 | 179 | Other styles are available. See [details.md](details.md) in the Versioneer 180 | source tree for descriptions. 181 | 182 | ## Debugging 183 | 184 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 185 | to return a version of "0+unknown". To investigate the problem, run `setup.py 186 | version`, which will run the version-lookup code in a verbose mode, and will 187 | display the full contents of `get_versions()` (including the `error` string, 188 | which may help identify what went wrong). 189 | 190 | ## Known Limitations 191 | 192 | Some situations are known to cause problems for Versioneer. This details the 193 | most significant ones. More can be found on Github 194 | [issues page](https://github.com/python-versioneer/python-versioneer/issues). 195 | 196 | ### Subprojects 197 | 198 | Versioneer has limited support for source trees in which `setup.py` is not in 199 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are 200 | two common reasons why `setup.py` might not be in the root: 201 | 202 | * Source trees which contain multiple subprojects, such as 203 | [Buildbot](https://github.com/buildbot/buildbot), which contains both 204 | "master" and "slave" subprojects, each with their own `setup.py`, 205 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI 206 | distributions (and upload multiple independently-installable tarballs). 207 | * Source trees whose main purpose is to contain a C library, but which also 208 | provide bindings to Python (and perhaps other languages) in subdirectories. 209 | 210 | Versioneer will look for `.git` in parent directories, and most operations 211 | should get the right version string. However `pip` and `setuptools` have bugs 212 | and implementation details which frequently cause `pip install .` from a 213 | subproject directory to fail to find a correct version string (so it usually 214 | defaults to `0+unknown`). 215 | 216 | `pip install --editable .` should work correctly. `setup.py install` might 217 | work too. 218 | 219 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in 220 | some later version. 221 | 222 | [Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking 223 | this issue. The discussion in 224 | [PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the 225 | issue from the Versioneer side in more detail. 226 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and 227 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve 228 | pip to let Versioneer work correctly. 229 | 230 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the 231 | `setup.cfg`, so subprojects were completely unsupported with those releases. 232 | 233 | ### Editable installs with setuptools <= 18.5 234 | 235 | `setup.py develop` and `pip install --editable .` allow you to install a 236 | project into a virtualenv once, then continue editing the source code (and 237 | test) without re-installing after every change. 238 | 239 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a 240 | convenient way to specify executable scripts that should be installed along 241 | with the python package. 242 | 243 | These both work as expected when using modern setuptools. When using 244 | setuptools-18.5 or earlier, however, certain operations will cause 245 | `pkg_resources.DistributionNotFound` errors when running the entrypoint 246 | script, which must be resolved by re-installing the package. This happens 247 | when the install happens with one version, then the egg_info data is 248 | regenerated while a different version is checked out. Many setup.py commands 249 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into 250 | a different virtualenv), so this can be surprising. 251 | 252 | [Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes 253 | this one, but upgrading to a newer version of setuptools should probably 254 | resolve it. 255 | 256 | 257 | ## Updating Versioneer 258 | 259 | To upgrade your project to a new release of Versioneer, do the following: 260 | 261 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 262 | * edit `setup.cfg` and `pyproject.toml`, if necessary, 263 | to include any new configuration settings indicated by the release notes. 264 | See [UPGRADING](./UPGRADING.md) for details. 265 | * re-run `versioneer install --[no-]vendor` in your source tree, to replace 266 | `SRC/_version.py` 267 | * commit any changed files 268 | 269 | ## Future Directions 270 | 271 | This tool is designed to make it easily extended to other version-control 272 | systems: all VCS-specific components are in separate directories like 273 | src/git/ . The top-level `versioneer.py` script is assembled from these 274 | components by running make-versioneer.py . In the future, make-versioneer.py 275 | will take a VCS name as an argument, and will construct a version of 276 | `versioneer.py` that is specific to the given VCS. It might also take the 277 | configuration arguments that are currently provided manually during 278 | installation by editing setup.py . Alternatively, it might go the other 279 | direction and include code from all supported VCS systems, reducing the 280 | number of intermediate scripts. 281 | 282 | ## Similar projects 283 | 284 | * [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time 285 | dependency 286 | * [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of 287 | versioneer 288 | * [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools 289 | plugin 290 | 291 | ## License 292 | 293 | To make Versioneer easier to embed, all its code is dedicated to the public 294 | domain. The `_version.py` that it creates is also in the public domain. 295 | Specifically, both are released under the "Unlicense", as described in 296 | https://unlicense.org/. 297 | 298 | [pypi-image]: https://img.shields.io/pypi/v/versioneer.svg 299 | [pypi-url]: https://pypi.python.org/pypi/versioneer/ 300 | [travis-image]: 301 | https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg 302 | [travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer 303 | 304 | """ 305 | # pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring 306 | # pylint:disable=missing-class-docstring,too-many-branches,too-many-statements 307 | # pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error 308 | # pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with 309 | # pylint:disable=attribute-defined-outside-init,too-many-arguments 310 | 311 | import configparser 312 | import errno 313 | import json 314 | import os 315 | import re 316 | import subprocess 317 | import sys 318 | from pathlib import Path 319 | from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Union 320 | from typing import NoReturn 321 | import functools 322 | 323 | have_tomllib = True 324 | if sys.version_info >= (3, 11): 325 | import tomllib 326 | else: 327 | try: 328 | import tomli as tomllib 329 | except ImportError: 330 | have_tomllib = False 331 | 332 | 333 | class VersioneerConfig: 334 | """Container for Versioneer configuration parameters.""" 335 | 336 | VCS: str 337 | style: str 338 | tag_prefix: str 339 | versionfile_source: str 340 | versionfile_build: Optional[str] 341 | parentdir_prefix: Optional[str] 342 | verbose: Optional[bool] 343 | 344 | 345 | def get_root() -> str: 346 | """Get the project root directory. 347 | 348 | We require that all commands are run from the project root, i.e. the 349 | directory that contains setup.py, setup.cfg, and versioneer.py . 350 | """ 351 | root = os.path.realpath(os.path.abspath(os.getcwd())) 352 | setup_py = os.path.join(root, "setup.py") 353 | pyproject_toml = os.path.join(root, "pyproject.toml") 354 | versioneer_py = os.path.join(root, "versioneer.py") 355 | if not ( 356 | os.path.exists(setup_py) 357 | or os.path.exists(pyproject_toml) 358 | or os.path.exists(versioneer_py) 359 | ): 360 | # allow 'python path/to/setup.py COMMAND' 361 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 362 | setup_py = os.path.join(root, "setup.py") 363 | pyproject_toml = os.path.join(root, "pyproject.toml") 364 | versioneer_py = os.path.join(root, "versioneer.py") 365 | if not ( 366 | os.path.exists(setup_py) 367 | or os.path.exists(pyproject_toml) 368 | or os.path.exists(versioneer_py) 369 | ): 370 | err = ("Versioneer was unable to run the project root directory. " 371 | "Versioneer requires setup.py to be executed from " 372 | "its immediate directory (like 'python setup.py COMMAND'), " 373 | "or in a way that lets it use sys.argv[0] to find the root " 374 | "(like 'python path/to/setup.py COMMAND').") 375 | raise VersioneerBadRootError(err) 376 | try: 377 | # Certain runtime workflows (setup.py install/develop in a setuptools 378 | # tree) execute all dependencies in a single python process, so 379 | # "versioneer" may be imported multiple times, and python's shared 380 | # module-import table will cache the first one. So we can't use 381 | # os.path.dirname(__file__), as that will find whichever 382 | # versioneer.py was first imported, even in later projects. 383 | my_path = os.path.realpath(os.path.abspath(__file__)) 384 | me_dir = os.path.normcase(os.path.splitext(my_path)[0]) 385 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) 386 | if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals(): 387 | print("Warning: build in %s is using versioneer.py from %s" 388 | % (os.path.dirname(my_path), versioneer_py)) 389 | except NameError: 390 | pass 391 | return root 392 | 393 | 394 | def get_config_from_root(root: str) -> VersioneerConfig: 395 | """Read the project setup.cfg file to determine Versioneer config.""" 396 | # This might raise OSError (if setup.cfg is missing), or 397 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 398 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 399 | # the top of versioneer.py for instructions on writing your setup.cfg . 400 | root_pth = Path(root) 401 | pyproject_toml = root_pth / "pyproject.toml" 402 | setup_cfg = root_pth / "setup.cfg" 403 | section: Union[Dict[str, Any], configparser.SectionProxy, None] = None 404 | if pyproject_toml.exists() and have_tomllib: 405 | try: 406 | with open(pyproject_toml, 'rb') as fobj: 407 | pp = tomllib.load(fobj) 408 | section = pp['tool']['versioneer'] 409 | except (tomllib.TOMLDecodeError, KeyError) as e: 410 | print(f"Failed to load config from {pyproject_toml}: {e}") 411 | print("Try to load it from setup.cfg") 412 | if not section: 413 | parser = configparser.ConfigParser() 414 | with open(setup_cfg) as cfg_file: 415 | parser.read_file(cfg_file) 416 | parser.get("versioneer", "VCS") # raise error if missing 417 | 418 | section = parser["versioneer"] 419 | 420 | # `cast`` really shouldn't be used, but its simplest for the 421 | # common VersioneerConfig users at the moment. We verify against 422 | # `None` values elsewhere where it matters 423 | 424 | cfg = VersioneerConfig() 425 | cfg.VCS = section['VCS'] 426 | cfg.style = section.get("style", "") 427 | cfg.versionfile_source = cast(str, section.get("versionfile_source")) 428 | cfg.versionfile_build = section.get("versionfile_build") 429 | cfg.tag_prefix = cast(str, section.get("tag_prefix")) 430 | if cfg.tag_prefix in ("''", '""', None): 431 | cfg.tag_prefix = "" 432 | cfg.parentdir_prefix = section.get("parentdir_prefix") 433 | if isinstance(section, configparser.SectionProxy): 434 | # Make sure configparser translates to bool 435 | cfg.verbose = section.getboolean("verbose") 436 | else: 437 | cfg.verbose = section.get("verbose") 438 | 439 | return cfg 440 | 441 | 442 | class NotThisMethod(Exception): 443 | """Exception raised if a method is not valid for the current scenario.""" 444 | 445 | 446 | # these dictionaries contain VCS-specific tools 447 | LONG_VERSION_PY: Dict[str, str] = {} 448 | HANDLERS: Dict[str, Dict[str, Callable]] = {} 449 | 450 | 451 | def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator 452 | """Create decorator to mark a method as the handler of a VCS.""" 453 | def decorate(f: Callable) -> Callable: 454 | """Store f in HANDLERS[vcs][method].""" 455 | HANDLERS.setdefault(vcs, {})[method] = f 456 | return f 457 | return decorate 458 | 459 | 460 | def run_command( 461 | commands: List[str], 462 | args: List[str], 463 | cwd: Optional[str] = None, 464 | verbose: bool = False, 465 | hide_stderr: bool = False, 466 | env: Optional[Dict[str, str]] = None, 467 | ) -> Tuple[Optional[str], Optional[int]]: 468 | """Call the given command(s).""" 469 | assert isinstance(commands, list) 470 | process = None 471 | 472 | popen_kwargs: Dict[str, Any] = {} 473 | if sys.platform == "win32": 474 | # This hides the console window if pythonw.exe is used 475 | startupinfo = subprocess.STARTUPINFO() 476 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW 477 | popen_kwargs["startupinfo"] = startupinfo 478 | 479 | for command in commands: 480 | try: 481 | dispcmd = str([command] + args) 482 | # remember shell=False, so use git.cmd on windows, not just git 483 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, 484 | stdout=subprocess.PIPE, 485 | stderr=(subprocess.PIPE if hide_stderr 486 | else None), **popen_kwargs) 487 | break 488 | except OSError as e: 489 | if e.errno == errno.ENOENT: 490 | continue 491 | if verbose: 492 | print("unable to run %s" % dispcmd) 493 | print(e) 494 | return None, None 495 | else: 496 | if verbose: 497 | print("unable to find command, tried %s" % (commands,)) 498 | return None, None 499 | stdout = process.communicate()[0].strip().decode() 500 | if process.returncode != 0: 501 | if verbose: 502 | print("unable to run %s (error)" % dispcmd) 503 | print("stdout was %s" % stdout) 504 | return None, process.returncode 505 | return stdout, process.returncode 506 | 507 | 508 | LONG_VERSION_PY['git'] = r''' 509 | # This file helps to compute a version number in source trees obtained from 510 | # git-archive tarball (such as those provided by githubs download-from-tag 511 | # feature). Distribution tarballs (built by setup.py sdist) and build 512 | # directories (produced by setup.py build) will contain a much shorter file 513 | # that just contains the computed version number. 514 | 515 | # This file is released into the public domain. 516 | # Generated by versioneer-0.29 517 | # https://github.com/python-versioneer/python-versioneer 518 | 519 | """Git implementation of _version.py.""" 520 | 521 | import errno 522 | import os 523 | import re 524 | import subprocess 525 | import sys 526 | from typing import Any, Callable, Dict, List, Optional, Tuple 527 | import functools 528 | 529 | 530 | def get_keywords() -> Dict[str, str]: 531 | """Get the keywords needed to look up the version information.""" 532 | # these strings will be replaced by git during git-archive. 533 | # setup.py/versioneer.py will grep for the variable names, so they must 534 | # each be defined on a line of their own. _version.py will just call 535 | # get_keywords(). 536 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 537 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 538 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" 539 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 540 | return keywords 541 | 542 | 543 | class VersioneerConfig: 544 | """Container for Versioneer configuration parameters.""" 545 | 546 | VCS: str 547 | style: str 548 | tag_prefix: str 549 | parentdir_prefix: str 550 | versionfile_source: str 551 | verbose: bool 552 | 553 | 554 | def get_config() -> VersioneerConfig: 555 | """Create, populate and return the VersioneerConfig() object.""" 556 | # these strings are filled in when 'setup.py versioneer' creates 557 | # _version.py 558 | cfg = VersioneerConfig() 559 | cfg.VCS = "git" 560 | cfg.style = "%(STYLE)s" 561 | cfg.tag_prefix = "%(TAG_PREFIX)s" 562 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 563 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 564 | cfg.verbose = False 565 | return cfg 566 | 567 | 568 | class NotThisMethod(Exception): 569 | """Exception raised if a method is not valid for the current scenario.""" 570 | 571 | 572 | LONG_VERSION_PY: Dict[str, str] = {} 573 | HANDLERS: Dict[str, Dict[str, Callable]] = {} 574 | 575 | 576 | def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator 577 | """Create decorator to mark a method as the handler of a VCS.""" 578 | def decorate(f: Callable) -> Callable: 579 | """Store f in HANDLERS[vcs][method].""" 580 | if vcs not in HANDLERS: 581 | HANDLERS[vcs] = {} 582 | HANDLERS[vcs][method] = f 583 | return f 584 | return decorate 585 | 586 | 587 | def run_command( 588 | commands: List[str], 589 | args: List[str], 590 | cwd: Optional[str] = None, 591 | verbose: bool = False, 592 | hide_stderr: bool = False, 593 | env: Optional[Dict[str, str]] = None, 594 | ) -> Tuple[Optional[str], Optional[int]]: 595 | """Call the given command(s).""" 596 | assert isinstance(commands, list) 597 | process = None 598 | 599 | popen_kwargs: Dict[str, Any] = {} 600 | if sys.platform == "win32": 601 | # This hides the console window if pythonw.exe is used 602 | startupinfo = subprocess.STARTUPINFO() 603 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW 604 | popen_kwargs["startupinfo"] = startupinfo 605 | 606 | for command in commands: 607 | try: 608 | dispcmd = str([command] + args) 609 | # remember shell=False, so use git.cmd on windows, not just git 610 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, 611 | stdout=subprocess.PIPE, 612 | stderr=(subprocess.PIPE if hide_stderr 613 | else None), **popen_kwargs) 614 | break 615 | except OSError as e: 616 | if e.errno == errno.ENOENT: 617 | continue 618 | if verbose: 619 | print("unable to run %%s" %% dispcmd) 620 | print(e) 621 | return None, None 622 | else: 623 | if verbose: 624 | print("unable to find command, tried %%s" %% (commands,)) 625 | return None, None 626 | stdout = process.communicate()[0].strip().decode() 627 | if process.returncode != 0: 628 | if verbose: 629 | print("unable to run %%s (error)" %% dispcmd) 630 | print("stdout was %%s" %% stdout) 631 | return None, process.returncode 632 | return stdout, process.returncode 633 | 634 | 635 | def versions_from_parentdir( 636 | parentdir_prefix: str, 637 | root: str, 638 | verbose: bool, 639 | ) -> Dict[str, Any]: 640 | """Try to determine the version from the parent directory name. 641 | 642 | Source tarballs conventionally unpack into a directory that includes both 643 | the project name and a version string. We will also support searching up 644 | two directory levels for an appropriately named parent directory 645 | """ 646 | rootdirs = [] 647 | 648 | for _ in range(3): 649 | dirname = os.path.basename(root) 650 | if dirname.startswith(parentdir_prefix): 651 | return {"version": dirname[len(parentdir_prefix):], 652 | "full-revisionid": None, 653 | "dirty": False, "error": None, "date": None} 654 | rootdirs.append(root) 655 | root = os.path.dirname(root) # up a level 656 | 657 | if verbose: 658 | print("Tried directories %%s but none started with prefix %%s" %% 659 | (str(rootdirs), parentdir_prefix)) 660 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 661 | 662 | 663 | @register_vcs_handler("git", "get_keywords") 664 | def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: 665 | """Extract version information from the given file.""" 666 | # the code embedded in _version.py can just fetch the value of these 667 | # keywords. When used from setup.py, we don't want to import _version.py, 668 | # so we do it with a regexp instead. This function is not used from 669 | # _version.py. 670 | keywords: Dict[str, str] = {} 671 | try: 672 | with open(versionfile_abs, "r") as fobj: 673 | for line in fobj: 674 | if line.strip().startswith("git_refnames ="): 675 | mo = re.search(r'=\s*"(.*)"', line) 676 | if mo: 677 | keywords["refnames"] = mo.group(1) 678 | if line.strip().startswith("git_full ="): 679 | mo = re.search(r'=\s*"(.*)"', line) 680 | if mo: 681 | keywords["full"] = mo.group(1) 682 | if line.strip().startswith("git_date ="): 683 | mo = re.search(r'=\s*"(.*)"', line) 684 | if mo: 685 | keywords["date"] = mo.group(1) 686 | except OSError: 687 | pass 688 | return keywords 689 | 690 | 691 | @register_vcs_handler("git", "keywords") 692 | def git_versions_from_keywords( 693 | keywords: Dict[str, str], 694 | tag_prefix: str, 695 | verbose: bool, 696 | ) -> Dict[str, Any]: 697 | """Get version information from git keywords.""" 698 | if "refnames" not in keywords: 699 | raise NotThisMethod("Short version file found") 700 | date = keywords.get("date") 701 | if date is not None: 702 | # Use only the last line. Previous lines may contain GPG signature 703 | # information. 704 | date = date.splitlines()[-1] 705 | 706 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant 707 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 708 | # -like" string, which we must then edit to make compliant), because 709 | # it's been around since git-1.5.3, and it's too difficult to 710 | # discover which version we're using, or to work around using an 711 | # older one. 712 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 713 | refnames = keywords["refnames"].strip() 714 | if refnames.startswith("$Format"): 715 | if verbose: 716 | print("keywords are unexpanded, not using") 717 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 718 | refs = {r.strip() for r in refnames.strip("()").split(",")} 719 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 720 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 721 | TAG = "tag: " 722 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} 723 | if not tags: 724 | # Either we're using git < 1.8.3, or there really are no tags. We use 725 | # a heuristic: assume all version tags have a digit. The old git %%d 726 | # expansion behaves like git log --decorate=short and strips out the 727 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 728 | # between branches and tags. By ignoring refnames without digits, we 729 | # filter out many common branch names like "release" and 730 | # "stabilization", as well as "HEAD" and "master". 731 | tags = {r for r in refs if re.search(r'\d', r)} 732 | if verbose: 733 | print("discarding '%%s', no digits" %% ",".join(refs - tags)) 734 | if verbose: 735 | print("likely tags: %%s" %% ",".join(sorted(tags))) 736 | for ref in sorted(tags): 737 | # sorting will prefer e.g. "2.0" over "2.0rc1" 738 | if ref.startswith(tag_prefix): 739 | r = ref[len(tag_prefix):] 740 | # Filter out refs that exactly match prefix or that don't start 741 | # with a number once the prefix is stripped (mostly a concern 742 | # when prefix is '') 743 | if not re.match(r'\d', r): 744 | continue 745 | if verbose: 746 | print("picking %%s" %% r) 747 | return {"version": r, 748 | "full-revisionid": keywords["full"].strip(), 749 | "dirty": False, "error": None, 750 | "date": date} 751 | # no suitable tags, so version is "0+unknown", but full hex is still there 752 | if verbose: 753 | print("no suitable tags, using unknown + full revision id") 754 | return {"version": "0+unknown", 755 | "full-revisionid": keywords["full"].strip(), 756 | "dirty": False, "error": "no suitable tags", "date": None} 757 | 758 | 759 | @register_vcs_handler("git", "pieces_from_vcs") 760 | def git_pieces_from_vcs( 761 | tag_prefix: str, 762 | root: str, 763 | verbose: bool, 764 | runner: Callable = run_command 765 | ) -> Dict[str, Any]: 766 | """Get version from 'git describe' in the root of the source tree. 767 | 768 | This only gets called if the git-archive 'subst' keywords were *not* 769 | expanded, and _version.py hasn't already been rewritten with a short 770 | version string, meaning we're inside a checked out source tree. 771 | """ 772 | GITS = ["git"] 773 | if sys.platform == "win32": 774 | GITS = ["git.cmd", "git.exe"] 775 | 776 | # GIT_DIR can interfere with correct operation of Versioneer. 777 | # It may be intended to be passed to the Versioneer-versioned project, 778 | # but that should not change where we get our version from. 779 | env = os.environ.copy() 780 | env.pop("GIT_DIR", None) 781 | runner = functools.partial(runner, env=env) 782 | 783 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, 784 | hide_stderr=not verbose) 785 | if rc != 0: 786 | if verbose: 787 | print("Directory %%s not under git control" %% root) 788 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 789 | 790 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 791 | # if there isn't one, this yields HEX[-dirty] (no NUM) 792 | describe_out, rc = runner(GITS, [ 793 | "describe", "--tags", "--dirty", "--always", "--long", 794 | "--match", f"{tag_prefix}[[:digit:]]*" 795 | ], cwd=root) 796 | # --long was added in git-1.5.5 797 | if describe_out is None: 798 | raise NotThisMethod("'git describe' failed") 799 | describe_out = describe_out.strip() 800 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 801 | if full_out is None: 802 | raise NotThisMethod("'git rev-parse' failed") 803 | full_out = full_out.strip() 804 | 805 | pieces: Dict[str, Any] = {} 806 | pieces["long"] = full_out 807 | pieces["short"] = full_out[:7] # maybe improved later 808 | pieces["error"] = None 809 | 810 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 811 | cwd=root) 812 | # --abbrev-ref was added in git-1.6.3 813 | if rc != 0 or branch_name is None: 814 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 815 | branch_name = branch_name.strip() 816 | 817 | if branch_name == "HEAD": 818 | # If we aren't exactly on a branch, pick a branch which represents 819 | # the current commit. If all else fails, we are on a branchless 820 | # commit. 821 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 822 | # --contains was added in git-1.5.4 823 | if rc != 0 or branches is None: 824 | raise NotThisMethod("'git branch --contains' returned error") 825 | branches = branches.split("\n") 826 | 827 | # Remove the first line if we're running detached 828 | if "(" in branches[0]: 829 | branches.pop(0) 830 | 831 | # Strip off the leading "* " from the list of branches. 832 | branches = [branch[2:] for branch in branches] 833 | if "master" in branches: 834 | branch_name = "master" 835 | elif not branches: 836 | branch_name = None 837 | else: 838 | # Pick the first branch that is returned. Good or bad. 839 | branch_name = branches[0] 840 | 841 | pieces["branch"] = branch_name 842 | 843 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 844 | # TAG might have hyphens. 845 | git_describe = describe_out 846 | 847 | # look for -dirty suffix 848 | dirty = git_describe.endswith("-dirty") 849 | pieces["dirty"] = dirty 850 | if dirty: 851 | git_describe = git_describe[:git_describe.rindex("-dirty")] 852 | 853 | # now we have TAG-NUM-gHEX or HEX 854 | 855 | if "-" in git_describe: 856 | # TAG-NUM-gHEX 857 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 858 | if not mo: 859 | # unparsable. Maybe git-describe is misbehaving? 860 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 861 | %% describe_out) 862 | return pieces 863 | 864 | # tag 865 | full_tag = mo.group(1) 866 | if not full_tag.startswith(tag_prefix): 867 | if verbose: 868 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 869 | print(fmt %% (full_tag, tag_prefix)) 870 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 871 | %% (full_tag, tag_prefix)) 872 | return pieces 873 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 874 | 875 | # distance: number of commits since tag 876 | pieces["distance"] = int(mo.group(2)) 877 | 878 | # commit: short hex revision ID 879 | pieces["short"] = mo.group(3) 880 | 881 | else: 882 | # HEX: no tags 883 | pieces["closest-tag"] = None 884 | out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) 885 | pieces["distance"] = len(out.split()) # total number of commits 886 | 887 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 888 | date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() 889 | # Use only the last line. Previous lines may contain GPG signature 890 | # information. 891 | date = date.splitlines()[-1] 892 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 893 | 894 | return pieces 895 | 896 | 897 | def plus_or_dot(pieces: Dict[str, Any]) -> str: 898 | """Return a + if we don't already have one, else return a .""" 899 | if "+" in pieces.get("closest-tag", ""): 900 | return "." 901 | return "+" 902 | 903 | 904 | def render_pep440(pieces: Dict[str, Any]) -> str: 905 | """Build up version string, with post-release "local version identifier". 906 | 907 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 908 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 909 | 910 | Exceptions: 911 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 912 | """ 913 | if pieces["closest-tag"]: 914 | rendered = pieces["closest-tag"] 915 | if pieces["distance"] or pieces["dirty"]: 916 | rendered += plus_or_dot(pieces) 917 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 918 | if pieces["dirty"]: 919 | rendered += ".dirty" 920 | else: 921 | # exception #1 922 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 923 | pieces["short"]) 924 | if pieces["dirty"]: 925 | rendered += ".dirty" 926 | return rendered 927 | 928 | 929 | def render_pep440_branch(pieces: Dict[str, Any]) -> str: 930 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 931 | 932 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 933 | (a feature branch will appear "older" than the master branch). 934 | 935 | Exceptions: 936 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 937 | """ 938 | if pieces["closest-tag"]: 939 | rendered = pieces["closest-tag"] 940 | if pieces["distance"] or pieces["dirty"]: 941 | if pieces["branch"] != "master": 942 | rendered += ".dev0" 943 | rendered += plus_or_dot(pieces) 944 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 945 | if pieces["dirty"]: 946 | rendered += ".dirty" 947 | else: 948 | # exception #1 949 | rendered = "0" 950 | if pieces["branch"] != "master": 951 | rendered += ".dev0" 952 | rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], 953 | pieces["short"]) 954 | if pieces["dirty"]: 955 | rendered += ".dirty" 956 | return rendered 957 | 958 | 959 | def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: 960 | """Split pep440 version string at the post-release segment. 961 | 962 | Returns the release segments before the post-release and the 963 | post-release version number (or -1 if no post-release segment is present). 964 | """ 965 | vc = str.split(ver, ".post") 966 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None 967 | 968 | 969 | def render_pep440_pre(pieces: Dict[str, Any]) -> str: 970 | """TAG[.postN.devDISTANCE] -- No -dirty. 971 | 972 | Exceptions: 973 | 1: no tags. 0.post0.devDISTANCE 974 | """ 975 | if pieces["closest-tag"]: 976 | if pieces["distance"]: 977 | # update the post release segment 978 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) 979 | rendered = tag_version 980 | if post_version is not None: 981 | rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"]) 982 | else: 983 | rendered += ".post0.dev%%d" %% (pieces["distance"]) 984 | else: 985 | # no commits, use the tag as the version 986 | rendered = pieces["closest-tag"] 987 | else: 988 | # exception #1 989 | rendered = "0.post0.dev%%d" %% pieces["distance"] 990 | return rendered 991 | 992 | 993 | def render_pep440_post(pieces: Dict[str, Any]) -> str: 994 | """TAG[.postDISTANCE[.dev0]+gHEX] . 995 | 996 | The ".dev0" means dirty. Note that .dev0 sorts backwards 997 | (a dirty tree will appear "older" than the corresponding clean one), 998 | but you shouldn't be releasing software with -dirty anyways. 999 | 1000 | Exceptions: 1001 | 1: no tags. 0.postDISTANCE[.dev0] 1002 | """ 1003 | if pieces["closest-tag"]: 1004 | rendered = pieces["closest-tag"] 1005 | if pieces["distance"] or pieces["dirty"]: 1006 | rendered += ".post%%d" %% pieces["distance"] 1007 | if pieces["dirty"]: 1008 | rendered += ".dev0" 1009 | rendered += plus_or_dot(pieces) 1010 | rendered += "g%%s" %% pieces["short"] 1011 | else: 1012 | # exception #1 1013 | rendered = "0.post%%d" %% pieces["distance"] 1014 | if pieces["dirty"]: 1015 | rendered += ".dev0" 1016 | rendered += "+g%%s" %% pieces["short"] 1017 | return rendered 1018 | 1019 | 1020 | def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: 1021 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 1022 | 1023 | The ".dev0" means not master branch. 1024 | 1025 | Exceptions: 1026 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 1027 | """ 1028 | if pieces["closest-tag"]: 1029 | rendered = pieces["closest-tag"] 1030 | if pieces["distance"] or pieces["dirty"]: 1031 | rendered += ".post%%d" %% pieces["distance"] 1032 | if pieces["branch"] != "master": 1033 | rendered += ".dev0" 1034 | rendered += plus_or_dot(pieces) 1035 | rendered += "g%%s" %% pieces["short"] 1036 | if pieces["dirty"]: 1037 | rendered += ".dirty" 1038 | else: 1039 | # exception #1 1040 | rendered = "0.post%%d" %% pieces["distance"] 1041 | if pieces["branch"] != "master": 1042 | rendered += ".dev0" 1043 | rendered += "+g%%s" %% pieces["short"] 1044 | if pieces["dirty"]: 1045 | rendered += ".dirty" 1046 | return rendered 1047 | 1048 | 1049 | def render_pep440_old(pieces: Dict[str, Any]) -> str: 1050 | """TAG[.postDISTANCE[.dev0]] . 1051 | 1052 | The ".dev0" means dirty. 1053 | 1054 | Exceptions: 1055 | 1: no tags. 0.postDISTANCE[.dev0] 1056 | """ 1057 | if pieces["closest-tag"]: 1058 | rendered = pieces["closest-tag"] 1059 | if pieces["distance"] or pieces["dirty"]: 1060 | rendered += ".post%%d" %% pieces["distance"] 1061 | if pieces["dirty"]: 1062 | rendered += ".dev0" 1063 | else: 1064 | # exception #1 1065 | rendered = "0.post%%d" %% pieces["distance"] 1066 | if pieces["dirty"]: 1067 | rendered += ".dev0" 1068 | return rendered 1069 | 1070 | 1071 | def render_git_describe(pieces: Dict[str, Any]) -> str: 1072 | """TAG[-DISTANCE-gHEX][-dirty]. 1073 | 1074 | Like 'git describe --tags --dirty --always'. 1075 | 1076 | Exceptions: 1077 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1078 | """ 1079 | if pieces["closest-tag"]: 1080 | rendered = pieces["closest-tag"] 1081 | if pieces["distance"]: 1082 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 1083 | else: 1084 | # exception #1 1085 | rendered = pieces["short"] 1086 | if pieces["dirty"]: 1087 | rendered += "-dirty" 1088 | return rendered 1089 | 1090 | 1091 | def render_git_describe_long(pieces: Dict[str, Any]) -> str: 1092 | """TAG-DISTANCE-gHEX[-dirty]. 1093 | 1094 | Like 'git describe --tags --dirty --always -long'. 1095 | The distance/hash is unconditional. 1096 | 1097 | Exceptions: 1098 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1099 | """ 1100 | if pieces["closest-tag"]: 1101 | rendered = pieces["closest-tag"] 1102 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 1103 | else: 1104 | # exception #1 1105 | rendered = pieces["short"] 1106 | if pieces["dirty"]: 1107 | rendered += "-dirty" 1108 | return rendered 1109 | 1110 | 1111 | def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: 1112 | """Render the given version pieces into the requested style.""" 1113 | if pieces["error"]: 1114 | return {"version": "unknown", 1115 | "full-revisionid": pieces.get("long"), 1116 | "dirty": None, 1117 | "error": pieces["error"], 1118 | "date": None} 1119 | 1120 | if not style or style == "default": 1121 | style = "pep440" # the default 1122 | 1123 | if style == "pep440": 1124 | rendered = render_pep440(pieces) 1125 | elif style == "pep440-branch": 1126 | rendered = render_pep440_branch(pieces) 1127 | elif style == "pep440-pre": 1128 | rendered = render_pep440_pre(pieces) 1129 | elif style == "pep440-post": 1130 | rendered = render_pep440_post(pieces) 1131 | elif style == "pep440-post-branch": 1132 | rendered = render_pep440_post_branch(pieces) 1133 | elif style == "pep440-old": 1134 | rendered = render_pep440_old(pieces) 1135 | elif style == "git-describe": 1136 | rendered = render_git_describe(pieces) 1137 | elif style == "git-describe-long": 1138 | rendered = render_git_describe_long(pieces) 1139 | else: 1140 | raise ValueError("unknown style '%%s'" %% style) 1141 | 1142 | return {"version": rendered, "full-revisionid": pieces["long"], 1143 | "dirty": pieces["dirty"], "error": None, 1144 | "date": pieces.get("date")} 1145 | 1146 | 1147 | def get_versions() -> Dict[str, Any]: 1148 | """Get version information or return default if unable to do so.""" 1149 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 1150 | # __file__, we can work backwards from there to the root. Some 1151 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 1152 | # case we can only use expanded keywords. 1153 | 1154 | cfg = get_config() 1155 | verbose = cfg.verbose 1156 | 1157 | try: 1158 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 1159 | verbose) 1160 | except NotThisMethod: 1161 | pass 1162 | 1163 | try: 1164 | root = os.path.realpath(__file__) 1165 | # versionfile_source is the relative path from the top of the source 1166 | # tree (where the .git directory might live) to this file. Invert 1167 | # this to find the root from __file__. 1168 | for _ in cfg.versionfile_source.split('/'): 1169 | root = os.path.dirname(root) 1170 | except NameError: 1171 | return {"version": "0+unknown", "full-revisionid": None, 1172 | "dirty": None, 1173 | "error": "unable to find root of source tree", 1174 | "date": None} 1175 | 1176 | try: 1177 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 1178 | return render(pieces, cfg.style) 1179 | except NotThisMethod: 1180 | pass 1181 | 1182 | try: 1183 | if cfg.parentdir_prefix: 1184 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1185 | except NotThisMethod: 1186 | pass 1187 | 1188 | return {"version": "0+unknown", "full-revisionid": None, 1189 | "dirty": None, 1190 | "error": "unable to compute version", "date": None} 1191 | ''' 1192 | 1193 | 1194 | @register_vcs_handler("git", "get_keywords") 1195 | def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: 1196 | """Extract version information from the given file.""" 1197 | # the code embedded in _version.py can just fetch the value of these 1198 | # keywords. When used from setup.py, we don't want to import _version.py, 1199 | # so we do it with a regexp instead. This function is not used from 1200 | # _version.py. 1201 | keywords: Dict[str, str] = {} 1202 | try: 1203 | with open(versionfile_abs, "r") as fobj: 1204 | for line in fobj: 1205 | if line.strip().startswith("git_refnames ="): 1206 | mo = re.search(r'=\s*"(.*)"', line) 1207 | if mo: 1208 | keywords["refnames"] = mo.group(1) 1209 | if line.strip().startswith("git_full ="): 1210 | mo = re.search(r'=\s*"(.*)"', line) 1211 | if mo: 1212 | keywords["full"] = mo.group(1) 1213 | if line.strip().startswith("git_date ="): 1214 | mo = re.search(r'=\s*"(.*)"', line) 1215 | if mo: 1216 | keywords["date"] = mo.group(1) 1217 | except OSError: 1218 | pass 1219 | return keywords 1220 | 1221 | 1222 | @register_vcs_handler("git", "keywords") 1223 | def git_versions_from_keywords( 1224 | keywords: Dict[str, str], 1225 | tag_prefix: str, 1226 | verbose: bool, 1227 | ) -> Dict[str, Any]: 1228 | """Get version information from git keywords.""" 1229 | if "refnames" not in keywords: 1230 | raise NotThisMethod("Short version file found") 1231 | date = keywords.get("date") 1232 | if date is not None: 1233 | # Use only the last line. Previous lines may contain GPG signature 1234 | # information. 1235 | date = date.splitlines()[-1] 1236 | 1237 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 1238 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 1239 | # -like" string, which we must then edit to make compliant), because 1240 | # it's been around since git-1.5.3, and it's too difficult to 1241 | # discover which version we're using, or to work around using an 1242 | # older one. 1243 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1244 | refnames = keywords["refnames"].strip() 1245 | if refnames.startswith("$Format"): 1246 | if verbose: 1247 | print("keywords are unexpanded, not using") 1248 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 1249 | refs = {r.strip() for r in refnames.strip("()").split(",")} 1250 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 1251 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 1252 | TAG = "tag: " 1253 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} 1254 | if not tags: 1255 | # Either we're using git < 1.8.3, or there really are no tags. We use 1256 | # a heuristic: assume all version tags have a digit. The old git %d 1257 | # expansion behaves like git log --decorate=short and strips out the 1258 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 1259 | # between branches and tags. By ignoring refnames without digits, we 1260 | # filter out many common branch names like "release" and 1261 | # "stabilization", as well as "HEAD" and "master". 1262 | tags = {r for r in refs if re.search(r'\d', r)} 1263 | if verbose: 1264 | print("discarding '%s', no digits" % ",".join(refs - tags)) 1265 | if verbose: 1266 | print("likely tags: %s" % ",".join(sorted(tags))) 1267 | for ref in sorted(tags): 1268 | # sorting will prefer e.g. "2.0" over "2.0rc1" 1269 | if ref.startswith(tag_prefix): 1270 | r = ref[len(tag_prefix):] 1271 | # Filter out refs that exactly match prefix or that don't start 1272 | # with a number once the prefix is stripped (mostly a concern 1273 | # when prefix is '') 1274 | if not re.match(r'\d', r): 1275 | continue 1276 | if verbose: 1277 | print("picking %s" % r) 1278 | return {"version": r, 1279 | "full-revisionid": keywords["full"].strip(), 1280 | "dirty": False, "error": None, 1281 | "date": date} 1282 | # no suitable tags, so version is "0+unknown", but full hex is still there 1283 | if verbose: 1284 | print("no suitable tags, using unknown + full revision id") 1285 | return {"version": "0+unknown", 1286 | "full-revisionid": keywords["full"].strip(), 1287 | "dirty": False, "error": "no suitable tags", "date": None} 1288 | 1289 | 1290 | @register_vcs_handler("git", "pieces_from_vcs") 1291 | def git_pieces_from_vcs( 1292 | tag_prefix: str, 1293 | root: str, 1294 | verbose: bool, 1295 | runner: Callable = run_command 1296 | ) -> Dict[str, Any]: 1297 | """Get version from 'git describe' in the root of the source tree. 1298 | 1299 | This only gets called if the git-archive 'subst' keywords were *not* 1300 | expanded, and _version.py hasn't already been rewritten with a short 1301 | version string, meaning we're inside a checked out source tree. 1302 | """ 1303 | GITS = ["git"] 1304 | if sys.platform == "win32": 1305 | GITS = ["git.cmd", "git.exe"] 1306 | 1307 | # GIT_DIR can interfere with correct operation of Versioneer. 1308 | # It may be intended to be passed to the Versioneer-versioned project, 1309 | # but that should not change where we get our version from. 1310 | env = os.environ.copy() 1311 | env.pop("GIT_DIR", None) 1312 | runner = functools.partial(runner, env=env) 1313 | 1314 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, 1315 | hide_stderr=not verbose) 1316 | if rc != 0: 1317 | if verbose: 1318 | print("Directory %s not under git control" % root) 1319 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 1320 | 1321 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 1322 | # if there isn't one, this yields HEX[-dirty] (no NUM) 1323 | describe_out, rc = runner(GITS, [ 1324 | "describe", "--tags", "--dirty", "--always", "--long", 1325 | "--match", f"{tag_prefix}[[:digit:]]*" 1326 | ], cwd=root) 1327 | # --long was added in git-1.5.5 1328 | if describe_out is None: 1329 | raise NotThisMethod("'git describe' failed") 1330 | describe_out = describe_out.strip() 1331 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 1332 | if full_out is None: 1333 | raise NotThisMethod("'git rev-parse' failed") 1334 | full_out = full_out.strip() 1335 | 1336 | pieces: Dict[str, Any] = {} 1337 | pieces["long"] = full_out 1338 | pieces["short"] = full_out[:7] # maybe improved later 1339 | pieces["error"] = None 1340 | 1341 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 1342 | cwd=root) 1343 | # --abbrev-ref was added in git-1.6.3 1344 | if rc != 0 or branch_name is None: 1345 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 1346 | branch_name = branch_name.strip() 1347 | 1348 | if branch_name == "HEAD": 1349 | # If we aren't exactly on a branch, pick a branch which represents 1350 | # the current commit. If all else fails, we are on a branchless 1351 | # commit. 1352 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 1353 | # --contains was added in git-1.5.4 1354 | if rc != 0 or branches is None: 1355 | raise NotThisMethod("'git branch --contains' returned error") 1356 | branches = branches.split("\n") 1357 | 1358 | # Remove the first line if we're running detached 1359 | if "(" in branches[0]: 1360 | branches.pop(0) 1361 | 1362 | # Strip off the leading "* " from the list of branches. 1363 | branches = [branch[2:] for branch in branches] 1364 | if "master" in branches: 1365 | branch_name = "master" 1366 | elif not branches: 1367 | branch_name = None 1368 | else: 1369 | # Pick the first branch that is returned. Good or bad. 1370 | branch_name = branches[0] 1371 | 1372 | pieces["branch"] = branch_name 1373 | 1374 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1375 | # TAG might have hyphens. 1376 | git_describe = describe_out 1377 | 1378 | # look for -dirty suffix 1379 | dirty = git_describe.endswith("-dirty") 1380 | pieces["dirty"] = dirty 1381 | if dirty: 1382 | git_describe = git_describe[:git_describe.rindex("-dirty")] 1383 | 1384 | # now we have TAG-NUM-gHEX or HEX 1385 | 1386 | if "-" in git_describe: 1387 | # TAG-NUM-gHEX 1388 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 1389 | if not mo: 1390 | # unparsable. Maybe git-describe is misbehaving? 1391 | pieces["error"] = ("unable to parse git-describe output: '%s'" 1392 | % describe_out) 1393 | return pieces 1394 | 1395 | # tag 1396 | full_tag = mo.group(1) 1397 | if not full_tag.startswith(tag_prefix): 1398 | if verbose: 1399 | fmt = "tag '%s' doesn't start with prefix '%s'" 1400 | print(fmt % (full_tag, tag_prefix)) 1401 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 1402 | % (full_tag, tag_prefix)) 1403 | return pieces 1404 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 1405 | 1406 | # distance: number of commits since tag 1407 | pieces["distance"] = int(mo.group(2)) 1408 | 1409 | # commit: short hex revision ID 1410 | pieces["short"] = mo.group(3) 1411 | 1412 | else: 1413 | # HEX: no tags 1414 | pieces["closest-tag"] = None 1415 | out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) 1416 | pieces["distance"] = len(out.split()) # total number of commits 1417 | 1418 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 1419 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() 1420 | # Use only the last line. Previous lines may contain GPG signature 1421 | # information. 1422 | date = date.splitlines()[-1] 1423 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1424 | 1425 | return pieces 1426 | 1427 | 1428 | def do_vcs_install(versionfile_source: str, ipy: Optional[str]) -> None: 1429 | """Git-specific installation logic for Versioneer. 1430 | 1431 | For Git, this means creating/changing .gitattributes to mark _version.py 1432 | for export-subst keyword substitution. 1433 | """ 1434 | GITS = ["git"] 1435 | if sys.platform == "win32": 1436 | GITS = ["git.cmd", "git.exe"] 1437 | files = [versionfile_source] 1438 | if ipy: 1439 | files.append(ipy) 1440 | if "VERSIONEER_PEP518" not in globals(): 1441 | try: 1442 | my_path = __file__ 1443 | if my_path.endswith((".pyc", ".pyo")): 1444 | my_path = os.path.splitext(my_path)[0] + ".py" 1445 | versioneer_file = os.path.relpath(my_path) 1446 | except NameError: 1447 | versioneer_file = "versioneer.py" 1448 | files.append(versioneer_file) 1449 | present = False 1450 | try: 1451 | with open(".gitattributes", "r") as fobj: 1452 | for line in fobj: 1453 | if line.strip().startswith(versionfile_source): 1454 | if "export-subst" in line.strip().split()[1:]: 1455 | present = True 1456 | break 1457 | except OSError: 1458 | pass 1459 | if not present: 1460 | with open(".gitattributes", "a+") as fobj: 1461 | fobj.write(f"{versionfile_source} export-subst\n") 1462 | files.append(".gitattributes") 1463 | run_command(GITS, ["add", "--"] + files) 1464 | 1465 | 1466 | def versions_from_parentdir( 1467 | parentdir_prefix: str, 1468 | root: str, 1469 | verbose: bool, 1470 | ) -> Dict[str, Any]: 1471 | """Try to determine the version from the parent directory name. 1472 | 1473 | Source tarballs conventionally unpack into a directory that includes both 1474 | the project name and a version string. We will also support searching up 1475 | two directory levels for an appropriately named parent directory 1476 | """ 1477 | rootdirs = [] 1478 | 1479 | for _ in range(3): 1480 | dirname = os.path.basename(root) 1481 | if dirname.startswith(parentdir_prefix): 1482 | return {"version": dirname[len(parentdir_prefix):], 1483 | "full-revisionid": None, 1484 | "dirty": False, "error": None, "date": None} 1485 | rootdirs.append(root) 1486 | root = os.path.dirname(root) # up a level 1487 | 1488 | if verbose: 1489 | print("Tried directories %s but none started with prefix %s" % 1490 | (str(rootdirs), parentdir_prefix)) 1491 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1492 | 1493 | 1494 | SHORT_VERSION_PY = """ 1495 | # This file was generated by 'versioneer.py' (0.29) from 1496 | # revision-control system data, or from the parent directory name of an 1497 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1498 | # of this file. 1499 | 1500 | import json 1501 | 1502 | version_json = ''' 1503 | %s 1504 | ''' # END VERSION_JSON 1505 | 1506 | 1507 | def get_versions(): 1508 | return json.loads(version_json) 1509 | """ 1510 | 1511 | 1512 | def versions_from_file(filename: str) -> Dict[str, Any]: 1513 | """Try to determine the version from _version.py if present.""" 1514 | try: 1515 | with open(filename) as f: 1516 | contents = f.read() 1517 | except OSError: 1518 | raise NotThisMethod("unable to read _version.py") 1519 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", 1520 | contents, re.M | re.S) 1521 | if not mo: 1522 | mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", 1523 | contents, re.M | re.S) 1524 | if not mo: 1525 | raise NotThisMethod("no version_json in _version.py") 1526 | return json.loads(mo.group(1)) 1527 | 1528 | 1529 | def write_to_version_file(filename: str, versions: Dict[str, Any]) -> None: 1530 | """Write the given version number to the given _version.py file.""" 1531 | contents = json.dumps(versions, sort_keys=True, 1532 | indent=1, separators=(",", ": ")) 1533 | with open(filename, "w") as f: 1534 | f.write(SHORT_VERSION_PY % contents) 1535 | 1536 | print("set %s to '%s'" % (filename, versions["version"])) 1537 | 1538 | 1539 | def plus_or_dot(pieces: Dict[str, Any]) -> str: 1540 | """Return a + if we don't already have one, else return a .""" 1541 | if "+" in pieces.get("closest-tag", ""): 1542 | return "." 1543 | return "+" 1544 | 1545 | 1546 | def render_pep440(pieces: Dict[str, Any]) -> str: 1547 | """Build up version string, with post-release "local version identifier". 1548 | 1549 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1550 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1551 | 1552 | Exceptions: 1553 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1554 | """ 1555 | if pieces["closest-tag"]: 1556 | rendered = pieces["closest-tag"] 1557 | if pieces["distance"] or pieces["dirty"]: 1558 | rendered += plus_or_dot(pieces) 1559 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1560 | if pieces["dirty"]: 1561 | rendered += ".dirty" 1562 | else: 1563 | # exception #1 1564 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 1565 | pieces["short"]) 1566 | if pieces["dirty"]: 1567 | rendered += ".dirty" 1568 | return rendered 1569 | 1570 | 1571 | def render_pep440_branch(pieces: Dict[str, Any]) -> str: 1572 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 1573 | 1574 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 1575 | (a feature branch will appear "older" than the master branch). 1576 | 1577 | Exceptions: 1578 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 1579 | """ 1580 | if pieces["closest-tag"]: 1581 | rendered = pieces["closest-tag"] 1582 | if pieces["distance"] or pieces["dirty"]: 1583 | if pieces["branch"] != "master": 1584 | rendered += ".dev0" 1585 | rendered += plus_or_dot(pieces) 1586 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1587 | if pieces["dirty"]: 1588 | rendered += ".dirty" 1589 | else: 1590 | # exception #1 1591 | rendered = "0" 1592 | if pieces["branch"] != "master": 1593 | rendered += ".dev0" 1594 | rendered += "+untagged.%d.g%s" % (pieces["distance"], 1595 | pieces["short"]) 1596 | if pieces["dirty"]: 1597 | rendered += ".dirty" 1598 | return rendered 1599 | 1600 | 1601 | def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: 1602 | """Split pep440 version string at the post-release segment. 1603 | 1604 | Returns the release segments before the post-release and the 1605 | post-release version number (or -1 if no post-release segment is present). 1606 | """ 1607 | vc = str.split(ver, ".post") 1608 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None 1609 | 1610 | 1611 | def render_pep440_pre(pieces: Dict[str, Any]) -> str: 1612 | """TAG[.postN.devDISTANCE] -- No -dirty. 1613 | 1614 | Exceptions: 1615 | 1: no tags. 0.post0.devDISTANCE 1616 | """ 1617 | if pieces["closest-tag"]: 1618 | if pieces["distance"]: 1619 | # update the post release segment 1620 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) 1621 | rendered = tag_version 1622 | if post_version is not None: 1623 | rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) 1624 | else: 1625 | rendered += ".post0.dev%d" % (pieces["distance"]) 1626 | else: 1627 | # no commits, use the tag as the version 1628 | rendered = pieces["closest-tag"] 1629 | else: 1630 | # exception #1 1631 | rendered = "0.post0.dev%d" % pieces["distance"] 1632 | return rendered 1633 | 1634 | 1635 | def render_pep440_post(pieces: Dict[str, Any]) -> str: 1636 | """TAG[.postDISTANCE[.dev0]+gHEX] . 1637 | 1638 | The ".dev0" means dirty. Note that .dev0 sorts backwards 1639 | (a dirty tree will appear "older" than the corresponding clean one), 1640 | but you shouldn't be releasing software with -dirty anyways. 1641 | 1642 | Exceptions: 1643 | 1: no tags. 0.postDISTANCE[.dev0] 1644 | """ 1645 | if pieces["closest-tag"]: 1646 | rendered = pieces["closest-tag"] 1647 | if pieces["distance"] or pieces["dirty"]: 1648 | rendered += ".post%d" % pieces["distance"] 1649 | if pieces["dirty"]: 1650 | rendered += ".dev0" 1651 | rendered += plus_or_dot(pieces) 1652 | rendered += "g%s" % pieces["short"] 1653 | else: 1654 | # exception #1 1655 | rendered = "0.post%d" % pieces["distance"] 1656 | if pieces["dirty"]: 1657 | rendered += ".dev0" 1658 | rendered += "+g%s" % pieces["short"] 1659 | return rendered 1660 | 1661 | 1662 | def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: 1663 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 1664 | 1665 | The ".dev0" means not master branch. 1666 | 1667 | Exceptions: 1668 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 1669 | """ 1670 | if pieces["closest-tag"]: 1671 | rendered = pieces["closest-tag"] 1672 | if pieces["distance"] or pieces["dirty"]: 1673 | rendered += ".post%d" % pieces["distance"] 1674 | if pieces["branch"] != "master": 1675 | rendered += ".dev0" 1676 | rendered += plus_or_dot(pieces) 1677 | rendered += "g%s" % pieces["short"] 1678 | if pieces["dirty"]: 1679 | rendered += ".dirty" 1680 | else: 1681 | # exception #1 1682 | rendered = "0.post%d" % pieces["distance"] 1683 | if pieces["branch"] != "master": 1684 | rendered += ".dev0" 1685 | rendered += "+g%s" % pieces["short"] 1686 | if pieces["dirty"]: 1687 | rendered += ".dirty" 1688 | return rendered 1689 | 1690 | 1691 | def render_pep440_old(pieces: Dict[str, Any]) -> str: 1692 | """TAG[.postDISTANCE[.dev0]] . 1693 | 1694 | The ".dev0" means dirty. 1695 | 1696 | Exceptions: 1697 | 1: no tags. 0.postDISTANCE[.dev0] 1698 | """ 1699 | if pieces["closest-tag"]: 1700 | rendered = pieces["closest-tag"] 1701 | if pieces["distance"] or pieces["dirty"]: 1702 | rendered += ".post%d" % pieces["distance"] 1703 | if pieces["dirty"]: 1704 | rendered += ".dev0" 1705 | else: 1706 | # exception #1 1707 | rendered = "0.post%d" % pieces["distance"] 1708 | if pieces["dirty"]: 1709 | rendered += ".dev0" 1710 | return rendered 1711 | 1712 | 1713 | def render_git_describe(pieces: Dict[str, Any]) -> str: 1714 | """TAG[-DISTANCE-gHEX][-dirty]. 1715 | 1716 | Like 'git describe --tags --dirty --always'. 1717 | 1718 | Exceptions: 1719 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1720 | """ 1721 | if pieces["closest-tag"]: 1722 | rendered = pieces["closest-tag"] 1723 | if pieces["distance"]: 1724 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1725 | else: 1726 | # exception #1 1727 | rendered = pieces["short"] 1728 | if pieces["dirty"]: 1729 | rendered += "-dirty" 1730 | return rendered 1731 | 1732 | 1733 | def render_git_describe_long(pieces: Dict[str, Any]) -> str: 1734 | """TAG-DISTANCE-gHEX[-dirty]. 1735 | 1736 | Like 'git describe --tags --dirty --always -long'. 1737 | The distance/hash is unconditional. 1738 | 1739 | Exceptions: 1740 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1741 | """ 1742 | if pieces["closest-tag"]: 1743 | rendered = pieces["closest-tag"] 1744 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1745 | else: 1746 | # exception #1 1747 | rendered = pieces["short"] 1748 | if pieces["dirty"]: 1749 | rendered += "-dirty" 1750 | return rendered 1751 | 1752 | 1753 | def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: 1754 | """Render the given version pieces into the requested style.""" 1755 | if pieces["error"]: 1756 | return {"version": "unknown", 1757 | "full-revisionid": pieces.get("long"), 1758 | "dirty": None, 1759 | "error": pieces["error"], 1760 | "date": None} 1761 | 1762 | if not style or style == "default": 1763 | style = "pep440" # the default 1764 | 1765 | if style == "pep440": 1766 | rendered = render_pep440(pieces) 1767 | elif style == "pep440-branch": 1768 | rendered = render_pep440_branch(pieces) 1769 | elif style == "pep440-pre": 1770 | rendered = render_pep440_pre(pieces) 1771 | elif style == "pep440-post": 1772 | rendered = render_pep440_post(pieces) 1773 | elif style == "pep440-post-branch": 1774 | rendered = render_pep440_post_branch(pieces) 1775 | elif style == "pep440-old": 1776 | rendered = render_pep440_old(pieces) 1777 | elif style == "git-describe": 1778 | rendered = render_git_describe(pieces) 1779 | elif style == "git-describe-long": 1780 | rendered = render_git_describe_long(pieces) 1781 | else: 1782 | raise ValueError("unknown style '%s'" % style) 1783 | 1784 | return {"version": rendered, "full-revisionid": pieces["long"], 1785 | "dirty": pieces["dirty"], "error": None, 1786 | "date": pieces.get("date")} 1787 | 1788 | 1789 | class VersioneerBadRootError(Exception): 1790 | """The project root directory is unknown or missing key files.""" 1791 | 1792 | 1793 | def get_versions(verbose: bool = False) -> Dict[str, Any]: 1794 | """Get the project version from whatever source is available. 1795 | 1796 | Returns dict with two keys: 'version' and 'full'. 1797 | """ 1798 | if "versioneer" in sys.modules: 1799 | # see the discussion in cmdclass.py:get_cmdclass() 1800 | del sys.modules["versioneer"] 1801 | 1802 | root = get_root() 1803 | cfg = get_config_from_root(root) 1804 | 1805 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1806 | handlers = HANDLERS.get(cfg.VCS) 1807 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1808 | verbose = verbose or bool(cfg.verbose) # `bool()` used to avoid `None` 1809 | assert cfg.versionfile_source is not None, \ 1810 | "please set versioneer.versionfile_source" 1811 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1812 | 1813 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1814 | 1815 | # extract version from first of: _version.py, VCS command (e.g. 'git 1816 | # describe'), parentdir. This is meant to work for developers using a 1817 | # source checkout, for users of a tarball created by 'setup.py sdist', 1818 | # and for users of a tarball/zipball created by 'git archive' or github's 1819 | # download-from-tag feature or the equivalent in other VCSes. 1820 | 1821 | get_keywords_f = handlers.get("get_keywords") 1822 | from_keywords_f = handlers.get("keywords") 1823 | if get_keywords_f and from_keywords_f: 1824 | try: 1825 | keywords = get_keywords_f(versionfile_abs) 1826 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1827 | if verbose: 1828 | print("got version from expanded keyword %s" % ver) 1829 | return ver 1830 | except NotThisMethod: 1831 | pass 1832 | 1833 | try: 1834 | ver = versions_from_file(versionfile_abs) 1835 | if verbose: 1836 | print("got version from file %s %s" % (versionfile_abs, ver)) 1837 | return ver 1838 | except NotThisMethod: 1839 | pass 1840 | 1841 | from_vcs_f = handlers.get("pieces_from_vcs") 1842 | if from_vcs_f: 1843 | try: 1844 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1845 | ver = render(pieces, cfg.style) 1846 | if verbose: 1847 | print("got version from VCS %s" % ver) 1848 | return ver 1849 | except NotThisMethod: 1850 | pass 1851 | 1852 | try: 1853 | if cfg.parentdir_prefix: 1854 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1855 | if verbose: 1856 | print("got version from parentdir %s" % ver) 1857 | return ver 1858 | except NotThisMethod: 1859 | pass 1860 | 1861 | if verbose: 1862 | print("unable to compute version") 1863 | 1864 | return {"version": "0+unknown", "full-revisionid": None, 1865 | "dirty": None, "error": "unable to compute version", 1866 | "date": None} 1867 | 1868 | 1869 | def get_version() -> str: 1870 | """Get the short version string for this project.""" 1871 | return get_versions()["version"] 1872 | 1873 | 1874 | def get_cmdclass(cmdclass: Optional[Dict[str, Any]] = None): 1875 | """Get the custom setuptools subclasses used by Versioneer. 1876 | 1877 | If the package uses a different cmdclass (e.g. one from numpy), it 1878 | should be provide as an argument. 1879 | """ 1880 | if "versioneer" in sys.modules: 1881 | del sys.modules["versioneer"] 1882 | # this fixes the "python setup.py develop" case (also 'install' and 1883 | # 'easy_install .'), in which subdependencies of the main project are 1884 | # built (using setup.py bdist_egg) in the same python process. Assume 1885 | # a main project A and a dependency B, which use different versions 1886 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1887 | # sys.modules by the time B's setup.py is executed, causing B to run 1888 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1889 | # sandbox that restores sys.modules to it's pre-build state, so the 1890 | # parent is protected against the child's "import versioneer". By 1891 | # removing ourselves from sys.modules here, before the child build 1892 | # happens, we protect the child from the parent's versioneer too. 1893 | # Also see https://github.com/python-versioneer/python-versioneer/issues/52 1894 | 1895 | cmds = {} if cmdclass is None else cmdclass.copy() 1896 | 1897 | # we add "version" to setuptools 1898 | from setuptools import Command 1899 | 1900 | class cmd_version(Command): 1901 | description = "report generated version string" 1902 | user_options: List[Tuple[str, str, str]] = [] 1903 | boolean_options: List[str] = [] 1904 | 1905 | def initialize_options(self) -> None: 1906 | pass 1907 | 1908 | def finalize_options(self) -> None: 1909 | pass 1910 | 1911 | def run(self) -> None: 1912 | vers = get_versions(verbose=True) 1913 | print("Version: %s" % vers["version"]) 1914 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1915 | print(" dirty: %s" % vers.get("dirty")) 1916 | print(" date: %s" % vers.get("date")) 1917 | if vers["error"]: 1918 | print(" error: %s" % vers["error"]) 1919 | cmds["version"] = cmd_version 1920 | 1921 | # we override "build_py" in setuptools 1922 | # 1923 | # most invocation pathways end up running build_py: 1924 | # distutils/build -> build_py 1925 | # distutils/install -> distutils/build ->.. 1926 | # setuptools/bdist_wheel -> distutils/install ->.. 1927 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1928 | # setuptools/install -> bdist_egg ->.. 1929 | # setuptools/develop -> ? 1930 | # pip install: 1931 | # copies source tree to a tempdir before running egg_info/etc 1932 | # if .git isn't copied too, 'git describe' will fail 1933 | # then does setup.py bdist_wheel, or sometimes setup.py install 1934 | # setup.py egg_info -> ? 1935 | 1936 | # pip install -e . and setuptool/editable_wheel will invoke build_py 1937 | # but the build_py command is not expected to copy any files. 1938 | 1939 | # we override different "build_py" commands for both environments 1940 | if 'build_py' in cmds: 1941 | _build_py: Any = cmds['build_py'] 1942 | else: 1943 | from setuptools.command.build_py import build_py as _build_py 1944 | 1945 | class cmd_build_py(_build_py): 1946 | def run(self) -> None: 1947 | root = get_root() 1948 | cfg = get_config_from_root(root) 1949 | versions = get_versions() 1950 | _build_py.run(self) 1951 | if getattr(self, "editable_mode", False): 1952 | # During editable installs `.py` and data files are 1953 | # not copied to build_lib 1954 | return 1955 | # now locate _version.py in the new build/ directory and replace 1956 | # it with an updated value 1957 | if cfg.versionfile_build: 1958 | target_versionfile = os.path.join(self.build_lib, 1959 | cfg.versionfile_build) 1960 | print("UPDATING %s" % target_versionfile) 1961 | write_to_version_file(target_versionfile, versions) 1962 | cmds["build_py"] = cmd_build_py 1963 | 1964 | if 'build_ext' in cmds: 1965 | _build_ext: Any = cmds['build_ext'] 1966 | else: 1967 | from setuptools.command.build_ext import build_ext as _build_ext 1968 | 1969 | class cmd_build_ext(_build_ext): 1970 | def run(self) -> None: 1971 | root = get_root() 1972 | cfg = get_config_from_root(root) 1973 | versions = get_versions() 1974 | _build_ext.run(self) 1975 | if self.inplace: 1976 | # build_ext --inplace will only build extensions in 1977 | # build/lib<..> dir with no _version.py to write to. 1978 | # As in place builds will already have a _version.py 1979 | # in the module dir, we do not need to write one. 1980 | return 1981 | # now locate _version.py in the new build/ directory and replace 1982 | # it with an updated value 1983 | if not cfg.versionfile_build: 1984 | return 1985 | target_versionfile = os.path.join(self.build_lib, 1986 | cfg.versionfile_build) 1987 | if not os.path.exists(target_versionfile): 1988 | print(f"Warning: {target_versionfile} does not exist, skipping " 1989 | "version update. This can happen if you are running build_ext " 1990 | "without first running build_py.") 1991 | return 1992 | print("UPDATING %s" % target_versionfile) 1993 | write_to_version_file(target_versionfile, versions) 1994 | cmds["build_ext"] = cmd_build_ext 1995 | 1996 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1997 | from cx_Freeze.dist import build_exe as _build_exe # type: ignore 1998 | # nczeczulin reports that py2exe won't like the pep440-style string 1999 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. 2000 | # setup(console=[{ 2001 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION 2002 | # "product_version": versioneer.get_version(), 2003 | # ... 2004 | 2005 | class cmd_build_exe(_build_exe): 2006 | def run(self) -> None: 2007 | root = get_root() 2008 | cfg = get_config_from_root(root) 2009 | versions = get_versions() 2010 | target_versionfile = cfg.versionfile_source 2011 | print("UPDATING %s" % target_versionfile) 2012 | write_to_version_file(target_versionfile, versions) 2013 | 2014 | _build_exe.run(self) 2015 | os.unlink(target_versionfile) 2016 | with open(cfg.versionfile_source, "w") as f: 2017 | LONG = LONG_VERSION_PY[cfg.VCS] 2018 | f.write(LONG % 2019 | {"DOLLAR": "$", 2020 | "STYLE": cfg.style, 2021 | "TAG_PREFIX": cfg.tag_prefix, 2022 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 2023 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 2024 | }) 2025 | cmds["build_exe"] = cmd_build_exe 2026 | del cmds["build_py"] 2027 | 2028 | if 'py2exe' in sys.modules: # py2exe enabled? 2029 | try: 2030 | from py2exe.setuptools_buildexe import py2exe as _py2exe # type: ignore 2031 | except ImportError: 2032 | from py2exe.distutils_buildexe import py2exe as _py2exe # type: ignore 2033 | 2034 | class cmd_py2exe(_py2exe): 2035 | def run(self) -> None: 2036 | root = get_root() 2037 | cfg = get_config_from_root(root) 2038 | versions = get_versions() 2039 | target_versionfile = cfg.versionfile_source 2040 | print("UPDATING %s" % target_versionfile) 2041 | write_to_version_file(target_versionfile, versions) 2042 | 2043 | _py2exe.run(self) 2044 | os.unlink(target_versionfile) 2045 | with open(cfg.versionfile_source, "w") as f: 2046 | LONG = LONG_VERSION_PY[cfg.VCS] 2047 | f.write(LONG % 2048 | {"DOLLAR": "$", 2049 | "STYLE": cfg.style, 2050 | "TAG_PREFIX": cfg.tag_prefix, 2051 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 2052 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 2053 | }) 2054 | cmds["py2exe"] = cmd_py2exe 2055 | 2056 | # sdist farms its file list building out to egg_info 2057 | if 'egg_info' in cmds: 2058 | _egg_info: Any = cmds['egg_info'] 2059 | else: 2060 | from setuptools.command.egg_info import egg_info as _egg_info 2061 | 2062 | class cmd_egg_info(_egg_info): 2063 | def find_sources(self) -> None: 2064 | # egg_info.find_sources builds the manifest list and writes it 2065 | # in one shot 2066 | super().find_sources() 2067 | 2068 | # Modify the filelist and normalize it 2069 | root = get_root() 2070 | cfg = get_config_from_root(root) 2071 | self.filelist.append('versioneer.py') 2072 | if cfg.versionfile_source: 2073 | # There are rare cases where versionfile_source might not be 2074 | # included by default, so we must be explicit 2075 | self.filelist.append(cfg.versionfile_source) 2076 | self.filelist.sort() 2077 | self.filelist.remove_duplicates() 2078 | 2079 | # The write method is hidden in the manifest_maker instance that 2080 | # generated the filelist and was thrown away 2081 | # We will instead replicate their final normalization (to unicode, 2082 | # and POSIX-style paths) 2083 | from setuptools import unicode_utils 2084 | normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/') 2085 | for f in self.filelist.files] 2086 | 2087 | manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt') 2088 | with open(manifest_filename, 'w') as fobj: 2089 | fobj.write('\n'.join(normalized)) 2090 | 2091 | cmds['egg_info'] = cmd_egg_info 2092 | 2093 | # we override different "sdist" commands for both environments 2094 | if 'sdist' in cmds: 2095 | _sdist: Any = cmds['sdist'] 2096 | else: 2097 | from setuptools.command.sdist import sdist as _sdist 2098 | 2099 | class cmd_sdist(_sdist): 2100 | def run(self) -> None: 2101 | versions = get_versions() 2102 | self._versioneer_generated_versions = versions 2103 | # unless we update this, the command will keep using the old 2104 | # version 2105 | self.distribution.metadata.version = versions["version"] 2106 | return _sdist.run(self) 2107 | 2108 | def make_release_tree(self, base_dir: str, files: List[str]) -> None: 2109 | root = get_root() 2110 | cfg = get_config_from_root(root) 2111 | _sdist.make_release_tree(self, base_dir, files) 2112 | # now locate _version.py in the new base_dir directory 2113 | # (remembering that it may be a hardlink) and replace it with an 2114 | # updated value 2115 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 2116 | print("UPDATING %s" % target_versionfile) 2117 | write_to_version_file(target_versionfile, 2118 | self._versioneer_generated_versions) 2119 | cmds["sdist"] = cmd_sdist 2120 | 2121 | return cmds 2122 | 2123 | 2124 | CONFIG_ERROR = """ 2125 | setup.cfg is missing the necessary Versioneer configuration. You need 2126 | a section like: 2127 | 2128 | [versioneer] 2129 | VCS = git 2130 | style = pep440 2131 | versionfile_source = src/myproject/_version.py 2132 | versionfile_build = myproject/_version.py 2133 | tag_prefix = 2134 | parentdir_prefix = myproject- 2135 | 2136 | You will also need to edit your setup.py to use the results: 2137 | 2138 | import versioneer 2139 | setup(version=versioneer.get_version(), 2140 | cmdclass=versioneer.get_cmdclass(), ...) 2141 | 2142 | Please read the docstring in ./versioneer.py for configuration instructions, 2143 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 2144 | """ 2145 | 2146 | SAMPLE_CONFIG = """ 2147 | # See the docstring in versioneer.py for instructions. Note that you must 2148 | # re-run 'versioneer.py setup' after changing this section, and commit the 2149 | # resulting files. 2150 | 2151 | [versioneer] 2152 | #VCS = git 2153 | #style = pep440 2154 | #versionfile_source = 2155 | #versionfile_build = 2156 | #tag_prefix = 2157 | #parentdir_prefix = 2158 | 2159 | """ 2160 | 2161 | OLD_SNIPPET = """ 2162 | from ._version import get_versions 2163 | __version__ = get_versions()['version'] 2164 | del get_versions 2165 | """ 2166 | 2167 | INIT_PY_SNIPPET = """ 2168 | from . import {0} 2169 | __version__ = {0}.get_versions()['version'] 2170 | """ 2171 | 2172 | 2173 | def do_setup() -> int: 2174 | """Do main VCS-independent setup function for installing Versioneer.""" 2175 | root = get_root() 2176 | try: 2177 | cfg = get_config_from_root(root) 2178 | except (OSError, configparser.NoSectionError, 2179 | configparser.NoOptionError) as e: 2180 | if isinstance(e, (OSError, configparser.NoSectionError)): 2181 | print("Adding sample versioneer config to setup.cfg", 2182 | file=sys.stderr) 2183 | with open(os.path.join(root, "setup.cfg"), "a") as f: 2184 | f.write(SAMPLE_CONFIG) 2185 | print(CONFIG_ERROR, file=sys.stderr) 2186 | return 1 2187 | 2188 | print(" creating %s" % cfg.versionfile_source) 2189 | with open(cfg.versionfile_source, "w") as f: 2190 | LONG = LONG_VERSION_PY[cfg.VCS] 2191 | f.write(LONG % {"DOLLAR": "$", 2192 | "STYLE": cfg.style, 2193 | "TAG_PREFIX": cfg.tag_prefix, 2194 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 2195 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 2196 | }) 2197 | 2198 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), 2199 | "__init__.py") 2200 | maybe_ipy: Optional[str] = ipy 2201 | if os.path.exists(ipy): 2202 | try: 2203 | with open(ipy, "r") as f: 2204 | old = f.read() 2205 | except OSError: 2206 | old = "" 2207 | module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] 2208 | snippet = INIT_PY_SNIPPET.format(module) 2209 | if OLD_SNIPPET in old: 2210 | print(" replacing boilerplate in %s" % ipy) 2211 | with open(ipy, "w") as f: 2212 | f.write(old.replace(OLD_SNIPPET, snippet)) 2213 | elif snippet not in old: 2214 | print(" appending to %s" % ipy) 2215 | with open(ipy, "a") as f: 2216 | f.write(snippet) 2217 | else: 2218 | print(" %s unmodified" % ipy) 2219 | else: 2220 | print(" %s doesn't exist, ok" % ipy) 2221 | maybe_ipy = None 2222 | 2223 | # Make VCS-specific changes. For git, this means creating/changing 2224 | # .gitattributes to mark _version.py for export-subst keyword 2225 | # substitution. 2226 | do_vcs_install(cfg.versionfile_source, maybe_ipy) 2227 | return 0 2228 | 2229 | 2230 | def scan_setup_py() -> int: 2231 | """Validate the contents of setup.py against Versioneer's expectations.""" 2232 | found = set() 2233 | setters = False 2234 | errors = 0 2235 | with open("setup.py", "r") as f: 2236 | for line in f.readlines(): 2237 | if "import versioneer" in line: 2238 | found.add("import") 2239 | if "versioneer.get_cmdclass()" in line: 2240 | found.add("cmdclass") 2241 | if "versioneer.get_version()" in line: 2242 | found.add("get_version") 2243 | if "versioneer.VCS" in line: 2244 | setters = True 2245 | if "versioneer.versionfile_source" in line: 2246 | setters = True 2247 | if len(found) != 3: 2248 | print("") 2249 | print("Your setup.py appears to be missing some important items") 2250 | print("(but I might be wrong). Please make sure it has something") 2251 | print("roughly like the following:") 2252 | print("") 2253 | print(" import versioneer") 2254 | print(" setup( version=versioneer.get_version(),") 2255 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 2256 | print("") 2257 | errors += 1 2258 | if setters: 2259 | print("You should remove lines like 'versioneer.VCS = ' and") 2260 | print("'versioneer.versionfile_source = ' . This configuration") 2261 | print("now lives in setup.cfg, and should be removed from setup.py") 2262 | print("") 2263 | errors += 1 2264 | return errors 2265 | 2266 | 2267 | def setup_command() -> NoReturn: 2268 | """Set up Versioneer and exit with appropriate error code.""" 2269 | errors = do_setup() 2270 | errors += scan_setup_py() 2271 | sys.exit(1 if errors else 0) 2272 | 2273 | 2274 | if __name__ == "__main__": 2275 | cmd = sys.argv[1] 2276 | if cmd == "setup": 2277 | setup_command() 2278 | --------------------------------------------------------------------------------