├── conda_pack
├── tests
│ ├── __init__.py
│ ├── conftest.py
│ ├── test_cli.py
│ ├── test_formats.py
│ └── test_core.py
├── __init__.py
├── scripts
│ ├── posix
│ │ ├── parcel
│ │ ├── deactivate
│ │ └── activate
│ └── windows
│ │ ├── deactivate.bat
│ │ └── activate.bat
├── compat.py
├── _progress.py
├── prefixes.py
├── cli.py
├── formats.py
└── _version.py
├── .gitattributes
├── docs
├── source
│ ├── _static
│ │ └── custom.css
│ ├── cli.rst
│ ├── _templates
│ │ └── help.html
│ ├── api.rst
│ ├── conf.py
│ ├── parcel.rst
│ ├── unix-binary.rst
│ ├── squashfs.rst
│ ├── index.rst
│ └── spark.rst
├── Makefile
└── make.bat
├── testing
├── test_packages
│ ├── conda_pack_test_lib1
│ │ ├── conda_pack_test_lib1
│ │ │ ├── __init__.py
│ │ │ └── cli.py
│ │ ├── setup.py
│ │ └── conda.recipe
│ │ │ └── meta.yaml
│ └── conda_pack_test_lib2
│ │ ├── conda_pack_test_lib2
│ │ ├── __init__.py
│ │ └── cli.py
│ │ ├── setup.py
│ │ └── conda.recipe
│ │ └── meta.yaml
├── extra_scripts
│ ├── conda_pack_test_activate.bat
│ ├── conda_pack_test_activate.sh
│ ├── conda_pack_test_deactivate.sh
│ └── conda_pack_test_deactivate.bat
├── env_yamls
│ ├── has_conda.yml
│ ├── nopython.yml
│ ├── activate_scripts.yml
│ ├── py310.yml
│ ├── py37_broken.yml
│ └── py37.yml
└── setup_envs.sh
├── .github
├── CODEOWNERS
├── workflows
│ ├── project.yml
│ ├── publish.yml
│ ├── cla.yml
│ ├── issues.yml
│ ├── labels.yml
│ ├── lock.yml
│ ├── stale.yml
│ └── main.yml
├── ISSUE_TEMPLATE
│ ├── 2_documentation.yml
│ ├── epic.yml
│ ├── 1_feature.yml
│ └── 0_bug.yml
└── PULL_REQUEST_TEMPLATE.md
├── .coveragerc
├── .gitignore
├── MANIFEST.in
├── news
├── TEMPLATE
├── support-pixi-environments.md
├── 306-fix-hard-link-failure
└── 295-create-output-dir-if-missing-and-force
├── setup.cfg
├── conda-recipe
└── meta.yaml
├── .pre-commit-config.yaml
├── CODE_OF_CONDUCT.md
├── Changelog.md
├── setup.py
├── LICENSE.txt
├── README.md
└── HOW_WE_USE_GITHUB.md
/conda_pack/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | conda_pack/_version.py export-subst
2 |
--------------------------------------------------------------------------------
/docs/source/_static/custom.css:
--------------------------------------------------------------------------------
1 | pre {
2 | padding: 7px 10px;
3 | }
4 |
--------------------------------------------------------------------------------
/testing/test_packages/conda_pack_test_lib1/conda_pack_test_lib1/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/testing/test_packages/conda_pack_test_lib2/conda_pack_test_lib2/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/testing/extra_scripts/conda_pack_test_activate.bat:
--------------------------------------------------------------------------------
1 | @SET CONDAPACK_ACTIVATED=1
2 |
--------------------------------------------------------------------------------
/testing/extra_scripts/conda_pack_test_activate.sh:
--------------------------------------------------------------------------------
1 | export CONDAPACK_ACTIVATED=1
2 |
--------------------------------------------------------------------------------
/testing/extra_scripts/conda_pack_test_deactivate.sh:
--------------------------------------------------------------------------------
1 | unset CONDAPACK_ACTIVATED
2 |
--------------------------------------------------------------------------------
/testing/extra_scripts/conda_pack_test_deactivate.bat:
--------------------------------------------------------------------------------
1 | @SET "CONDAPACK_ACTIVATED="
2 |
--------------------------------------------------------------------------------
/testing/test_packages/conda_pack_test_lib1/conda_pack_test_lib1/cli.py:
--------------------------------------------------------------------------------
1 | def main():
2 | pass
3 |
--------------------------------------------------------------------------------
/testing/test_packages/conda_pack_test_lib2/conda_pack_test_lib2/cli.py:
--------------------------------------------------------------------------------
1 | def main():
2 | pass
3 |
--------------------------------------------------------------------------------
/testing/env_yamls/has_conda.yml:
--------------------------------------------------------------------------------
1 | name: has_conda
2 |
3 | dependencies:
4 | - conda
5 | - toolz
6 |
--------------------------------------------------------------------------------
/docs/source/cli.rst:
--------------------------------------------------------------------------------
1 | CLI Docs
2 | --------
3 |
4 | .. autoprogram:: conda_pack.cli:PARSER
5 | :prog: conda-pack
6 |
--------------------------------------------------------------------------------
/testing/env_yamls/nopython.yml:
--------------------------------------------------------------------------------
1 | name: nopython
2 |
3 | channels:
4 | - conda-forge
5 |
6 | dependencies:
7 | - snappy
8 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # Syntax for this file at https://help.github.com/articles/about-codeowners/
2 |
3 | * @conda/constructor @xhochy
4 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test_*.py
4 | conda_pack/compat.py
5 | conda_pack/prefixes.py
6 | source =
7 | conda_pack
8 |
--------------------------------------------------------------------------------
/testing/env_yamls/activate_scripts.yml:
--------------------------------------------------------------------------------
1 | name: activate_scripts
2 |
3 | channels:
4 | - conda-forge
5 |
6 | dependencies:
7 | - python=3.7
8 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | testing/conda/*
2 | build
3 | dist
4 | *.egg-info
5 | .gitignore.swp
6 | .cache
7 | .coverage
8 | htmlcov/
9 | .pytest_cache/
10 | __pycache__/
11 |
--------------------------------------------------------------------------------
/conda_pack/__init__.py:
--------------------------------------------------------------------------------
1 | from . import _version
2 | from .core import CondaEnv, CondaPackException, File, pack
3 |
4 | __version__ = _version.get_versions()['version']
5 |
--------------------------------------------------------------------------------
/docs/source/_templates/help.html:
--------------------------------------------------------------------------------
1 |
Need help?
2 |
3 |
4 | Open an issue in the issue tracker.
5 |
6 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include conda_pack/*.py
2 | include conda_pack/script/
3 | include setup.py
4 | include README.rst
5 | include LICENSE.txt
6 | include MANIFEST.in
7 | include versioneer.py
8 | include conda_pack/_version.py
9 |
--------------------------------------------------------------------------------
/testing/env_yamls/py310.yml:
--------------------------------------------------------------------------------
1 | name: py310
2 |
3 | channels:
4 | - ctools
5 | - conda-forge
6 |
7 | dependencies:
8 | - python=3.10
9 | - conda_pack_test_lib1
10 | - conda_pack_test_lib2
11 | - jinja2
12 |
--------------------------------------------------------------------------------
/testing/env_yamls/py37_broken.yml:
--------------------------------------------------------------------------------
1 | name: py37_broken
2 |
3 | channels:
4 | - conda-forge
5 |
6 | dependencies:
7 | - python=3.7
8 | - cytoolz
9 | - toolz
10 | - pip
11 | - pip:
12 | - toolz==0.7.0
13 |
--------------------------------------------------------------------------------
/news/TEMPLATE:
--------------------------------------------------------------------------------
1 | ### Enhancements
2 |
3 | *
4 |
5 | ### Bug fixes
6 |
7 | *
8 |
9 | ### Deprecations
10 |
11 | *
12 |
13 | ### Docs
14 |
15 | *
16 |
17 | ### Other
18 |
19 | *
20 |
--------------------------------------------------------------------------------
/testing/env_yamls/py37.yml:
--------------------------------------------------------------------------------
1 | name: py37
2 |
3 | channels:
4 | - ctools
5 | - conda-forge
6 |
7 | dependencies:
8 | - python=3.7
9 | - conda_pack_test_lib1
10 | - conda_pack_test_lib2
11 | - toolz
12 | - pip
13 | - pip:
14 | - pytest
15 |
--------------------------------------------------------------------------------
/docs/source/api.rst:
--------------------------------------------------------------------------------
1 | API Docs
2 | ========
3 |
4 | .. currentmodule:: conda_pack
5 |
6 |
7 | CondaEnv
8 | --------
9 |
10 | .. autoclass:: CondaEnv
11 | :member-order: bysource
12 | :members:
13 |
14 | File
15 | ----
16 |
17 | .. autoclass:: File
18 | :members:
19 |
20 | pack
21 | ----
22 |
23 | .. autofunction:: pack
24 |
--------------------------------------------------------------------------------
/news/support-pixi-environments.md:
--------------------------------------------------------------------------------
1 | ### Enhancements
2 |
3 | *
4 |
5 | ### Bug fixes
6 |
7 | * Fix a crash when trying to use conda-pack environments created with [pixi](https://pixi.sh).
8 |
9 | ### Deprecations
10 |
11 | *
12 |
13 | ### Docs
14 |
15 | *
16 |
17 | ### Other
18 |
19 | *
20 |
--------------------------------------------------------------------------------
/news/306-fix-hard-link-failure:
--------------------------------------------------------------------------------
1 | ### Enhancements
2 |
3 | *
4 |
5 | ### Bug fixes
6 |
7 | * In `SquashFSArchive._add` use copy instead of hard-link when source and
8 | destination do not share file ownership (#248).
9 |
10 | ### Deprecations
11 |
12 | *
13 |
14 | ### Docs
15 |
16 | *
17 |
18 | ### Other
19 |
20 | *
21 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | exclude = __init__.py
3 | max-line-length = 100
4 |
5 | [isort]
6 | profile=black
7 | line_length=100
8 |
9 | [versioneer]
10 | VCS = git
11 | style = pep440
12 | versionfile_source = conda_pack/_version.py
13 | versionfile_build = conda_pack/_version.py
14 | tag_prefix =
15 | parentdir_prefix = conda-pack-
16 |
17 | [bdist_wheel]
18 | universal=1
19 |
--------------------------------------------------------------------------------
/testing/test_packages/conda_pack_test_lib1/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | setup(name='conda_pack_test_lib1',
4 | version='0.0.1',
5 | description='Dummy package for testing conda-pack',
6 | packages=['conda_pack_test_lib1'],
7 | entry_points='''
8 | [console_scripts]
9 | conda-pack-test-lib1=conda_pack_test_lib1.cli:main
10 | ''',
11 | zip_safe=False)
12 |
--------------------------------------------------------------------------------
/testing/test_packages/conda_pack_test_lib2/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | setup(name='conda_pack_test_lib2',
4 | version='0.0.1',
5 | description='Dummy package for testing conda-pack',
6 | packages=['conda_pack_test_lib2'],
7 | entry_points='''
8 | [console_scripts]
9 | conda-pack-test-lib2=conda_pack_test_lib2.cli:main
10 | ''',
11 | zip_safe=False)
12 |
--------------------------------------------------------------------------------
/news/295-create-output-dir-if-missing-and-force:
--------------------------------------------------------------------------------
1 | ### Enhancements
2 |
3 | * Creates the output directory if it does not already exist and `force` is used (#295)
4 |
5 | * Raise error before packing if the output directory is missing and `force` is not used (#295)
6 |
7 | ### Bug fixes
8 |
9 | *
10 |
11 | ### Deprecations
12 |
13 | *
14 |
15 | ### Docs
16 |
17 | *
18 |
19 | ### Other
20 |
21 | *
22 |
--------------------------------------------------------------------------------
/conda_pack/scripts/posix/parcel:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | local CDH_PREFIX="${PARCELS_ROOT}/${PARCEL_DIRNAME}"
4 |
5 | if [ -z "${CDH_PYTHON}" ]; then
6 | export CDH_PYTHON=${CDH_PREFIX}/bin/python
7 | fi
8 |
9 | # Run the activate scripts
10 | local _script_dir="${CDH_PREFIX}/etc/conda/activate.d"
11 | if [ -d "$_script_dir" ] && [ -n "$(ls -A "$_script_dir")" ]; then
12 | local _path
13 | for _path in "$_script_dir"/*.sh; do
14 | . "$_path"
15 | done
16 | fi
17 |
--------------------------------------------------------------------------------
/testing/test_packages/conda_pack_test_lib2/conda.recipe/meta.yaml:
--------------------------------------------------------------------------------
1 | package:
2 | name: conda_pack_test_lib2
3 | version: 0.0.1
4 |
5 | source:
6 | path: ../
7 |
8 | build:
9 | number: 1
10 | script: python setup.py install --single-version-externally-managed --record=record.txt
11 |
12 | requirements:
13 | build:
14 | - setuptools
15 | - python
16 | run:
17 | - python
18 |
19 | test:
20 | imports:
21 | - conda_pack_test_lib2
22 |
23 | about:
24 | home: https://github.com/conda/conda-pack
25 | summary: Dummy package for testing conda-pack
26 |
--------------------------------------------------------------------------------
/testing/test_packages/conda_pack_test_lib1/conda.recipe/meta.yaml:
--------------------------------------------------------------------------------
1 | package:
2 | name: conda_pack_test_lib1
3 | version: 0.0.1
4 |
5 | source:
6 | path: ../
7 |
8 | build:
9 | number: 1
10 | script: python setup.py install --single-version-externally-managed --record=record.txt
11 | noarch: python
12 |
13 | requirements:
14 | build:
15 | - setuptools
16 | - python >=3.7
17 | run:
18 | - python >=3.7
19 |
20 | test:
21 | imports:
22 | - conda_pack_test_lib1
23 |
24 | about:
25 | home: https://github.com/conda/conda-pack
26 | summary: Dummy package for testing conda-pack
27 |
--------------------------------------------------------------------------------
/.github/workflows/project.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Add to Project
3 |
4 | on:
5 | issues:
6 | types:
7 | - opened
8 | pull_request_target:
9 | types:
10 | - opened
11 |
12 | jobs:
13 | add_to_project:
14 | if: '!github.event.repository.fork'
15 | runs-on: ubuntu-latest
16 | steps:
17 | - uses: actions/add-to-project@v0.6.0
18 | with:
19 | # issues are added to the Planning project
20 | # PRs are added to the Review project
21 | project-url: https://github.com/orgs/conda/projects/${{ github.event_name == 'issues' && 2 || 16 }}
22 | github-token: ${{ secrets.PROJECT_TOKEN }}
23 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = conda-pack
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/conda_pack/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | test_dir = os.path.dirname(os.path.abspath(__file__))
4 | croot = os.environ.get('CONDA_ROOT')
5 | if croot is None:
6 | croot = os.path.join(test_dir, '..', '..', 'testing', 'conda')
7 | env_dir = os.path.join(os.path.abspath(croot), 'envs')
8 |
9 | py37_path = os.path.join(env_dir, 'py37')
10 | py37_editable_path = os.path.join(env_dir, 'py37_editable')
11 | py37_broken_path = os.path.join(env_dir, 'py37_broken')
12 | py37_missing_files_path = os.path.join(env_dir, 'py37_missing_files')
13 | py310_path = os.path.join(env_dir, 'py310')
14 | nopython_path = os.path.join(env_dir, 'nopython')
15 | has_conda_path = os.path.join(env_dir, 'has_conda')
16 | activate_scripts_path = os.path.join(env_dir, 'activate_scripts')
17 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish
2 | on:
3 | push:
4 | tags:
5 | - '*'
6 | jobs:
7 | build:
8 | runs-on: ubuntu-latest
9 | steps:
10 | - uses: actions/checkout@v3
11 | - uses: actions/setup-python@v3
12 | with:
13 | python-version: 3.11
14 | - run: |
15 | pip install build
16 | python -m build
17 | - uses: actions/upload-artifact@v3
18 | with:
19 | path: ./dist
20 |
21 | pypi-publish:
22 | needs: ['build']
23 | environment: 'publish'
24 |
25 | name: upload release to PyPI
26 | runs-on: ubuntu-latest
27 | permissions:
28 | id-token: write
29 | steps:
30 | - uses: actions/download-artifact@v3
31 | - name: Publish package distributions to PyPI
32 | uses: pypa/gh-action-pypi-publish@release/v1
33 | with:
34 | packages_dir: artifact/
35 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 | set SPHINXPROJ=conda-pack
13 |
14 | if "%1" == "" goto help
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
20 | echo.installed, then set the SPHINXBUILD environment variable to point
21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
22 | echo.may add the Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.http://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | :end
36 | popd
37 |
--------------------------------------------------------------------------------
/conda_pack/scripts/posix/deactivate:
--------------------------------------------------------------------------------
1 | _conda_pack_deactivate () {
2 | # If there's an active environment
3 | if [ -n "$CONDA_PREFIX" ]; then
4 | # First run the deactivate scripts
5 | local _script_dir="${CONDA_PREFIX}/etc/conda/deactivate.d"
6 | if [ -d "$_script_dir" ] && [ -n "$(ls -A "$_script_dir")" ]; then
7 | local _path
8 | for _path in "$_script_dir"/*.sh; do
9 | . "$_path"
10 | done
11 | fi
12 |
13 | # Remove env/bin from path
14 | local IFS=':'
15 | local _target="$CONDA_PREFIX/bin"
16 | local _newpath
17 | local _path
18 |
19 | for _path in $PATH; do
20 | if [ "$_path" != "$_target" ] ; then
21 | _newpath=${_newpath:+$_newpath:}$_path
22 | fi
23 | done
24 |
25 | PATH="$_newpath"
26 | unset CONDA_PREFIX
27 | PS1="$_CONDA_PACK_OLD_PS1"
28 | unset _CONDA_PACK_OLD_PS1
29 | fi
30 | }
31 |
32 | _conda_pack_deactivate
33 |
--------------------------------------------------------------------------------
/.github/workflows/cla.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: CLA
3 |
4 | on:
5 | issue_comment:
6 | types:
7 | - created
8 | pull_request_target:
9 |
10 | jobs:
11 | check:
12 | if: >-
13 | !github.event.repository.fork
14 | && (
15 | github.event.issue.pull_request
16 | && github.event.comment.body == '@conda-bot check'
17 | || github.event_name == 'pull_request_target'
18 | )
19 | runs-on: ubuntu-latest
20 | steps:
21 | - name: Check CLA
22 | uses: conda/actions/check-cla@v24.2.0
23 | with:
24 | # [required]
25 | # A token with ability to comment, label, and modify the commit status
26 | # (`pull_request: write` and `statuses: write` for fine-grained PAT; `repo` for classic PAT)
27 | # (default: secrets.GITHUB_TOKEN)
28 | token: ${{ secrets.CLA_ACTION_TOKEN }}
29 | # [required]
30 | # Label to apply to contributor's PR once CLA is signed
31 | label: cla-signed
32 |
33 | # [required]
34 | # Token for opening signee PR in the provided `cla_repo`
35 | # (`pull_request: write` for fine-grained PAT; `repo` and `workflow` for classic PAT)
36 | cla_token: ${{ secrets.CLA_FORK_TOKEN }}
37 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | import alabaster
2 |
3 | import conda_pack
4 |
5 | # Project settings
6 | project = 'conda-pack'
7 | copyright = '2017, Jim Crist'
8 | author = 'Jim Crist'
9 | release = version = conda_pack.__version__
10 |
11 | source_suffix = '.rst'
12 | master_doc = 'index'
13 | language = None
14 | pygments_style = 'sphinx'
15 | exclude_patterns = []
16 |
17 | # Sphinx Extensions
18 | extensions = ['sphinx.ext.autodoc',
19 | 'numpydoc',
20 | 'sphinxcontrib.autoprogram']
21 |
22 | numpydoc_show_class_members = False
23 |
24 | # Sphinx Theme
25 | html_theme = 'alabaster'
26 | html_theme_path = [alabaster.get_path()]
27 | templates_path = ['_templates']
28 | html_static_path = ['_static']
29 | html_theme_options = {
30 | 'description': 'A tool for packaging and distributing conda environments.',
31 | 'github_button': True,
32 | 'github_count': False,
33 | 'github_user': 'conda',
34 | 'github_repo': 'conda-pack',
35 | 'travis_button': True,
36 | 'show_powered_by': False,
37 | 'page_width': '960px',
38 | 'sidebar_width': '200px',
39 | 'code_font_size': '0.8em'
40 | }
41 | html_sidebars = {
42 | '**': ['about.html',
43 | 'navigation.html',
44 | 'help.html',
45 | 'searchbox.html']
46 | }
47 |
--------------------------------------------------------------------------------
/conda-recipe/meta.yaml:
--------------------------------------------------------------------------------
1 | {% set data = load_setup_py_data() %}
2 |
3 | package:
4 | name: conda-pack
5 | version: {{ data.get('version').lstrip('v') }}
6 |
7 | source:
8 | path: ../
9 |
10 | build:
11 | number: 0
12 | noarch: python
13 | script:
14 | - {{ PYTHON }} -m pip install . --no-deps --ignore-installed -vv
15 | entry_points:
16 | - conda-pack = conda_pack.cli:main
17 |
18 | requirements:
19 | host:
20 | - python >=3.7
21 | - pip
22 | run:
23 | - python >=3.7
24 | - setuptools
25 |
26 | test:
27 | source_files:
28 | - testing
29 | - conda_pack/tests
30 | requires:
31 | - python
32 | - pytest
33 | - squashfs-tools
34 | - squashfuse
35 | commands:
36 | - bash testing/setup_envs.sh
37 | - pytest -s -vv conda_pack/tests
38 |
39 | about:
40 | home: https://conda.github.io/conda-pack/
41 | license: BSD-3-Clause
42 | license_family: BSD
43 | license_file: LICENSE.txt
44 | summary: 'Package conda environments for redistribution'
45 | description: |
46 | ``conda-pack`` is a command line tool for creating relocatable conda
47 | environments. This is useful for deploying code in a consistent environment,
48 | potentially in a location where python/conda isn't already installed.
49 | doc_url: https://conda.github.io/conda-pack/
50 | dev_url: https://github.com/conda/conda-pack
51 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | # disable autofixing PRs, commenting "pre-commit.ci autofix" on a pull request triggers a autofix
2 | ci:
3 | autofix_prs: false
4 | exclude: ^(versioneer.py|conda_pack/_version.py)
5 | repos:
6 | - repo: https://github.com/pre-commit/pre-commit-hooks
7 | rev: v4.5.0
8 | hooks:
9 | - id: check-added-large-files
10 | - id: check-ast
11 | - id: fix-byte-order-marker
12 | - id: check-case-conflict
13 | - id: check-executables-have-shebangs
14 | - id: check-merge-conflict
15 | - id: check-shebang-scripts-are-executable
16 | - id: debug-statements
17 | - id: detect-private-key
18 | - id: mixed-line-ending
19 | - id: end-of-file-fixer
20 | - id: trailing-whitespace
21 | - id: check-yaml
22 | exclude: conda-recipe/meta.yaml
23 | - id: check-merge-conflict
24 | - repo: https://github.com/asottile/pyupgrade
25 | rev: v3.15.1
26 | hooks:
27 | - id: pyupgrade
28 | args: ["--py37-plus"]
29 | - repo: https://github.com/PyCQA/isort
30 | rev: 5.13.2
31 | hooks:
32 | - id: isort
33 | - repo: https://github.com/akaihola/darker
34 | rev: 1.7.2
35 | hooks:
36 | - id: darker
37 | additional_dependencies: [black]
38 | - repo: https://github.com/PyCQA/flake8
39 | rev: 7.0.0
40 | hooks:
41 | - id: flake8
42 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Conda Organization Code of Conduct
2 |
3 | > [!NOTE]
4 | > Below is the short version of our CoC, see the long version [here](https://github.com/conda-incubator/governance/blob/main/CODE_OF_CONDUCT.md).
5 |
6 | # The Short Version
7 |
8 | Be kind to others. Do not insult or put down others. Behave professionally. Remember that harassment and sexist, racist, or exclusionary jokes are not appropriate for the conda Organization.
9 |
10 | All communication should be appropriate for a professional audience including people of many different backgrounds. Sexual language and imagery is not appropriate.
11 |
12 | The conda Organization is dedicated to providing a harassment-free community for everyone, regardless of gender, sexual orientation, gender identity and expression, disability, physical appearance, body size, race, or religion. We do not tolerate harassment of community members in any form.
13 |
14 | Thank you for helping make this a welcoming, friendly community for all.
15 |
16 | ## Report an Incident
17 |
18 | * Report a code of conduct incident [using a form](https://form.jotform.com/221527028480048).
19 | * Report a code of conduct incident via email: [conduct@conda.org](mailto:conduct@conda.org).
20 | * Contact [an individual committee member](#committee-membership) or [CoC event representative](#coc-representatives) to report an incident in confidence.
21 |
--------------------------------------------------------------------------------
/.github/workflows/issues.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Automate Issues
3 |
4 | on:
5 | # NOTE: github.event is issue_comment payload:
6 | # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment
7 | issue_comment:
8 | types: [created]
9 |
10 | env:
11 | FEEDBACK_LBL: pending::feedback
12 | SUPPORT_LBL: pending::support
13 |
14 | jobs:
15 | # NOTE: will update label if anyone responds, not just the author/reporter
16 | # TODO: create conda-issue-sorting team and modify this to toggle label based on
17 | # whether a non-issue-sorting engineer commented
18 | pending_support:
19 | # if [pending::feedback] and anyone responds
20 | if: >-
21 | !github.event.repository.fork
22 | && !github.event.issue.pull_request
23 | && contains(github.event.issue.labels.*.name, 'pending::feedback')
24 | runs-on: ubuntu-latest
25 | steps:
26 | # remove [pending::feedback]
27 | - uses: actions-ecosystem/action-remove-labels@v1.3.0
28 | with:
29 | labels: ${{ env.FEEDBACK_LBL }}
30 | github_token: ${{ secrets.PROJECT_TOKEN }}
31 | # add [pending::support], if still open
32 | - uses: actions-ecosystem/action-add-labels@v1.1.3
33 | if: github.event.issue.state == 'open'
34 | with:
35 | labels: ${{ env.SUPPORT_LBL }}
36 | github_token: ${{ secrets.PROJECT_TOKEN }}
37 |
--------------------------------------------------------------------------------
/Changelog.md:
--------------------------------------------------------------------------------
1 | # 0.7.1 (2023-08-07)
2 |
3 | ## What's Changed
4 | * Updates tests to Python 3.7/3.10, as 2.7/3.6 being EOL by @dbast in https://github.com/conda/conda-pack/pull/207
5 | * Renovations and require Python >=3.7 by @dbast in https://github.com/conda/conda-pack/pull/206
6 | * document packaging of environments as self-extracting executables by @saraedum in https://github.com/conda/conda-pack/pull/159
7 | * Add fix for core.py missing space by @Rtanti in https://github.com/conda/conda-pack/pull/218
8 | * Re-enable coverage reporting by @dbast in https://github.com/conda/conda-pack/pull/251
9 | * Allow the compression level to be set in zip files by @jjhelmus in https://github.com/conda/conda-pack/pull/252
10 | * Fix path separator on Windows distributions by @andysham in https://github.com/conda/conda-pack/pull/275
11 | * codesign binaries on osx-arm64 by @xhochy in https://github.com/conda/conda-pack/pull/257
12 |
13 | ## New Contributors
14 | * @saraedum made their first contribution in https://github.com/conda/conda-pack/pull/159
15 | * @Rtanti made their first contribution in https://github.com/conda/conda-pack/pull/218
16 | * @kenodegard made their first contribution in https://github.com/conda/conda-pack/pull/227
17 | * @jezdez made their first contribution in https://github.com/conda/conda-pack/pull/233
18 | * @jjhelmus made their first contribution in https://github.com/conda/conda-pack/pull/252
19 | * @andysham made their first contribution in https://github.com/conda/conda-pack/pull/275
20 |
--------------------------------------------------------------------------------
/conda_pack/compat.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | default_encoding = sys.getdefaultencoding()
5 | on_win = sys.platform == 'win32'
6 | on_mac = sys.platform == 'darwin'
7 | on_linux = sys.platform == 'linux'
8 | is_32bit = sys.maxsize < 2**32 or os.environ.get('CONDA_FORCE_32BIT', '0') == '1'
9 |
10 | PY2 = sys.version_info.major == 2
11 |
12 |
13 | if PY2:
14 | from imp import load_source
15 |
16 | from Queue import Queue
17 |
18 | def source_from_cache(path):
19 | if path.endswith('.pyc') or path.endswith('.pyo'):
20 | return path[:-1]
21 | raise ValueError("Path %s is not a python bytecode file" % path)
22 | else:
23 | import importlib
24 | from importlib.util import source_from_cache
25 | from queue import Queue # noqa
26 |
27 | def load_source(name, path):
28 | loader = importlib.machinery.SourceFileLoader(name, path)
29 | spec = importlib.util.spec_from_loader(loader.name, loader)
30 | mod = importlib.util.module_from_spec(spec)
31 | spec.loader.exec_module(mod)
32 | return mod
33 |
34 |
35 | def find_py_source(path, ignore=True):
36 | """Find the source file for a given bytecode file.
37 |
38 | If ignore is True, errors are swallowed and None is returned"""
39 | if not ignore:
40 | return source_from_cache(path)
41 | else:
42 | try:
43 | return source_from_cache(path)
44 | except (NotImplementedError, ValueError):
45 | return None
46 |
--------------------------------------------------------------------------------
/.github/workflows/labels.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Sync Labels
3 |
4 | on:
5 | # NOTE: github.event is workflow_dispatch payload:
6 | # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_dispatch
7 | workflow_dispatch:
8 | inputs:
9 | dryrun:
10 | description: 'dryrun: Preview changes to labels without editing them (true|false)'
11 | required: true
12 | type: boolean
13 | default: true
14 |
15 | jobs:
16 | sync:
17 | if: '!github.event.repository.fork'
18 | runs-on: ubuntu-latest
19 | env:
20 | GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml
21 | LOCAL: .github/labels.yml
22 | steps:
23 | - uses: actions/checkout@v4
24 | - id: has_local
25 | uses: andstor/file-existence-action@v3.0.0
26 | with:
27 | files: ${{ env.LOCAL }}
28 | - name: Global Only
29 | uses: EndBug/label-sync@v2.3.3
30 | if: steps.has_local.outputs.files_exists == 'false'
31 | with:
32 | config-file: ${{ env.GLOBAL }}
33 | delete-other-labels: true
34 | dry-run: ${{ github.event.inputs.dryrun }}
35 | - name: Global & Local
36 | uses: EndBug/label-sync@v2.3.3
37 | if: steps.has_local.outputs.files_exists == 'true'
38 | with:
39 | config-file: |
40 | ${{ env.GLOBAL }}
41 | ${{ env.LOCAL }}
42 | delete-other-labels: true
43 | dry-run: ${{ github.event.inputs.dryrun }}
44 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | import versioneer
4 |
5 | setup(name='conda-pack',
6 | version=versioneer.get_version(),
7 | cmdclass=versioneer.get_cmdclass(),
8 | url='https://conda.github.io/conda-pack/',
9 | project_urls={"Source Code": "https://github.com/conda/conda-pack"},
10 | maintainer='Jim Crist',
11 | maintainer_email='jiminy.crist@gmail.com',
12 | keywords='conda packaging',
13 | classifiers=["Development Status :: 4 - Beta",
14 | "License :: OSI Approved :: BSD License",
15 | "Programming Language :: Python :: 3.7",
16 | "Programming Language :: Python :: 3.8",
17 | "Programming Language :: Python :: 3.9",
18 | "Programming Language :: Python :: 3.10",
19 | "Topic :: System :: Archiving :: Packaging",
20 | "Topic :: System :: Software Distribution",
21 | "Topic :: Software Development :: Build Tools"],
22 | license='BSD',
23 | description='Package conda environments for redistribution',
24 | long_description=open('README.md').read(),
25 | long_description_content_type="text/markdown",
26 | packages=['conda_pack'],
27 | package_data={'conda_pack': ['scripts/windows/*', 'scripts/posix/*']},
28 | entry_points='''
29 | [console_scripts]
30 | conda-pack=conda_pack.cli:main
31 | ''',
32 | install_requires=['setuptools'],
33 | python_requires='>=3.7',
34 | zip_safe=False)
35 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2017, Jim Crist and contributors
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions are met:
6 |
7 | 1. Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 |
10 | 2. Redistributions in binary form must reproduce the above copyright notice,
11 | this list of conditions and the following disclaimer in the documentation
12 | and/or other materials provided with the distribution.
13 |
14 | 3. Neither the name of the copyright holder nor the names of its contributors
15 | may be used to endorse or promote products derived from this software
16 | without specific prior written permission.
17 |
18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/2_documentation.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Documentation
3 | description: Create a documentation related issue.
4 | labels:
5 | - type::documentation
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | > [!NOTE]
11 | > Documentation requests that are incomplete or missing information may be closed as inactionable.
12 |
13 | Since there are already a lot of open issues, please also take a moment to search existing ones to see if your bug has already been reported. If you find something related, please upvote that issue and provide additional details as necessary.
14 |
15 | 💐 Thank you for helping to make conda better. We would be unable to improve conda without our community!
16 | - type: checkboxes
17 | id: checks
18 | attributes:
19 | label: Checklist
20 | description: Please confirm and check all of the following options.
21 | options:
22 | - label: I added a descriptive title
23 | required: true
24 | - label: I searched open reports and couldn't find a duplicate
25 | required: true
26 | - type: textarea
27 | id: what
28 | attributes:
29 | label: What happened?
30 | description: Mention here any typos, broken links, or missing, incomplete, or outdated information, etc. that you have noticed in the conda docs or CLI help.
31 | validations:
32 | required: true
33 | - type: textarea
34 | id: context
35 | attributes:
36 | label: Additional Context
37 | description: Include any additional information (or screenshots) that you think would be valuable.
38 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 |
3 |
4 | ### Description
5 |
6 |
10 |
11 | ### Checklist - did you ...
12 |
13 |
15 |
16 | - [ ] Add a file to the `news` directory ([using the template](../blob/main/news/TEMPLATE)) for the next release's release notes?
17 |
23 | - [ ] Add / update necessary tests?
24 | - [ ] Add / update outdated documentation?
25 |
26 |
36 |
--------------------------------------------------------------------------------
/conda_pack/scripts/windows/deactivate.bat:
--------------------------------------------------------------------------------
1 | @REM @ symbols in this file indicate that output should not be printed.
2 | @REM Setting it this way allows us to not touch the user's echo setting.
3 | @REM For debugging, remove the @ on the section you need to study.
4 |
5 | @REM If there's no active environment, there's nothing to do
6 | @IF "%CONDA_PREFIX%" == "" @GOTO skipdeactivate
7 |
8 | @REM Run any activate scripts
9 | @REM Do this before running setlocal so that variables are cleared properly
10 | @IF EXIST "%CONDA_PREFIX%\etc\conda\deactivate.d" (
11 | @PUSHD "%CONDA_PREFIX%\etc\conda\deactivate.d"
12 | @FOR %%g in (*.bat) DO @CALL "%%g"
13 | @POPD
14 | )
15 |
16 | @setlocal enabledelayedexpansion
17 |
18 | @REM Remove path entries for this environment
19 | @SET "TARGETS=;%CONDA_PREFIX%;%CONDA_PREFIX%\Library\mingw-w64\bin;%CONDA_PREFIX%\Library\usr\bin;%CONDA_PREFIX%\Library\bin;%CONDA_PREFIX%\Scripts"
20 | @SET "NEW_PATH="
21 | @FOR %%i IN ("%PATH:;=";"%") DO @(CALL :filterPath "%%~i")
22 |
23 | @REM Restore the command prompt
24 | @SET "PROMPT=%_CONDA_PACK_OLD_PS1%"
25 | @SET "CONDA_PS1_BACKUP="
26 |
27 | @REM This persists env variables, which are otherwise local to this script right now.
28 | @endlocal & (
29 | @REM Used for deactivate, to make sure we restore original state after deactivation
30 | @SET "_CONDA_PACK_OLD_PS1=%CONDA_PS1_BACKUP%"
31 | @SET "PROMPT=%PROMPT%"
32 | @SET "PATH=%NEW_PATH:~1%"
33 | @SET "CONDA_PREFIX="
34 |
35 | )
36 |
37 | :skipdeactivate
38 | @EXIT /b
39 |
40 | :filterPath
41 | @IF "%~1" == "" @GOTO :filterOut
42 | @FOR %%j IN ("%TARGETS:;=";"%") DO @IF /i "%~1" == "%%~j" @GOTO :filterOut
43 | @SET "NEW_PATH=%NEW_PATH%;%~1"
44 | :filterOut
45 | @EXIT /b
46 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Conda-Pack
2 |
3 | `conda-pack` is a command line tool for creating relocatable conda
4 | environments. This is useful for deploying code in a consistent environment,
5 | potentially in locations where python or conda isn't already installed.
6 |
7 | See the documentation
8 | for more information.
9 |
10 | Conda-pack is offered under a New BSD license; see the
11 | license file.
12 |
13 | ## Build status
14 |
15 | | [](https://github.com/conda/conda-pack/actions/workflows/main.yml) [](https://codecov.io/gh/conda/conda-pack) [](https://results.pre-commit.ci/latest/github/conda/conda-pack/main) | [](https://anaconda.org/ctools/conda-pack) |
16 | | --- | :-: |
17 | | [`conda install ctools/label/dev::conda-pack`](https://anaconda.org/ctools/conda-pack) | [](https://anaconda.org/ctools/conda-pack) |
18 | | [`conda install defaults::conda-pack`](https://anaconda.org/anaconda/conda-pack) | [](https://anaconda.org/anaconda/conda-pack) |
19 | | [`conda install conda-forge::conda-pack`](https://anaconda.org/conda-forge/conda-pack) | [](https://anaconda.org/conda-forge/conda-pack) |
20 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/epic.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Epic
3 | description: A collection of related tickets.
4 | labels:
5 | - epic
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | This form is intended for grouping and collecting together related tickets to better gauge the scope of a problem/feature.
11 |
12 | If you are attempting to report a bug, propose a new feature, or some other code change please use one of the other forms available.
13 |
14 | > [!NOTE]
15 | > Epics that are incomplete or missing information may be closed as inactionable.
16 |
17 | Since there are already a lot of open issues, please also take a moment to search existing ones to see if a similar epic has already been opened. If you find something related, please upvote that issue and provide additional details as necessary.
18 |
19 | 💐 Thank you for helping to make Conda better. We would be unable to improve Conda without our community!
20 | - type: checkboxes
21 | id: checks
22 | attributes:
23 | label: Checklist
24 | description: Please confirm and check all of the following options.
25 | options:
26 | - label: I added a descriptive title
27 | required: true
28 | - label: I searched open reports and couldn't find a duplicate
29 | required: true
30 | - type: textarea
31 | id: summary
32 | attributes:
33 | label: Summary
34 | description: >-
35 | Define the highlevel objectives to be accomplished in this epic. Include the
36 | bigger picture of what is changing and/or the user story for why the
37 | changes are desired/necessary.
38 | validations:
39 | required: true
40 | - type: textarea
41 | attributes:
42 | label: Linked Issues & PRs
43 | description: List all issues related to this epic.
44 | value: |
45 | - [ ] #
46 | validations:
47 | required: true
48 |
--------------------------------------------------------------------------------
/conda_pack/scripts/windows/activate.bat:
--------------------------------------------------------------------------------
1 | @REM @ symbols in this file indicate that output should not be printed.
2 | @REM Setting it this way allows us to not touch the user's echo setting.
3 | @REM For debugging, remove the @ on the section you need to study.
4 | @setlocal enabledelayedexpansion
5 |
6 | @CALL :NORMALIZEPATH "%~dp0"
7 | @SET "SCRIPT_DIR=%RETVAL%"
8 | @CALL :NORMALIZEPATH "%SCRIPT_DIR%\.."
9 | @set "NEW_PREFIX=%RETVAL%"
10 |
11 | @if "%CONDA_PREFIX%" == "" @goto skipdeactivate
12 | @if "%CONDA_PREFIX%" == "%NEW_PREFIX%" @exit /b
13 |
14 | @REM If the current environment is a conda-pack environment, or a root environment
15 | @REM
16 | @if exist "%CONDA_PREFIX%\Scripts\deactivate.bat" @call "%CONDA_PREFIX%\Scripts\deactivate.bat"
17 | @REM Newer versions of conda only have the deactivate script in the root environment
18 | @if exist "%CONDA_PREFIX%\..\..\Scripts\deactivate.bat" @call "%CONDA_PREFIX%\..\..\Scripts\deactivate.bat"
19 | :skipdeactivate
20 |
21 | @for /F "delims=" %%i in ("%NEW_PREFIX%") do @SET "ENV_NAME=%%~ni"
22 |
23 | @REM take a snapshot of pristine state for later
24 | @SET "CONDA_PS1_BACKUP=%PROMPT%"
25 | @SET "PROMPT=(%ENV_NAME%) %PROMPT%"
26 |
27 | @SET "CONDA_PREFIX=%NEW_PREFIX%"
28 | @SET "PATH=%NEW_PREFIX%;%NEW_PREFIX%\Library\mingw-w64\bin;%NEW_PREFIX%\Library\usr\bin;%NEW_PREFIX%\Library\bin;%NEW_PREFIX%\Scripts;%PATH%"
29 |
30 | @REM This persists env variables, which are otherwise local to this script right now.
31 | @endlocal & (
32 | @REM Used for deactivate, to make sure we restore original state after deactivation
33 | @SET "_CONDA_PACK_OLD_PS1=%CONDA_PS1_BACKUP%"
34 | @SET "PROMPT=%PROMPT%"
35 | @SET "PATH=%PATH%"
36 | @SET "CONDA_PREFIX=%CONDA_PREFIX%"
37 |
38 | @REM Run any activate scripts
39 | @IF EXIST "%CONDA_PREFIX%\etc\conda\activate.d" (
40 | @PUSHD "%CONDA_PREFIX%\etc\conda\activate.d"
41 | @FOR %%g in (*.bat) DO @CALL "%%g"
42 | @POPD
43 | )
44 | )
45 |
46 | :: ========== FUNCTIONS ==========
47 | @EXIT /B
48 |
49 | :NORMALIZEPATH
50 | @SET RETVAL=%~dpfn1
51 | @EXIT /B
52 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/1_feature.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature Request
3 | description: Create a feature request.
4 | labels:
5 | - type::feature
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Because processing new feature requests is time-consuming, we would like to ask you to fill out the following form to the best of your ability and as completely as possible.
11 |
12 | > [!NOTE]
13 | > Feature requests that are incomplete or missing information may be closed as inactionable.
14 |
15 | Since there are already a lot of open issues, please also take a moment to search existing ones to see if your feature request has already been submitted. If you find something related, please upvote that issue and provide additional details as necessary.
16 |
17 | 💐 Thank you for helping to make Conda better. We would be unable to improve Conda without our community!
18 | - type: checkboxes
19 | id: checks
20 | attributes:
21 | label: Checklist
22 | description: Please confirm and check all of the following options.
23 | options:
24 | - label: I added a descriptive title
25 | required: true
26 | - label: I searched open requests and couldn't find a duplicate
27 | required: true
28 | - type: textarea
29 | id: idea
30 | attributes:
31 | label: What is the idea?
32 | description: Describe what the feature is and the desired state.
33 | validations:
34 | required: true
35 | - type: textarea
36 | id: why
37 | attributes:
38 | label: Why is this needed?
39 | description: Who would benefit from this feature? Why would this add value to them? What problem does this solve?
40 | - type: textarea
41 | id: what
42 | attributes:
43 | label: What should happen?
44 | description: What should be the user experience with the feature? Describe from a user perspective what they would do and see.
45 | - type: textarea
46 | id: context
47 | attributes:
48 | label: Additional Context
49 | description: Include any additional information that you think would be valuable.
50 |
--------------------------------------------------------------------------------
/.github/workflows/lock.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Lock
3 |
4 | on:
5 | # NOTE: github.event is workflow_dispatch payload:
6 | # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_dispatch
7 | workflow_dispatch:
8 |
9 | schedule:
10 | - cron: 0 6 * * *
11 |
12 | permissions:
13 | issues: write
14 | pull-requests: write
15 |
16 | jobs:
17 | lock:
18 | if: '!github.event.repository.fork'
19 | runs-on: ubuntu-latest
20 | steps:
21 | - uses: dessant/lock-threads@v5
22 | with:
23 | # Number of days of inactivity before a closed issue is locked
24 | issue-inactive-days: 365
25 | # Do not lock issues created before a given timestamp, value must follow ISO 8601
26 | exclude-issue-created-before: ''
27 | # Do not lock issues with these labels, value must be a comma separated list of labels or ''
28 | exclude-any-issue-labels: ''
29 | # Labels to add before locking an issue, value must be a comma separated list of labels or ''
30 | add-issue-labels: locked
31 | # Reason for locking an issue, value must be one of resolved, off-topic, too heated, spam or ''
32 | issue-lock-reason: resolved
33 |
34 | # Number of days of inactivity before a closed pull request is locked
35 | pr-inactive-days: 365
36 | # Do not lock pull requests created before a given timestamp, value must follow ISO 8601
37 | exclude-pr-created-before: ''
38 | # Do not lock pull requests with these labels, value must be a comma separated list of labels or ''
39 | exclude-any-pr-labels: ''
40 | # Labels to add before locking a pull request, value must be a comma separated list of labels or ''
41 | add-pr-labels: locked
42 | # Reason for locking a pull request, value must be one of resolved, off-topic, too heated, spam or ''
43 | pr-lock-reason: resolved
44 |
45 | # Limit locking to issues, pull requests or discussions, value must be a comma separated list of issues, prs, discussions or ''
46 | process-only: issues, prs
47 |
--------------------------------------------------------------------------------
/conda_pack/scripts/posix/activate:
--------------------------------------------------------------------------------
1 | _conda_pack_activate() {
2 | local _CONDA_SHELL_FLAVOR
3 | if [ -n "${BASH_VERSION:+x}" ]; then
4 | _CONDA_SHELL_FLAVOR=bash
5 | elif [ -n "${ZSH_VERSION:+x}" ]; then
6 | _CONDA_SHELL_FLAVOR=zsh
7 | elif [ -n "${KSH_VERSION:+x}" ]; then
8 | _CONDA_SHELL_FLAVOR=ksh
9 | elif [ -n "${POSH_VERSION:+x}" ]; then
10 | _CONDA_SHELL_FLAVOR=posh
11 | else
12 | # https://unix.stackexchange.com/a/120138/92065
13 | local _q="$(ps -p$$ -o cmd="",comm="",fname="" 2>/dev/null | sed 's/^-//' | grep -oE '\w+' | head -n1)"
14 | if [ "$_q" = dash ]; then
15 | _CONDA_SHELL_FLAVOR=dash
16 | else
17 | (>&2 echo "Unrecognized shell.")
18 | return 1
19 | fi
20 | fi
21 |
22 | # https://unix.stackexchange.com/questions/4650/determining-path-to-sourced-shell-script/
23 | local script_dir
24 | case "$_CONDA_SHELL_FLAVOR" in
25 | bash) script_dir="$(dirname "${BASH_SOURCE[0]}")";;
26 | zsh) script_dir="$(dirname "${(%):-%x}")";; # http://stackoverflow.com/a/28336473/2127762
27 | dash) x=$(lsof -p $$ -Fn0 | tail -1); script_dir="$(dirname "${x#*n}")";;
28 | *) script_dir="$(cd "$(dirname "$_")" && echo "$PWD")";;
29 | esac
30 |
31 | local full_path_script_dir="$(cd "${script_dir}" > /dev/null && pwd)"
32 | local full_path_env="$(dirname "$full_path_script_dir")"
33 | local env_name="$(basename "$full_path_env")"
34 |
35 | # If there's already a source env
36 | if [ -n "$CONDA_PREFIX" ]; then
37 | # If the source env differs from this env
38 | if [ "$CONDA_PREFIX" != "$full_path_env" ]; then
39 | # Check whether deactivate is a function or executable
40 | type deactivate >/dev/null 2>/dev/null
41 | if [ $? -eq 0 ]; then
42 | . deactivate >/dev/null 2>/dev/null
43 | fi
44 | else
45 | return 0 # nothing to do
46 | fi
47 | fi
48 | export CONDA_PREFIX="$full_path_env"
49 | export _CONDA_PACK_OLD_PS1="$PS1"
50 | PATH="$full_path_env/bin:$PATH"
51 | PS1="($env_name) $PS1"
52 |
53 | case "$_CONDA_SHELL_FLAVOR" in
54 | zsh) rehash;;
55 | posh) ;;
56 | *) hash -r;;
57 | esac
58 |
59 | # Run the activate scripts
60 | local _script_dir="${full_path_env}/etc/conda/activate.d"
61 | if [ -d "$_script_dir" ] && [ -n "$(ls -A "$_script_dir")" ]; then
62 | local _path
63 | for _path in "$_script_dir"/*.sh; do
64 | . "$_path"
65 | done
66 | fi
67 | }
68 |
69 | _conda_pack_activate
70 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/0_bug.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug Report
3 | description: Create a bug report.
4 | labels:
5 | - type::bug
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Because processing new bug reports is time-consuming, we would like to ask you to fill out the following form to the best of your ability and as completely as possible.
11 |
12 | > [!NOTE]
13 | > Bug reports that are incomplete or missing information may be closed as inactionable.
14 |
15 | Since there are already a lot of open issues, please also take a moment to search existing ones to see if your bug has already been reported. If you find something related, please upvote that issue and provide additional details as necessary.
16 |
17 | 💐 Thank you for helping to make Conda better. We would be unable to improve Conda without our community!
18 | - type: checkboxes
19 | id: checks
20 | attributes:
21 | label: Checklist
22 | description: Please confirm and check all of the following options.
23 | options:
24 | - label: I added a descriptive title
25 | required: true
26 | - label: I searched open reports and couldn't find a duplicate
27 | required: true
28 | - type: textarea
29 | id: what
30 | attributes:
31 | label: What happened?
32 | description: What should have happened instead? Please provide as many details as possible. The more information provided, the more likely we are able to replicate your problem and offer a solution.
33 | validations:
34 | required: true
35 | - type: textarea
36 | id: info
37 | attributes:
38 | label: Conda Info
39 | description: |
40 | Let's collect some basic information about your conda install.
41 |
42 | Please run the following command from your command line and paste the output below.
43 |
44 | ```bash
45 | conda info
46 | ```
47 | render: shell
48 | - type: textarea
49 | id: config
50 | attributes:
51 | label: Conda Config
52 | description: |
53 | Let's collect any customizations you may have for your conda install.
54 |
55 | Please run the following command from your command line and paste the output below.
56 |
57 | ```bash
58 | conda config --show-sources
59 | ```
60 | render: shell
61 | - type: textarea
62 | id: list
63 | attributes:
64 | label: Conda list
65 | description: |
66 | The packages installed into your environment can offer clues as to the problem you are facing.
67 |
68 | Please activate the environment within which you are encountering this bug, run the following command from your command line, and paste the output below.
69 |
70 | ```bash
71 | conda list --show-channel-urls
72 | ```
73 | render: shell
74 | - type: textarea
75 | id: context
76 | attributes:
77 | label: Additional Context
78 | description: Include any additional information (or screenshots) that you think would be valuable.
79 |
--------------------------------------------------------------------------------
/conda_pack/_progress.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import threading
3 | import time
4 | from timeit import default_timer
5 |
6 |
7 | def format_time(t):
8 | """Format seconds into a human readable form.
9 |
10 | >>> format_time(10.4)
11 | '10.4s'
12 | >>> format_time(1000.4)
13 | '16min 40.4s'
14 | """
15 | m, s = divmod(t, 60)
16 | h, m = divmod(m, 60)
17 | if h:
18 | return f"{h:2.0f}hr {m:2.0f}min {s:4.1f}s"
19 | elif m:
20 | return f"{m:2.0f}min {s:4.1f}s"
21 | else:
22 | return f"{s:4.1f}s"
23 |
24 |
25 | class progressbar:
26 | """A simple progressbar for iterables.
27 |
28 | Displays a progress bar showing progress through an iterable.
29 |
30 | Parameters
31 | ----------
32 | iterable : iterable
33 | The object to iterate over.
34 | width : int, optional
35 | Width of the bar in characters.
36 | enabled : bool, optional
37 | Whether to log progress. Useful for turning off progress reports
38 | without changing your code. Default is True.
39 | file : file, optional
40 | Where to log progress. Default is ``sys.stdout``.
41 |
42 | Example
43 | -------
44 | >>> with progressbar(iterable) as itbl: # doctest: +SKIP
45 | ... for i in itbl:
46 | ... do_stuff(i)
47 | [########################################] | 100% Completed | 5.2 s
48 | """
49 | def __init__(self, iterable, width=40, enabled=True, file=None):
50 | self._iterable = iterable
51 | self._ndone = 0
52 | self._ntotal = len(iterable) + 1 # wait for exit to finish
53 | self._width = width
54 | self._enabled = enabled
55 | self._file = sys.stdout if file is None else file
56 |
57 | def __enter__(self):
58 | if self._enabled:
59 | self._start_time = default_timer()
60 | # Start background thread
61 | self._running = True
62 | self._timer = threading.Thread(target=self._timer_func)
63 | self._timer.daemon = True
64 | self._timer.start()
65 | return self
66 |
67 | def __exit__(self, type, value, traceback):
68 | if self._enabled:
69 | self._running = False
70 | self._timer.join()
71 | if type is None: # Finished if no exception
72 | self._ndone += 1
73 | self._update_bar()
74 | self._file.write('\n')
75 | self._file.flush()
76 |
77 | def __iter__(self):
78 | for i in self._iterable:
79 | self._ndone += 1
80 | yield i
81 |
82 | def _timer_func(self):
83 | while self._running:
84 | self._update_bar()
85 | time.sleep(0.1)
86 |
87 | def _update_bar(self):
88 | elapsed = default_timer() - self._start_time
89 | frac = (self._ndone / self._ntotal) if self._ntotal else 1
90 | bar = '#' * int(self._width * frac)
91 | percent = int(100 * frac)
92 | elapsed = format_time(elapsed)
93 | msg = '\r[{0:<{1}}] | {2}% Completed | {3}'.format(bar, self._width,
94 | percent, elapsed)
95 | try:
96 | self._file.write(msg)
97 | self._file.flush()
98 | except ValueError:
99 | pass
100 |
--------------------------------------------------------------------------------
/docs/source/parcel.rst:
--------------------------------------------------------------------------------
1 | Parcels
2 | =======
3 |
4 | Conda-pack has recently been enhanced with the ability to generate
5 | `parcels `_
6 | for use on Cloudera Hadoop clusters. The file formats for parcels and
7 | conda-packs are nearly identical. In fact, it was possible even with
8 | older versions of conda-pack to build parcels as follows:
9 |
10 | 1. Add two unmanaged files to the environment:
11 |
12 | - ``meta/parcel.json``:, a file of parcel-specific metadata
13 | - ``meta/conda_env.sh``: a simple activation script. The filename
14 | is flexible, and is recorded in the metadata file.
15 |
16 | 2. Pack the environment with specifically chosen values of
17 | ``--arcroot``, ``--dest-prefix``, and ``--output``.
18 |
19 | The latest version of conda-pack has been enhanced to eliminate the manual
20 | aspects of this work. The key is the introduction of a ``--format parcel``
21 | option and four parcel-specific options:
22 |
23 | - ``--parcel-name``: the base name of the parcel
24 |
25 | By default, this value will be taken from the basename of the selected environment
26 | directory. Parcel names may not have dashes ``-``, however, so if the name of the
27 | environment contains a dash, use this option to provide a compliant alternative.
28 | - ``--parcel-version``: the version of the parcel
29 |
30 | This is generally expected to follow a standard `semver `_
31 | format. If not supplied, conda-pack will autogenerate one from today's date in
32 | ``YYYY.MM.DD`` format.
33 | - ``--parcel-distribution``: the target distribution for the parcel.
34 |
35 | This is an abbreviation describing the specific operating system on which
36 | your Cloudera clsuter runs. Its default value is ``el7``, corresponding
37 | to RHEL7/CentOS7. Other common values include ``el6``, ``sles12``, ``bionic``,
38 | and ``xenial``.
39 |
40 | - ``--parcel-root``: the location where parcels are unpacked on the cluster
41 |
42 | The default value of this location is ``/opt/cloudera/parcels``. Unless your
43 | cluster manager has modified this default, there should be no need to change
44 | this, but it is essential that this matches your configuration.
45 |
46 | In many cases, it will not be necessary to override any of these options,
47 | because conda-pack provides sensible defaults. Given these values, conda-pack
48 | generates values for the following internal options:
49 |
50 | - ``arcroot``: ``{parcel_name}-{parcel_version}``
51 | - ``dest_prefix``: ``{parcel_root}/{parcel_name}-{parcel_version}``
52 | - ``output`` (filename): ``{parcel_name}-{parcel_version}-{parcel_distro}.parcel``
53 |
54 | Conda-pack will exit with an error if you attempt to override the ``dest-prefix``
55 | or ``arcroot`` options. We recommend against overriding the ``output`` option,
56 | but conda-pack does not prevent this.
57 |
58 | Example
59 | -------
60 |
61 | Create an environment:
62 |
63 | .. code-block:: bash
64 |
65 | $ conda create -y -n example python=3.5 numpy pandas scikit-learn
66 |
67 |
68 | Package the environment into a parcel:
69 |
70 | .. code-block:: bash
71 |
72 | $ conda pack -n example --format parcel --parcel-name=sklearn
73 | Collecting packages...
74 | Packing environment at '/Users/mgrant/miniconda3/envs/example' to 'sklearn-2020.09.15-el7.parcel'
75 | [########################################] | 100% Completed | 9.8s
76 |
--------------------------------------------------------------------------------
/testing/setup_envs.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -Eeo pipefail
4 |
5 | echo Setting up environments for testing
6 |
7 | CONDA_CLEAN_P=$1
8 |
9 | # GitHub action specific items. These are no-ops locally
10 | [ "$RUNNER_OS" == "Windows" ] && CONDA_EXE="$CONDA/Scripts/conda.exe"
11 | [ "$RUNNER_OS" == "macOS" ] && export CONDA_PKGS_DIRS=~/.pkgs
12 |
13 | cwd=$(cd $(dirname ${BASH_SOURCE[0]}) && pwd)
14 | ymls=$cwd/env_yamls
15 | if [[ "$CONDA_ROOT" != "" ]]; then
16 | mkdir -p $CONDA_ROOT
17 | croot=$(cd $CONDA_ROOT && pwd)
18 | else
19 | croot=$cwd/conda
20 | fi
21 | envs=$croot/envs
22 |
23 | if [ ! -d $croot/conda-meta ]; then
24 | ${CONDA_EXE:-conda} create -y -p $croot conda python=3.7
25 | fi
26 |
27 | source $croot/etc/profile.d/conda.sh
28 | export CONDA_PKGS_DIRS=$croot/pkgs
29 |
30 | if [ -d $croot/envs/activate_scripts/conda-meta ]; then
31 | conda info
32 | ls -l $croot/envs
33 | exit 0
34 | fi
35 |
36 | mkdir -p $envs
37 | # Make sure the local package cache is used
38 | rm -rf $croot/pkgs
39 |
40 | echo Creating py37 environment
41 | env=$envs/py37
42 | conda env create -f $ymls/py37.yml -p $env
43 | # Create unmanaged conda-related files for conda-pack to remove
44 | if [ -f $env/python.exe ]; then
45 | touch $env/Scripts/activate
46 | touch $env/Scripts/activate.bat
47 | touch $env/Scripts/deactivate
48 | touch $env/Scripts/deactivate.bat
49 | touch $env/Scripts/conda
50 | touch $env/Scripts/conda.bat
51 | else
52 | touch $env/bin/activate
53 | touch $env/bin/deactivate
54 | touch $env/bin/conda
55 | fi
56 |
57 | echo Creating py37_missing_files environment
58 | env=$envs/py37_missing_files
59 | conda env create -f $ymls/py37.yml -p $env
60 | if [ -f $env/python.exe ]; then
61 | rm $env/lib/site-packages/toolz/*.py
62 | else
63 | rm $env/lib/python3.7/site-packages/toolz/*.py
64 | fi
65 |
66 | # Only do this when the developer has agreed to it, this might otherwise break things in his system.
67 | if [[ "$CONDA_CLEAN_P" == "purge-packages" ]]; then
68 | conda clean -apfy
69 | fi
70 |
71 | echo Creating py310 environment
72 | env=$envs/py310
73 | conda env create -f $ymls/py310.yml -p $env
74 | # Remove this package from the cache for testing
75 | rm -rf $croot/pkgs/conda_pack_test_lib2*py310*
76 |
77 | echo Creating py37_editable environment
78 | env=$envs/py37_editable
79 | conda env create -f $ymls/py37.yml -p $env
80 | pushd $cwd/test_packages/conda_pack_test_lib1
81 | if [ -f $env/python.exe ]; then
82 | $env/python.exe setup.py develop
83 | else
84 | $env/bin/python setup.py develop
85 | fi
86 | popd
87 |
88 | echo Creating py37_broken environment
89 | env=$envs/py37_broken
90 | conda env create -f $ymls/py37_broken.yml -p $env
91 |
92 | echo Creating nopython environment
93 | env=$envs/nopython
94 | conda env create -f $ymls/nopython.yml -p $env
95 |
96 | echo Creating conda environment
97 | env=$envs/has_conda
98 | conda env create -f $ymls/has_conda.yml -p $env
99 |
100 | echo Creating activate_scripts environment
101 | env=$envs/activate_scripts
102 | conda env create -f $ymls/activate_scripts.yml -p $env
103 | mkdir -p $env/etc/conda/activate.d $env/etc/conda/deactivate.d
104 | if [ -f $env/python.exe ]; then
105 | cp $cwd/extra_scripts/conda_pack_test_activate.bat $env/etc/conda/activate.d
106 | cp $cwd/extra_scripts/conda_pack_test_deactivate.bat $env/etc/conda/deactivate.d
107 | else
108 | cp $cwd/extra_scripts/conda_pack_test_activate.sh $env/etc/conda/activate.d
109 | cp $cwd/extra_scripts/conda_pack_test_deactivate.sh $env/etc/conda/deactivate.d
110 | fi
111 |
112 | rm -f $croot/pkgs/{*.tar.bz2,*.conda}
113 | conda info
114 | ls -l $croot/envs
115 |
--------------------------------------------------------------------------------
/docs/source/unix-binary.rst:
--------------------------------------------------------------------------------
1 | Bundle as Single Executable
2 | ===========================
3 |
4 | ``conda-pack`` can be used to distribute conda environments as executable shell
5 | scripts for Linux and macOS.
6 |
7 |
8 | Packaging a Simple Binary
9 | -------------------------
10 |
11 | We will package the `normaliz `_ binary in
12 | this example. It provides a command line tool which is compiled from C++ code.
13 |
14 | Create an environment and `conda pack` it:
15 |
16 | .. code-block:: bash
17 |
18 | $ conda create -y -n normaliz normaliz=3.8.5
19 | $ conda pack -n normaliz
20 |
21 | Add an entrypoint that activates the environment and starts normaliz:
22 |
23 | .. code-block:: bash
24 |
25 | $ mkdir pack
26 | $ tar -zxf normaliz.tar.gz -C pack
27 | $ cat > pack/entrypoint.sh <<- 'EOF'
28 | #!/bin/sh
29 | source bin/activate
30 | conda-unpack
31 | exec bin/normaliz $@
32 | EOF
33 | $ chmod +x pack/entrypoint.sh
34 |
35 | Optional: reduce the size by removing files that are not needed here:
36 |
37 | .. code-block:: bash
38 |
39 | $ rm -rf pack/lib/*.a pack/usr/share pack/usr/include
40 | $ find pack/lib -name '*.dylib' -type f -exec strip -S \{\} \; # macOS
41 | $ find pack/lib -name '*.so' -type f -exec strip --strip-unneeded \{\} \; # Linux
42 |
43 | Pack everything into a single shell script with `makeself `_:
44 |
45 | .. code-block:: bash
46 |
47 | $ conda install -y conda-forge::makeself
48 | $ makeself pack/ normaliz.run Normaliz ./entrypoint.sh
49 |
50 | The shell script `normaliz.run` now works on other machines with the same platform. Note that arguments to `bin/normaliz`
51 | need to be given after an initial `--` since earlier arguments are consumed by makeself:
52 |
53 | .. code-block:: bash
54 |
55 | $ ./normaliz.run -- --version
56 | Normaliz 3.8.5
57 |
58 |
59 | Packaging a Complex Environment
60 | -------------------------------
61 |
62 | Complex environments can be packaged in the same way. Here we package
63 | the computer algebra system `SageMath `_ which comes with a
64 | Jupyter notebook interface:
65 |
66 | .. code-block:: bash
67 |
68 | $ conda create -y -n sagemath sage=9.2
69 | $ conda pack -n sagemath
70 | $ mkdir pack
71 | $ tar -zxf sagemath.tar.gz -C pack
72 | $ cat > pack/entrypoint.sh <<- 'EOF'
73 | #!/bin/sh
74 | source bin/activate
75 | conda-unpack
76 | exec bin/sage --notebook=jupyter $@
77 | EOF
78 | $ chmod +x pack/entrypoint.sh
79 | $ makeself pack/ sagemath.run SageMath ./entrypoint.sh
80 | $ ./sagmath.run # opens a browser with Jupyter running SageMath
81 |
82 | The above creates a huge bundle that takes a long time to pack and unpack (and
83 | might exceed the available space in your `/tmp`). This can be speeded up by
84 | reducing the level of compression and by uncompressing things permanently:
85 |
86 | .. code-block:: bash
87 |
88 | $ cat > pack/unpack.sh <<- 'EOF'
89 | #!/bin/sh
90 | source bin/activate
91 | conda-unpack
92 | EOF
93 | $ chmod +x pack/unpack.sh
94 | $ cat > pack/sagemath.run <<- 'EOF'
95 | #!/bin/sh
96 | dir=$(CDPATH= cd -- "$(dirname -- "$0")" && pwd)
97 | cd "$dir"
98 | ./bin/sage --notebook=jupyter $@
99 | EOF
100 | $ chmod +x pack/sagemath.run
101 | $ mkdir tmp
102 | $ TMPDIR=tmp/ makeself --complevel 6 --target ./sagemath-9.2 pack/ sagemath.install SageMath ./unpack.sh
103 |
104 | The resulting shell script unpacks the environment into `./sagemath-9.2`.
105 | This can be overwritten with the `--target` parameter:
106 |
107 | .. code-block:: bash
108 |
109 | $ ./sagemath.install
110 | $ ./sagemath-9.2/sagemath.run # opens a browser with Jupyter running SageMath
111 |
--------------------------------------------------------------------------------
/.github/workflows/stale.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Stale
3 |
4 | on:
5 | # NOTE: github.event is workflow_dispatch payload:
6 | # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_dispatch
7 | workflow_dispatch:
8 | inputs:
9 | dryrun:
10 | description: 'dryrun: Preview stale issues/prs without marking them (true|false)'
11 | required: true
12 | type: boolean
13 | default: true
14 |
15 | schedule:
16 | - cron: 0 4 * * *
17 |
18 | permissions:
19 | issues: write
20 | pull-requests: write
21 |
22 | jobs:
23 | stale:
24 | if: '!github.event.repository.fork'
25 | runs-on: ubuntu-latest
26 | strategy:
27 | matrix:
28 | include:
29 | - only-issue-labels: ''
30 | days-before-issue-stale: 365
31 | days-before-issue-close: 30
32 | # [type::support] issues have a more aggressive stale/close timeline
33 | - only-issue-labels: type::support
34 | days-before-issue-stale: 90
35 | days-before-issue-close: 21
36 | steps:
37 | - uses: conda/actions/read-yaml@v24.2.0
38 | id: read_yaml
39 | with:
40 | path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml
41 |
42 | - uses: actions/stale@v9
43 | id: stale
44 | with:
45 | # Only issues with these labels are checked whether they are stale
46 | only-issue-labels: ${{ matrix.only-issue-labels }}
47 |
48 | # Idle number of days before marking issues stale
49 | days-before-issue-stale: ${{ matrix.days-before-issue-stale }}
50 | # Idle number of days before closing stale issues/PRs
51 | days-before-issue-close: ${{ matrix.days-before-issue-close }}
52 | # Idle number of days before marking PRs stale
53 | days-before-pr-stale: 365
54 | # Idle number of days before closing stale PRs
55 | days-before-pr-close: 30
56 |
57 | # Comment on the staled issues
58 | stale-issue-message: ${{ fromJSON(steps.read_yaml.outputs.value)['stale-issue'] }}
59 | # Label to apply on staled issues
60 | stale-issue-label: stale
61 | # Label to apply on closed issues
62 | close-issue-label: stale::closed
63 | # Reason to use when closing issues
64 | close-issue-reason: not_planned
65 |
66 | # Comment on the staled PRs
67 | stale-pr-message: ${{ fromJSON(steps.read_yaml.outputs.value)['stale-pr'] }}
68 | # Label to apply on staled PRs
69 | stale-pr-label: stale
70 | # Label to apply on closed PRs
71 | close-pr-label: stale::closed
72 |
73 | # Remove stale label from issues/PRs on updates/comments
74 | remove-stale-when-updated: true
75 | # Add specified labels to issues/PRs when they become unstale
76 | labels-to-add-when-unstale: stale::recovered
77 | # Remove specified labels to issues/PRs when they become unstale
78 | labels-to-remove-when-unstale: stale,stale::closed
79 |
80 | # Max number of operations per run
81 | operations-per-run: ${{ secrets.STALE_OPERATIONS_PER_RUN || 100 }}
82 | # Dry-run
83 | debug-only: ${{ github.event.inputs.dryrun || false }}
84 | # Order to get issues/PRs
85 | ascending: true
86 | # Delete branch after closing a stale PR
87 | delete-branch: false
88 |
89 | # Issues with these labels will never be considered stale
90 | exempt-issue-labels: stale::recovered,epic
91 | # Issues with these labels will never be considered stale
92 | exempt-pr-labels: stale::recovered,epic
93 | # Exempt all issues/PRs with milestones from stale
94 | exempt-all-milestones: true
95 | # Assignees on issues/PRs exempted from stale
96 | exempt-assignees: mingwandroid
97 |
98 | - name: Print outputs
99 | run: echo ${{ join(steps.stale.outputs.*, ',') }}
100 |
--------------------------------------------------------------------------------
/conda_pack/tests/test_cli.py:
--------------------------------------------------------------------------------
1 | import os
2 | import signal
3 | import tarfile
4 | import time
5 | from threading import Thread
6 |
7 | import pytest
8 |
9 | import conda_pack
10 | from conda_pack.cli import main
11 | from conda_pack.compat import on_win
12 |
13 | from .conftest import py37_path, py310_path
14 |
15 |
16 | def test_help(capsys):
17 | with pytest.raises(SystemExit) as exc:
18 | main(["-h"])
19 |
20 | assert exc.value.code == 0
21 |
22 | out, err = capsys.readouterr()
23 | assert not err
24 | assert 'usage: conda-pack' in out
25 |
26 |
27 | def test_version(capsys):
28 | with pytest.raises(SystemExit) as exc:
29 | main(["--version"])
30 |
31 | assert exc.value.code == 0
32 |
33 | out, err = capsys.readouterr()
34 | assert not err
35 | assert conda_pack.__version__ in out
36 |
37 |
38 | def test_parse_include_exclude():
39 | out = {}
40 |
41 | def capture(**kwargs):
42 | out.update(kwargs)
43 |
44 | with pytest.raises(SystemExit) as exc:
45 | main(["--exclude", "foo/*",
46 | "--include", "*.py",
47 | "--include", "*.pyx",
48 | "--exclude", "foo/bar/*.pyx"],
49 | pack=capture)
50 |
51 | assert exc.value.code == 0
52 |
53 | assert out['filters'] == [("exclude", "foo/*"),
54 | ("include", "*.py"),
55 | ("include", "*.pyx"),
56 | ("exclude", "foo/bar/*.pyx")]
57 |
58 |
59 | def test_cli_roundtrip(capsys, tmpdir):
60 | out_path = os.path.join(str(tmpdir), 'py37.tar')
61 |
62 | with pytest.raises(SystemExit) as exc:
63 | main(["-p", py37_path, "-o", out_path])
64 |
65 | assert exc.value.code == 0
66 |
67 | assert os.path.exists(out_path)
68 | assert tarfile.is_tarfile(out_path)
69 |
70 | out, err = capsys.readouterr()
71 | assert not err
72 |
73 | bar, percent, time = (i.strip() for i in out.split("\r")[-1].split("|"))
74 | assert bar == "[" + "#" * 40 + "]"
75 | assert percent == "100% Completed"
76 | assert time
77 |
78 |
79 | def test_quiet(capsys, tmpdir):
80 | out_path = os.path.join(str(tmpdir), 'py37.tar')
81 |
82 | with pytest.raises(SystemExit) as exc:
83 | main(["-p", py37_path, "-o", out_path, "-q"])
84 |
85 | assert exc.value.code == 0
86 |
87 | assert os.path.exists(out_path)
88 | assert tarfile.is_tarfile(out_path)
89 |
90 | out, err = capsys.readouterr()
91 | assert not err
92 | assert not out
93 |
94 |
95 | def test_cli_exceptions(capsys):
96 | with pytest.raises(SystemExit) as exc:
97 | main(["-p", "not_a_real_path"])
98 |
99 | assert exc.value.code == 1
100 |
101 | out, err = capsys.readouterr()
102 | assert "CondaPackError: Environment path" in err
103 |
104 | with pytest.raises(SystemExit) as exc:
105 | main(["-foo", "-bar"])
106 |
107 | assert exc.value.code != 0
108 |
109 | out, err = capsys.readouterr()
110 | assert not out
111 | assert "usage: conda-pack" in err
112 |
113 |
114 | def test_cli_warnings(capsys, tmpdir):
115 | out_path = os.path.join(str(tmpdir), 'py310.tar')
116 |
117 | with pytest.raises(SystemExit) as exc:
118 | main(["-p", py310_path, "-o", out_path])
119 |
120 | assert exc.value.code == 0
121 |
122 | assert os.path.exists(out_path)
123 | assert tarfile.is_tarfile(out_path)
124 |
125 | out, err = capsys.readouterr()
126 | assert "Conda-managed packages were found" in err
127 | assert "UserWarning" not in err # printed, not from python warning
128 |
129 |
130 | @pytest.mark.skipif(on_win, reason='SIGINT terminates the tests on Windows')
131 | def test_keyboard_interrupt(capsys, tmpdir):
132 | def interrupt():
133 | time.sleep(0.2)
134 | os.kill(os.getpid(), signal.SIGINT)
135 |
136 | interrupter = Thread(target=interrupt)
137 |
138 | out_path = os.path.join(str(tmpdir), 'py37.tar')
139 | try:
140 | with pytest.raises(SystemExit) as exc:
141 | interrupter.start()
142 | main(["-p", py37_path, "-o", out_path])
143 | except KeyboardInterrupt:
144 | assert False, "Should have been caught by the CLI"
145 |
146 | assert exc.value.code == 1
147 | out, err = capsys.readouterr()
148 | assert err == 'Interrupted\n'
149 | assert not os.path.exists(out_path)
150 |
--------------------------------------------------------------------------------
/docs/source/squashfs.rst:
--------------------------------------------------------------------------------
1 | SquashFS
2 | ========
3 |
4 | ``conda-pack`` can package environments into
5 | `SquashFS `_, a compressed, read-only Linux filesystem.
6 | These filesystems can then be mounted directly, without decompressing them first.
7 | This allows using packed environments immediately and without consuming disk space.
8 |
9 | Packing
10 | -------
11 | Packing environments into SquashFS works on MacOS and Linux.
12 | You will need to install `squashfs-tools `_, more specifically
13 | the ``mksquashfs`` command.
14 | On Ubuntu run ``apt-get install squashfs-tools``,
15 | on MacOS ``brew install squashfs`` or alternatively (Linux+MacOS) install from conda-forge through
16 | ``conda install -c conda-forge squashfs-tools``.
17 |
18 | Mounting
19 | --------
20 | Mounting SquashFS environments is only possible on MacOS and Linux.
21 |
22 | On Linux there are two ways:
23 |
24 | - Mounting directly: Since SquashFS is part of the Linux kernel, it can be mounted using
25 | ``mount -t squashfs ``. This will require root or ``CAP_SYS_ADMIN``.
26 | - Mounting as `Filesystem in Userspace (FUSE) `_:
27 | This can be done by installing `squashfuse `_, for example through
28 | ``apt-get install squashfuse`` (Ubuntu), ``conda install -c conda-forge squashfuse`` or from source.
29 | Contrary to the Kernel-version of SquashFS, ``squashfuse`` doesn't require root permissions to run. ``squashfuse_ll``
30 | comes packaged with ``squashfuse`` and is often faster.
31 |
32 | On Mac only the FUSE option is available:
33 |
34 | - First install `macFUSE `_, eg via ``brew install --cask macfuse``.
35 | - Then install ``squashfuse``, ideally from `source `_.
36 |
37 | Python Example
38 | --------------
39 |
40 | Create an environment:
41 |
42 | .. code-block:: bash
43 |
44 | $ conda create -y -n example python=3.9 numpy pandas scikit-learn
45 |
46 | Pack the environment into SquashFS:
47 |
48 | .. code-block:: bash
49 |
50 | $ conda pack -n example --format squashfs --n-threads 4
51 |
52 | Create a directory to mount to:
53 |
54 | .. code-block:: bash
55 |
56 | $ mkdir env_mountpoint
57 |
58 |
59 | Option 1 (Linux + MacOS): Mount the environment using squashfuse:
60 |
61 | .. code-block:: bash
62 |
63 | $ squashfuse example.squashfs env_mountpoint
64 |
65 | Option 2 (Linux): Mount the environment using ``mount``:
66 |
67 | .. code-block:: bash
68 |
69 | $ sudo mount -t squashfs example.squashfs env_mountpoint
70 |
71 | Compression options
72 | -------------------
73 |
74 | Compression can be specified through ``--compress-level``.
75 | Default is level 4, which will use ``zstd`` compression.
76 |
77 | When selecting a compression option for packing, keep in mind the kernel or ``squashfuse`` version on the target system.
78 | For older systems, make sure that SquashFS or ``squashfuse`` support ``zstd`` compression
79 | (Linux kernel version ``>=4.14`` or ``squashfuse >= 0.1.101``).
80 | If ``zstd`` isn't supported on the target, you can always compress with ``xz`` instead.
81 |
82 |
83 | - 0: no compression
84 | - 1-8: ``zstd`` with increasing compression level
85 | - 9: ``xz``
86 |
87 | Making the unpacked environment writeable
88 | -----------------------------------------
89 |
90 | SquashFS is a read-only filesystem.
91 | Sometimes the unpacked environment needs to be writeable on the target machine, for example to install
92 | more packages.
93 | A good way to do this is to use `Union mounting `_ to
94 | add a writeable layer on top of the read-only SquashFS.
95 | On Linux the most used option is `OverlayFS `_.
96 |
97 | To set this up, we create three layers:
98 |
99 | 1. The SquashFS-packed conda env as a read-only lower layer
100 | 2. A writeable working directory, necessary for OverlayFS
101 | 3. A writeable upper directory, where all new and changed files will go
102 |
103 | .. code-block:: bash
104 |
105 | $ # 1. Create read-only lower layer, consisting of squashFS-packed conda env
106 | $ mkdir squashFS_mountpoint
107 | $ sudo mount -t squashfs example.squashfs squashFS_mountpoint
108 | $ # 2. Create workdir & 3. Create upperdir
109 | $ mkdir workdir upperdir
110 |
111 | Now we combine them into a single directory ``writeable_env``, which will contain our environment but
112 | which will be writeable.
113 |
114 | .. code-block:: bash
115 |
116 | $ mkdir writeable_env
117 | $ sudo mount -t overlay overlay \
118 | -o lowerdir=squashFS_mountpoint,upperdir=upperdir,workdir=workdir writeable_env
119 |
120 | Any files created in the ``writeable_env`` directory will also show up in ``upperdir``.
121 | After unmounting, delete ``upperdir`` and ``workdir`` and all changes made to the environment will be gone.
122 |
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: Build
2 | on:
3 | push:
4 | branches:
5 | - main
6 | tags:
7 | - '*'
8 | pull_request:
9 | branches:
10 | - main
11 | jobs:
12 | package:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - name: Retrieve the source code
16 | uses: actions/checkout@v3
17 | with:
18 | fetch-depth: 0
19 | - name: Install build dependencies
20 | run: |
21 | source $CONDA/bin/activate
22 | conda config --append channels conda-forge
23 | conda install -y conda-build conda-verify sphinx numpydoc sphinxcontrib-autoprogram make
24 | python -m pip install -e .
25 | - name: Build the documentation as a test
26 | run: |
27 | source $CONDA/bin/activate
28 | cd docs
29 | make html
30 | - name: Build the package
31 | run: conda build conda-recipe --no-test
32 | - name: Reduce the size of the build artifact
33 | run: rm -rf /usr/share/miniconda/conda-bld/{git_cache,work,conda-pack*,*/.cache}
34 | - name: Upload the build artifact
35 | uses: actions/upload-artifact@v3
36 | with:
37 | name: package-${{ github.sha }}
38 | path: /usr/share/miniconda/conda-bld
39 | retention-days: 5
40 | testbed:
41 | defaults:
42 | run:
43 | shell: bash
44 | runs-on: ${{ matrix.os }}
45 | strategy:
46 | matrix:
47 | os: [ubuntu-latest,macos-latest,windows-latest]
48 | steps:
49 | - name: Retrieve the source code
50 | uses: actions/checkout@v3
51 | - id: conda-root
52 | name: Set CONDA_ROOT
53 | run: |
54 | CONDA_ROOT=$(dirname $GITHUB_WORKSPACE)/conda
55 | echo "value=$(dirname $GITHUB_WORKSPACE)/conda" >> $GITHUB_OUTPUT
56 | echo "CONDA_ROOT=$CONDA_ROOT" >> $GITHUB_ENV
57 | echo "CONDA_ROOT: $CONDA_ROOT"
58 | # Use a smaller cache entry to enable a quicker exit if we
59 | # have already built the testbed. Any small file will do
60 | - id: cache-key
61 | name: Retrieve cache key
62 | uses: actions/cache@v3
63 | with:
64 | path: LICENSE.txt
65 | key: key-${{ matrix.os }}-${{ hashFiles('testing') }}-5
66 | - name: Retrieve or create the testbed cache
67 | if: steps.cache-key.outputs.cache-hit != 'true'
68 | uses: actions/cache@v3
69 | with:
70 | path: ${{ steps.conda-root.outputs.value }}
71 | key: testbed-${{ matrix.os }}-${{ hashFiles('testing') }}-5
72 | - name: Verify or create the testbed
73 | if: steps.cache-key.outputs.cache-hit != 'true'
74 | run: testing/setup_envs.sh purge-packages
75 | tests:
76 | defaults:
77 | run:
78 | shell: bash
79 | runs-on: ${{ matrix.os }}
80 | needs: [package,testbed]
81 | strategy:
82 | fail-fast: false
83 | matrix:
84 | os: [macos-latest,ubuntu-latest,windows-latest]
85 | pyver: ["3.7","3.8","3.9","3.10"]
86 | include:
87 | # include the appropriate dependencies for testing SquashFS on each OS
88 | - os: macos-latest
89 | squashfs_deps: "conda-forge::squashfs-tools"
90 | - os: ubuntu-latest
91 | squashfs_deps: "conda-forge::squashfs-tools conda-forge::squashfuse"
92 | - os: windows-latest
93 | squashfs_deps: ""
94 | steps:
95 | - name: Retrieve the source code
96 | uses: actions/checkout@v3
97 | - id: conda-root
98 | name: Set CONDA_ROOT
99 | run: |
100 | CONDA_ROOT=$(dirname $GITHUB_WORKSPACE)/conda
101 | echo "value=$(dirname $GITHUB_WORKSPACE)/conda" >> $GITHUB_OUTPUT
102 | echo "CONDA_ROOT=$CONDA_ROOT" >> $GITHUB_ENV
103 | echo "CONDA_ROOT: $CONDA_ROOT"
104 | - name: Retrieve the testbed cache
105 | uses: actions/cache@v3
106 | with:
107 | path: ${{ steps.conda-root.outputs.value }}
108 | key: testbed-${{ matrix.os }}-${{ hashFiles('testing') }}-5
109 | - name: Download the build artifact
110 | uses: actions/download-artifact@v3
111 | with:
112 | name: package-${{ github.sha }}
113 | path: conda-bld
114 | - name: Verify or create the testbed
115 | run: testing/setup_envs.sh
116 | - name: Create the test environment and run the tests
117 | run: |
118 | source $CONDA_ROOT/etc/profile.d/conda.sh
119 | conda info -a
120 | mv conda-bld $CONDA_ROOT/conda-bld
121 | conda create -n cptest local::conda-pack conda-forge::pytest conda-forge::pytest-cov python=${{ matrix.pyver }} ${{ matrix.squashfs_deps }}
122 | conda activate cptest
123 | pytest -v -ss --cov=conda_pack --cov-branch --cov-report=xml conda_pack/tests
124 | - uses: codecov/codecov-action@v3
125 | upload:
126 | needs: tests
127 | runs-on: ubuntu-latest
128 | if: github.event_name == 'push'
129 | steps:
130 | - name: Retrieve the source code
131 | uses: actions/checkout@v3
132 | with:
133 | fetch-depth: 0
134 | - name: Download the build artifacts
135 | uses: actions/download-artifact@v3
136 | with:
137 | name: package-${{ github.sha }}
138 | path: conda-bld
139 | - name: Install deployment dependencies and build the docs
140 | run: |
141 | source $CONDA/bin/activate
142 | conda config --append channels conda-forge
143 | conda install -y sphinx numpydoc sphinxcontrib-autoprogram make anaconda-client
144 | python -m pip install -e .
145 | cd docs
146 | make html
147 | - name: Deploy the documentation
148 | if: startsWith(github.ref, 'refs/tags/')
149 | uses: peaceiris/actions-gh-pages@v3
150 | with:
151 | github_token: ${{ secrets.GITHUB_TOKEN }}
152 | publish_dir: ./docs/build/html
153 | - name: Upload to anaconda.org
154 | env:
155 | ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }}
156 | GITHUB_REF: ${{ github.ref }}
157 | run: |
158 | source $CONDA/bin/activate
159 | [[ "$GITHUB_REF" =~ ^refs/tags/ ]] || export LABEL="--label dev"
160 | anaconda --verbose --token $ANACONDA_TOKEN upload --user ctools $LABEL conda-bld/*/*.tar.bz2 --force
161 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | Conda-Pack
2 | ==========
3 |
4 | ``conda-pack`` is a command line tool for creating archives of `conda
5 | environments `_ that can be installed on other
6 | systems and locations. This is useful for deploying code in a consistent
7 | environment—potentially where python and/or conda isn't already installed.
8 |
9 | A tool like ``conda-pack`` is necessary because conda environments *are
10 | not relocatable*. Simply moving an environment to a different directory
11 | can render it partially or completely inoperable. ``conda-pack`` addresses
12 | this challenge by building archives from original conda package sources
13 | and reproducing conda's own relocation logic.
14 |
15 | .. raw:: html
16 |
17 |
18 |
19 |
20 |
21 |
22 | Use Cases
23 | ---------
24 |
25 | - Bundling an application with its environment for deployment
26 |
27 | - Packaging a conda environment for use with Apache Spark when deploying on
28 | YARN (:doc:`see here ` for more information).
29 |
30 | - Packaging a conda environment for deployment on Apache YARN. One way to do
31 | this is to use `Skein `_.
32 |
33 | - Archiving an environment in a functioning state. Note that a more sustainable
34 | way to do this is to specify your environment as a `environment.yml
35 | `_,
36 | and recreate the environment when needed.
37 |
38 | - Packaging an environment as single executable with entrypoint to run on execution (see the
39 | instructions for :doc:`Linux and macOS `).
40 |
41 | - *BETA*: Packaging a conda environment as a standard Cloudera parcel. This is
42 | a newly added capability. It has been tested on a live cluster, but different
43 | cluster configurations may produce different results. We welcome users to
44 | `file an issue `_ if necessary.
45 | :doc:`See here ` for more information).
46 |
47 | Installation
48 | ------------
49 |
50 | It's recommended to install in your root conda environment - the ``conda pack``
51 | command will then be available in all sub-environments as well.
52 |
53 | **Install with conda:**
54 |
55 | ``conda-pack`` is available from `Anaconda `_
56 | as well as from `conda-forge `_:
57 |
58 | .. code::
59 |
60 | conda install conda-pack
61 | conda install -c conda-forge conda-pack
62 |
63 |
64 | **Install from PyPI:**
65 |
66 | While ``conda-pack`` requires an existing ``conda`` install, it can also be
67 | installed from PyPI:
68 |
69 | .. code::
70 |
71 | pip install conda-pack
72 |
73 |
74 | **Install from source:**
75 |
76 | ``conda-pack`` is `available on github `_
77 | and can always be installed from source.
78 |
79 | .. code::
80 |
81 | pip install git+https://github.com/conda/conda-pack.git
82 |
83 |
84 | Commandline Usage
85 | -----------------
86 |
87 | ``conda-pack`` is primarily a commandline tool. Full CLI docs can be found
88 | :doc:`here `.
89 |
90 | One common use case is packing an environment on one machine to distribute to
91 | other machines that may not have conda/python installed.
92 |
93 | On the source machine
94 |
95 | .. code-block:: bash
96 |
97 | # Pack environment my_env into my_env.tar.gz
98 | $ conda pack -n my_env
99 |
100 | # Pack environment my_env into out_name.tar.gz
101 | $ conda pack -n my_env -o out_name.tar.gz
102 |
103 | # Pack environment located at an explicit path into my_env.tar.gz
104 | $ conda pack -p /explicit/path/to/my_env
105 |
106 | On the target machine
107 |
108 | .. code-block:: bash
109 |
110 | # Unpack environment into directory `my_env`
111 | $ mkdir -p my_env
112 | $ tar -xzf my_env.tar.gz -C my_env
113 |
114 | # Use python without activating or fixing the prefixes. Most python
115 | # libraries will work fine, but things that require prefix cleanups
116 | # will fail.
117 | $ ./my_env/bin/python
118 |
119 | # Activate the environment. This adds `my_env/bin` to your path
120 | $ source my_env/bin/activate
121 |
122 | # Run python from in the environment
123 | (my_env) $ python
124 |
125 | # Cleanup prefixes from in the active environment.
126 | # Note that this command can also be run without activating the environment
127 | # as long as some version of python is already installed on the machine.
128 | (my_env) $ conda-unpack
129 |
130 | # At this point the environment is exactly as if you installed it here
131 | # using conda directly. All scripts should work fine.
132 | (my_env) $ ipython --version
133 |
134 | # Deactivate the environment to remove it from your path
135 | (my_env) $ source my_env/bin/deactivate
136 |
137 |
138 | API Usage
139 | ---------
140 |
141 | ``conda-pack`` also provides a Python API, the full documentation of which can
142 | be found :doc:`here `. The API mostly mirrors that of the ``conda pack``
143 | commandline. Repeating the examples from above:
144 |
145 | .. code-block:: python
146 |
147 | import conda_pack
148 |
149 | # Pack environment my_env into my_env.tar.gz
150 | conda_pack.pack(name="my_env")
151 |
152 | # Pack environment my_env into out_name.tar.gz
153 | conda_pack.pack(name="my_env", output="out_name.tar.gz")
154 |
155 | # Pack environment located at an explicit path into my_env.tar.gz
156 | conda_pack.pack(prefix="/explicit/path/to/my_env")
157 |
158 |
159 | Caveats
160 | -------
161 |
162 | This tool has a few caveats.
163 |
164 | - Conda must be installed and be on your path.
165 |
166 | - The OS where the environment was built must match the OS of the target.
167 | This means that environments built on Windows can't be relocated to Linux.
168 |
169 | - Once an environment is unpacked and ``conda-unpack`` has been executed,
170 | it *cannot* be relocated. Re-applying ``conda-pack`` is unlikely to work.
171 |
172 | - ``conda-pack`` is not well-suited for archiving old environments, because it
173 | requires that conda's package cache have all of the environment's packages
174 | present. It is intended for building archives from actively maintained
175 | conda environments.
176 |
177 | .. toctree::
178 | :hidden:
179 |
180 | cli.rst
181 | api.rst
182 | spark.rst
183 | parcel.rst
184 | squashfs.rst
185 | unix-binary.rst
186 |
--------------------------------------------------------------------------------
/docs/source/spark.rst:
--------------------------------------------------------------------------------
1 | Usage with Apache Spark on YARN
2 | ===============================
3 |
4 | ``conda-pack`` can be used to distribute conda environments to be used with
5 | `Apache Spark `_ jobs when `deploying on Apache YARN
6 | `_. By bundling your
7 | environment for use with Spark, you can make use of all the libraries provided
8 | by ``conda``, and ensure that they're consistently provided on every node. This
9 | makes use of `YARN's
10 | `_
11 | resource localization by distributing environments as archives, which are then
12 | automatically unarchived on every node. In this case either the ``tar.gz`` or
13 | ``zip`` formats must be used.
14 |
15 |
16 | Python Example
17 | --------------
18 |
19 | Create an environment:
20 |
21 | .. code-block:: bash
22 |
23 | $ conda create -y -n example python=3.5 numpy pandas scikit-learn
24 |
25 |
26 | Activate the environment:
27 |
28 | .. code-block:: bash
29 |
30 | $ conda activate example # Older conda versions use `source activate` instead
31 |
32 |
33 | Package the environment into a ``tar.gz`` archive:
34 |
35 | .. code-block:: bash
36 |
37 | $ conda pack -o environment.tar.gz
38 | Collecting packages...
39 | Packing environment at '/Users/jcrist/anaconda/envs/example' to 'environment.tar.gz'
40 | [########################################] | 100% Completed | 23.2s
41 |
42 |
43 | Write a PySpark script, for example:
44 |
45 | .. code-block:: python
46 |
47 | # script.py
48 | from pyspark import SparkConf
49 | from pyspark import SparkContext
50 |
51 | conf = SparkConf()
52 | conf.setAppName('spark-yarn')
53 | sc = SparkContext(conf=conf)
54 |
55 | def some_function(x):
56 | # Packages are imported and available from your bundled environment.
57 | import sklearn
58 | import pandas
59 | import numpy as np
60 |
61 | # Use the libraries to do work
62 | return np.sin(x)**2 + 2
63 |
64 | rdd = (sc.parallelize(range(1000))
65 | .map(some_function)
66 | .take(10))
67 |
68 | print(rdd)
69 |
70 |
71 | Submit the job to Spark using ``spark-submit``. In YARN cluster mode:
72 |
73 | .. code-block:: bash
74 |
75 | $ PYSPARK_PYTHON=./environment/bin/python \
76 | spark-submit \
77 | --conf spark.yarn.appMasterEnv.PYSPARK_PYTHON=./environment/bin/python \
78 | --master yarn \
79 | --deploy-mode cluster \
80 | --archives environment.tar.gz#environment \
81 | script.py
82 |
83 |
84 | Or in YARN client mode:
85 |
86 | .. code-block:: bash
87 |
88 | $ PYSPARK_DRIVER_PYTHON=`which python` \
89 | PYSPARK_PYTHON=./environment/bin/python \
90 | spark-submit \
91 | --conf spark.yarn.appMasterEnv.PYSPARK_PYTHON=./environment/bin/python \
92 | --master yarn \
93 | --deploy-mode client \
94 | --archives environment.tar.gz#environment \
95 | script.py
96 |
97 |
98 | You can also start a PySpark interactive session using the following:
99 |
100 | .. code-block:: bash
101 |
102 | $ PYSPARK_DRIVER_PYTHON=`which python` \
103 | PYSPARK_PYTHON=./environment/bin/python \
104 | pyspark \
105 | --conf spark.yarn.appMasterEnv.PYSPARK_PYTHON=./environment/bin/python \
106 | --master yarn \
107 | --deploy-mode client \
108 | --archives environment.tar.gz#environment
109 |
110 |
111 | R Example
112 | ---------
113 |
114 | Conda also supports R environments. Here we'll demonstrate creating and
115 | packaging an environment for use with `Sparklyr `__.
116 | Note that similar techniques also work with `SparkR
117 | `__.
118 |
119 | First, create an environment:
120 |
121 | .. code-block:: bash
122 |
123 | $ conda create -y -n example r-sparklyr
124 |
125 |
126 | Activate the environment:
127 |
128 | .. code-block:: bash
129 |
130 | $ conda activate example # Older conda versions use `source activate` instead
131 |
132 |
133 | Package the environment into a ``tar.gz`` archive. Note the addition of the
134 | ``-d ./environment`` flag. This tells ``conda-pack`` to rewrite the any
135 | prefixes to the path ``./environment`` (the relative path to the environment
136 | from the working directory on the YARN workers) before packaging. This is
137 | required for R, as the R executables have absolute paths hardcoded in them
138 | (whereas Python does not).
139 |
140 | .. code-block:: bash
141 |
142 | $ conda pack -o environment.tar.gz -d ./environment
143 | Collecting packages...
144 | Packing environment at '/Users/jcrist/anaconda/envs/example' to 'environment.tar.gz'
145 | [########################################] | 100% Completed | 21.8s
146 |
147 |
148 | Write an R script, for example:
149 |
150 | .. code-block:: r
151 |
152 | library(sparklyr)
153 |
154 | # Create a spark configuration
155 | config <- spark_config()
156 |
157 | # Specify that the packaged environment should be distributed
158 | # and unpacked to the directory "environment"
159 | config$spark.yarn.dist.archives <- "environment.tar.gz#environment"
160 |
161 | # Specify the R command to use, as well as various R locations on the workers
162 | config$spark.r.command <- "./environment/bin/Rscript"
163 | config$sparklyr.apply.env.R_HOME <- "./environment/lib/R"
164 | config$sparklyr.apply.env.RHOME <- "./environment"
165 | config$sparklyr.apply.env.R_SHARE_DIR <- "./environment/lib/R/share"
166 | config$sparklyr.apply.env.R_INCLUDE_DIR <- "./environment/lib/R/include"
167 |
168 | # Create a spark context.
169 | # You can also specify `master = "yarn-cluster"` for cluster mode.
170 | sc <- spark_connect(master = "yarn-client", config = config)
171 |
172 | # Use a user defined function, which requires a working R environment on
173 | # every worker node. Since all R packages already exist on every node, we
174 | # pass in ``packages = FALSE`` to avoid redistributing them.
175 | sdf_copy_to(sc, iris) %>%
176 | spark_apply(function(e) broom::tidy(lm(Petal_Length ~ Petal_Width, e)),
177 | packages = FALSE)
178 |
179 |
180 | Run the script.
181 |
182 | .. code-block:: bash
183 |
184 | $ Rscript script.R
185 | # Source: table [?? x 5]
186 | # Database: spark_connection
187 | Sepal_Length Sepal_Width Petal_Length Petal_Width Species
188 |
189 | 1 (Intercept) 1.08 0.0730 14.8 4.04e-31
190 | 2 Petal_Width 2.23 0.0514 43.4 4.68e-86
191 |
--------------------------------------------------------------------------------
/conda_pack/prefixes.py:
--------------------------------------------------------------------------------
1 | # Much of this file borrowed from conda/core/portability.py:
2 | #
3 | # https://github.com/conda/conda/blob/master/conda/core/portability.py
4 | #
5 | # The license of which has been provided below:
6 | #
7 | # -----------------------------------------------------------------------------
8 | #
9 | # BSD 3-Clause License
10 | #
11 | # Copyright (c) 2012, Continuum Analytics, Inc.
12 | # All rights reserved.
13 | #
14 | # Redistribution and use in source and binary forms, with or without
15 | # modification, are permitted provided that the following conditions are met:
16 | #
17 | # * Redistributions of source code must retain the above copyright
18 | # notice, this list of conditions and the following disclaimer.
19 | # * Redistributions in binary form must reproduce the above copyright
20 | # notice, this list of conditions and the following disclaimer in the
21 | # documentation and/or other materials provided with the distribution.
22 | # * Neither the name of the copyright holder nor the names of its
23 | # contributors may be used to endorse or promote products
24 | # derived from this software without specific prior written permission.
25 | #
26 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
27 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
28 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
29 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
30 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
31 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
32 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
33 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
34 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
35 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
36 | # POSSIBILITY OF SUCH DAMAGE.
37 |
38 | import platform
39 | import re
40 | import struct
41 | import subprocess
42 | import sys
43 |
44 | on_win = sys.platform == 'win32'
45 |
46 | # three capture groups: whole_shebang, executable, options
47 | SHEBANG_REGEX = (
48 | # pretty much the whole match string
49 | br'^(#!'
50 | # allow spaces between #! and beginning of the executable path
51 | br'(?:[ ]*)'
52 | # the executable is the next text block without an escaped
53 | # space or non-space whitespace character
54 | br'(/(?:\\ |[^ \n\r\t])*)'
55 | # the rest of the line can contain option flags
56 | br'(.*)'
57 | # end whole_shebang group
58 | br')$')
59 |
60 |
61 | def update_prefix(path, new_prefix, placeholder, mode='text'):
62 | if on_win and mode == 'text':
63 | # force all prefix replacements to forward slashes to simplify need to
64 | # escape backslashes replace with unix-style path separators
65 | new_prefix = new_prefix.replace('\\', '/')
66 |
67 | file_changed = False
68 | with open(path, 'rb+') as fh:
69 | original_data = fh.read()
70 | fh.seek(0)
71 |
72 | data = replace_prefix(original_data, mode, placeholder, new_prefix)
73 |
74 | # If the before and after content is the same, skip writing
75 | if data != original_data:
76 | fh.write(data)
77 | fh.truncate()
78 | file_changed = True
79 |
80 | if file_changed and platform.system() == "Darwin" and platform.machine() == "arm64":
81 | subprocess.run(
82 | ["/usr/bin/codesign", "-s", "-", "-f", path], capture_output=True
83 | )
84 |
85 |
86 | def replace_prefix(data, mode, placeholder, new_prefix):
87 | if mode == 'text':
88 | data2 = text_replace(data, placeholder, new_prefix)
89 | elif mode == 'binary':
90 | data2 = binary_replace(data,
91 | placeholder.encode('utf-8'),
92 | new_prefix.encode('utf-8'))
93 | if not on_win and len(data2) != len(data):
94 | message = ("Found mismatched data length in binary file:\n"
95 | "original data length: {len_orig!d})\n"
96 | "new data length: {len_new!d}\n"
97 | ).format(len_orig=len(data),
98 | len_new=len(data2))
99 | raise ValueError(message)
100 | else:
101 | raise ValueError("Invalid mode: %r" % mode)
102 | return data2
103 |
104 |
105 | def text_replace(data, placeholder, new_prefix):
106 | return data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
107 |
108 |
109 | if on_win:
110 | def binary_replace(data, placeholder, new_prefix):
111 | new_prefix = new_prefix.lower()
112 | if placeholder in data:
113 | return replace_pyzzer_entry_point_shebang(data, placeholder, new_prefix)
114 | elif placeholder.lower() in data:
115 | return replace_pyzzer_entry_point_shebang(data, placeholder.lower(), new_prefix)
116 | return data
117 |
118 | else:
119 | def binary_replace(data, placeholder, new_prefix):
120 | """Perform a binary replacement of `data`, where ``placeholder`` is
121 | replaced with ``new_prefix`` and the remaining string is padded with null
122 | characters. All input arguments are expected to be bytes objects."""
123 |
124 | def replace(match):
125 | occurances = match.group().count(placeholder)
126 | padding = (len(placeholder) - len(new_prefix)) * occurances
127 | if padding < 0:
128 | raise ValueError("negative padding")
129 | return match.group().replace(placeholder, new_prefix) + b'\0' * padding
130 |
131 | pat = re.compile(re.escape(placeholder) + b'([^\0]*?)\0')
132 | return pat.sub(replace, data)
133 |
134 |
135 | def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix):
136 | """Code adapted from pyzzer. This is meant to deal with entry point exe's
137 | created by distlib, which consist of a launcher, then a shebang, then a zip
138 | archive of the entry point code to run. We need to change the shebang.
139 | """
140 | # Copyright (c) 2013 Vinay Sajip.
141 | #
142 | # Permission is hereby granted, free of charge, to any person obtaining a copy
143 | # of this software and associated documentation files (the "Software"), to deal
144 | # in the Software without restriction, including without limitation the rights
145 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
146 | # copies of the Software, and to permit persons to whom the Software is
147 | # furnished to do so, subject to the following conditions:
148 | #
149 | # The above copyright notice and this permission notice shall be included in
150 | # all copies or substantial portions of the Software.
151 | #
152 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
153 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
154 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
155 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
156 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
157 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
158 | # THE SOFTWARE.
159 | launcher = shebang = None
160 | pos = all_data.rfind(b'PK\x05\x06')
161 | if pos >= 0:
162 | end_cdr = all_data[pos + 12:pos + 20]
163 | cdr_size, cdr_offset = struct.unpack(' 0:
167 | pos = all_data.rfind(b'#!', 0, arc_pos)
168 | if pos >= 0:
169 | shebang = all_data[pos:arc_pos]
170 | if pos > 0:
171 | launcher = all_data[:pos]
172 |
173 | if data and shebang and launcher:
174 | if hasattr(placeholder, 'encode'):
175 | placeholder = placeholder.encode('utf-8')
176 | if hasattr(new_prefix, 'encode'):
177 | new_prefix = new_prefix.encode('utf-8')
178 | shebang = shebang.replace(placeholder, new_prefix)
179 | all_data = b"".join([launcher, shebang, data])
180 | return all_data
181 |
--------------------------------------------------------------------------------
/conda_pack/cli.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import sys
3 | import traceback
4 |
5 | from . import __version__
6 | from .core import CondaPackException, context, pack
7 |
8 |
9 | class MultiAppendAction(argparse.Action):
10 | def __init__(self, option_strings, dest, nargs=None, **kwargs):
11 | if nargs is not None:
12 | raise ValueError("nargs not allowed")
13 | super().__init__(option_strings, dest, **kwargs)
14 |
15 | def __call__(self, parser, namespace, values, option_string=None):
16 | if getattr(namespace, self.dest) is None:
17 | setattr(namespace, self.dest, [])
18 | getattr(namespace, self.dest).append((option_string.strip('-'), values))
19 |
20 |
21 | def build_parser():
22 | description = "Package an existing conda environment into an archive file."
23 | kwargs = dict(prog="conda-pack", description=description, add_help=False)
24 | kwargs["allow_abbrev"] = False
25 | parser = argparse.ArgumentParser(**kwargs)
26 | parser.add_argument("--name", "-n",
27 | metavar="ENV",
28 | help="The name of the environment to pack. "
29 | "If neither --name nor --prefix are supplied, "
30 | "the current activated environment is packed.")
31 | parser.add_argument("--prefix", "-p",
32 | metavar="PATH",
33 | help="The path to the environment to pack. "
34 | "Only one of --name/--prefix should be supplied.")
35 | parser.add_argument("--output", "-o",
36 | metavar="PATH",
37 | help=("The path of the output file. Defaults to the "
38 | "environment name with a ``.tar.gz`` suffix "
39 | "(e.g. ``my_env.tar.gz``)."))
40 | parser.add_argument("--arcroot",
41 | metavar="PATH", default='',
42 | help=("The relative path in the archive to the conda "
43 | "environment. Defaults to ''."))
44 | parser.add_argument("--dest-prefix", "-d",
45 | metavar="PATH",
46 | help=("If present, prefixes will be rewritten to this "
47 | "path before packaging. In this case the "
48 | "`conda-unpack` script will not be generated. "
49 | "This option should not be used with parcels, which "
50 | "instead generate their destination prefix from the "
51 | "--parcel-root, --parcel-name, and "
52 | "--parcel-version options."))
53 | parser.add_argument("--parcel-root", default=None,
54 | help="(Parcels only) The location where all parcels are unpacked "
55 | "on the target Hadoop cluster (default: '/opt/cloudera/parcels').")
56 | parser.add_argument("--parcel-name", default=None,
57 | help="(Parcels only) The name of the parcel, without a version "
58 | "suffix. The default value is the local environment name. The parcel "
59 | "name may not have any hyphens.")
60 | parser.add_argument("--parcel-version", default=None,
61 | help="(Parcels only) The version number for the parcel. The default "
62 | "value is the current date, using the format YYYY.MM.DD.")
63 | parser.add_argument("--parcel-distro", default=None,
64 | help="(Parcels only) The distribution type for the parcel. The "
65 | "default value is 'el7'. This value cannot have any hyphens.")
66 | parser.add_argument("--format",
67 | choices=['infer', 'zip', 'tar.gz', 'tgz', 'tar.bz2',
68 | 'tbz2', 'tar.xz', 'txz', 'tar', 'parcel', 'squashfs'],
69 | default='infer',
70 | help=("The archival format to use. By default this is "
71 | "inferred by the output file extension."))
72 | parser.add_argument("--compress-level",
73 | metavar="LEVEL",
74 | type=int,
75 | default=4,
76 | help=("The compression level to use, from 0 to 9. "
77 | "Higher numbers decrease output file size at "
78 | "the expense of compression time. Default is 4."))
79 | parser.add_argument("--n-threads", "-j",
80 | metavar="N",
81 | type=int,
82 | default=1,
83 | help=("The number of threads to use. Set to -1 to use "
84 | "the number of cpus on this machine. If a file "
85 | "format doesn't support threaded packaging, this "
86 | "option will be ignored. Default is 1."))
87 | parser.add_argument("--zip-symlinks",
88 | action="store_true",
89 | help=("Symbolic links aren't supported by the Zip "
90 | "standard, but are supported by *many* common "
91 | "Zip implementations. If set, store symbolic "
92 | "links in the archive, instead of the file "
93 | "referred to by the link. This can avoid storing "
94 | "multiple copies of the same files. *Note that "
95 | "the resulting archive may silently fail on "
96 | "decompression if the ``unzip`` implementation "
97 | "doesn't support symlinks*. Ignored if format "
98 | "isn't ``zip``."))
99 | parser.add_argument("--no-zip-64",
100 | action="store_true",
101 | help="Disable ZIP64 extensions.")
102 | parser.add_argument("--ignore-editable-packages",
103 | action="store_true",
104 | help="Skips checks for editable packages.")
105 | parser.add_argument("--ignore-missing-files",
106 | action="store_true",
107 | help="Skip checks for missing package files.")
108 | parser.add_argument("--exclude",
109 | action=MultiAppendAction,
110 | metavar="PATTERN",
111 | dest="filters",
112 | help="Exclude files matching this pattern")
113 | parser.add_argument("--include",
114 | action=MultiAppendAction,
115 | metavar="PATTERN",
116 | dest="filters",
117 | help="Re-add excluded files matching this pattern")
118 | parser.add_argument("--force", "-f",
119 | action="store_true",
120 | help=("Overwrite any existing archive at the output path, "
121 | "or create the output directory structure if it's missing."))
122 | parser.add_argument("--quiet", "-q",
123 | action="store_true",
124 | help="Do not report progress")
125 | parser.add_argument("--help", "-h", action='help',
126 | help="Show this help message then exit")
127 | parser.add_argument("--version",
128 | action='store_true',
129 | help="Show version then exit")
130 | return parser
131 |
132 |
133 | # Parser at top level to allow sphinxcontrib.autoprogram to work
134 | PARSER = build_parser()
135 |
136 |
137 | def fail(msg):
138 | print(msg, file=sys.stderr)
139 | sys.exit(1)
140 |
141 |
142 | def main(args=None, pack=pack):
143 | args = PARSER.parse_args(args=args)
144 |
145 | # Manually handle version printing to output to stdout in python < 3.4
146 | if args.version:
147 | print('conda-pack %s' % __version__)
148 | sys.exit(0)
149 |
150 | try:
151 | with context.set_cli():
152 | pack(name=args.name,
153 | prefix=args.prefix,
154 | output=args.output,
155 | format=args.format,
156 | force=args.force,
157 | compress_level=args.compress_level,
158 | n_threads=args.n_threads,
159 | zip_symlinks=args.zip_symlinks,
160 | zip_64=not args.no_zip_64,
161 | arcroot=args.arcroot,
162 | dest_prefix=args.dest_prefix,
163 | parcel_root=args.parcel_root,
164 | parcel_name=args.parcel_name,
165 | parcel_version=args.parcel_version,
166 | parcel_distro=args.parcel_distro,
167 | verbose=not args.quiet,
168 | filters=args.filters,
169 | ignore_editable_packages=args.ignore_editable_packages,
170 | ignore_missing_files=args.ignore_missing_files)
171 | except CondaPackException as e:
172 | fail("CondaPackError: %s" % e)
173 | except KeyboardInterrupt:
174 | fail("Interrupted")
175 | except Exception:
176 | fail(traceback.format_exc())
177 | sys.exit(0)
178 |
179 |
180 | if __name__ == '__main__':
181 | main()
182 |
--------------------------------------------------------------------------------
/conda_pack/tests/test_formats.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import subprocess
4 | import sys
5 | import tarfile
6 | import threading
7 | import time
8 | import zipfile
9 | from multiprocessing import cpu_count
10 | from os.path import exists, isdir, isfile, islink, join
11 | from subprocess import STDOUT, check_output
12 |
13 | import pytest
14 |
15 | from conda_pack.compat import PY2, on_linux, on_mac, on_win
16 | from conda_pack.core import CondaPackException
17 | from conda_pack.formats import _parse_n_threads, archive
18 |
19 |
20 | @pytest.fixture(scope="module")
21 | def root_and_paths(tmpdir_factory):
22 | root = str(tmpdir_factory.mktemp('example_dir'))
23 |
24 | def mkfil(*paths):
25 | with open(join(root, *paths), mode='wb') as fil:
26 | # Write 512 KiB to file
27 | fil.write(os.urandom(512 * 2 ** 10))
28 |
29 | def mkdir(path):
30 | os.mkdir(join(root, path))
31 |
32 | def symlink(path, target):
33 | target = join(root, target)
34 | path = join(root, path)
35 | if on_win:
36 | # Copy the files instead of symlinking
37 | if isdir(target):
38 | shutil.copytree(target, path)
39 | else:
40 | shutil.copyfile(target, path)
41 | else:
42 | target = os.path.relpath(target, os.path.dirname(path))
43 | os.symlink(target, path)
44 |
45 | # Build test directory structure
46 | mkdir("empty_dir")
47 | symlink("link_to_empty_dir", "empty_dir")
48 |
49 | mkdir("dir")
50 | mkfil("dir", "one")
51 | mkfil("dir", "two")
52 | symlink("link_to_dir", "dir")
53 |
54 | mkfil("file")
55 | symlink("link_to_file", "file")
56 |
57 | paths = ["empty_dir",
58 | "link_to_empty_dir",
59 | join("dir", "one"),
60 | join("dir", "two"),
61 | "file",
62 | "link_to_file",
63 | "link_to_dir"]
64 |
65 | if on_win:
66 | # Since we have no symlinks, these are actual
67 | # files that need to be added to the archive
68 | paths.extend([join("link_to_dir", "one"),
69 | join("link_to_dir", "two")])
70 |
71 | # make sure the input matches the test
72 | check(root, links=not on_win)
73 |
74 | return root, paths
75 |
76 |
77 | def check(out_dir, root=None, links=False):
78 | assert exists(join(out_dir, "empty_dir"))
79 | assert isdir(join(out_dir, "empty_dir"))
80 | assert isdir(join(out_dir, "link_to_empty_dir"))
81 | assert isdir(join(out_dir, "dir"))
82 | assert isfile(join(out_dir, "dir", "one"))
83 | assert isfile(join(out_dir, "dir", "two"))
84 | assert isdir(join(out_dir, "link_to_dir"))
85 | assert isfile(join(out_dir, "link_to_dir", "one"))
86 | assert isfile(join(out_dir, "link_to_dir", "two"))
87 | assert isfile(join(out_dir, "file"))
88 | assert isfile(join(out_dir, "link_to_file"))
89 |
90 | if root is not None:
91 | def check_equal_contents(*paths):
92 | with open(join(out_dir, *paths), 'rb') as path1:
93 | packaged = path1.read()
94 |
95 | with open(join(root, *paths), 'rb') as path2:
96 | source = path2.read()
97 |
98 | assert packaged == source
99 |
100 | check_equal_contents("dir", "one")
101 | check_equal_contents("dir", "two")
102 | check_equal_contents("file")
103 |
104 | if links:
105 | def checklink(path, sol):
106 | path = join(out_dir, "link_to_dir")
107 | sol = join(out_dir, sol)
108 | assert islink(path)
109 | return join(out_dir, os.readlink(path)) == sol
110 |
111 | checklink("link_to_dir", "dir")
112 | checklink("link_to_file", "file")
113 | checklink("link_to_empty_dir", "empty_dir")
114 | else:
115 | # Check that contents of directories are same
116 | files = set(os.listdir(join(out_dir, "link_to_dir")))
117 | # Remove the dynamically written file, if running in a test
118 | files.discard('from_bytes')
119 | assert files == {'one', 'two'}
120 |
121 |
122 | def has_infozip_cli():
123 | try:
124 | out = check_output(['unzip', '-h'], stderr=STDOUT).decode()
125 | except Exception:
126 | return False
127 | return "Info-ZIP" in out
128 |
129 |
130 | def has_tar_cli():
131 | try:
132 | check_output(['tar', '-h'], stderr=STDOUT)
133 | return True
134 | except Exception:
135 | return False
136 |
137 |
138 | @pytest.mark.parametrize('format, zip_symlinks', [
139 | ('zip', True), ('zip', False),
140 | ('tar.gz', False), ('tar.bz2', False), ('tar.xz', False), ('tar', False),
141 | ('squashfs', False)
142 | ])
143 | def test_format(tmpdir, format, zip_symlinks, root_and_paths):
144 | if format == 'zip':
145 | if zip_symlinks and (on_win or not has_infozip_cli()):
146 | pytest.skip("Cannot test zipfile symlink support on this platform")
147 | test_symlinks = zip_symlinks
148 | else:
149 | test_symlinks = not on_win
150 | if format == "squashfs":
151 | if on_win:
152 | # mksquashfs can work on win, but we don't support moving envs
153 | # between OSs anyway, so we don't test it either
154 | pytest.skip("Cannot mount squashfs on windows")
155 | elif on_mac and sys.version_info < (3, 9):
156 | # We have some spurious hardlinking issues with older Pythons.
157 | # xfail them until we can remove support for them.
158 | pytest.xfail("Sometimes hardlinking inside the test environment fails.")
159 |
160 | root, paths = root_and_paths
161 |
162 | packed_env_path = join(str(tmpdir), 'test.' + format)
163 | spill_dir = join(str(tmpdir), 'test')
164 | os.mkdir(spill_dir)
165 |
166 | with open(packed_env_path, mode='wb') as fil:
167 | with archive(fil, packed_env_path, '', format, zip_symlinks=zip_symlinks) as arc:
168 | for rel in paths:
169 | arc.add(join(root, rel), rel)
170 | arc.add_bytes(join(root, "file"),
171 | b"foo bar",
172 | join("dir", "from_bytes"))
173 | arc.add_bytes(join(root, "file"),
174 | b"foo bar",
175 | join("somedir/nested dir", "from_bytes"))
176 | if format == "squashfs":
177 | arc.mksquashfs_from_staging()
178 |
179 | if format == 'zip':
180 | if test_symlinks:
181 | check_output(['unzip', packed_env_path, '-d', spill_dir])
182 | else:
183 | with zipfile.ZipFile(packed_env_path) as out:
184 | out.extractall(spill_dir)
185 | elif format == "squashfs":
186 | if on_mac:
187 | # There is no simple way to install MacFUSE + squashfuse on the macOS CI runners.
188 | # So instead of mounting we extract the archive and check the contents that way.
189 |
190 | # unsquashfs creates its own directories
191 | os.rmdir(spill_dir)
192 | cmd = ["unsquashfs", "-dest", spill_dir, packed_env_path]
193 | subprocess.check_output(cmd)
194 | else:
195 | cmd = ["squashfuse", packed_env_path, spill_dir]
196 | subprocess.check_output(cmd)
197 | else:
198 | with tarfile.open(packed_env_path) as out:
199 | out.extractall(spill_dir)
200 |
201 | check(spill_dir, links=test_symlinks, root=root)
202 | for dir in ["dir", "somedir/nested dir"]:
203 | assert isfile(join(spill_dir, dir, "from_bytes"))
204 | with open(join(spill_dir, dir, "from_bytes"), 'rb') as fil:
205 | assert fil.read() == b"foo bar"
206 |
207 | if format == "squashfs" and on_linux:
208 | cmd = ["fusermount", "-u", spill_dir]
209 | subprocess.check_output(cmd)
210 |
211 |
212 | def test_n_threads():
213 | assert _parse_n_threads(-1) == cpu_count()
214 | assert _parse_n_threads(40) == 40
215 |
216 | for n in [-10, 0]:
217 | with pytest.raises(CondaPackException):
218 | _parse_n_threads(n)
219 |
220 |
221 | @pytest.mark.parametrize('format', ['tar.gz', 'tar.bz2', 'tar.xz'])
222 | def test_format_parallel(tmpdir, format, root_and_paths):
223 | # Python 2's bzip dpesn't support reading multipart files :(
224 | if format == 'tar.bz2' and PY2:
225 | if on_win or not has_tar_cli():
226 | pytest.skip("Unable to test parallel bz2 support on this platform")
227 | use_cli_to_extract = True
228 | else:
229 | use_cli_to_extract = False
230 |
231 | root, paths = root_and_paths
232 |
233 | out_path = join(str(tmpdir), 'test.' + format)
234 | out_dir = join(str(tmpdir), 'test')
235 | os.mkdir(out_dir)
236 |
237 | baseline = threading.active_count()
238 | with open(out_path, mode='wb') as fil:
239 | with archive(fil, out_path, '', format, n_threads=2) as arc:
240 | for rel in paths:
241 | arc.add(join(root, rel), rel)
242 | timeout = 5
243 | while threading.active_count() > baseline:
244 | time.sleep(0.1)
245 | timeout -= 0.1
246 | assert timeout > 0, "Threads failed to shutdown in sufficient time"
247 |
248 | if use_cli_to_extract:
249 | check_output(['tar', '-xf', out_path, '-C', out_dir])
250 | else:
251 | with tarfile.open(out_path) as out:
252 | out.extractall(out_dir)
253 |
254 | check(out_dir, links=(not on_win), root=root)
255 |
--------------------------------------------------------------------------------
/conda_pack/formats.py:
--------------------------------------------------------------------------------
1 | import errno
2 | import os
3 | import shutil
4 | import stat
5 | import struct
6 | import subprocess
7 | import tarfile
8 | import tempfile
9 | import threading
10 | import time
11 | import zipfile
12 | import zlib
13 | from contextlib import closing
14 | from functools import partial
15 | from io import BytesIO
16 | from multiprocessing.pool import ThreadPool
17 |
18 | from .compat import Queue, on_win
19 | from .core import CondaPackException
20 |
21 |
22 | def _parse_n_threads(n_threads=1):
23 | if n_threads == -1:
24 | from multiprocessing import cpu_count
25 | return cpu_count()
26 | if n_threads < 1:
27 | raise CondaPackException("n-threads must be >= 1, or -1 for all cores")
28 | return n_threads
29 |
30 |
31 | def archive(fileobj, path, arcroot, format, compress_level=4, zip_symlinks=False,
32 | zip_64=True, n_threads=1, verbose=False):
33 |
34 | n_threads = _parse_n_threads(n_threads)
35 |
36 | if format == 'zip':
37 | return ZipArchive(fileobj, arcroot, compresslevel=compress_level,
38 | zip_symlinks=zip_symlinks, zip_64=zip_64)
39 |
40 | # Tar archives
41 | if format in ('tar.gz', 'tgz', 'parcel'):
42 | if n_threads == 1:
43 | mode = 'w:gz'
44 | close_file = False
45 | else:
46 | mode = 'w'
47 | close_file = True
48 | fileobj = ParallelGzipFileWriter(fileobj, compresslevel=compress_level,
49 | n_threads=n_threads)
50 | elif format in ('tar.bz2', 'tbz2'):
51 | if n_threads == 1:
52 | mode = 'w:bz2'
53 | close_file = False
54 | else:
55 | mode = 'w'
56 | close_file = True
57 | fileobj = ParallelBZ2FileWriter(fileobj, compresslevel=compress_level,
58 | n_threads=n_threads)
59 | elif format in ('tar.xz', 'txz'):
60 | if n_threads == 1:
61 | mode = 'w:xz'
62 | close_file = False
63 | else:
64 | mode = 'w'
65 | close_file = True
66 | fileobj = ParallelXZFileWriter(fileobj, compresslevel=compress_level,
67 | n_threads=n_threads)
68 | elif format == "squashfs":
69 | return SquashFSArchive(fileobj, path, arcroot, n_threads, verbose=verbose,
70 | compress_level=compress_level)
71 | else: # format == 'tar'
72 | mode = 'w'
73 | close_file = False
74 | return TarArchive(fileobj, arcroot, close_file=close_file,
75 | mode=mode, compresslevel=compress_level)
76 |
77 |
78 | class ParallelFileWriter:
79 | def __init__(self, fileobj, compresslevel=9, n_threads=1):
80 | self.fileobj = fileobj
81 | self.compresslevel = compresslevel
82 | self.n_threads = n_threads
83 |
84 | # Initialize file state
85 | self.size = 0
86 | self._init_state()
87 | self._write_header()
88 |
89 | # Parallel initialization
90 | self.buffers = []
91 | self.buffer_length = 0
92 |
93 | self.pool = ThreadPool(n_threads)
94 | self.compress_queue = Queue(maxsize=n_threads)
95 |
96 | self._consumer_thread = threading.Thread(target=self._consumer)
97 | self._consumer_thread.daemon = True
98 | self._consumer_thread.start()
99 |
100 | def tell(self):
101 | return self.size
102 |
103 | def write(self, data):
104 | if not isinstance(data, bytes):
105 | data = memoryview(data)
106 | n = len(data)
107 | if n > 0:
108 | self._per_buffer_op(data)
109 | self.size += n
110 | self.buffer_length += n
111 | self.buffers.append(data)
112 | if self.buffer_length > self._block_size:
113 | self.compress_queue.put(self.buffers)
114 | self.buffers = []
115 | self.buffer_length = 0
116 | return n
117 |
118 | def _consumer(self):
119 | with closing(self.pool):
120 | for buffers in self.pool.imap(
121 | self._compress, iter(self.compress_queue.get, None)):
122 | for buf in buffers:
123 | if len(buf):
124 | self.fileobj.write(buf)
125 |
126 | def _compress(self, in_bufs):
127 | out_bufs = []
128 | compressor = self._new_compressor()
129 | for data in in_bufs:
130 | out_bufs.append(compressor.compress(data))
131 | out_bufs.append(self._flush_compressor(compressor))
132 | return out_bufs
133 |
134 | def close(self):
135 | if self.fileobj is None:
136 | return
137 |
138 | # Flush any waiting buffers
139 | if self.buffers:
140 | self.compress_queue.put(self.buffers)
141 |
142 | # Wait for all work to finish
143 | self.compress_queue.put(None)
144 | self._consumer_thread.join()
145 |
146 | # Write the closing bytes
147 | self._write_footer()
148 |
149 | # Flush fileobj
150 | self.fileobj.flush()
151 |
152 | # Cache shutdown state
153 | self.compress_queue = None
154 | self.pool = None
155 | self.fileobj = None
156 |
157 |
158 | class ParallelGzipFileWriter(ParallelFileWriter):
159 | # Since it's hard for us to keep a running dictionary (a serial operation)
160 | # with parallel compression of blocks, we use a blocksize > a few factors
161 | # bigger than the max dict size (32 KiB). In practice this is fine - we
162 | # only lose out by a small factor of unneeded redundancy, and real files
163 | # often lack enough redundant byte sequences to make this significant. Pigz
164 | # uses 128 KiB, but does more work to keep a running dict.
165 | _block_size = 256 * 2**10
166 |
167 | def _init_state(self):
168 | self.crc = zlib.crc32(b"") & 0xffffffff
169 |
170 | def _new_compressor(self):
171 | return zlib.compressobj(self.compresslevel, zlib.DEFLATED,
172 | -zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, 0)
173 |
174 | def _per_buffer_op(self, buffer):
175 | self.crc = zlib.crc32(buffer, self.crc) & 0xffffffff
176 |
177 | def _write32u(self, value):
178 | self.fileobj.write(struct.pack("=4.14 or squashfuse>=0.1.101 (compiled with zstd) " \
423 | "for mounting.\nTo support older systems, compress with " \
424 | "`xz` (--compress-level 9) instead."
425 | print(s)
426 | else:
427 | cmd.append("-no-progress")
428 | subprocess.check_call(cmd)
429 |
430 | def _absolute_path(self, path):
431 | return os.path.normpath(os.path.join(self._staging_dir, path))
432 |
433 | def _ensure_parent(self, path):
434 | dir_path = os.path.dirname(path)
435 | os.makedirs(dir_path, exist_ok=True)
436 |
437 | def _add(self, source, target):
438 | target_abspath = self._absolute_path(target)
439 | self._ensure_parent(target_abspath)
440 |
441 | # hardlink instead of copy is faster, but it doesn't work across devices
442 | source_stat = os.lstat(source)
443 | target_stat = os.lstat(os.path.dirname(target_abspath))
444 | same_device = source_stat.st_dev == target_stat.st_dev
445 | same_user = source_stat.st_uid == target_stat.st_uid
446 |
447 | if same_device and same_user:
448 | copy_func = partial(os.link, follow_symlinks=False)
449 | else:
450 | copy_func = partial(shutil.copy2, follow_symlinks=False)
451 |
452 | # we overwrite if the same `target` is added twice
453 | # to be consistent with the tar-archive implementation
454 | if os.path.lexists(target_abspath):
455 | os.remove(target_abspath)
456 |
457 | if os.path.isdir(source) and not os.path.islink(source):
458 | # directories we add through copying the tree
459 | shutil.copytree(source,
460 | target_abspath,
461 | symlinks=True,
462 | copy_function=copy_func)
463 | else:
464 | # files & links to directories we copy directly
465 | copy_func(source, target_abspath)
466 |
467 | def _add_bytes(self, source, sourcebytes, target):
468 | target_abspath = self._absolute_path(target)
469 | self._ensure_parent(target_abspath)
470 | with open(target_abspath, "wb") as f:
471 | shutil.copystat(source, target_abspath)
472 | f.write(sourcebytes)
473 |
--------------------------------------------------------------------------------
/HOW_WE_USE_GITHUB.md:
--------------------------------------------------------------------------------
1 |
2 | [conda-org]: https://github.com/conda
3 | [sub-team]: https://github.com/conda-incubator/governance#sub-teams
4 |
5 | [project-planning]: https://github.com/orgs/conda/projects/2/views/11
6 | [project-sorting]: https://github.com/orgs/conda/projects/2/views/11
7 | [project-support]: https://github.com/orgs/conda/projects/2/views/12
8 | [project-backlog]: https://github.com/orgs/conda/projects/2/views/13
9 | [project-in-progress]: https://github.com/orgs/conda/projects/2/views/14
10 |
11 | [docs-toc]: https://github.blog/changelog/2021-04-13-table-of-contents-support-in-markdown-files/
12 | [docs-actions]: https://docs.github.com/en/actions
13 | [docs-saved-reply]: https://docs.github.com/en/get-started/writing-on-github/working-with-saved-replies/creating-a-saved-reply
14 | [docs-commit-signing]: https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits
15 |
16 | [infrastructure]: https://github.com/conda/infrastructure
17 | [workflow-sync]: https://github.com/conda/infrastructure/blob/main/.github/workflows/sync.yml
18 | [labels-global]: https://github.com/conda/infrastructure/blob/main/.github/global.yml
19 |
20 |
21 | [workflow-cla]: /.github/workflows/cla.yml
22 | [workflow-issues]: /.github/workflows/issues.yml
23 | [workflow-labels]: /.github/workflows/labels.yml
24 | [workflow-lock]: /.github/workflows/lock.yml
25 | [workflow-project]: /.github/workflows/project.yml
26 | [workflow-stale]: /.github/workflows/stale.yml
27 | [labels-local]: /.github/labels.yml
28 | [labels-page]: ../../labels
29 |
30 | # How We Use GitHub
31 |
32 | This document seeks to outline how we as a community use GitHub Issues to track bugs and feature requests while still catering to development practices & project management (_e.g._, release cycles, feature planning, priority sorting, etc.).
33 |
34 |
35 | **Topics:**
36 |
37 | - [What is "Issue Sorting"?](#what-is-issue-sorting)
38 | - [Issue Sorting Procedures](#issue-sorting-procedures)
39 | - [Commit Signing](#commit-signing)
40 | - [Types of Issues](#types-of-issues)
41 | - [Standard Issue](#standard-issue)
42 | - [Epics](#epics)
43 | - [Spikes](#spikes)
44 | - [Working on Issues](#working-on-issues)
45 |
46 | > [!NOTE]
47 | > This document is written in the style of an FAQ. For easier navigation, use [GitHub's table of contents feature][docs-toc].
48 |
49 | ## What is "Issue Sorting"?
50 |
51 | > [!NOTE]
52 | > "Issue sorting" is similar to that of "triaging", but we've chosen to use different terminology because "triaging" is a word related to very weighty topics (_e.g._, injuries and war) and we would like to be sensitive to those connotations. Additionally, we are taking a more "fuzzy" approach to sorting (_e.g._, severities may not be assigned, etc.).
53 |
54 | "Issue Sorting" refers to the process of assessing the priority of incoming issues. Below is a high-level diagram of the flow of issues:
55 |
56 | ```mermaid
57 | flowchart LR
58 | subgraph flow_sorting [Issue Sorting]
59 | board_sorting{{Sorting}}
60 | board_support{{Support}}
61 |
62 | board_sorting<-->board_support
63 | end
64 |
65 | subgraph flow_refinement [Refinement]
66 | board_backlog{{Backlog}}
67 |
68 | board_backlog-- refine -->board_backlog
69 | end
70 |
71 | subgraph flow_progress [In Progress]
72 | board_progress{{In Progress}}
73 | end
74 |
75 | state_new(New Issues)
76 | state_closed(Closed)
77 |
78 | state_new-->board_sorting
79 | board_sorting-- investigated -->board_backlog
80 | board_sorting-- duplicates, off-topic -->state_closed
81 | board_support-- resolved, unresponsive -->state_closed
82 | board_backlog-- pending work -->board_progress
83 | board_backlog-- resolved, irrelevant -->state_closed
84 | board_progress-- resolved -->state_closed
85 | ```
86 |
87 | ### Why sort issues?
88 |
89 | At the most basic "bird's eye view" level, sorted issues will fall into the category of four main priority levels:
90 |
91 | - Do now
92 | - Do sometime
93 | - Provide user support
94 | - Never do (_i.e._, close)
95 |
96 | At its core, sorting enables new issues to be placed into these four categories, which helps to ensure that they will be processed at a velocity similar to or exceeding the rate at which new issues are coming in. One of the benefits of actively sorting issues is to avoid engineer burnout and to make necessary work sustainable; this is done by eliminating a never-ending backlog that has not been reviewed by any maintainers.
97 |
98 | There will always be broad-scope design and architecture implementations that the maintainers will be interested in pursuing; by actively organizing issues, the sorting engineers will be able to more easily track and tackle both specific and big-picture goals.
99 |
100 | ### Who does the sorting?
101 |
102 | Sorting engineers are a conda governance [sub-team][sub-team]; they are a group of community members who are responsible for making decisions regarding closing issues and setting feature work priorities, among other sorting-related tasks.
103 |
104 | ### How do items show up for sorting?
105 |
106 | New issues that are opened in any of the repositories in the [conda GitHub organization][conda-org] will show up in the "Sorting" tab of the [Planning project][project-planning]. There are two [GitHub Actions][docs-actions] workflows utilized for this purpose; [`.github/workflows/issues.yml`][workflow-issues] and [`.github/workflows/project.yml`][workflow-project].
107 |
108 | The GitHub Actions in the [`conda/infrastructure`][infrastructure] repository are viewed as canonical; the [`.github/workflows/sync.yml` workflow][workflow-sync] sends out any modifications to other `conda` repositories from there.
109 |
110 | ### What is done about the issues in the "Sorting" tab?
111 |
112 | Issues in the ["Sorting" tab of the project board][project-sorting] are considered ready for the following procedures:
113 |
114 | - Mitigation via short-term workarounds and fixes
115 | - Redirection to the correct project
116 | - Determining if support can be provided for errors and questions
117 | - Closing out of any duplicate/off-topic issues
118 |
119 | The sorting engineers on rotation are not seeking to _resolve_ issues that arise. Instead, the goal is to understand the issue and to determine whether it is legitimate, and then to collect as much relevant information as possible so that the maintainers can make an informed decision about the appropriate resolution schedule.
120 |
121 | Issues will remain in the ["Sorting" tab][project-sorting] as long as the issue is in an investigatory phase (_e.g._, querying the user for more details, asking the user to attempt other workarounds, other debugging efforts, etc.) and are likely to remain in this state the longest, but should still be progressing over the course of 1-2 weeks.
122 |
123 | For more information on the sorting process, see [Issue Sorting Procedures](#issue-sorting-procedures).
124 |
125 | ### When do items move out of the "Sorting" tab?
126 |
127 | Items move out of the ["Sorting" tab][project-sorting] once the investigatory phase described in [What is done about the issues in the "Sorting" tab?](#what-is-done-about-the-issues-in-the-sorting-tab) has concluded and the sorting engineer has enough information to make a decision about the appropriate resolution schedule for the issue. The additional tabs in the project board that the issues can be moved to include the following:
128 |
129 | - **"Support"** - Any issue in the ["Support" tab of the Planning board][project-support] is a request for support and is not a feature request or a bug report. Add the [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) label to move an issue to this tab.
130 | - **"Backlog"** - The issue has revealed a bug or feature request. We have collected enough details to understand the problem/request and to reproduce it on our own. These issues have been moved into the [Backlog tab of the Planning board][project-backlog] at the end of the sorting rotation during Refinement. Add the [`backlog`](https://github.com/conda/infrastructure/labels/backlog) label to move an issue to this tab.
131 | - **"Closed"** - The issue was closed due to being a duplicate, being redirected to a different project, was a user error, a question that has been resolved, etc.
132 |
133 | ### Where do work issues go after being sorted?
134 |
135 | Once issues are deemed ready to be worked on, they will be moved to the ["Backlog" tab of the Planning board][project-backlog]. Once actively in progress, the issues will be moved to the ["In Progress" tab of the Planning board][project-in-progress] and then closed out once the work is complete.
136 |
137 | ### What is the purpose of having a "Backlog"?
138 |
139 | Issues are "backlogged" when they have been sorted but not yet earmarked for an upcoming release.
140 |
141 | ### What automation procedures are currently in place?
142 |
143 | Global automation procedures synced out from the [`conda/infrastructure`][infrastructure] repo include:
144 |
145 | - [Marking of issues and pull requests as stale][workflow-stale], resulting in:
146 | - issues marked as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 21 days of inactivity and being closed after 7 further days of inactivity (that is, closed after 30 inactive days total)
147 | - all other inactive issues (not labeled as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity)
148 | - all inactive pull requests being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity)
149 | - [Locking of closed issues and pull requests with no further activity][workflow-lock] after 365 days
150 | - [Adding new issues and pull requests to the respective project boards][workflow-project]
151 | - [Indicating an issue is ready for the sorting engineer's attention][workflow-issues] by toggling [`pending::feedback`](https://github.com/conda/infrastructure/labels/pending%3A%3Afeedback) with [`pending::support`](https://github.com/conda/infrastructure/labels/pending%3A%3Asupport) after a contributor leaves a comment
152 | - [Verifying that contributors have signed the CLA][workflow-cla] before allowing pull requests to be merged; if the contributor hasn't signed the CLA previously, merging is be blocked until a manual review can be done
153 | - [Syncing out templates, labels, workflows, and documentation][workflow-sync] from [`conda/infrastructure`][infrastructure] to the other repositories
154 |
155 | ## Issue Sorting Procedures
156 |
157 | ### How are issues sorted?
158 |
159 | Issues in the ["Sorting" tab of the Planning board][project-sorting] are reviewed by issue sorting engineers, who take rotational sorting shifts. In the process of sorting issues, engineers label the issues and move them to the other tabs of the project board for further action.
160 |
161 | Issues that require input from multiple members of the sorting team will be brought up during refinement meetings in order to understand how those particular issues fit into the short- and long-term roadmap. These meetings enable the sorting engineers to get together to collectively prioritize issues, earmark feature requests for specific future releases (versus a more open-ended backlog), tag issues as ideal for first-time contributors, as well as whether or not to close/reject specific feature requests.
162 |
163 | ### How does labeling work?
164 |
165 | Labeling is a very important means for sorting engineers to keep track of the current state of an issue with regards to the asynchronous nature of communicating with users. Utilizing the proper labels helps to identify the severity of the issue as well as to quickly understand the current state of a discussion.
166 |
167 | Each label has an associated description that clarifies how the label should be used. Hover on the label to see its description. Label colors are used to distinguish labels by category.
168 |
169 | Generally speaking, labels with the same category are considered mutually exclusive, but in some cases labels sharing the same category can occur concurrently, as they indicate qualifiers as opposed to types. For example, we may have the following types, [`type::bug`](https://github.com/conda/infrastructure/labels/type%3A%3Abug), [`type::feature`](https://github.com/conda/infrastructure/labels/type%3A%3Afeature), and [`type::documentation`](https://github.com/conda/infrastructure/labels/type%3A%3Adocumentation), where for any one issue there would be _at most_ **one** of these to be defined (_i.e._ an issue should not be a bug _and_ a feature request at the same time). Alternatively, with issues involving specific operating systems (_i.e._, [`os::linux`](https://github.com/conda/infrastructure/labels/os%3A%3Alinux), [`os::macos`](https://github.com/conda/infrastructure/labels/os%3A%3Amacos), and [`os::windows`](https://github.com/conda/infrastructure/labels/os%3A%3Awindows)), an issue could be labeled with one or more, depending on the system(s) the issue occurs on.
170 |
171 | Please note that there are also automation policies in place that are affected by labeling. For example, if an issue is labeled as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport), that issue will be marked [`stale`](https://github.com/conda/infrastructure/labels/stale) after 21 days of inactivity and auto-closed after seven more days without activity (30 inactive days total), which is earlier than issues without this label. See [What automation procedures are currently in place?](#what-automation-procedures-are-currently-in-place) for more details.
172 |
173 | ### What labels are required for each issue?
174 |
175 | At minimum, both `type` and `source` labels should be specified on each issue before moving it from the "Sorting" tab to the "Backlog" tab. All issues that are bugs should also be tagged with a `severity` label.
176 |
177 | The `type` labels are exclusive of each other: each sorted issue should have exactly one `type` label. These labels give high-level information on the issue's classification (_e.g._, bug, feature, tech debt, etc.)
178 |
179 | The `source` labels are exclusive of each other: each sorted issue should have exactly one `source` label. These labels give information on the sub-group to which the issue's author belongs (_e.g._, a partner, a frequent contributor, the wider community, etc.). Through these labels, maintainers gain insight into how well we're meeting the needs of various groups.
180 |
181 | The `severity` labels are exclusive of each other and, while required for the [`type::bug`](https://github.com/conda/infrastructure/labels/type%3A%bug) label, they can also be applied to other types to indicate demand or need. These labels help us to prioritize our work. Severity is not the only factor for work prioritization, but it is an important consideration.
182 |
183 | Please review the descriptions of the `type`, `source`, and `severity` labels on the [labels page][labels-page] prior to use.
184 |
185 | ### How are new labels defined?
186 |
187 | Labels are defined using a scoped syntax with an optional high-level category (_e.g._, `source`, `tag`, `type`, etc.) and a specific topic, much like the following:
188 |
189 | - `[topic]`
190 | - `[category::topic]`
191 | - `[category::topic-phrase]`
192 |
193 | This syntax helps with issue sorting enforcement, as it helps to ensure that sorted issues are, at minimum, categorized by type and source.
194 |
195 | There are a number of labels that have been defined for the different repositories. In order to create a streamlined sorting process, label terminologies are standardized using similar (if not the same) labels.
196 |
197 | ### How are new labels added?
198 |
199 | New **global** labels (_i.e._, labels that apply equally to all repositories within the conda GitHub organization) are added to [`conda/infrastructure`][infrastructure]'s [`.github/global.yml` file][labels-global]; new **local** labels (_i.e._, labels specific to particular repositories) are added to each repository's [`.github/labels.yml` file][labels-local]. All new labels should follow the labeling syntax described in ["How are new labels defined?"](#how-are-new-labels-defined). Global labels are combined with any local labels and these aggregated labels are used by the [`.github/workflows/labels.yml` workflow][workflow-labels] to synchronize the labels available for the repository.
200 |
201 | ### Are there any templates to use as responses for commonly-seen issues?
202 |
203 | Some of the same types of issues appear regularly (_e.g._, issues that are duplicates of others, issues that should be filed in the Anaconda issue tracker, errors that are due to a user's specific setup/environment, etc.).
204 |
205 | Below are some boilerplate responses for the most commonly-seen issues to be sorted:
206 |
207 |
208 | Duplicate Issue
209 |
210 |
211 |
212 |
213 | This is a duplicate of [link to primary issue]; please feel free to continue the discussion there.
214 |
215 |
216 | > [!WARNING]
217 | > Apply the https://github.com/conda/infrastructure/labels/duplicate label to the issue being closed and https://github.com/conda/infrastructure/labels/duplicate%3A%3Aprimary to the original issue.
218 |
219 |
220 |
221 |
222 | Requesting an Uninstall/Reinstall of conda
223 |
224 |
225 |
226 |
227 | Please uninstall your current version of `conda` and reinstall the latest version.
228 | Feel free to use either the [miniconda](https://docs.conda.io/en/latest/miniconda.html)
229 | or [anaconda](https://www.anaconda.com/products/individual) installer,
230 | whichever is more appropriate for your needs.
231 |
232 |
233 |
234 |
235 |
236 | Redirect to Anaconda Issue Tracker
237 |
238 |
239 |
240 |
241 | Thank you for filing this issue! Unfortunately, this is off-topic for this repo.
242 | If you are still encountering this issue please reopen in the
243 | [Anaconda issue tracker](https://github.com/ContinuumIO/anaconda-issues/issues)
244 | where `conda` installer/package issues are addressed.
245 |
246 |
247 | > [!WARNING]
248 | > Apply the https://github.com/conda/infrastructure/labels/off-topic label to these issues before closing them out.
249 |
250 |
251 |
252 |
253 | Redirecting to Nucleus Forums
254 |
255 |
256 |
257 |
258 | Unfortunately, this issue is outside the scope of support we offer via GitHub;
259 | if you continue to experience the problems described here,
260 | please post details to the [Nucleus forums](https://community.anaconda.cloud/).
261 |
262 |
263 | > [!WARNING]
264 | > Apply the https://github.com/conda/infrastructure/labels/off-topic label to these issues before closing them out.
265 |
266 |
267 |
268 |
269 | Slow solving of conda environment
270 |
271 |
272 |
273 | Hi [@username],
274 |
275 | Thanks for voicing your concern about the performance of the classic dependency solver. To fix this, our official recommendation is using the new default "conda-libmamba-solver" instead of the classic solver (more information about the "conda-libmamba-solver" can be found here: https://conda.github.io/conda-libmamba-solver/getting-started/).
276 |
277 | In most cases "conda-libmamba-solver" should be significantly faster than the "classic" solver. We hope it provides you with a much better experience going forward.
278 |
279 |
280 |
281 |
282 |
283 | In order to not have to manually type or copy/paste the above repeatedly, note that it's possible to add text for the most commonly-used responses via [GitHub's "Add Saved Reply" option][docs-saved-reply].
284 |
285 | ## Commit Signing
286 |
287 | For all conda maintainers, we require commit signing and strongly recommend it for all others wishing to contribute to conda
288 | related projects. More information about how to set this up within GitHub can be found here:
289 |
290 | - [GitHub's signing commits docs][docs-commit-signing]
291 |
292 | ## Types of Issues
293 |
294 | ### Standard Issue
295 |
296 | TODO
297 |
298 | ### Epics
299 |
300 | TODO
301 |
302 | ### Spikes
303 |
304 | #### What is a spike?
305 |
306 | "Spike" is a term that is borrowed from extreme programming and agile development. They are used when the **outcome of an issue is unknown or even optional**. For example, when first coming across a problem that has not been solved before, a project may choose to either research the problem or create a prototype in order to better understand it.
307 |
308 | Additionally, spikes represent work that **may or may not actually be completed or implemented**. An example of this are prototypes created to explore possible solutions. Not all prototypes are implemented and the purpose of creating a prototype is often to explore the problem space more. For research-oriented tasks, the end result of this research may be that a feature request simply is not viable at the moment and would result in putting a stop to that work.
309 |
310 | Finally, spikes are usually **timeboxed**. However, given the open source/volunteer nature of our contributions, we do not enforce this for our contributors. When a timebox is set, this means that we are limiting how long we want someone to work on said spike. We do this to prevent contributors from falling into a rabbit hole they may never return from. Instead, we set a time limit to perform work on the spike and then have the assignee report back. If the tasks defined in the spike have not yet been completed, a decision is made on whether it makes sense to perform further work on the spike.
311 |
312 | #### When do I create a spike?
313 |
314 | A spike should be created when we do not have enough information to move forward with solving a problem. That simply means that, whenever we are dealing with unknowns or processes the project team has never encountered before, it may be useful for us to create a spike.
315 |
316 | In day-to-day work, this kind of situation may appear when new bug reports or feature requests come in that deal with problems or technologies that the project team is unfamiliar with. All issues that the project team has sufficient knowledge of should instead proceed as regular issues.
317 |
318 | #### When do I not create a spike?
319 |
320 | Below are some common scenarios where creating a spike is not appropriate:
321 |
322 | - Writing a technical specification for a feature we know how to implement
323 | - Design work that would go into drafting how an API is going to look and function
324 | - Any work that must be completed or is not optional
325 |
326 | ## Working on Issues
327 |
328 | ### How do I assign myself to an issue I am actively reviewing?
329 |
330 | If you do **not** have permissions, please indicate that you are working on an issue by leaving a comment. Someone who has permissions will assign you to the issue. If two weeks have passed without a pull request or an additional comment requesting information, you may be removed from the issue and the issue reassigned.
331 |
332 | If you are assigned to an issue but will not be able to continue work on it, please comment to indicate that you will no longer be working on it and press `unassign me` next to your username in the `Assignees` section of the issue page (top right).
333 |
334 | If you **do** have permissions, please assign yourself to the issue by pressing `assign myself` under the `Assignees` section of the issue page (top right).
335 |
--------------------------------------------------------------------------------
/conda_pack/tests/test_core.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import re
4 | import subprocess
5 | import tarfile
6 | from glob import glob
7 |
8 | import pytest
9 |
10 | from conda_pack import CondaEnv, CondaPackException, pack
11 | from conda_pack.compat import load_source, on_win
12 | from conda_pack.core import BIN_DIR, File, name_to_prefix
13 |
14 | from .conftest import (
15 | activate_scripts_path,
16 | env_dir,
17 | has_conda_path,
18 | nopython_path,
19 | py37_broken_path,
20 | py37_editable_path,
21 | py37_missing_files_path,
22 | py37_path,
23 | py310_path,
24 | )
25 |
26 | BIN_DIR_L = BIN_DIR.lower()
27 | SP_37 = "Lib\\site-packages" if on_win else "lib/python3.7/site-packages"
28 | SP_37_L = SP_37.lower().replace("\\", "/")
29 |
30 |
31 | if on_win:
32 | def normpath(f):
33 | return os.path.normcase(f).replace('\\', '/')
34 | else:
35 | def normpath(f):
36 | return f
37 |
38 |
39 | @pytest.fixture(scope="module")
40 | def py37_env():
41 | return CondaEnv.from_prefix(py37_path)
42 |
43 |
44 | @pytest.fixture
45 | def bad_conda_exe(tmpdir_factory, monkeypatch):
46 | tmpdir = str(tmpdir_factory.mktemp('bin'))
47 | fake_conda = os.path.join(tmpdir, 'conda.bat' if on_win else 'conda')
48 | with open(fake_conda, 'w') as f:
49 | f.write('ECHO Failed\r\nEXIT /B 1' if on_win else 'echo "Failed"\nexit 1')
50 | os.chmod(fake_conda, os.stat(fake_conda).st_mode | 0o111)
51 |
52 | monkeypatch.setenv('PATH', tmpdir, prepend=os.pathsep)
53 | monkeypatch.delenv('CONDA_EXE', raising=False)
54 |
55 |
56 | def test_name_to_prefix():
57 | # Smoketest on default name
58 | name_to_prefix()
59 |
60 | with pytest.raises(CondaPackException):
61 | name_to_prefix("this_is_probably_not_a_real_env_name")
62 |
63 |
64 | def test_from_prefix():
65 | rel_env_dir = os.path.relpath(py37_path, os.getcwd())
66 | env = CondaEnv.from_prefix(rel_env_dir)
67 | assert len(env)
68 | # relative path is normalized
69 | assert os.path.normcase(env.prefix) == os.path.normcase(py37_path)
70 |
71 | # Path is missing
72 | with pytest.raises(CondaPackException):
73 | CondaEnv.from_prefix(os.path.join(env_dir, "this_path_doesnt_exist"))
74 |
75 | # Path exists, but isn't a conda environment
76 | with pytest.raises(CondaPackException):
77 | CondaEnv.from_prefix(os.path.join(env_dir))
78 |
79 |
80 | def test_missing_package_cache():
81 | with pytest.warns(UserWarning) as record:
82 | env = CondaEnv.from_prefix(py310_path)
83 |
84 | assert len(env)
85 |
86 | assert len(record) == 1
87 | msg = str(record[0].message)
88 | assert 'conda_pack_test_lib2' in msg
89 |
90 | with pytest.raises(CondaPackException):
91 | CondaEnv.from_prefix(py310_path, on_missing_cache="raise")
92 |
93 |
94 | def test_errors_editable_packages():
95 | with pytest.raises(CondaPackException) as exc:
96 | CondaEnv.from_prefix(py37_editable_path)
97 |
98 | assert "Editable packages found" in str(exc.value)
99 |
100 |
101 | def test_ignore_errors_editable_packages():
102 | CondaEnv.from_prefix(py37_editable_path, ignore_editable_packages=True)
103 |
104 |
105 | def test_errors_when_target_directory_not_exists_and_not_force(tmpdir, py37_env):
106 |
107 | target_directory = os.path.join(tmpdir, "not_a_real_directory/")
108 | assert not os.path.exists(target_directory)
109 |
110 | target_file = os.path.join(target_directory, "env.tar.gz")
111 |
112 | with pytest.raises(CondaPackException) as exc:
113 | py37_env.pack(output=target_file, force=False)
114 |
115 | assert "not_a_real_directory" in str(exc.value)
116 |
117 |
118 | def test_creates_directories_if_missing_and_force(tmpdir, py37_env):
119 |
120 | target_directory = os.path.join(tmpdir, "not_a_real_directory/")
121 | assert not os.path.exists(target_directory)
122 |
123 | target_file = os.path.join(target_directory, "env.tar.gz")
124 |
125 | py37_env.pack(output=target_file, force=True)
126 |
127 | assert os.path.exists(target_directory)
128 |
129 |
130 | def test_errors_pip_overwrites():
131 | with pytest.raises(CondaPackException) as exc:
132 | CondaEnv.from_prefix(py37_broken_path)
133 |
134 | msg = str(exc.value)
135 | assert "pip" in msg
136 | assert "toolz" in msg
137 |
138 |
139 | def test_missing_files():
140 | with pytest.raises(CondaPackException) as exc:
141 | CondaEnv.from_prefix(py37_missing_files_path)
142 |
143 | msg = str(exc.value)
144 | assert f"{os.sep}toolz{os.sep}__init__.py" in msg, msg
145 | assert f"{os.sep}toolz{os.sep}_signatures.py" in msg, msg
146 |
147 |
148 | def test_missing_files_ignored(tmpdir):
149 | out_path = os.path.join(str(tmpdir), "py37_missing.tar")
150 | CondaEnv.from_prefix(py37_missing_files_path, ignore_missing_files=True).pack(
151 | out_path
152 | )
153 |
154 |
155 | def test_errors_conda_missing(bad_conda_exe):
156 | with pytest.raises(CondaPackException) as exc:
157 | CondaEnv.from_name('probably_fake_env')
158 |
159 | assert 'Failed to determine path to environment' in str(exc.value)
160 |
161 |
162 | def test_env_properties(py37_env):
163 | assert py37_env.name == "py37"
164 | assert py37_env.prefix == py37_path
165 |
166 | # Env has a length
167 | assert len(py37_env) == len(py37_env.files)
168 |
169 | # Env is iterable
170 | assert len(list(py37_env)) == len(py37_env)
171 |
172 | # Smoketest repr
173 | assert "CondaEnv<" in repr(py37_env)
174 |
175 |
176 | def test_load_environment_ignores(py37_env):
177 | lk = {normpath(f.target): f for f in py37_env}
178 | for fname in ("conda", "conda.bat"):
179 | assert f"{BIN_DIR_L}/{fname}" not in lk
180 | for fname in ("activate", "activate.bat", "deactivate", "deactivate.bat"):
181 | fpath = f"{BIN_DIR_L}/{fname}"
182 | assert fpath not in lk or not lk[fpath].source.startswith(py37_path)
183 |
184 |
185 | def test_file():
186 | f = File('/root/path/to/foo/bar', 'foo/bar')
187 | # smoketest repr
188 | repr(f)
189 |
190 |
191 | def test_loaded_file_properties(py37_env):
192 | lk = {normpath(f.target): f for f in py37_env}
193 |
194 | # Pip installed entrypoint
195 | exe_suffix = ".exe" if on_win else ""
196 | fil = lk[f"{BIN_DIR_L}/pytest{exe_suffix}"]
197 | assert not fil.is_conda
198 | assert fil.file_mode == 'unknown'
199 | assert fil.prefix_placeholder is None
200 |
201 | # Conda installed noarch entrypoint
202 | fil = lk[f"{BIN_DIR_L}/conda-pack-test-lib1"]
203 | assert fil.is_conda
204 | assert fil.file_mode == 'text'
205 | assert fil.prefix_placeholder != py37_env.prefix
206 |
207 | # Conda installed entrypoint
208 | suffix = "-script.py" if on_win else ""
209 | fil = lk[f"{BIN_DIR_L}/conda-pack-test-lib2{suffix}"]
210 | assert fil.is_conda
211 | assert fil.file_mode == 'text'
212 | assert fil.prefix_placeholder != py37_env.prefix
213 |
214 | # Conda installed file
215 | fil = lk[f"{SP_37_L}/conda_pack_test_lib1/cli.py"]
216 | assert fil.is_conda
217 | assert fil.file_mode is None
218 | assert fil.prefix_placeholder is None
219 |
220 |
221 | def test_works_with_no_python():
222 | # Collection doesn't require python
223 | env = CondaEnv.from_prefix(nopython_path)
224 | # non-empty
225 | assert len(env)
226 |
227 |
228 | def test_include_exclude(py37_env):
229 | old_len = len(py37_env)
230 | env2 = py37_env.exclude("*.pyc")
231 | # No mutation
232 | assert len(py37_env) == old_len
233 | assert env2 is not py37_env
234 | assert len(env2) < len(py37_env)
235 |
236 | # Re-add the removed files, envs are equivalent
237 | assert len(env2.include("*.pyc")) == len(py37_env)
238 |
239 | env3 = env2.exclude(os.path.join(SP_37, "conda_pack_test_lib1", "*"))
240 | env4 = env3.include(os.path.join(SP_37, "conda_pack_test_lib1", "cli.py"))
241 | assert len(env3) + 1 == len(env4)
242 |
243 |
244 | def test_output_and_format(py37_env):
245 | output, format = py37_env._output_and_format()
246 | assert output == "py37.tar.gz"
247 | assert format == "tar.gz"
248 |
249 | for format in ["tar.gz", "tar.bz2", "tar.xz", "tar", "zip", "parcel"]:
250 | output = os.extsep.join([py37_env.name, format])
251 |
252 | o, f = py37_env._output_and_format(format=format)
253 | assert f == format
254 | assert o == (None if f == "parcel" else output)
255 |
256 | o, f = py37_env._output_and_format(output=output)
257 | assert o == output
258 | assert f == format
259 |
260 | o, f = py37_env._output_and_format(output="foo.zip", format=format)
261 | assert f == format
262 | assert o == 'foo.zip'
263 |
264 | with pytest.raises(CondaPackException):
265 | py37_env._output_and_format(format="foo")
266 |
267 | with pytest.raises(CondaPackException):
268 | py37_env._output_and_format(output="foo.bar")
269 |
270 | with pytest.raises(CondaPackException):
271 | py37_env._output_and_format(output="foo.parcel", format="zip")
272 |
273 |
274 | def test_roundtrip(tmpdir, py37_env):
275 | out_path = os.path.join(str(tmpdir), "py37.tar")
276 | py37_env.pack(out_path)
277 | assert os.path.exists(out_path)
278 | assert tarfile.is_tarfile(out_path)
279 |
280 | with tarfile.open(out_path) as fil:
281 | # Check all files are relative paths
282 | for member in fil.getnames():
283 | assert not member.startswith(os.path.sep)
284 |
285 | extract_path = str(tmpdir.join('env'))
286 | fil.extractall(extract_path)
287 |
288 | # Shebang rewriting happens before prefixes are fixed
289 | textfile = os.path.join(extract_path, BIN_DIR, 'conda-pack-test-lib1')
290 | with open(textfile) as fil:
291 | shebang = fil.readline().strip()
292 | assert shebang == '#!/usr/bin/env python'
293 |
294 | # Check conda-unpack --help and --version
295 | if on_win:
296 | binary_name = 'conda-unpack.exe'
297 | script_name = 'conda-unpack-script.py'
298 | else:
299 | binary_name = script_name = 'conda-unpack'
300 | conda_unpack = os.path.join(extract_path, BIN_DIR, binary_name)
301 | conda_unpack_script = os.path.join(extract_path, BIN_DIR, script_name)
302 | out = subprocess.check_output([conda_unpack, '--help'],
303 | stderr=subprocess.STDOUT).decode()
304 | assert out.startswith('usage: conda-unpack')
305 |
306 | out = subprocess.check_output([conda_unpack, '--version'],
307 | stderr=subprocess.STDOUT).decode()
308 | assert out.startswith('conda-unpack')
309 |
310 | # Check no prefix generated for python executable
311 | python_pattern = re.compile(r'bin/python\d.\d')
312 | conda_unpack_mod = load_source('conda_unpack', conda_unpack_script)
313 | pythons = [r for r in conda_unpack_mod._prefix_records
314 | if python_pattern.match(r[0])]
315 | assert not pythons
316 |
317 | if on_win:
318 | command = (r"@call {path}\Scripts\activate.bat && "
319 | "conda-unpack.exe && "
320 | r"call {path}\Scripts\deactivate.bat && "
321 | "echo Done").format(path=extract_path)
322 | unpack = tmpdir.join('unpack.bat')
323 | unpack.write(command)
324 | out = subprocess.check_output(['cmd.exe', '/c', str(unpack)],
325 | stderr=subprocess.STDOUT).decode()
326 | assert out == 'Done\r\n'
327 |
328 | else:
329 | # Check bash scripts all don't error
330 | command = (". {path}/bin/activate && "
331 | "conda-unpack && "
332 | ". {path}/bin/deactivate && "
333 | "echo 'Done'").format(path=extract_path)
334 | out = subprocess.check_output(['/usr/bin/env', 'bash', '-c', command],
335 | stderr=subprocess.STDOUT).decode()
336 | assert out == 'Done\n'
337 |
338 |
339 | @pytest.mark.parametrize('fix_dest', (True, False))
340 | def test_pack_with_conda(tmpdir, fix_dest):
341 | env = CondaEnv.from_prefix(has_conda_path)
342 | out_path = os.path.join(str(tmpdir), 'has_conda.tar')
343 | extract_path = os.path.join(str(tmpdir), 'output')
344 | env.pack(out_path, dest_prefix=extract_path if fix_dest else None)
345 |
346 | os.mkdir(extract_path)
347 |
348 | assert os.path.exists(out_path)
349 | assert tarfile.is_tarfile(out_path)
350 | # Extract tarfile
351 | with tarfile.open(out_path, ignore_zeros=True) as fil:
352 | fil.extractall(extract_path)
353 |
354 | if on_win:
355 | fnames = ['conda.exe', 'activate.bat']
356 | # New conda drops deactivate.bat files
357 | if not fix_dest:
358 | fnames.append("deactivate.bat")
359 | else:
360 | fnames = ['conda', 'activate', 'deactivate']
361 | # Check conda/activate/deactivate all present
362 | for fname in fnames:
363 | fpath = os.path.join(extract_path, BIN_DIR, fname)
364 | assert os.path.exists(fpath)
365 | # Make sure we have replaced the activate/deactivate scripts
366 | # if the dest_prefix was not fixed; make sure we haven't
367 | # done so if it is.
368 | if 'activate' in fname:
369 | with open(fpath) as fp:
370 | data = fp.read()
371 | if fix_dest:
372 | assert 'CONDA_PACK' not in data
373 | else:
374 | assert 'CONDA_PACK' in data
375 |
376 | # Check the packaged conda works and recognizes its environment.
377 | # We need to unset CONDA_PREFIX to simulate unpacking into an environment
378 | # where conda is not already present.
379 | if on_win:
380 | if fix_dest:
381 | # XXX: Conda windows activatation scripts now seem to assume a base
382 | # conda install, rather than relative paths. Given that this tool
383 | # is mostly for deploying code, and usually on servers (not
384 | # windows), this failure isn't critical but isn't 100% correct.
385 | # Ideally this test shouldn't need to special case `fix_dest`, and
386 | # use the same batch commands in both cases.
387 | commands = (
388 | rf"@call {extract_path}\condabin\conda activate",
389 | r"@conda info --json",
390 | r"@conda deactivate",
391 | )
392 | else:
393 | commands = (
394 | r"@set CONDA_PREFIX=",
395 | r"@set CONDA_SHVL=",
396 | rf"@call {extract_path}\Scripts\activate",
397 | r"@conda info --json",
398 | r"@deactivate",
399 | )
400 | script_file = tmpdir.join("unpack.bat")
401 | cmd = ["cmd", "/c", str(script_file)]
402 |
403 | else:
404 | commands = (
405 | "unset CONDA_PREFIX",
406 | "unset CONDA_SHLVL",
407 | f". {extract_path}/bin/activate",
408 | "conda info --json",
409 | ". deactivate >/dev/null 2>/dev/null",
410 | )
411 | script_file = tmpdir.join("unpack.sh")
412 | cmd = ["/usr/bin/env", "bash", str(script_file)]
413 |
414 | script_file.write('\n'.join(commands))
415 | out = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode()
416 | conda_info = json.loads(out)
417 | extract_path_n = normpath(extract_path)
418 | for var in ('conda_prefix', 'sys.prefix', 'default_prefix', 'root_prefix'):
419 | assert normpath(conda_info[var]) == extract_path_n
420 | assert extract_path_n in list(map(normpath, conda_info['envs']))
421 |
422 | # Check the conda-meta directory has been anonymized
423 | for path in glob(os.path.join(extract_path, 'conda-meta', '*.json')):
424 | with open(path) as fil:
425 | data = json.load(fil)
426 |
427 | for field in ["extracted_package_dir", "package_tarball_full_path"]:
428 | if field in data:
429 | assert data[field] == ""
430 |
431 | if "link" in data and "source" in data["link"]:
432 | assert data["link"]["source"] == ""
433 |
434 |
435 | def test_pack_exceptions(py37_env):
436 | # Can't pass both prefix and name
437 | with pytest.raises(CondaPackException):
438 | pack(prefix=py37_path, name="py37")
439 |
440 | # Unknown filter type
441 | with pytest.raises(CondaPackException):
442 | pack(prefix=py37_path, filters=[("exclude", "*.py"), ("foo", "*.pyc")])
443 |
444 |
445 | def test_zip64(tmpdir):
446 | # Create an environment that requires ZIP64 extensions, but doesn't use a
447 | # lot of disk/RAM
448 | source = os.path.join(str(tmpdir), 'source.txt')
449 | with open(source, 'wb') as f:
450 | f.write(b'0')
451 |
452 | files = [File(source, target='foo%d' % i) for i in range(1 << 16)]
453 | large_env = CondaEnv('large', files=files)
454 |
455 | out_path = os.path.join(str(tmpdir), 'large.zip')
456 |
457 | # Errors if ZIP64 disabled
458 | with pytest.raises(CondaPackException) as exc:
459 | large_env.pack(output=out_path, zip_64=False)
460 | assert 'ZIP64' in str(exc.value)
461 | assert not os.path.exists(out_path)
462 |
463 | # Works fine if ZIP64 not disabled
464 | large_env.pack(output=out_path)
465 | assert os.path.exists(out_path)
466 |
467 |
468 | def test_force(tmpdir, py37_env):
469 | already_exists = os.path.join(str(tmpdir), "py37.tar")
470 | with open(already_exists, "wb"):
471 | pass
472 |
473 | # file already exists
474 | with pytest.raises(CondaPackException):
475 | py37_env.pack(output=already_exists)
476 |
477 | py37_env.pack(output=already_exists, force=True)
478 | assert tarfile.is_tarfile(already_exists)
479 |
480 |
481 | def test_pack(tmpdir, py37_env):
482 | out_path = os.path.join(str(tmpdir), "py37.tar")
483 |
484 | exclude1 = "*.py"
485 | exclude2 = "*.pyc"
486 | include = os.path.join(SP_37, "conda_pack_test_lib1", "*")
487 |
488 | res = pack(
489 | prefix=py37_path,
490 | output=out_path,
491 | filters=[("exclude", exclude1), ("exclude", exclude2), ("include", include)],
492 | )
493 |
494 | assert res == out_path
495 | assert os.path.exists(out_path)
496 | assert tarfile.is_tarfile(out_path)
497 |
498 | with tarfile.open(out_path) as fil:
499 | paths = fil.getnames()
500 |
501 | filtered = py37_env.exclude(exclude1).exclude(exclude2).include(include)
502 |
503 | # Files line up with filtering, with extra conda-unpack command
504 | sol = {os.path.normcase(f.target) for f in filtered.files}
505 | res = {os.path.normcase(p) for p in paths}
506 | diff = res.difference(sol)
507 |
508 | if on_win:
509 | fnames = ('conda-unpack.exe', 'conda-unpack-script.py',
510 | 'activate.bat', 'deactivate.bat')
511 | else:
512 | fnames = ('conda-unpack', 'activate', 'deactivate')
513 | assert diff == {os.path.join(BIN_DIR_L, f) for f in fnames}
514 |
515 |
516 | def _test_dest_prefix(src_prefix, dest_prefix, arcroot, out_path, format):
517 | if on_win:
518 | test_files = ['Scripts/conda-pack-test-lib1',
519 | 'Scripts/pytest.exe']
520 | else:
521 | test_files = ['bin/conda-pack-test-lib1',
522 | 'bin/pytest',
523 | 'bin/clear']
524 |
525 | orig_bytes = src_prefix.encode()
526 | orig_bytes_l = src_prefix.lower().encode() if on_win else orig_bytes
527 | new_bytes = dest_prefix.encode()
528 | new_bytes_l = dest_prefix.lower().encode() if on_win else new_bytes
529 |
530 | # all paths, including shebangs, are rewritten using the prefix
531 | with tarfile.open(out_path) as fil:
532 | for path in fil.getnames():
533 | assert os.path.basename(path) != "conda-unpack", path
534 | if arcroot:
535 | assert path.startswith(arcroot), path
536 | for test_file in test_files:
537 | orig_path = os.path.join(src_prefix, test_file)
538 | dest_path = os.path.join(arcroot, test_file)
539 | with open(orig_path, 'rb') as fil2:
540 | orig_data = fil2.read()
541 | if orig_bytes in orig_data:
542 | data = fil.extractfile(dest_path).read()
543 | assert orig_bytes not in data and orig_bytes_l not in data, test_file
544 | assert new_bytes in data or new_bytes_l in data, test_file
545 |
546 |
547 | def test_dest_prefix(tmpdir, py37_env):
548 | out_path = os.path.join(str(tmpdir), "py37.tar")
549 | dest = r"c:\foo\bar\baz\biz" if on_win else "/foo/bar/baz/biz"
550 | res = pack(prefix=py37_path, dest_prefix=dest, output=out_path)
551 |
552 | assert res == out_path
553 | assert os.path.exists(out_path)
554 | assert tarfile.is_tarfile(out_path)
555 |
556 | _test_dest_prefix(py37_env.prefix, dest, "", out_path, "r")
557 |
558 |
559 | def test_parcel(tmpdir, py37_env):
560 | if on_win:
561 | pytest.skip("Not parcel tests on Windows")
562 | arcroot = "py37-1234.56"
563 |
564 | out_path = os.path.join(str(tmpdir), arcroot + "-el7.parcel")
565 |
566 | pdir = os.getcwd()
567 | try:
568 | os.chdir(str(tmpdir))
569 | res = pack(prefix=py37_path, format="parcel", parcel_version="1234.56")
570 | finally:
571 | os.chdir(pdir)
572 |
573 | assert os.path.join(str(tmpdir), res) == out_path
574 | assert os.path.exists(out_path)
575 |
576 | # Verify that only the parcel files were added
577 | with tarfile.open(out_path, "r:gz") as fil:
578 | paths = fil.getnames()
579 | sol = {os.path.join(arcroot, f.target) for f in py37_env.files}
580 | diff = set(paths).difference(sol)
581 | fnames = ("conda_env.sh", "parcel.json")
582 | assert diff == {os.path.join(arcroot, "meta", f) for f in fnames}
583 |
584 | # Verify correct metadata in parcel.json
585 | with tarfile.open(out_path) as fil:
586 | fpath = os.path.join(arcroot, "meta", "parcel.json")
587 | data = fil.extractfile(fpath).read()
588 | data = json.loads(data)
589 | assert data["name"] == "py37" and data["components"][0]["name"] == "py37"
590 | assert (
591 | data["version"] == "1234.56" and data["components"][0]["version"] == "1234.56"
592 | )
593 |
594 | # Verify the correct dest_prefix substitution
595 | dest = os.path.join("/opt/cloudera/parcels", arcroot)
596 | _test_dest_prefix(py37_env.prefix, dest, arcroot, out_path, "r:gz")
597 |
598 |
599 | def test_activate(tmpdir):
600 | out_path = os.path.join(str(tmpdir), 'activate_scripts.tar')
601 | extract_path = str(tmpdir.join('env'))
602 |
603 | env = CondaEnv.from_prefix(activate_scripts_path)
604 | env.pack(out_path)
605 |
606 | with tarfile.open(out_path) as fil:
607 | fil.extractall(extract_path)
608 |
609 | # Check that activate environment variable is set
610 | if on_win:
611 | command = (r"@CALL {path}\Scripts\activate" "\r\n"
612 | r"@ECHO CONDAPACK_ACTIVATED=%CONDAPACK_ACTIVATED%" "\r\n"
613 | r"@CALL {path}\Scripts\deactivate" "\r\n"
614 | r"@ECHO CONDAPACK_ACTIVATED=%CONDAPACK_ACTIVATED%" "\r\n"
615 | r"@echo Done").format(path=extract_path)
616 | unpack = tmpdir.join('unpack.bat')
617 | unpack.write(command)
618 |
619 | out = subprocess.check_output(['cmd', '/c', str(unpack)],
620 | stderr=subprocess.STDOUT).decode()
621 |
622 | assert out == 'CONDAPACK_ACTIVATED=1\r\nCONDAPACK_ACTIVATED=\r\nDone\r\n'
623 |
624 | else:
625 | command = (". {path}/bin/activate && "
626 | "test $CONDAPACK_ACTIVATED -eq 1 && "
627 | ". {path}/bin/deactivate && "
628 | "test ! $CONDAPACK_ACTIVATED && "
629 | "echo 'Done'").format(path=extract_path)
630 |
631 | out = subprocess.check_output(['/usr/bin/env', 'bash', '-c', command],
632 | stderr=subprocess.STDOUT).decode()
633 |
634 | assert out == 'Done\n'
635 |
--------------------------------------------------------------------------------
/conda_pack/_version.py:
--------------------------------------------------------------------------------
1 |
2 | # This file helps to compute a version number in source trees obtained from
3 | # git-archive tarball (such as those provided by githubs download-from-tag
4 | # feature). Distribution tarballs (built by setup.py sdist) and build
5 | # directories (produced by setup.py build) will contain a much shorter file
6 | # that just contains the computed version number.
7 |
8 | # This file is released into the public domain. Generated by
9 | # versioneer-0.21 (https://github.com/python-versioneer/python-versioneer)
10 |
11 | """Git implementation of _version.py."""
12 |
13 | import errno
14 | import os
15 | import re
16 | import subprocess
17 | import sys
18 | from typing import Callable, Dict
19 |
20 |
21 | def get_keywords():
22 | """Get the keywords needed to look up the version information."""
23 | # these strings will be replaced by git during git-archive.
24 | # setup.py/versioneer.py will grep for the variable names, so they must
25 | # each be defined on a line of their own. _version.py will just call
26 | # get_keywords().
27 | git_refnames = " (HEAD -> main)"
28 | git_full = "b00e8faec05833a3eaa82aced1377755132930cf"
29 | git_date = "2024-03-04 15:08:38 +0100"
30 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
31 | return keywords
32 |
33 |
34 | class VersioneerConfig:
35 | """Container for Versioneer configuration parameters."""
36 |
37 |
38 | def get_config():
39 | """Create, populate and return the VersioneerConfig() object."""
40 | # these strings are filled in when 'setup.py versioneer' creates
41 | # _version.py
42 | cfg = VersioneerConfig()
43 | cfg.VCS = "git"
44 | cfg.style = "pep440"
45 | cfg.tag_prefix = ""
46 | cfg.parentdir_prefix = "conda-pack-"
47 | cfg.versionfile_source = "conda_pack/_version.py"
48 | cfg.verbose = False
49 | return cfg
50 |
51 |
52 | class NotThisMethod(Exception):
53 | """Exception raised if a method is not valid for the current scenario."""
54 |
55 |
56 | LONG_VERSION_PY: Dict[str, str] = {}
57 | HANDLERS: Dict[str, Dict[str, Callable]] = {}
58 |
59 |
60 | def register_vcs_handler(vcs, method): # decorator
61 | """Create decorator to mark a method as the handler of a VCS."""
62 | def decorate(f):
63 | """Store f in HANDLERS[vcs][method]."""
64 | if vcs not in HANDLERS:
65 | HANDLERS[vcs] = {}
66 | HANDLERS[vcs][method] = f
67 | return f
68 | return decorate
69 |
70 |
71 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
72 | env=None):
73 | """Call the given command(s)."""
74 | assert isinstance(commands, list)
75 | process = None
76 | for command in commands:
77 | try:
78 | dispcmd = str([command] + args)
79 | # remember shell=False, so use git.cmd on windows, not just git
80 | process = subprocess.Popen([command] + args, cwd=cwd, env=env,
81 | stdout=subprocess.PIPE,
82 | stderr=(subprocess.PIPE if hide_stderr
83 | else None))
84 | break
85 | except OSError:
86 | e = sys.exc_info()[1]
87 | if e.errno == errno.ENOENT:
88 | continue
89 | if verbose:
90 | print("unable to run %s" % dispcmd)
91 | print(e)
92 | return None, None
93 | else:
94 | if verbose:
95 | print("unable to find command, tried %s" % (commands,))
96 | return None, None
97 | stdout = process.communicate()[0].strip().decode()
98 | if process.returncode != 0:
99 | if verbose:
100 | print("unable to run %s (error)" % dispcmd)
101 | print("stdout was %s" % stdout)
102 | return None, process.returncode
103 | return stdout, process.returncode
104 |
105 |
106 | def versions_from_parentdir(parentdir_prefix, root, verbose):
107 | """Try to determine the version from the parent directory name.
108 |
109 | Source tarballs conventionally unpack into a directory that includes both
110 | the project name and a version string. We will also support searching up
111 | two directory levels for an appropriately named parent directory
112 | """
113 | rootdirs = []
114 |
115 | for _ in range(3):
116 | dirname = os.path.basename(root)
117 | if dirname.startswith(parentdir_prefix):
118 | return {"version": dirname[len(parentdir_prefix):],
119 | "full-revisionid": None,
120 | "dirty": False, "error": None, "date": None}
121 | rootdirs.append(root)
122 | root = os.path.dirname(root) # up a level
123 |
124 | if verbose:
125 | print("Tried directories %s but none started with prefix %s" %
126 | (str(rootdirs), parentdir_prefix))
127 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
128 |
129 |
130 | @register_vcs_handler("git", "get_keywords")
131 | def git_get_keywords(versionfile_abs):
132 | """Extract version information from the given file."""
133 | # the code embedded in _version.py can just fetch the value of these
134 | # keywords. When used from setup.py, we don't want to import _version.py,
135 | # so we do it with a regexp instead. This function is not used from
136 | # _version.py.
137 | keywords = {}
138 | try:
139 | with open(versionfile_abs, "r") as fobj:
140 | for line in fobj:
141 | if line.strip().startswith("git_refnames ="):
142 | mo = re.search(r'=\s*"(.*)"', line)
143 | if mo:
144 | keywords["refnames"] = mo.group(1)
145 | if line.strip().startswith("git_full ="):
146 | mo = re.search(r'=\s*"(.*)"', line)
147 | if mo:
148 | keywords["full"] = mo.group(1)
149 | if line.strip().startswith("git_date ="):
150 | mo = re.search(r'=\s*"(.*)"', line)
151 | if mo:
152 | keywords["date"] = mo.group(1)
153 | except OSError:
154 | pass
155 | return keywords
156 |
157 |
158 | @register_vcs_handler("git", "keywords")
159 | def git_versions_from_keywords(keywords, tag_prefix, verbose):
160 | """Get version information from git keywords."""
161 | if "refnames" not in keywords:
162 | raise NotThisMethod("Short version file found")
163 | date = keywords.get("date")
164 | if date is not None:
165 | # Use only the last line. Previous lines may contain GPG signature
166 | # information.
167 | date = date.splitlines()[-1]
168 |
169 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
170 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
171 | # -like" string, which we must then edit to make compliant), because
172 | # it's been around since git-1.5.3, and it's too difficult to
173 | # discover which version we're using, or to work around using an
174 | # older one.
175 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
176 | refnames = keywords["refnames"].strip()
177 | if refnames.startswith("$Format"):
178 | if verbose:
179 | print("keywords are unexpanded, not using")
180 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
181 | refs = {r.strip() for r in refnames.strip("()").split(",")}
182 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
183 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
184 | TAG = "tag: "
185 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
186 | if not tags:
187 | # Either we're using git < 1.8.3, or there really are no tags. We use
188 | # a heuristic: assume all version tags have a digit. The old git %d
189 | # expansion behaves like git log --decorate=short and strips out the
190 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
191 | # between branches and tags. By ignoring refnames without digits, we
192 | # filter out many common branch names like "release" and
193 | # "stabilization", as well as "HEAD" and "master".
194 | tags = {r for r in refs if re.search(r'\d', r)}
195 | if verbose:
196 | print("discarding '%s', no digits" % ",".join(refs - tags))
197 | if verbose:
198 | print("likely tags: %s" % ",".join(sorted(tags)))
199 | for ref in sorted(tags):
200 | # sorting will prefer e.g. "2.0" over "2.0rc1"
201 | if ref.startswith(tag_prefix):
202 | r = ref[len(tag_prefix):]
203 | # Filter out refs that exactly match prefix or that don't start
204 | # with a number once the prefix is stripped (mostly a concern
205 | # when prefix is '')
206 | if not re.match(r'\d', r):
207 | continue
208 | if verbose:
209 | print("picking %s" % r)
210 | return {"version": r,
211 | "full-revisionid": keywords["full"].strip(),
212 | "dirty": False, "error": None,
213 | "date": date}
214 | # no suitable tags, so version is "0+unknown", but full hex is still there
215 | if verbose:
216 | print("no suitable tags, using unknown + full revision id")
217 | return {"version": "0+unknown",
218 | "full-revisionid": keywords["full"].strip(),
219 | "dirty": False, "error": "no suitable tags", "date": None}
220 |
221 |
222 | @register_vcs_handler("git", "pieces_from_vcs")
223 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
224 | """Get version from 'git describe' in the root of the source tree.
225 |
226 | This only gets called if the git-archive 'subst' keywords were *not*
227 | expanded, and _version.py hasn't already been rewritten with a short
228 | version string, meaning we're inside a checked out source tree.
229 | """
230 | GITS = ["git"]
231 | TAG_PREFIX_REGEX = "*"
232 | if sys.platform == "win32":
233 | GITS = ["git.cmd", "git.exe"]
234 | TAG_PREFIX_REGEX = r"\*"
235 |
236 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
237 | hide_stderr=True)
238 | if rc != 0:
239 | if verbose:
240 | print("Directory %s not under git control" % root)
241 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
242 |
243 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
244 | # if there isn't one, this yields HEX[-dirty] (no NUM)
245 | describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty",
246 | "--always", "--long",
247 | "--match",
248 | "%s%s" % (tag_prefix, TAG_PREFIX_REGEX)],
249 | cwd=root)
250 | # --long was added in git-1.5.5
251 | if describe_out is None:
252 | raise NotThisMethod("'git describe' failed")
253 | describe_out = describe_out.strip()
254 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
255 | if full_out is None:
256 | raise NotThisMethod("'git rev-parse' failed")
257 | full_out = full_out.strip()
258 |
259 | pieces = {}
260 | pieces["long"] = full_out
261 | pieces["short"] = full_out[:7] # maybe improved later
262 | pieces["error"] = None
263 |
264 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
265 | cwd=root)
266 | # --abbrev-ref was added in git-1.6.3
267 | if rc != 0 or branch_name is None:
268 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
269 | branch_name = branch_name.strip()
270 |
271 | if branch_name == "HEAD":
272 | # If we aren't exactly on a branch, pick a branch which represents
273 | # the current commit. If all else fails, we are on a branchless
274 | # commit.
275 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
276 | # --contains was added in git-1.5.4
277 | if rc != 0 or branches is None:
278 | raise NotThisMethod("'git branch --contains' returned error")
279 | branches = branches.split("\n")
280 |
281 | # Remove the first line if we're running detached
282 | if "(" in branches[0]:
283 | branches.pop(0)
284 |
285 | # Strip off the leading "* " from the list of branches.
286 | branches = [branch[2:] for branch in branches]
287 | if "master" in branches:
288 | branch_name = "master"
289 | elif not branches:
290 | branch_name = None
291 | else:
292 | # Pick the first branch that is returned. Good or bad.
293 | branch_name = branches[0]
294 |
295 | pieces["branch"] = branch_name
296 |
297 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
298 | # TAG might have hyphens.
299 | git_describe = describe_out
300 |
301 | # look for -dirty suffix
302 | dirty = git_describe.endswith("-dirty")
303 | pieces["dirty"] = dirty
304 | if dirty:
305 | git_describe = git_describe[:git_describe.rindex("-dirty")]
306 |
307 | # now we have TAG-NUM-gHEX or HEX
308 |
309 | if "-" in git_describe:
310 | # TAG-NUM-gHEX
311 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
312 | if not mo:
313 | # unparsable. Maybe git-describe is misbehaving?
314 | pieces["error"] = ("unable to parse git-describe output: '%s'"
315 | % describe_out)
316 | return pieces
317 |
318 | # tag
319 | full_tag = mo.group(1)
320 | if not full_tag.startswith(tag_prefix):
321 | if verbose:
322 | fmt = "tag '%s' doesn't start with prefix '%s'"
323 | print(fmt % (full_tag, tag_prefix))
324 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
325 | % (full_tag, tag_prefix))
326 | return pieces
327 | pieces["closest-tag"] = full_tag[len(tag_prefix):]
328 |
329 | # distance: number of commits since tag
330 | pieces["distance"] = int(mo.group(2))
331 |
332 | # commit: short hex revision ID
333 | pieces["short"] = mo.group(3)
334 |
335 | else:
336 | # HEX: no tags
337 | pieces["closest-tag"] = None
338 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
339 | pieces["distance"] = int(count_out) # total number of commits
340 |
341 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
342 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
343 | # Use only the last line. Previous lines may contain GPG signature
344 | # information.
345 | date = date.splitlines()[-1]
346 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
347 |
348 | return pieces
349 |
350 |
351 | def plus_or_dot(pieces):
352 | """Return a + if we don't already have one, else return a ."""
353 | if "+" in pieces.get("closest-tag", ""):
354 | return "."
355 | return "+"
356 |
357 |
358 | def render_pep440(pieces):
359 | """Build up version string, with post-release "local version identifier".
360 |
361 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
362 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
363 |
364 | Exceptions:
365 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
366 | """
367 | if pieces["closest-tag"]:
368 | rendered = pieces["closest-tag"]
369 | if pieces["distance"] or pieces["dirty"]:
370 | rendered += plus_or_dot(pieces)
371 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
372 | if pieces["dirty"]:
373 | rendered += ".dirty"
374 | else:
375 | # exception #1
376 | rendered = "0+untagged.%d.g%s" % (pieces["distance"],
377 | pieces["short"])
378 | if pieces["dirty"]:
379 | rendered += ".dirty"
380 | return rendered
381 |
382 |
383 | def render_pep440_branch(pieces):
384 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
385 |
386 | The ".dev0" means not master branch. Note that .dev0 sorts backwards
387 | (a feature branch will appear "older" than the master branch).
388 |
389 | Exceptions:
390 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
391 | """
392 | if pieces["closest-tag"]:
393 | rendered = pieces["closest-tag"]
394 | if pieces["distance"] or pieces["dirty"]:
395 | if pieces["branch"] != "master":
396 | rendered += ".dev0"
397 | rendered += plus_or_dot(pieces)
398 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
399 | if pieces["dirty"]:
400 | rendered += ".dirty"
401 | else:
402 | # exception #1
403 | rendered = "0"
404 | if pieces["branch"] != "master":
405 | rendered += ".dev0"
406 | rendered += "+untagged.%d.g%s" % (pieces["distance"],
407 | pieces["short"])
408 | if pieces["dirty"]:
409 | rendered += ".dirty"
410 | return rendered
411 |
412 |
413 | def pep440_split_post(ver):
414 | """Split pep440 version string at the post-release segment.
415 |
416 | Returns the release segments before the post-release and the
417 | post-release version number (or -1 if no post-release segment is present).
418 | """
419 | vc = str.split(ver, ".post")
420 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
421 |
422 |
423 | def render_pep440_pre(pieces):
424 | """TAG[.postN.devDISTANCE] -- No -dirty.
425 |
426 | Exceptions:
427 | 1: no tags. 0.post0.devDISTANCE
428 | """
429 | if pieces["closest-tag"]:
430 | if pieces["distance"]:
431 | # update the post release segment
432 | tag_version, post_version = pep440_split_post(pieces["closest-tag"])
433 | rendered = tag_version
434 | if post_version is not None:
435 | rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"])
436 | else:
437 | rendered += ".post0.dev%d" % (pieces["distance"])
438 | else:
439 | # no commits, use the tag as the version
440 | rendered = pieces["closest-tag"]
441 | else:
442 | # exception #1
443 | rendered = "0.post0.dev%d" % pieces["distance"]
444 | return rendered
445 |
446 |
447 | def render_pep440_post(pieces):
448 | """TAG[.postDISTANCE[.dev0]+gHEX] .
449 |
450 | The ".dev0" means dirty. Note that .dev0 sorts backwards
451 | (a dirty tree will appear "older" than the corresponding clean one),
452 | but you shouldn't be releasing software with -dirty anyways.
453 |
454 | Exceptions:
455 | 1: no tags. 0.postDISTANCE[.dev0]
456 | """
457 | if pieces["closest-tag"]:
458 | rendered = pieces["closest-tag"]
459 | if pieces["distance"] or pieces["dirty"]:
460 | rendered += ".post%d" % pieces["distance"]
461 | if pieces["dirty"]:
462 | rendered += ".dev0"
463 | rendered += plus_or_dot(pieces)
464 | rendered += "g%s" % pieces["short"]
465 | else:
466 | # exception #1
467 | rendered = "0.post%d" % pieces["distance"]
468 | if pieces["dirty"]:
469 | rendered += ".dev0"
470 | rendered += "+g%s" % pieces["short"]
471 | return rendered
472 |
473 |
474 | def render_pep440_post_branch(pieces):
475 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
476 |
477 | The ".dev0" means not master branch.
478 |
479 | Exceptions:
480 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
481 | """
482 | if pieces["closest-tag"]:
483 | rendered = pieces["closest-tag"]
484 | if pieces["distance"] or pieces["dirty"]:
485 | rendered += ".post%d" % pieces["distance"]
486 | if pieces["branch"] != "master":
487 | rendered += ".dev0"
488 | rendered += plus_or_dot(pieces)
489 | rendered += "g%s" % pieces["short"]
490 | if pieces["dirty"]:
491 | rendered += ".dirty"
492 | else:
493 | # exception #1
494 | rendered = "0.post%d" % pieces["distance"]
495 | if pieces["branch"] != "master":
496 | rendered += ".dev0"
497 | rendered += "+g%s" % pieces["short"]
498 | if pieces["dirty"]:
499 | rendered += ".dirty"
500 | return rendered
501 |
502 |
503 | def render_pep440_old(pieces):
504 | """TAG[.postDISTANCE[.dev0]] .
505 |
506 | The ".dev0" means dirty.
507 |
508 | Exceptions:
509 | 1: no tags. 0.postDISTANCE[.dev0]
510 | """
511 | if pieces["closest-tag"]:
512 | rendered = pieces["closest-tag"]
513 | if pieces["distance"] or pieces["dirty"]:
514 | rendered += ".post%d" % pieces["distance"]
515 | if pieces["dirty"]:
516 | rendered += ".dev0"
517 | else:
518 | # exception #1
519 | rendered = "0.post%d" % pieces["distance"]
520 | if pieces["dirty"]:
521 | rendered += ".dev0"
522 | return rendered
523 |
524 |
525 | def render_git_describe(pieces):
526 | """TAG[-DISTANCE-gHEX][-dirty].
527 |
528 | Like 'git describe --tags --dirty --always'.
529 |
530 | Exceptions:
531 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
532 | """
533 | if pieces["closest-tag"]:
534 | rendered = pieces["closest-tag"]
535 | if pieces["distance"]:
536 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
537 | else:
538 | # exception #1
539 | rendered = pieces["short"]
540 | if pieces["dirty"]:
541 | rendered += "-dirty"
542 | return rendered
543 |
544 |
545 | def render_git_describe_long(pieces):
546 | """TAG-DISTANCE-gHEX[-dirty].
547 |
548 | Like 'git describe --tags --dirty --always -long'.
549 | The distance/hash is unconditional.
550 |
551 | Exceptions:
552 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
553 | """
554 | if pieces["closest-tag"]:
555 | rendered = pieces["closest-tag"]
556 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
557 | else:
558 | # exception #1
559 | rendered = pieces["short"]
560 | if pieces["dirty"]:
561 | rendered += "-dirty"
562 | return rendered
563 |
564 |
565 | def render(pieces, style):
566 | """Render the given version pieces into the requested style."""
567 | if pieces["error"]:
568 | return {"version": "unknown",
569 | "full-revisionid": pieces.get("long"),
570 | "dirty": None,
571 | "error": pieces["error"],
572 | "date": None}
573 |
574 | if not style or style == "default":
575 | style = "pep440" # the default
576 |
577 | if style == "pep440":
578 | rendered = render_pep440(pieces)
579 | elif style == "pep440-branch":
580 | rendered = render_pep440_branch(pieces)
581 | elif style == "pep440-pre":
582 | rendered = render_pep440_pre(pieces)
583 | elif style == "pep440-post":
584 | rendered = render_pep440_post(pieces)
585 | elif style == "pep440-post-branch":
586 | rendered = render_pep440_post_branch(pieces)
587 | elif style == "pep440-old":
588 | rendered = render_pep440_old(pieces)
589 | elif style == "git-describe":
590 | rendered = render_git_describe(pieces)
591 | elif style == "git-describe-long":
592 | rendered = render_git_describe_long(pieces)
593 | else:
594 | raise ValueError("unknown style '%s'" % style)
595 |
596 | return {"version": rendered, "full-revisionid": pieces["long"],
597 | "dirty": pieces["dirty"], "error": None,
598 | "date": pieces.get("date")}
599 |
600 |
601 | def get_versions():
602 | """Get version information or return default if unable to do so."""
603 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
604 | # __file__, we can work backwards from there to the root. Some
605 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
606 | # case we can only use expanded keywords.
607 |
608 | cfg = get_config()
609 | verbose = cfg.verbose
610 |
611 | try:
612 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
613 | verbose)
614 | except NotThisMethod:
615 | pass
616 |
617 | try:
618 | root = os.path.realpath(__file__)
619 | # versionfile_source is the relative path from the top of the source
620 | # tree (where the .git directory might live) to this file. Invert
621 | # this to find the root from __file__.
622 | for _ in cfg.versionfile_source.split('/'):
623 | root = os.path.dirname(root)
624 | except NameError:
625 | return {"version": "0+unknown", "full-revisionid": None,
626 | "dirty": None,
627 | "error": "unable to find root of source tree",
628 | "date": None}
629 |
630 | try:
631 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
632 | return render(pieces, cfg.style)
633 | except NotThisMethod:
634 | pass
635 |
636 | try:
637 | if cfg.parentdir_prefix:
638 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
639 | except NotThisMethod:
640 | pass
641 |
642 | return {"version": "0+unknown", "full-revisionid": None,
643 | "dirty": None,
644 | "error": "unable to compute version", "date": None}
645 |
--------------------------------------------------------------------------------