├── .codecov.yml ├── .coveragerc ├── .dockerignore ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── feature_request.md │ └── support-question.md ├── dependabot.yaml └── workflows │ ├── docker.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── .gitpod.yml ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── CHANGES.rst ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── RELEASE.md ├── ROADMAP.md ├── dev-requirements.txt ├── docker-compose.test.yml ├── docker ├── entrypoint └── git-credential-env ├── docs ├── Makefile ├── issue_template.md ├── make.bat ├── pull_request_template.md ├── requirements.txt └── source │ ├── _static │ ├── custom.css │ └── images │ │ ├── favicon.ico │ │ ├── logo.png │ │ ├── repo2docker.png │ │ ├── repo2docker.sketch │ │ └── repo2docker.svg │ ├── architecture.md │ ├── changelog.md │ ├── conf.py │ ├── config_files.rst │ ├── configuration │ └── index.rst │ ├── contributing │ ├── buildpack.md │ ├── contentprovider.rst │ ├── contributing.md │ ├── index.rst │ ├── roadmap.md │ └── tasks.md │ ├── design.md │ ├── faq.rst │ ├── getting-started │ └── index.rst │ ├── howto │ ├── base_image.md │ ├── breaking_changes.md │ ├── deploy.rst │ ├── export_environment.rst │ ├── index.rst │ ├── jupyterhub_images.rst │ ├── lab_workspaces.rst │ ├── languages.rst │ └── user_interface.rst │ ├── index.rst │ ├── install.rst │ ├── specification.rst │ └── usage.rst ├── playwright-requirements.txt ├── repo2docker ├── __init__.py ├── __main__.py ├── _version.py ├── app.py ├── buildpacks │ ├── __init__.py │ ├── _r_base.py │ ├── base.py │ ├── conda │ │ ├── __init__.py │ │ ├── activate-conda.sh │ │ ├── environment.py-2.7-linux-64.lock │ │ ├── environment.py-2.7.yml │ │ ├── environment.py-3.10-linux-64.lock │ │ ├── environment.py-3.10-linux-aarch64.lock │ │ ├── environment.py-3.10.yml │ │ ├── environment.py-3.11-linux-64.lock │ │ ├── environment.py-3.11-linux-aarch64.lock │ │ ├── environment.py-3.11.lock │ │ ├── environment.py-3.11.yml │ │ ├── environment.py-3.12-linux-64.lock │ │ ├── environment.py-3.12-linux-aarch64.lock │ │ ├── environment.py-3.12.yml │ │ ├── environment.py-3.5-linux-64.lock │ │ ├── environment.py-3.5.yml │ │ ├── environment.py-3.6-linux-64.lock │ │ ├── environment.py-3.6.yml │ │ ├── environment.py-3.7-linux-64.lock │ │ ├── environment.py-3.7-linux-aarch64.lock │ │ ├── environment.py-3.7.yml │ │ ├── environment.py-3.8-linux-64.lock │ │ ├── environment.py-3.8-linux-aarch64.lock │ │ ├── environment.py-3.8.yml │ │ ├── environment.py-3.9-linux-64.lock │ │ ├── environment.py-3.9-linux-aarch64.lock │ │ ├── environment.py-3.9.yml │ │ ├── environment.yml │ │ ├── freeze.py │ │ └── install-base-env.bash │ ├── docker.py │ ├── julia │ │ ├── __init__.py │ │ ├── julia_project.py │ │ └── julia_require.py │ ├── legacy │ │ └── __init__.py │ ├── nix │ │ ├── __init__.py │ │ ├── install-nix.bash │ │ └── nix-shell-wrapper │ ├── pipfile │ │ └── __init__.py │ ├── python │ │ └── __init__.py │ ├── python3-login │ ├── r.py │ └── repo2docker-entrypoint ├── contentproviders │ ├── __init__.py │ ├── base.py │ ├── ckan.py │ ├── dataverse.json │ ├── dataverse.py │ ├── doi.py │ ├── figshare.py │ ├── git.py │ ├── hydroshare.py │ ├── mercurial.py │ ├── swhid.py │ └── zenodo.py ├── docker.py ├── engine.py ├── semver.py └── utils.py ├── setup.cfg ├── setup.py ├── tests ├── base │ └── node │ │ ├── README.md │ │ └── verify ├── check-tmp ├── conda │ ├── README.md │ ├── downgrade │ │ ├── environment.yml │ │ └── verify │ ├── py-r-postbuild-file │ │ ├── environment.yml │ │ ├── postBuild │ │ ├── verify │ │ └── verify.py │ ├── py2 │ │ ├── environment.yml │ │ └── verify │ ├── py310-requirements-file │ │ ├── environment.yml │ │ ├── requirements.txt │ │ └── verify │ ├── py312 │ │ ├── environment.yml │ │ └── verify │ ├── py35-binder-dir │ │ ├── .binder │ │ │ └── environment.yml │ │ ├── Dockerfile │ │ ├── environment.yml │ │ └── verify │ ├── py36-postBuild │ │ ├── environment.yml │ │ ├── postBuild │ │ ├── verify │ │ └── verify.py │ └── r3.6-target-repo-dir-flag │ │ ├── environment.yml │ │ ├── test-extra-args.yaml │ │ ├── verify │ │ └── verify.py ├── conftest.py ├── contentproviders │ └── test_dataverse.py ├── dockerfile │ ├── binder-dir │ │ ├── Dockerfile │ │ ├── binder │ │ │ └── Dockerfile │ │ ├── sayhi.sh │ │ └── verify │ ├── editable │ │ ├── Dockerfile │ │ ├── README.rst │ │ └── change.sh │ ├── jupyter-stack │ │ ├── Dockerfile │ │ ├── README.rst │ │ └── verify │ └── simple │ │ ├── Dockerfile │ │ ├── README.rst │ │ ├── sayhi.sh │ │ └── verify ├── external │ └── reproductions.repos.yaml ├── julia │ ├── README.md │ ├── project-binder-dir │ │ ├── .binder │ │ │ └── Project.toml │ │ ├── Project.toml │ │ └── verify │ └── project │ │ ├── Project.toml │ │ └── verify ├── nix │ ├── binder-dir │ │ ├── README.rst │ │ ├── binder │ │ │ └── default.nix │ │ └── verify │ ├── ignore-outside │ │ ├── README.rst │ │ ├── binder │ │ │ └── default.nix │ │ ├── default.nix │ │ ├── start │ │ └── verify │ ├── simple │ │ ├── README.rst │ │ ├── default.nix │ │ └── verify │ ├── start │ │ ├── README.rst │ │ ├── default.nix │ │ ├── start │ │ └── verify │ └── test-building │ │ ├── README.rst │ │ ├── default.nix │ │ └── verify ├── norun │ ├── .gitignore │ ├── Dockerfile │ ├── test_find.py │ └── test_registry.py ├── pipfile │ ├── binder-folder-lock │ │ ├── Pipfile │ │ ├── Pipfile.lock │ │ ├── README.rst │ │ ├── binder │ │ │ ├── Pipfile │ │ │ └── Pipfile.lock │ │ └── verify │ ├── binder-folder │ │ ├── Pipfile │ │ ├── README.rst │ │ ├── binder │ │ │ └── Pipfile │ │ └── verify │ ├── environment-yml │ │ ├── Pipfile │ │ ├── README.rst │ │ ├── environment.yml │ │ └── verify │ ├── pipfile-lock │ │ ├── Pipfile │ │ ├── Pipfile.lock │ │ ├── README.rst │ │ └── verify │ ├── py2-with-server-and-kernel-req │ │ ├── Pipfile │ │ ├── Pipfile.lock │ │ ├── README.rst │ │ ├── requirements3.txt │ │ └── verify │ ├── py36 │ │ ├── Pipfile │ │ ├── Pipfile.lock │ │ ├── README.rst │ │ ├── runtime.txt │ │ └── verify │ ├── requirements-txt │ │ ├── Pipfile │ │ ├── README.rst │ │ ├── requirements.txt │ │ └── verify │ ├── setup-py-explicit-in-binder-dir │ │ ├── .binder │ │ │ └── Pipfile │ │ ├── README.rst │ │ ├── dummy │ │ │ └── __init__.py │ │ ├── setup.py │ │ └── verify │ ├── setup-py-explicit │ │ ├── Pipfile │ │ ├── README.rst │ │ ├── dummy │ │ │ └── __init__.py │ │ ├── setup.py │ │ └── verify │ └── setup-py-implicit │ │ ├── Pipfile │ │ ├── README.rst │ │ ├── dummy │ │ └── __init__.py │ │ ├── setup.py │ │ └── verify ├── r │ ├── README.md │ ├── r-rspm-apt-file │ │ ├── apt.txt │ │ ├── install.R │ │ ├── runtime.txt │ │ ├── verify │ │ ├── verify.r │ │ └── verify.sh │ ├── r-rspm-description-file │ │ ├── DESCRIPTION │ │ ├── NAMESPACE │ │ ├── R │ │ │ └── print_something.R │ │ └── verify │ ├── r3.6-rspm │ │ ├── install.R │ │ ├── runtime.txt │ │ └── verify │ ├── r4.3.2-rspm │ │ ├── install.R │ │ ├── runtime.txt │ │ └── verify │ └── r4.4-rspm │ │ ├── install.R │ │ ├── runtime.txt │ │ └── verify ├── ui │ ├── README.md │ └── browser │ │ ├── environment.yml │ │ ├── external-verify │ │ ├── external-verify.py │ │ ├── test-extra-args.yaml │ │ └── verify ├── unit │ ├── contentproviders │ │ ├── test_ckan.py │ │ ├── test_doi.py │ │ ├── test_figshare.py │ │ ├── test_git.py │ │ ├── test_hydroshare.py │ │ ├── test_local.py │ │ ├── test_mercurial.py │ │ ├── test_swhid.py │ │ └── test_zenodo.py │ ├── test_app.py │ ├── test_args.py │ ├── test_argumentvalidation.py │ ├── test_binder_dir.py │ ├── test_buildpack.py │ ├── test_cache_from.py │ ├── test_clone_depth.py │ ├── test_connect_url.py │ ├── test_docker.py │ ├── test_editable.py │ ├── test_engine.py │ ├── test_env.py │ ├── test_env_yml.py │ ├── test_external_scripts.py │ ├── test_freeze.py │ ├── test_labels.py │ ├── test_memlimit.py │ ├── test_ports.py │ ├── test_preassemble.py │ ├── test_r.py │ ├── test_semver.py │ ├── test_subdir.py │ ├── test_users.py │ ├── test_utils.py │ └── test_volumes.py └── venv │ ├── apt-packages │ ├── README.rst │ ├── apt.txt │ └── verify │ ├── binder-dir │ ├── .containerignore │ ├── .dockerignore │ ├── README.rst │ ├── apt.txt │ ├── binder │ │ ├── apt.txt │ │ ├── postBuild │ │ └── requirements.txt │ ├── environment.yml │ ├── postBuild │ └── verify │ ├── default │ └── verify │ ├── numpy │ ├── README.rst │ ├── requirements.txt │ └── verify │ ├── postBuild │ ├── README.rst │ ├── postBuild │ ├── requirements.txt │ └── verify │ ├── py2-with-kernel-requirements │ ├── requirements.txt │ ├── requirements3.txt │ ├── runtime.txt │ └── verify │ ├── py2 │ ├── requirements.txt │ ├── runtime.txt │ └── verify │ ├── py3 │ ├── runtime.txt │ └── verify │ ├── py35 │ ├── runtime.txt │ └── verify │ ├── start │ ├── postBuild │ │ ├── README.rst │ │ ├── postBuild │ │ ├── requirements.txt │ │ ├── start │ │ └── verify │ └── start-script │ │ ├── README.rst │ │ ├── start │ │ └── verify │ └── usr-bin │ └── verify └── versioneer.py /.codecov.yml: -------------------------------------------------------------------------------- 1 | # show coverage in CI status, not as a comment. 2 | comment: off 3 | fixes: 4 | - "*/site-packages/::" 5 | coverage: 6 | status: 7 | project: 8 | default: 9 | target: "0%" 10 | patch: 11 | default: 12 | target: "0%" 13 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | # this file comes from versioneer and we don't test it 3 | omit = */_version.py 4 | 5 | [paths] 6 | # This tells coverage how to combine results together or said differently 7 | # which files at different paths are actually the same file 8 | # documented at https://coverage.readthedocs.io/en/latest/config.html#paths 9 | # Yes, we list repo2docker twice here. This allows you to install repo2docker 10 | # with `pip install -e.` for local development and from the wheel (as done on 11 | # CI) and get `repo2docker/foo.py` as paths in the coverage report 12 | source = 13 | repo2docker 14 | repo2docker 15 | ../repo2docker 16 | */site-packages/repo2docker 17 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .#* 2 | dist 3 | build 4 | *.tar.gz 5 | *.egg-info 6 | *.py[co] 7 | __pycache__ 8 | MANIFEST 9 | 10 | .DS_Store 11 | .cache 12 | 13 | repo2docker/s2i 14 | hooks 15 | htmlcov 16 | 17 | 18 | ^bin/ 19 | lib/ 20 | lib64/ 21 | share/ 22 | include/ 23 | 24 | # Docs 25 | generated/ 26 | test_file_text.txt 27 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | repo2docker/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us repair something that is currently broken 4 | title: "" 5 | labels: "" 6 | assignees: "" 7 | --- 8 | 9 | 10 | 11 | ### Bug description 12 | 13 | 14 | 15 | #### Expected behaviour 16 | 17 | 18 | 19 | #### Actual behaviour 20 | 21 | 22 | 23 | ### How to reproduce 24 | 25 | 26 | 27 | 1. Go to '...' 28 | 2. Click on '....' 29 | 3. Scroll down to '....' 30 | 4. See error 31 | 32 | ### Your personal set up 33 | 34 | 35 | 36 | - OS: [e.g. linux, OSX] 37 | - Docker version: `docker version` 38 | - repo2docker version `repo2docker --version` 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest a new feature or a big change to repo2docker 4 | title: "" 5 | labels: "needs: discussion" 6 | assignees: "" 7 | --- 8 | 9 | 10 | 11 | ### Proposed change 12 | 13 | 14 | 15 | ### Alternative options 16 | 17 | 18 | 19 | ### Who would use this feature? 20 | 21 | 22 | 23 | ### How much effort will adding it take? 24 | 25 | 26 | 27 | ### Who can do this work? 28 | 29 | 30 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/support-question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Support question 3 | about: Ask a question about using repo2docker 4 | title: "" 5 | labels: "" 6 | assignees: "" 7 | --- 8 | 9 | 🚨 Please do **not** open an issue for support questions. Instead please search for similar issues or post on http://discourse.jupyter.org/c/questions. 🚨 10 | 11 | More people read the forum than this issue tracker, it is indexed by search engines and easier for others to discover. 12 | 13 | For more details: https://discourse.jupyter.org/t/a-proposal-for-jupyterhub-communications/505 14 | -------------------------------------------------------------------------------- /.github/dependabot.yaml: -------------------------------------------------------------------------------- 1 | # dependabot.yaml reference: https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file 2 | # 3 | # Notes: 4 | # - Status and logs from dependabot are provided at 5 | # https://github.com/jupyterhub/repo2docker/network/updates. 6 | # - YAML anchors are not supported here or in GitHub Workflows. 7 | # 8 | version: 2 9 | updates: 10 | # Maintain dependencies in our GitHub Workflows 11 | - package-ecosystem: github-actions 12 | directory: / 13 | labels: [ci] 14 | schedule: 15 | interval: monthly 16 | time: "05:00" 17 | timezone: Etc/UTC 18 | -------------------------------------------------------------------------------- /.github/workflows/docker.yml: -------------------------------------------------------------------------------- 1 | # This is a GitHub workflow defining a set of jobs with a set of steps. 2 | # ref: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions 3 | # 4 | name: Docker build 5 | 6 | on: 7 | pull_request: 8 | paths-ignore: 9 | - "docs/**" 10 | - "**.md" 11 | - "**.rst" 12 | - ".github/workflows/*" 13 | - "!.github/workflows/docker.yml" 14 | - ".pre-commit-config.yaml" 15 | push: 16 | paths-ignore: 17 | - "docs/**" 18 | - "**.md" 19 | - "**.rst" 20 | - ".github/workflows/*" 21 | - "!.github/workflows/docker.yml" 22 | - ".pre-commit-config.yaml" 23 | branches-ignore: 24 | - "dependabot/**" 25 | - "pre-commit-ci-update-config" 26 | tags: 27 | - "**" 28 | workflow_dispatch: 29 | 30 | jobs: 31 | docker: 32 | runs-on: ubuntu-24.04 33 | steps: 34 | - name: Checkout code 35 | uses: actions/checkout@v4 36 | with: 37 | fetch-depth: 0 38 | 39 | # https://github.com/docker/build-push-action 40 | - name: Build Docker image 41 | uses: docker/build-push-action@v6 42 | with: 43 | context: . 44 | tags: jupyter/repo2docker:pr 45 | push: false 46 | 47 | - name: Run repo2docker Docker image 48 | run: docker run jupyter/repo2docker:pr repo2docker --version 49 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | # This is a GitHub workflow defining a set of jobs with a set of steps. 2 | # ref: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions 3 | # 4 | name: Test 5 | 6 | on: 7 | pull_request: 8 | paths-ignore: 9 | - "docs/**" 10 | - "**.md" 11 | - "**.rst" 12 | - ".github/workflows/*" 13 | - "!.github/workflows/test.yml" 14 | - ".pre-commit-config.yaml" 15 | push: 16 | paths-ignore: 17 | - "docs/**" 18 | - "**.md" 19 | - "**.rst" 20 | - ".github/workflows/*" 21 | - "!.github/workflows/test.yml" 22 | - ".pre-commit-config.yaml" 23 | branches-ignore: 24 | - "dependabot/**" 25 | - "pre-commit-ci-update-config" 26 | tags: 27 | - "**" 28 | schedule: 29 | # Run weekly test so we know if tests break for external reasons 30 | # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#scheduled-events 31 | # 32 | # At 10:36 on Sunday (https://crontab.guru) 33 | - cron: '36 10 * * 0' 34 | workflow_dispatch: 35 | 36 | # Global environment variables 37 | env: 38 | GIT_COMMITTER_EMAIL: ci-user@github.local 39 | GIT_COMMITTER_NAME: CI User 40 | GIT_AUTHOR_EMAIL: ci-user@github.local 41 | GIT_AUTHOR_NAME: CI User 42 | 43 | 44 | jobs: 45 | test: 46 | # Don't run scheduled tests on forks 47 | if: ${{ github.repository_owner == 'jupyterhub' || github.event_name != 'schedule' }} 48 | 49 | runs-on: ubuntu-${{ matrix.ubuntu_version }} 50 | 51 | strategy: 52 | fail-fast: false 53 | matrix: 54 | ubuntu_version: ["24.04"] 55 | python_version: ["3.13"] 56 | repo_type: 57 | - base 58 | - conda 59 | - dockerfile 60 | - external 61 | - julia 62 | - nix 63 | - pipfile 64 | - r 65 | - unit 66 | - venv 67 | - contentproviders 68 | - norun 69 | # Playwright test 70 | - ui 71 | include: 72 | # The earliest actions/setup-python versions depend on the runner. 73 | - ubuntu_version: "22.04" 74 | python_version: "3.9" 75 | repo_type: venv 76 | 77 | steps: 78 | - uses: actions/checkout@v4 79 | 80 | - uses: actions/setup-python@v5 81 | with: 82 | python-version: "${{ matrix.python_version }}" 83 | 84 | - name: Install dependencies 85 | run: | 86 | pip install -r dev-requirements.txt 87 | pip freeze 88 | 89 | - name: Install UI test dependencies 90 | if: matrix.repo_type == 'ui' 91 | run: | 92 | pip install -r playwright-requirements.txt 93 | playwright install firefox 94 | 95 | - name: Install repo2docker 96 | run: | 97 | python -m build --wheel . 98 | pip install dist/*.whl 99 | 100 | # add for mercurial tests 101 | pip install mercurial hg-evolve 102 | 103 | pip freeze 104 | 105 | - name: Run pytest 106 | run: | 107 | pytest --verbose --color=yes --durations=10 --cov=repo2docker tests/${{ matrix.repo_type }} 108 | 109 | - uses: codecov/codecov-action@v5 110 | -------------------------------------------------------------------------------- /.gitpod.yml: -------------------------------------------------------------------------------- 1 | tasks: 2 | - init: | 3 | pip3 install sphinx-autobuild 4 | pip3 install -r dev-requirements.txt 5 | pip3 install -r docs/requirements.txt 6 | pip3 install -e . 7 | command: | 8 | sphinx-autobuild docs/source/ docs/_build/html/ 9 | name: Sphinx preview 10 | 11 | ports: 12 | - port: 8000 13 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # pre-commit is a tool to perform a predefined set of tasks manually and/or 2 | # automatically before git commits are made. 3 | # 4 | # Config reference: https://pre-commit.com/#pre-commit-configyaml---top-level 5 | # 6 | # Common tasks 7 | # 8 | # - Run on all files: pre-commit run --all-files 9 | # - Register git hooks: pre-commit install --install-hooks 10 | # 11 | repos: 12 | # Autoformat: Python code, syntax patterns are modernized 13 | - repo: https://github.com/asottile/pyupgrade 14 | rev: v3.19.0 15 | hooks: 16 | - id: pyupgrade 17 | args: 18 | - --py38-plus 19 | # check-tmp is a Python based test script run in created environments 20 | # that can be Python 3.5, older than the Python version required to run 21 | # repo2docker. 22 | exclude: check-tmp 23 | 24 | # Autoformat: Python code 25 | - repo: https://github.com/psf/black 26 | rev: 24.10.0 27 | hooks: 28 | - id: black 29 | args: 30 | - --target-version=py39 31 | - --target-version=py310 32 | - --target-version=py311 33 | - --target-version=py312 34 | - --target-version=py313 35 | 36 | # Autoformat: Python code 37 | - repo: https://github.com/pycqa/isort 38 | rev: 5.13.2 39 | hooks: 40 | - id: isort 41 | args: 42 | - --profile=black 43 | 44 | # Autoformat: markdown 45 | - repo: https://github.com/pre-commit/mirrors-prettier 46 | rev: v4.0.0-alpha.8 47 | hooks: 48 | - id: prettier 49 | files: ".md" 50 | 51 | # pre-commit.ci config reference: https://pre-commit.ci/#configuration 52 | ci: 53 | autoupdate_schedule: monthly 54 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Configuration on how ReadTheDocs (RTD) builds our documentation 2 | # ref: https://readthedocs.org/projects/repo2docker-service/ 3 | # ref: https://docs.readthedocs.io/en/stable/config-file/v2.html 4 | # 5 | version: 2 6 | 7 | sphinx: 8 | configuration: docs/source/conf.py 9 | 10 | build: 11 | os: ubuntu-22.04 12 | tools: 13 | python: "3.10" 14 | 15 | python: 16 | install: 17 | - requirements: docs/requirements.txt 18 | - method: pip 19 | path: . 20 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | The ``repo2docker`` changelog has been moved. It can be viewed online at https://repo2docker.readthedocs.io/en/latest/changelog.html. In the ``repo2docker`` repository, the changelog is located at ``docs/source/changelog.md``. 2 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to repo2docker development 2 | 3 | :sparkles: Thank you for thinking about contributing to repo2docker! :sparkles: 4 | 5 | (And thank you particularly for coming to read the guidelines! :heart_eyes:) 6 | 7 | The repo2docker developer documentation is all rendered on our documentation website: [https://repo2docker.readthedocs.io](https://repo2docker.readthedocs.io). 8 | If you're here, you're probably looking for the [Contributing to repo2docker development](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html) page. 9 | 10 | Please make sure you've read the following sections before opening an issue/pull request: 11 | 12 | - [Process for making a contribution](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html#process-for-making-a-contribution). 13 | - These steps talk you through choosing the right issue template (bug report or feature request) and making a change. 14 | - [Guidelines to getting a Pull Request merged](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html#guidelines-to-getting-a-pull-request-merged). 15 | - These are tips and tricks to help make your contribution as smooth as possible for you and for the repo2docker maintenance team. 16 | 17 | There are a few other pages to highlight: 18 | 19 | - [Our roadmap](https://repo2docker.readthedocs.io/en/latest/contributing/roadmap.html) 20 | - We use the roadmap to develop a shared understanding of the project's vision and direction amongst the community of users, contributors, and maintainers. 21 | This is a great place to get a feel for what the maintainers are thinking about for the short, medium, and long term future of the project. 22 | - [Design of repo2docker](https://repo2docker.readthedocs.io/en/latest/design.html) 23 | - This page explains some of the design principles behind repo2docker. 24 | Its a good place to understand _why_ the team have made the decisions that they have along the way! 25 | - We absolutely encourage discussion around refactoring, updating or extending repo2docker, but please make sure that you've understood this page before opening an issue to discuss the change you'd like to propose. 26 | - [Common developer tasks and how-tos](https://repo2docker.readthedocs.io/en/latest/contributing/tasks.html) 27 | - Some notes on running tests, buildpack dependencies, creating a release, and keeping the pip files up to date. 28 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # syntax = docker/dockerfile:1.3 2 | ARG ALPINE_VERSION=3.21 3 | FROM alpine:${ALPINE_VERSION} 4 | 5 | RUN apk add --no-cache git python3 python3-dev py3-pip py3-setuptools build-base 6 | 7 | # build wheels in a build stage 8 | ARG VIRTUAL_ENV=/opt/venv 9 | ENV PATH=${VIRTUAL_ENV}/bin:${PATH} 10 | 11 | RUN python3 -m venv ${VIRTUAL_ENV} 12 | 13 | ADD . /tmp/src 14 | RUN cd /tmp/src && git clean -xfd && git status 15 | RUN mkdir /tmp/wheelhouse \ 16 | && cd /tmp/wheelhouse \ 17 | && pip install wheel \ 18 | && pip wheel --no-cache-dir /tmp/src \ 19 | && ls -l /tmp/wheelhouse 20 | 21 | FROM alpine:${ALPINE_VERSION} 22 | 23 | # install python, git, bash, mercurial 24 | RUN apk add --no-cache git git-lfs python3 py3-pip py3-setuptools bash docker mercurial 25 | 26 | ARG VIRTUAL_ENV=/opt/venv 27 | ENV PATH=${VIRTUAL_ENV}/bin:${PATH} 28 | 29 | RUN python3 -m venv ${VIRTUAL_ENV} 30 | 31 | # install hg-evolve (Mercurial extensions) 32 | RUN pip install hg-evolve --no-cache-dir 33 | 34 | # install repo2docker 35 | COPY --from=0 /tmp/wheelhouse /tmp/wheelhouse 36 | RUN pip install --no-cache-dir --ignore-installed --no-deps /tmp/wheelhouse/*.whl \ 37 | && pip list 38 | 39 | # add git-credential helper 40 | COPY ./docker/git-credential-env /usr/local/bin/git-credential-env 41 | RUN git config --system credential.helper env 42 | 43 | # add entrypoint 44 | COPY ./docker/entrypoint /usr/local/bin/entrypoint 45 | RUN chmod +x /usr/local/bin/entrypoint 46 | ENTRYPOINT ["/usr/local/bin/entrypoint"] 47 | 48 | # Used for testing purpose in ports.py 49 | EXPOSE 52000 50 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2017, Project Jupyter Contributors 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.md 2 | include LICENSE 3 | include setup.cfg 4 | recursive-include repo2docker/buildpacks * 5 | include versioneer.py 6 | include repo2docker/_version.py 7 | include repo2docker/contentproviders/dataverse.json 8 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | VERSION=$(shell git rev-parse --short HEAD) 2 | IMAGE_PREFIX=jupyter/ 3 | 4 | build-image: 5 | docker build -t $(IMAGE_PREFIX)repo2docker:v$(VERSION) . 6 | 7 | push-image: 8 | docker push $(IMAGE_PREFIX)repo2docker:v$(VERSION) 9 | 10 | .all: 11 | build-image push-image 12 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | See https://repo2docker.readthedocs.io/en/latest/contributing/tasks.html#creating-a-release 2 | -------------------------------------------------------------------------------- /ROADMAP.md: -------------------------------------------------------------------------------- 1 | The repo2docker roadmap is hosted in the repo2docker documentation. 2 | You can access it at the following link: 3 | 4 | https://repo2docker.readthedocs.io/en/latest/contributing/roadmap.html 5 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | build 2 | conda-lock 3 | pre-commit 4 | pytest-cov 5 | pytest>=7 6 | pyyaml 7 | requests_mock 8 | bcrypt -------------------------------------------------------------------------------- /docker-compose.test.yml: -------------------------------------------------------------------------------- 1 | sut: 2 | build: . 3 | command: jupyter-repo2docker --version 4 | -------------------------------------------------------------------------------- /docker/entrypoint: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | function write_config() { 5 | # Checks for the environment variables *_PROXY. 6 | # If at least one variables is found, it writes all found variables 7 | # into a docker config file 8 | docker_cfg="$1" 9 | httpProxy="${HTTP_PROXY:-$http_proxy}" 10 | httpsProxy="${HTTPS_PROXY:-$https_proxy}" 11 | noProxy="${NO_PROXY:-$no_proxy}" 12 | # If no proxy vars are set, do nothing 13 | [ -z "$httpProxy" ] && [ -z "$httpsProxy" ] && [ -z "$noProxy" ] && return 14 | [ -f "$1" ] && echo "$1 already exists. Not touching it. You are responsible for setting your proxy vars there yourself" >&2 && return 15 | sep="" 16 | mkdir -p "$(dirname $docker_cfg)" 17 | cat < "$docker_cfg" 18 | { 19 | "proxies": { 20 | "default": { 21 | EOF 22 | [ -n "$httpProxy" ] && echo -ne "$sep"' "httpProxy": "'"$httpProxy"'"' >> "$docker_cfg" && sep=",\n" 23 | [ -n "$httpsProxy" ] && echo -ne "$sep"' "httpsProxy": "'"$httpsProxy"'"' >> "$docker_cfg" && sep=",\n" 24 | [ -n "$noProxy" ] && echo -ne "$sep"' "noProxy": "'"$noProxy"'"' >> "$docker_cfg" && sep=",\n" 25 | cat <> "$docker_cfg" 26 | 27 | } 28 | } 29 | } 30 | EOF 31 | } 32 | 33 | write_config /root/.docker/config.json 34 | 35 | exec "$@" 36 | -------------------------------------------------------------------------------- /docker/git-credential-env: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo -e "$GIT_CREDENTIAL_ENV" 3 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation generated by sphinx-quickstart 2 | # ---------------------------------------------------------------------------- 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) 21 | 22 | 23 | # Manually added commands 24 | # ---------------------------------------------------------------------------- 25 | 26 | # For local development: 27 | # - builds and rebuilds html on changes to source 28 | # - starts a livereload enabled webserver and opens up a browser 29 | devenv: 30 | sphinx-autobuild -b html --open-browser "$(SOURCEDIR)" "$(BUILDDIR)/html" $(SPHINXOPTS) 31 | 32 | # For local development and CI: 33 | # - verifies that links are valid 34 | linkcheck: 35 | $(SPHINXBUILD) -b linkcheck "$(SOURCEDIR)" "$(BUILDDIR)/linkcheck" $(SPHINXOPTS) 36 | @echo 37 | @echo "Link check complete; look for any errors in the above output " \ 38 | "or in $(BUILDDIR)/linkcheck/output.txt." 39 | -------------------------------------------------------------------------------- /docs/issue_template.md: -------------------------------------------------------------------------------- 1 | 14 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | if "%1" == "devenv" goto devenv 15 | if "%1" == "linkcheck" goto linkcheck 16 | goto default 17 | 18 | 19 | :default 20 | %SPHINXBUILD% >NUL 2>NUL 21 | if errorlevel 9009 ( 22 | echo. 23 | echo.The 'sphinx-build' command was not found. Open and read README.md! 24 | exit /b 1 25 | ) 26 | %SPHINXBUILD% -M %1 "%SOURCEDIR%" "%BUILDDIR%" %SPHINXOPTS% 27 | goto end 28 | 29 | 30 | :help 31 | %SPHINXBUILD% -M help "%SOURCEDIR%" "%BUILDDIR%" %SPHINXOPTS% 32 | goto end 33 | 34 | 35 | :devenv 36 | sphinx-autobuild >NUL 2>NUL 37 | if errorlevel 9009 ( 38 | echo. 39 | echo.The 'sphinx-autobuild' command was not found. Open and read README.md! 40 | exit /b 1 41 | ) 42 | sphinx-autobuild -b html --open-browser "%SOURCEDIR%" "%BUILDDIR%/html" %SPHINXOPTS% 43 | goto end 44 | 45 | 46 | :linkcheck 47 | %SPHINXBUILD% -b linkcheck "%SOURCEDIR%" "%BUILDDIR%/linkcheck" %SPHINXOPTS% 48 | echo. 49 | echo.Link check complete; look for any errors in the above output 50 | echo.or in "%BUILDDIR%/linkcheck/output.txt". 51 | goto end 52 | 53 | 54 | :end 55 | popd 56 | -------------------------------------------------------------------------------- /docs/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 8 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | myst-parser>=0.18 2 | pydata-sphinx-theme>=0.11 3 | sphinx-autobuild 4 | sphinx-copybutton 5 | sphinxcontrib-autoprogram>=0.1.7 6 | sphinxext-opengraph 7 | sphinxext-rediraffe 8 | -------------------------------------------------------------------------------- /docs/source/_static/custom.css: -------------------------------------------------------------------------------- 1 | img.logo { 2 | max-width:100%; 3 | height: 45px; 4 | } 5 | 6 | div.body p.caption { 7 | font-size: 1.5em; 8 | } 9 | 10 | h2 a.toc-backref { 11 | text-decoration: none; 12 | } 13 | 14 | 15 | .navbar-brand{ 16 | display: flex; 17 | justify-content:center; 18 | align-items:center; 19 | overflow:hidden; 20 | } -------------------------------------------------------------------------------- /docs/source/_static/images/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterhub/repo2docker/e795060aec3555a6203ed79c3676765fb3f3b850/docs/source/_static/images/favicon.ico -------------------------------------------------------------------------------- /docs/source/_static/images/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterhub/repo2docker/e795060aec3555a6203ed79c3676765fb3f3b850/docs/source/_static/images/logo.png -------------------------------------------------------------------------------- /docs/source/_static/images/repo2docker.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterhub/repo2docker/e795060aec3555a6203ed79c3676765fb3f3b850/docs/source/_static/images/repo2docker.png -------------------------------------------------------------------------------- /docs/source/_static/images/repo2docker.sketch: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jupyterhub/repo2docker/e795060aec3555a6203ed79c3676765fb3f3b850/docs/source/_static/images/repo2docker.sketch -------------------------------------------------------------------------------- /docs/source/configuration/index.rst: -------------------------------------------------------------------------------- 1 | =========================== 2 | Configuring your repository 3 | =========================== 4 | 5 | Information about configuring your repository to work with repo2docker, 6 | and controlling elements of the built environment using configuration files. 7 | 8 | For information on where to put your configuration files see :ref:`usage-config-file-location`. 9 | 10 | .. toctree:: 11 | :maxdepth: 2 12 | :caption: Complete list of configuration files 13 | 14 | ../config_files 15 | ../specification 16 | -------------------------------------------------------------------------------- /docs/source/contributing/buildpack.md: -------------------------------------------------------------------------------- 1 | # Add a new buildpack 2 | 3 | A new buildpack is needed when a new language or a new package manager should be 4 | supported. [Existing buildpacks](https://github.com/jupyterhub/repo2docker/tree/HEAD/repo2docker/buildpacks) 5 | are a good model for how new buildpacks should be structured. 6 | See [the Buildpacks page](buildpacks) for more information about the 7 | structure of a buildpack. 8 | 9 | ## Criteria to balance and consider 10 | 11 | Criteria to balance are: 12 | 13 | 1. Maintenance burden on repo2docker. 14 | 2. How easy it is to use a given setup without support from repo2docker natively. 15 | There are two escape hatches here - `postBuild` and `Dockerfile`. 16 | 3. How widely used is this language / package manager? This is the primary tradeoff 17 | with point (1). We (the Binder / Jupyter team) want to make new formats 18 | as little as possible, so ideally we can just say "X repositories on binder already use 19 | this using one of the escape hatches in (2), so let us make it easy and add 20 | native support". 21 | 22 | ### Adding libraries or UI to existing buildpacks 23 | 24 | Note that this doesn't apply to adding additional libraries / UI to existing 25 | buildpacks. For example, if we had an R buildpack and it supported IRKernel, 26 | it is much easier to 27 | just support RStudio / Shiny with it, since those are library additions instead of entirely 28 | new buildpacks. 29 | -------------------------------------------------------------------------------- /docs/source/contributing/contentprovider.rst: -------------------------------------------------------------------------------- 1 | ========================== 2 | Add a new content provider 3 | ========================== 4 | 5 | Adding a new content provider allows repo2docker to grab repositories from new 6 | locations on the internet. To do so, you should take the following steps: 7 | 8 | #. Sub-class the `ContentProvider class `_. 9 | This will give you a skeleton class you can modify to support your new 10 | content provider. 11 | #. Implement a **detect()** method for the class. This takes an input 12 | string (e.g., a URL or path) and determines if it points to this particular 13 | content provider. It should return a dictionary (called 14 | ``spec`` that will be passed to the ``fetch()`` method. `For example, see the ZenodoProvider detect method `_. 15 | #. Implement a **fetch()** method for the class. This takes the dictionary ``spec`` as input, and 16 | ensures the repository exists on disk (e.g., by downloading it) and 17 | returns a path to it. 18 | `For example, see the ZenodoProvider fetch method `_. 19 | -------------------------------------------------------------------------------- /docs/source/contributing/index.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Contributing 3 | ============ 4 | 5 | The repo2docker community is welcoming of all kinds of help and 6 | participation from others. Below are a few ways that you can get involved, 7 | as well as resources for understanding the structure and design of the 8 | repo2docker package. 9 | 10 | .. toctree:: 11 | 12 | contributing 13 | roadmap 14 | ../architecture 15 | ../design 16 | tasks 17 | buildpack 18 | contentprovider 19 | -------------------------------------------------------------------------------- /docs/source/getting-started/index.rst: -------------------------------------------------------------------------------- 1 | =============== 2 | Getting Started 3 | =============== 4 | 5 | Instructions and information on how to get started with repo2docker 6 | on your own machine. Select from the pages listed below to begin. 7 | 8 | .. toctree:: 9 | :maxdepth: 2 10 | 11 | ../install 12 | ../usage 13 | ../faq -------------------------------------------------------------------------------- /docs/source/howto/base_image.md: -------------------------------------------------------------------------------- 1 | # Change the base image used by Docker 2 | 3 | You may change the base image used in the `Dockerfile` that creates images by repo2docker. 4 | This is equivalent to changing the `FROM ` in the Dockerfile. 5 | 6 | To do so, use the `base_image` traitlet when invoking `repo2docker` (ex: `repo2docker --Repo2Docker.base_image=image_name`). 7 | Note that this is not configurable by individual repositories, it is configured when you invoke the `repo2docker` command. 8 | 9 | ```{note} 10 | By default repo2docker builds on top of the `buildpack-deps:jammy` base image, an Ubuntu-based image. 11 | ``` 12 | 13 | ## Requirements for your base image 14 | 15 | `repo2docker` will only work if a specific set of packages exists in the base image. 16 | Only images that match the following criteria are supported: 17 | 18 | - Ubuntu based distributions (minimum `18.04`) 19 | - Contains a set of base packages installed with [the `buildpack-deps` image family](https://hub.docker.com/_/buildpack-deps). 20 | 21 | Other images _may_ work, but are not officially supported. 22 | 23 | ## This will affect reproducibility 🚨 24 | 25 | Changing the base image may have an impact on the reproducibility of repositories that are built. 26 | There are **no guarantees that repositories will behave the same way as other repo2docker builds if you change the base image**. 27 | For example these are two scenarios that would make your repositories non-reproducible: 28 | 29 | - **Your base image is different from `Ubuntu:jammy`.** 30 | If you change the base image in a way that is different from repo2docker's default (the Ubuntu `jammy` image), then repositories that **you** build with repo2docker may be significantly different from those that **other** instances of repo2docker build (e.g., those from [`mybinder.org`](https://mybinder.org)). 31 | - **Your base image changes over time.** 32 | If you choose a base image that changes its composition over time (e.g., an image provided by some other community), then it may cause repositories build with your base image to change in unpredictable ways. 33 | We recommend choosing a base image that you know to be stable and trustworthy. 34 | -------------------------------------------------------------------------------- /docs/source/howto/breaking_changes.md: -------------------------------------------------------------------------------- 1 | # Deal with breaking changes in repo2docker 2 | 3 | Repo2docker occasionally has to make breaking changes in how repositories are built. 4 | 5 | ## Upgrade of base image from Ubuntu 18.04 to 22.04 6 | 7 | The base image used by repo2docker was [upgraded from Ubuntu 18.04 to Ubuntu 22.04](https://github.com/jupyterhub/repo2docker/pull/1287) in version 2023.10.0 due to Ubuntu 18.04 going out of support. 8 | 9 | This is unlikely to affect you unless you are using {ref}`apt.txt `. 10 | 11 | {ref}`apt.txt ` installs packages from the official Ubuntu package repositories, and is intrinsically tied to the Ubuntu version. 12 | Many packages will be available in both Ubuntu 18.04 and Ubuntu 22.04, however some may be renamed (for example if multiple incompatible versions are available). 13 | 14 | Some packages may be removed, or may not be compatible with the previous version. 15 | In this case you should see if your packages can be installed using a {ref}`Conda environment.yml file ` using either the default [conda-forge channel](https://conda-forge.org/feedstock-outputs/) or in one of the many [third-party channels](https://docs.conda.io/projects/conda/en/latest/user-guide/concepts/channels.html). 16 | 17 | Alternatively you can try installing the packages from source, using a {ref}`postBuild ` script. 18 | 19 | As a last resort you can install an older version of repo2docker locally, build your image, push it to a public container registry such as [Docker Hub](https://hub.docker.com/), [GitHub Container Registry](https://docs.github.com/en/packages/guides/about-github-container-registry) or [quay.io](https://quay.io/), and replace your repository's repo2docker configuration with a minimal {ref}`Dockerfile ` containing just: 20 | 21 | ```dockerfile 22 | FROM //: 23 | ``` 24 | 25 | This image will contain a frozen version of your repository at the time the image was built. 26 | You will need to rebuild and push it everytime your repository is modified. 27 | -------------------------------------------------------------------------------- /docs/source/howto/export_environment.rst: -------------------------------------------------------------------------------- 1 | .. _export-environment: 2 | 3 | ============================================================================= 4 | How to automatically create a ``environment.yml`` that works with repo2docker 5 | ============================================================================= 6 | 7 | This how-to explains how to create a ``environment.yml`` that specifies all 8 | installed packages and their precise versions from your environment. 9 | 10 | 11 | The challenge 12 | ============= 13 | 14 | ``conda env export -f environment.yml`` creates a strict export of all packages. 15 | This is the most robust for reproducibility, but it does bake in potential 16 | platform-specific packages, so you can only use an exported environment on the 17 | same platform. 18 | 19 | ``repo2docker`` uses a linux based image as the starting point for every docker 20 | image it creates. However a lot of people use OSX or Windows as their day to 21 | day operating system. This means that the ``environment.yml`` created by a strict 22 | export will not work with error messages saying that certain packages can not 23 | be resolved (``ResolvePackageNotFound``). 24 | 25 | 26 | The solution 27 | ============ 28 | 29 | Export your explicit install commands 30 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 31 | 32 | To get a minimal ``environment.yml`` that only contains the packages you 33 | explicitly installed run 34 | ``conda env export --from-history -f environment.yml``. We recommend that you 35 | use this option to create your ``environment.yml``. The resulting 36 | ``environment.yml`` then contains a loose pinning of the versions used, e.g. 37 | ``pandas=0.25`` if you explicitly requested this ``pandas`` version on 38 | installation. If you didn't list a version constraint during installation, it 39 | will also not be listed in your ``environment.yml``. 40 | 41 | While this approach doesn't lead to perfect reproducibilty, it will contain 42 | just the same packages as if you would recreate the enviroment with the same 43 | commands again today. 44 | 45 | Strict version export 46 | ~~~~~~~~~~~~~~~~~~~~~ 47 | 48 | Follow this procedure to create a strict export of your environment that will 49 | work with ``repo2docker`` and sites like `mybinder.org `_. 50 | 51 | We will launch a terminal inside a basic docker image, install the packages 52 | you need and then perform a strict export of the environment. 53 | 54 | #. install repo2docker on your computer by following :ref:`install` 55 | #. in a terminal launch a basic repository 56 | ``repo2docker https://github.com/binder-examples/conda-freeze`` 57 | inside repo2docker 58 | #. open the URL printed at the end in a browser, the URL should look like 59 | ``http://127.0.0.1:61037/?token=30e61ec80bda6dd0d14805ea76bb59e7b0cd78b5d6b436f0`` 60 | #. open a terminal by clicking "New -> Terminal" next to the "Upload" button on the 61 | right hand side of the webpage 62 | #. install the packages your project requires with ``conda install `` 63 | #. use ``conda env export -n root`` to print the environment 64 | #. copy and paste the environment you just printed into a ``environment.yml`` in 65 | your projects repository 66 | #. close your browser tabs and exit the repo2docker session by pressing Ctrl-C. 67 | 68 | This will give you a strict export of your environment that precisely pins the 69 | versions of packages in your environment based on a linux environment. 70 | -------------------------------------------------------------------------------- /docs/source/howto/index.rst: -------------------------------------------------------------------------------- 1 | ============= 2 | How-to Guides 3 | ============= 4 | 5 | Short, actionable guides that cover specific topics with repo2docker. 6 | Select from the pages listed below to get started. 7 | 8 | .. toctree:: 9 | :maxdepth: 2 10 | :caption: How-To guides 11 | 12 | user_interface 13 | languages 14 | export_environment 15 | lab_workspaces 16 | breaking_changes 17 | jupyterhub_images 18 | deploy 19 | base_image 20 | -------------------------------------------------------------------------------- /docs/source/howto/jupyterhub_images.rst: -------------------------------------------------------------------------------- 1 | .. _jupyterhub_images: 2 | 3 | ============================= 4 | Build JupyterHub-ready images 5 | ============================= 6 | 7 | JupyterHub_ allows multiple 8 | users to collaborate on a shared Jupyter server. ``repo2docker`` can build 9 | Docker images that can be shared within a JupyterHub deployment. For example, 10 | `mybinder.org `_ uses JupyterHub and ``repo2docker`` 11 | to allow anyone to build a Docker image of a git repository online and 12 | share an executable version of the repository with a URL to the built image. 13 | 14 | To build JupyterHub_-ready Docker images with ``repo2docker``, the 15 | version of your JupyterHub deployment must be included in the 16 | ``environment.yml`` or ``requirements.txt`` of the git repositories you 17 | build. 18 | 19 | If your instance of JupyterHub uses ``DockerSpawner``, you will need to set its 20 | command to run ``jupyterhub-singleuser`` by adding this line in your 21 | configuration file:: 22 | 23 | c.DockerSpawner.cmd = ['jupyterhub-singleuser'] 24 | 25 | .. _JupyterHub: https://github.com/jupyterhub/jupyterhub 26 | -------------------------------------------------------------------------------- /docs/source/howto/lab_workspaces.rst: -------------------------------------------------------------------------------- 1 | .. howto/lab_workspaces:: 2 | 3 | ============================================= 4 | Share JupyterLab Workspaces with a repository 5 | ============================================= 6 | 7 | JupyterLab uses `workspaces `_ 8 | to save the current state of windows, settings, and documents that 9 | are open in a JupyterLab session. It is a way to persist the general 10 | configuration over time. 11 | 12 | It is possible to export JupyterLab workspaces and load them in to 13 | another JupyterLab installation in order to share a workspace with 14 | someone else. 15 | 16 | In order to package your workspace with a repository, we recommend 17 | following the steps in the documentation: 18 | 19 | https://jupyterlab.readthedocs.io/en/stable/user/binder.html#customize-the-layout 20 | -------------------------------------------------------------------------------- /docs/source/howto/user_interface.rst: -------------------------------------------------------------------------------- 1 | .. _user_interface: 2 | 3 | ============================ 4 | Configure the user interface 5 | ============================ 6 | 7 | You can build several user interfaces into the resulting Docker image. 8 | This is controlled with various :ref:`configuration files `. 9 | 10 | 11 | JupyterLab 12 | ========== 13 | 14 | JupyterLab is the default interface for repo2docker. 15 | 16 | The following Binder URL will open the 17 | `pyTudes repository `_ 18 | and begin a JupyterLab session in the ``ipynb`` folder: 19 | 20 | https://mybinder.org/v2/gh/norvig/pytudes/HEAD?urlpath=lab/tree/ipynb 21 | 22 | The ``/tree/ipynb`` above is how JupyterLab directs you to a specific file 23 | or folder. 24 | 25 | To learn more about URLs in JupyterLab and Jupyter Notebook, visit 26 | `starting JupyterLab `_. 27 | 28 | Classic Notebook Interface 29 | ========================== 30 | 31 | The classic notebook is also available without any configuration. 32 | To switch to the classic notebook, 33 | You do not need any extra configuration in order to allow the use 34 | of the classic notebook interface. 35 | You can launch the classic notebook interface from within a user 36 | session by opening JupyterLab and replacing ``/lab/`` with ``/tree/`` in the default juptyerlab URL 37 | like so: 38 | 39 | .. code-block:: none 40 | 41 | http(s):///tree/ 42 | 43 | And you can switch back to JupyterLab by replacing ``/tree/`` with ``/lab/``: 44 | 45 | .. code-block:: none 46 | 47 | http(s):///lab/ 48 | 49 | RStudio 50 | ======= 51 | 52 | The RStudio user interface is automatically enabled if a configuration file for 53 | R is detected (i.e. an R version specified in ``runtime.txt``). If this is detected, 54 | RStudio will be accessible by appending ``/rstudio`` to the URL, like so: 55 | 56 | .. code-block:: none 57 | 58 | http(s):///rstudio 59 | 60 | For example, the following Binder link will open an RStudio session in 61 | the `R demo repository `_. 62 | 63 | http://mybinder.org/v2/gh/binder-examples/r/HEAD?urlpath=rstudio 64 | 65 | 66 | Shiny 67 | ===== 68 | 69 | `Shiny lets you create interactive visualizations with R `_. 70 | Shiny is automatically enabled if a configuration file for 71 | R is detected (i.e. an R version specified in ``runtime.txt``). If 72 | this is detected, Shiny will be accessible by appending 73 | ``/shiny/`` to the URL, like so: 74 | 75 | .. code-block:: none 76 | 77 | http(s):///shiny/bus-dashboard 78 | 79 | This assumes that a folder called ``bus-dashboard`` exists in the root 80 | of the repository, and that it contains all of the files needed to run 81 | a Shiny app. 82 | 83 | For example, the following Binder link will open a Shiny session in 84 | the `R demo repository `_. 85 | 86 | http://mybinder.org/v2/gh/binder-examples/r/HEAD?urlpath=shiny/bus-dashboard/ 87 | 88 | 89 | Stencila 90 | ======== 91 | 92 | .. note:: 93 | 94 | Stencila support has been removed due to changes in stencila making it incompatible. 95 | Please `get in touch `__ if you would like to help restore stencila support. 96 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | jupyter-repo2docker 2 | =================== 3 | 4 | ``jupyter-repo2docker`` is a tool to **build, run, and push Docker 5 | images from source code repositories**. 6 | 7 | ``repo2docker`` fetches a repository 8 | (from GitHub, GitLab, Zenodo, Figshare, Dataverse installations, a Git repository or a local directory) 9 | and builds a container image in which the code can be executed. 10 | The image build process is based on the configuration files found in the repository. 11 | 12 | ``repo2docker`` can be 13 | used to explore a repository locally by building and executing the 14 | constructed image of the repository, or as a means of building images that 15 | are pushed to a Docker registry. 16 | 17 | ``repo2docker`` is the tool used by `BinderHub `_ 18 | to build images on demand. 19 | 20 | Please report `Bugs `_, 21 | `ask questions `_ or 22 | `contribute to the project `_. 23 | 24 | .. toctree:: 25 | :maxdepth: 2 26 | :caption: Getting started with repo2docker 27 | 28 | getting-started/index 29 | howto/index 30 | configuration/index 31 | 32 | .. toctree:: 33 | :maxdepth: 2 34 | :caption: Contribute to repo2docker 35 | 36 | contributing/index 37 | 38 | .. toctree:: 39 | :maxdepth: 2 40 | :caption: Changelog 41 | 42 | changelog 43 | -------------------------------------------------------------------------------- /docs/source/install.rst: -------------------------------------------------------------------------------- 1 | .. _install: 2 | 3 | Installing ``repo2docker`` 4 | ========================== 5 | 6 | repo2docker requires Python 3.6 or above on Linux and macOS. See 7 | :ref:`below ` for more information about Windows support. 8 | 9 | Prerequisite: Docker 10 | -------------------- 11 | 12 | Install `Docker `_ as it is required 13 | to build Docker images. The 14 | `Community Edition `_, 15 | is available for free. 16 | 17 | Recent versions of Docker are recommended. 18 | The latest version of Docker, ``18.03``, successfully builds repositories from 19 | `binder-examples `_. 20 | The `BinderHub `_ helm chart uses version 21 | ``17.11.0-ce-dind``. See the 22 | `helm chart `_ 23 | for more details. 24 | 25 | Optional: Mercurial 26 | ------------------- 27 | 28 | For `Mercurial `_ repositories, Mercurial and 29 | `hg-evolve `_ need to be 30 | installed. For example, on Debian based distributions, one can do:: 31 | 32 | sudo apt install mercurial 33 | $(hg debuginstall --template "{pythonexe}") -m pip install hg-evolve --user 34 | 35 | To install Mercurial on other systems, see `here 36 | `_. 37 | 38 | Note that for old Mercurial versions, you may need to specify a version for 39 | hg-evolve. For example, ``hg-evolve==9.2`` for hg 4.5 (which is installed with 40 | `apt` on Ubuntu 18.4). 41 | 42 | Installing with ``pip`` 43 | ----------------------- 44 | 45 | We recommend installing ``repo2docker`` with the ``pip`` tool:: 46 | 47 | python3 -m pip install jupyter-repo2docker 48 | 49 | for the latest release. To install the most recent code from the upstream repository, run:: 50 | 51 | python3 -m pip install https://github.com/jupyterhub/repo2docker/archive/main.zip 52 | 53 | For information on using ``repo2docker``, see :ref:`usage`. 54 | 55 | Installing from source code 56 | --------------------------- 57 | 58 | Alternatively, you can install repo2docker from a local source tree, 59 | e.g. in case you are contributing back to this project:: 60 | 61 | git clone https://github.com/jupyterhub/repo2docker.git 62 | cd repo2docker 63 | python3 -m pip install -e . 64 | 65 | That's it! For information on using ``repo2docker``, see 66 | :ref:`usage`. 67 | 68 | .. _windows: 69 | 70 | Windows support 71 | --------------- 72 | 73 | Windows support for ``repo2docker`` is still in the experimental stage. 74 | 75 | An article about `using Windows and the WSL`_ (Windows Subsytem for Linux or 76 | Bash on Windows) provides additional information about Windows and docker. 77 | 78 | 79 | .. _using Windows and the WSL: https://nickjanetakis.com/blog/setting-up-docker-for-windows-and-wsl-to-work-flawlessly 80 | -------------------------------------------------------------------------------- /docs/source/specification.rst: -------------------------------------------------------------------------------- 1 | .. _specification: 2 | 3 | ==================================================== 4 | The Reproducible Execution Environment Specification 5 | ==================================================== 6 | 7 | repo2docker scans a repository for particular :ref:`config-files`, such 8 | as ``requirements.txt`` or ``Project.toml``. The collection of files, their contents, 9 | and the resulting actions that repo2docker takes is known 10 | as the **Reproducible Execution Environment Specification** (or REES). 11 | 12 | The goal of the REES is to automate and encourage existing community best practices 13 | for reproducible computational environments. This includes installing pacakges using 14 | community-standard specification files and their corresponding tools, 15 | such as ``requirements.txt`` (with ``pip``), ``Project.toml`` (with Julia), or 16 | ``apt.txt`` (with ``apt``). While repo2docker automates the 17 | creation of the environment, a human should be able to look at a REES-compliant 18 | repository and reproduce the environment using common, clear steps without 19 | repo2docker software. 20 | 21 | Currently, the definition of the REE Specification is the following: 22 | 23 | Any directory containing zero or more files from the :ref:`config-files` list is a 24 | valid reproducible execution environment as defined by the REES. The 25 | configuration files have to all be placed either in the root of the 26 | directory, in a ``binder/`` sub-directory or a ``.binder/`` sub-directory. 27 | 28 | For example, the REES recognises ``requirements.txt`` as a valid config file. 29 | The file format is as defined by the ``requirements.txt`` standard of the Python 30 | community. A REES-compliant tool will install a Python interpreter (of unspecified version) 31 | and perform the equivalent action of ``pip install -r requirements.txt`` so that the 32 | user can afterwards run python and use the packages installed. 33 | -------------------------------------------------------------------------------- /playwright-requirements.txt: -------------------------------------------------------------------------------- 1 | -r dev-requirements.txt 2 | pytest-playwright 3 | -------------------------------------------------------------------------------- /repo2docker/__init__.py: -------------------------------------------------------------------------------- 1 | from ._version import get_versions 2 | 3 | __version__ = get_versions()["version"] 4 | del get_versions 5 | 6 | from . import _version 7 | from .app import Repo2Docker 8 | 9 | __version__ = _version.get_versions()["version"] 10 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import BaseImage, BuildPack 2 | from .conda import CondaBuildPack 3 | from .docker import DockerBuildPack 4 | from .julia import JuliaProjectTomlBuildPack, JuliaRequireBuildPack 5 | from .legacy import LegacyBinderDockerBuildPack 6 | from .nix import NixBuildPack 7 | from .pipfile import PipfileBuildPack 8 | from .python import PythonBuildPack 9 | from .r import RBuildPack 10 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/activate-conda.sh: -------------------------------------------------------------------------------- 1 | set -e 2 | 3 | # Setup conda 4 | CONDA_PROFILE="${CONDA_DIR}/etc/profile.d/conda.sh" 5 | echo "Activating profile: ${CONDA_PROFILE}" 6 | test -f $CONDA_PROFILE && . $CONDA_PROFILE 7 | 8 | # Setup micromamba 9 | eval $(micromamba shell hook -s posix -r ${CONDA_DIR}) 10 | 11 | # Setup mamba 12 | export MAMBA_ROOT_PREFIX="${CONDA_DIR}" 13 | __mamba_setup="$("${CONDA_DIR}/bin/mamba" shell hook --shell posix 2> /dev/null)" 14 | if [ $? -eq 0 ]; then 15 | eval "$__mamba_setup" 16 | else 17 | alias mamba="${CONDA_DIR}/bin/mamba" # Fallback on help from mamba activate 18 | fi 19 | unset __mamba_setup 20 | 21 | # Activate the environment 22 | if [[ "${KERNEL_PYTHON_PREFIX}" != "${NB_PYTHON_PREFIX}" ]]; then 23 | # if the kernel is a separate env, stack them 24 | # so both are on PATH, notebook first 25 | mamba activate ${KERNEL_PYTHON_PREFIX} 26 | mamba activate --stack ${NB_PYTHON_PREFIX} 27 | 28 | # even though it's second on $PATH 29 | # make sure CONDA_DEFAULT_ENV is the *kernel* env 30 | # so that `!conda install PKG` installs in the kernel env 31 | # where user packages are installed, not the notebook env 32 | # which only contains UI when the two are different 33 | export CONDA_DEFAULT_ENV="${KERNEL_PYTHON_PREFIX}" 34 | else 35 | mamba activate ${NB_PYTHON_PREFIX} 36 | fi 37 | 38 | set +e 39 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/environment.py-2.7.yml: -------------------------------------------------------------------------------- 1 | channels: 2 | - conda-forge 3 | dependencies: 4 | - python=2.7.* 5 | - pip 6 | - ipykernel==4.8.2 7 | - wcwidth==0.1.9 8 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/environment.py-3.10.yml: -------------------------------------------------------------------------------- 1 | # AUTO GENERATED FROM environment.yml, DO NOT MANUALLY MODIFY 2 | # Generated on 2025-02-09 21:05:17 UTC 3 | channels: 4 | - conda-forge 5 | dependencies: 6 | - python=3.10.* 7 | - nodejs=20 8 | - pip 9 | - ipywidgets==8.* # https://github.com/jupyter-widgets/ipywidgets 10 | - jupyter-offlinenotebook==0.3.* # https://github.com/manics/jupyter-offlinenotebook 11 | - jupyter-resource-usage==1.* # https://github.com/jupyter-server/jupyter-resource-usage 12 | - jupyter_server==2.* # https://github.com/jupyter-server/jupyter_server 13 | - jupyterhub-singleuser==5.* # https://github.com/jupyterhub/jupyterhub 14 | - jupyterlab==4.* # https://github.com/jupyterlab/jupyterlab 15 | - notebook==7.* # https://github.com/jupyter/notebook 16 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/environment.py-3.11.yml: -------------------------------------------------------------------------------- 1 | # AUTO GENERATED FROM environment.yml, DO NOT MANUALLY MODIFY 2 | # Generated on 2025-02-09 21:05:55 UTC 3 | channels: 4 | - conda-forge 5 | dependencies: 6 | - python=3.11.* 7 | - nodejs=20 8 | - pip 9 | - ipywidgets==8.* # https://github.com/jupyter-widgets/ipywidgets 10 | - jupyter-offlinenotebook==0.3.* # https://github.com/manics/jupyter-offlinenotebook 11 | - jupyter-resource-usage==1.* # https://github.com/jupyter-server/jupyter-resource-usage 12 | - jupyter_server==2.* # https://github.com/jupyter-server/jupyter_server 13 | - jupyterhub-singleuser==5.* # https://github.com/jupyterhub/jupyterhub 14 | - jupyterlab==4.* # https://github.com/jupyterlab/jupyterlab 15 | - notebook==7.* # https://github.com/jupyter/notebook 16 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/environment.py-3.12.yml: -------------------------------------------------------------------------------- 1 | # AUTO GENERATED FROM environment.yml, DO NOT MANUALLY MODIFY 2 | # Generated on 2025-02-09 21:07:33 UTC 3 | channels: 4 | - conda-forge 5 | dependencies: 6 | - python=3.12.* 7 | - nodejs=20 8 | - pip 9 | - ipywidgets==8.* # https://github.com/jupyter-widgets/ipywidgets 10 | - jupyter-offlinenotebook==0.3.* # https://github.com/manics/jupyter-offlinenotebook 11 | - jupyter-resource-usage==1.* # https://github.com/jupyter-server/jupyter-resource-usage 12 | - jupyter_server==2.* # https://github.com/jupyter-server/jupyter_server 13 | - jupyterhub-singleuser==5.* # https://github.com/jupyterhub/jupyterhub 14 | - jupyterlab==4.* # https://github.com/jupyterlab/jupyterlab 15 | - notebook==7.* # https://github.com/jupyter/notebook 16 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/environment.py-3.5.yml: -------------------------------------------------------------------------------- 1 | # py3.5 is not being auto-generated from this file anymore 2 | # update environment.py-3.5.frozen.yml directly 3 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/environment.py-3.6.yml: -------------------------------------------------------------------------------- 1 | # py3.6 is not being auto-generated from this file anymore 2 | # update environment.py-3.6.frozen.yml directly -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/environment.py-3.7.yml: -------------------------------------------------------------------------------- 1 | # AUTO GENERATED FROM environment.yml, DO NOT MANUALLY MODIFY 2 | # Generated on 2023-10-14 22:04:48 UTC 3 | channels: 4 | - conda-forge 5 | dependencies: 6 | - python=3.7.* 7 | - nodejs=18 8 | - pip 9 | - ipywidgets==8.* # https://github.com/jupyter-widgets/ipywidgets 10 | - jupyter-offlinenotebook==0.2.* # https://github.com/manics/jupyter-offlinenotebook 11 | - jupyter-resource-usage==0.7.* # https://github.com/jupyter-server/jupyter-resource-usage 12 | - jupyter_server==1.* # https://github.com/jupyter-server/jupyter_server 13 | - jupyterhub-singleuser==3.* # https://github.com/jupyterhub/jupyterhub 14 | - jupyterlab==3.* # https://github.com/jupyterlab/jupyterlab 15 | - notebook==6.* # https://github.com/jupyter/notebook 16 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/environment.py-3.8.yml: -------------------------------------------------------------------------------- 1 | # AUTO GENERATED FROM environment.yml, DO NOT MANUALLY MODIFY 2 | # Generated on 2025-02-09 21:03:25 UTC 3 | channels: 4 | - conda-forge 5 | dependencies: 6 | - python=3.8.* 7 | - nodejs=20 8 | - pip 9 | - ipywidgets==8.* # https://github.com/jupyter-widgets/ipywidgets 10 | - jupyter-offlinenotebook==0.3.* # https://github.com/manics/jupyter-offlinenotebook 11 | - jupyter-resource-usage==1.* # https://github.com/jupyter-server/jupyter-resource-usage 12 | - jupyter_server==2.* # https://github.com/jupyter-server/jupyter_server 13 | - jupyterhub-singleuser==5.* # https://github.com/jupyterhub/jupyterhub 14 | - jupyterlab==4.* # https://github.com/jupyterlab/jupyterlab 15 | - notebook==7.* # https://github.com/jupyter/notebook 16 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/environment.py-3.9.yml: -------------------------------------------------------------------------------- 1 | # AUTO GENERATED FROM environment.yml, DO NOT MANUALLY MODIFY 2 | # Generated on 2025-02-09 21:04:41 UTC 3 | channels: 4 | - conda-forge 5 | dependencies: 6 | - python=3.9.* 7 | - nodejs=20 8 | - pip 9 | - ipywidgets==8.* # https://github.com/jupyter-widgets/ipywidgets 10 | - jupyter-offlinenotebook==0.3.* # https://github.com/manics/jupyter-offlinenotebook 11 | - jupyter-resource-usage==1.* # https://github.com/jupyter-server/jupyter-resource-usage 12 | - jupyter_server==2.* # https://github.com/jupyter-server/jupyter_server 13 | - jupyterhub-singleuser==5.* # https://github.com/jupyterhub/jupyterhub 14 | - jupyterlab==4.* # https://github.com/jupyterlab/jupyterlab 15 | - notebook==7.* # https://github.com/jupyter/notebook 16 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/environment.yml: -------------------------------------------------------------------------------- 1 | channels: 2 | - conda-forge 3 | dependencies: 4 | - python=3.10 5 | - nodejs=20 6 | - pip 7 | - ipywidgets==8.* # https://github.com/jupyter-widgets/ipywidgets 8 | - jupyter-offlinenotebook==0.3.* # https://github.com/manics/jupyter-offlinenotebook 9 | - jupyter-resource-usage==1.* # https://github.com/jupyter-server/jupyter-resource-usage 10 | - jupyter_server==2.* # https://github.com/jupyter-server/jupyter_server 11 | - jupyterhub-singleuser==5.* # https://github.com/jupyterhub/jupyterhub 12 | - jupyterlab==4.* # https://github.com/jupyterlab/jupyterlab 13 | - notebook==7.* # https://github.com/jupyter/notebook 14 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/conda/install-base-env.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This downloads and installs a pinned version of micromamba 3 | # and sets up the base environment 4 | set -ex 5 | 6 | cd $(dirname $0) 7 | 8 | export MAMBA_VERSION="2.1.0" 9 | export CONDA_VERSION=24.11.0 10 | 11 | URL="https://anaconda.org/conda-forge/micromamba/${MAMBA_VERSION}/download/${CONDA_PLATFORM}/micromamba-${MAMBA_VERSION}-0.tar.bz2" 12 | 13 | # make sure we don't do anything funky with user's $HOME 14 | # since this is run as root 15 | unset HOME 16 | mkdir -p ${CONDA_DIR} 17 | 18 | export MICROMAMBA_EXE="/usr/local/bin/micromamba" 19 | 20 | time wget -qO- ${URL} | tar -xvj bin/micromamba 21 | mv bin/micromamba "$MICROMAMBA_EXE" 22 | chmod 0755 "$MICROMAMBA_EXE" 23 | 24 | eval "$(${MICROMAMBA_EXE} shell hook --root-prefix ${CONDA_DIR} -s posix)" 25 | 26 | micromamba activate 27 | 28 | export PATH="${PWD}/bin:$PATH" 29 | 30 | cat <> ${CONDA_DIR}/.condarc 31 | channels: 32 | - conda-forge 33 | auto_update_conda: false 34 | show_channel_urls: true 35 | update_dependencies: false 36 | # channel_priority: flexible 37 | EOT 38 | 39 | micromamba install conda=${CONDA_VERSION} mamba=${MAMBA_VERSION} -y 40 | 41 | echo "installing notebook env:" 42 | cat "${NB_ENVIRONMENT_FILE}" 43 | 44 | 45 | time ${MAMBA_EXE} create -p ${NB_PYTHON_PREFIX} --file "${NB_ENVIRONMENT_FILE}" 46 | 47 | if [[ ! -z "${NB_REQUIREMENTS_FILE:-}" ]]; then 48 | echo "installing pip requirements" 49 | cat "${NB_REQUIREMENTS_FILE}" 50 | ${NB_PYTHON_PREFIX}/bin/python -mpip install --no-cache --no-deps -r "${NB_REQUIREMENTS_FILE}" 51 | fi 52 | # empty conda history file, 53 | # which seems to result in some effective pinning of packages in the initial env, 54 | # which we don't intend. 55 | # this file must not be *removed*, however 56 | echo '' > ${NB_PYTHON_PREFIX}/conda-meta/history 57 | 58 | if [[ ! -z "${KERNEL_ENVIRONMENT_FILE:-}" ]]; then 59 | # install kernel env and register kernelspec 60 | echo "installing kernel env:" 61 | cat "${KERNEL_ENVIRONMENT_FILE}" 62 | time ${MAMBA_EXE} create -p ${KERNEL_PYTHON_PREFIX} --file "${KERNEL_ENVIRONMENT_FILE}" 63 | 64 | if [[ ! -z "${KERNEL_REQUIREMENTS_FILE:-}" ]]; then 65 | echo "installing pip requirements for kernel" 66 | cat "${KERNEL_REQUIREMENTS_FILE}" 67 | ${KERNEL_PYTHON_PREFIX}/bin/python -mpip install --no-cache --no-deps -r "${KERNEL_REQUIREMENTS_FILE}" 68 | fi 69 | 70 | ${KERNEL_PYTHON_PREFIX}/bin/ipython kernel install --prefix "${NB_PYTHON_PREFIX}" 71 | echo '' > ${KERNEL_PYTHON_PREFIX}/conda-meta/history 72 | ${MAMBA_EXE} list -p ${KERNEL_PYTHON_PREFIX} 73 | fi 74 | 75 | # Clean things out! 76 | time ${MAMBA_EXE} clean --all -f -y 77 | 78 | # Remove the pip cache created as part of installing micromamba 79 | rm -rf /root/.cache 80 | 81 | chown -R $NB_USER:$NB_USER ${CONDA_DIR} 82 | 83 | ${MAMBA_EXE} list -p ${NB_PYTHON_PREFIX} 84 | 85 | # Set NPM config 86 | ${NB_PYTHON_PREFIX}/bin/npm config --global set prefix ${NPM_DIR} 87 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/docker.py: -------------------------------------------------------------------------------- 1 | """Generates a variety of Dockerfiles based on an input matrix 2 | """ 3 | 4 | import os 5 | 6 | from .base import BuildPack 7 | 8 | 9 | class DockerBuildPack(BuildPack): 10 | """Docker BuildPack""" 11 | 12 | dockerfile = "Dockerfile" 13 | 14 | def detect(self): 15 | """Check if current repo should be built with the Docker BuildPack""" 16 | return os.path.exists(self.binder_path("Dockerfile")) 17 | 18 | def render(self, build_args=None): 19 | """Render the Dockerfile using by reading it from the source repo""" 20 | Dockerfile = self.binder_path("Dockerfile") 21 | with open(Dockerfile) as f: 22 | return f.read() 23 | 24 | def build( 25 | self, 26 | client, 27 | image_spec, 28 | memory_limit, 29 | build_args, 30 | cache_from, 31 | extra_build_kwargs, 32 | platform=None, 33 | ): 34 | """Build a Docker image based on the Dockerfile in the source repo.""" 35 | # If you work on this bit of code check the corresponding code in 36 | # buildpacks/base.py where it is duplicated 37 | if not isinstance(memory_limit, int): 38 | raise ValueError( 39 | "The memory limit has to be specified as an " 40 | f"integer but is '{type(memory_limit)}'" 41 | ) 42 | limits = {} 43 | if memory_limit: 44 | # We want to always disable swap. Docker expects `memswap` to 45 | # be total allowable memory, *including* swap - while `memory` 46 | # points to non-swap memory. We set both values to the same so 47 | # we use no swap. 48 | limits = {"memory": memory_limit, "memswap": memory_limit} 49 | 50 | build_kwargs = dict( 51 | path=os.getcwd(), 52 | dockerfile=self.binder_path(self.dockerfile), 53 | tag=image_spec, 54 | buildargs=build_args, 55 | container_limits=limits, 56 | cache_from=cache_from, 57 | labels=self.get_labels(), 58 | platform=platform, 59 | ) 60 | 61 | build_kwargs.update(extra_build_kwargs) 62 | 63 | yield from client.build(**build_kwargs) 64 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/julia/__init__.py: -------------------------------------------------------------------------------- 1 | from .julia_project import JuliaProjectTomlBuildPack 2 | from .julia_require import JuliaRequireBuildPack 3 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/julia/julia_require.py: -------------------------------------------------------------------------------- 1 | """ 2 | DEPRECATED - Dependencies of REQUIRE have been removed 3 | """ 4 | 5 | import os 6 | 7 | from ..python import PythonBuildPack 8 | 9 | 10 | class JuliaRequireBuildPack(PythonBuildPack): 11 | """ 12 | Julia build pack which uses conda and REQUIRE. 13 | 14 | Now just an informative error message. 15 | """ 16 | 17 | def build(self, *args, **kwargs): 18 | raise ValueError( 19 | "Julia REQUIRE no longer supported due to removed infrastructure. Use Project.toml." 20 | ) 21 | 22 | def detect(self): 23 | """ 24 | Check if current repo exects tp be built with the Julia Legacy Build pack 25 | 26 | This no longer works, but try to raise an informative error. 27 | """ 28 | return os.path.exists(self.binder_path("REQUIRE")) and not ( 29 | os.path.exists(self.binder_path("Project.toml")) 30 | or os.path.exists(self.binder_path("JuliaProject.toml")) 31 | ) 32 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/legacy/__init__.py: -------------------------------------------------------------------------------- 1 | """Generates Dockerfiles from the legacy Binder Dockerfiles 2 | based on `andrewosh/binder-base`. 3 | 4 | The Dockerfile is amended to add the contents of the repository 5 | to the image and install a supported version of the notebook 6 | and IPython kernel. 7 | 8 | Note: This buildpack has been deprecated. 9 | """ 10 | 11 | import logging 12 | 13 | 14 | class LegacyBinderDockerBuildPack: 15 | """Legacy build pack for compatibility to first version of Binder. 16 | 17 | This buildpack has been deprecated. 18 | """ 19 | 20 | def __init__(self, *args, **kwargs): 21 | pass 22 | 23 | def detect(self): 24 | """Check if current repo should be built with the Legacy BuildPack.""" 25 | log = logging.getLogger("repo2docker") 26 | try: 27 | with open("Dockerfile") as f: 28 | for line in f: 29 | if line.startswith("FROM"): 30 | if "andrewosh/binder-base" in line.split("#")[0].lower(): 31 | log.error( 32 | "The legacy buildpack was removed in January 2020." 33 | ) 34 | log.error( 35 | "Please see https://repo2docker.readthedocs.io/en/" 36 | "latest/configuration/index.html for alternative ways " 37 | "of configuring your repository." 38 | ) 39 | raise RuntimeError("The legacy buildpack has been removed.") 40 | else: 41 | return False 42 | except FileNotFoundError: 43 | pass 44 | 45 | return False 46 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/nix/__init__.py: -------------------------------------------------------------------------------- 1 | """BuildPack for nixpkgs environments""" 2 | 3 | import os 4 | from functools import lru_cache 5 | 6 | from ..base import BaseImage 7 | 8 | 9 | class NixBuildPack(BaseImage): 10 | """A nix Package Manager BuildPack""" 11 | 12 | @lru_cache 13 | def get_path(self): 14 | """Return paths to be added to PATH environemnt variable""" 15 | return super().get_path() + ["/home/${NB_USER}/.nix-profile/bin"] 16 | 17 | @lru_cache 18 | def get_env(self): 19 | """Ordered list of environment variables to be set for this image""" 20 | 21 | return super().get_env() + [ 22 | ("NIX_PATH", "nixpkgs=/home/${NB_USER}/.nix-defexpr/channels/nixpkgs"), 23 | ("NIX_SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt"), 24 | ("GIT_SSL_CAINFO", "/etc/ssl/certs/ca-certificates.crt"), 25 | ] 26 | 27 | @lru_cache 28 | def get_build_scripts(self): 29 | """ 30 | Return series of build-steps common to all nix repositories. 31 | Notice how only root privileges are needed for creating nix 32 | directory and a nix.conf file. 33 | 34 | - create nix directory for user nix installation 35 | - disable sandboxing because its unsupported inside a Docker container 36 | - install nix package manager for user 37 | 38 | """ 39 | if self.platform == "linux/arm64": 40 | nix_arch = "aarch64" 41 | else: 42 | nix_arch = "x86_64" 43 | return super().get_build_scripts() + [ 44 | ( 45 | "root", 46 | """ 47 | mkdir -m 0755 /nix && \ 48 | chown -R ${NB_USER}:${NB_USER} /nix /usr/local/bin/nix-shell-wrapper /home/${NB_USER} && \ 49 | mkdir -p /etc/nix && \ 50 | touch /etc/nix/nix.conf && \ 51 | echo "sandbox = false" >> /etc/nix/nix.conf 52 | """, 53 | ), 54 | ( 55 | "${NB_USER}", 56 | f""" 57 | NIX_ARCH={nix_arch} bash /home/${{NB_USER}}/.local/bin/install-nix.bash && \ 58 | rm /home/${{NB_USER}}/.local/bin/install-nix.bash 59 | """, 60 | ), 61 | ] 62 | 63 | @lru_cache 64 | def get_build_script_files(self): 65 | """Dict of files to be copied to the container image for use in building""" 66 | return { 67 | "nix/install-nix.bash": "/home/${NB_USER}/.local/bin/install-nix.bash", 68 | "nix/nix-shell-wrapper": "/usr/local/bin/nix-shell-wrapper", 69 | } 70 | 71 | @lru_cache 72 | def get_assemble_scripts(self): 73 | """Return series of build-steps specific to this source repository.""" 74 | return super().get_assemble_scripts() + [ 75 | ( 76 | "${NB_USER}", 77 | f""" 78 | nix-channel --add https://nixos.org/channels/nixpkgs-unstable nixpkgs && \ 79 | nix-channel --update && \ 80 | nix-shell {self.binder_path("default.nix")} 81 | """, 82 | ) 83 | ] 84 | 85 | @lru_cache 86 | def get_start_script(self): 87 | """The path to a script to be executed as ENTRYPOINT""" 88 | # the shell wrapper script duplicates the behaviour of other buildpacks 89 | # when it comes to the `start` script as well as handling a binder/ 90 | # sub-directory when it exists 91 | return "/usr/local/bin/nix-shell-wrapper" 92 | 93 | def detect(self): 94 | """Check if current repo should be built with the nix BuildPack""" 95 | return os.path.exists(self.binder_path("default.nix")) 96 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/nix/install-nix.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This downloads and installs a pinned version of nix 3 | set -ex 4 | 5 | NIX_VERSION="2.13.2" 6 | if [ "$NIX_ARCH" = "aarch64" ]; then 7 | NIX_SHA256="4ae275a46a2441d3459ae389a90ce6e8f7eff12c2a084b2d003ba6f8d0899603" 8 | else 9 | NIX_SHA256="beaec0f28899c22f33adbe30e4ecfceef87b797278c5210ee693e22e9719dfb4" 10 | fi 11 | 12 | # Do all our operations in /tmp, since we can't rely on current directory being writeable yet. 13 | cd /tmp 14 | wget --quiet https://nixos.org/releases/nix/nix-$NIX_VERSION/nix-$NIX_VERSION-$NIX_ARCH-linux.tar.xz 15 | echo "$NIX_SHA256 nix-$NIX_VERSION-$NIX_ARCH-linux.tar.xz" | sha256sum -c 16 | tar xJf nix-*-$NIX_ARCH-linux.tar.xz 17 | sh nix-*-$NIX_ARCH-linux/install 18 | rm -r nix-*-$NIX_ARCH-linux* 19 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/nix/nix-shell-wrapper: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | _term() { 4 | echo "Caught SIGTERM signal!" 5 | # kill -TERM "$PID" 2>/dev/null 6 | exit 0 7 | } 8 | 9 | trap _term SIGTERM 10 | 11 | # if there is a binder/ sub-directory it takes precedence 12 | # files outside it are ignored 13 | # find binder sub-directory (if present) 14 | binder_dir="./" 15 | for dir in "./binder" "./.binder" ; do 16 | if [ -e $dir ]; then 17 | binder_dir=$dir 18 | break 19 | fi 20 | done 21 | 22 | # raise error if both binder and .binder are found 23 | if [[ -d "./binder" && -d "./.binder" ]]; then 24 | echo "Error: Found both binder and .binder directories." 25 | exit 1 26 | fi 27 | 28 | echo "binder_dir is: $binder_dir" 29 | 30 | nixpath="$binder_dir/default.nix"; 31 | if [ -f $binder_dir/start ]; then 32 | chmod u+x $binder_dir/start 33 | # Using `$@`` here which is what the internet recommends leads to 34 | # errors when the command is something like `jupyter --ip=0.0.0.0 ...` 35 | # as nix-shell picks that up as an argument to it instead of the command. 36 | # There are several issues on the nix repos discussing this and adding support 37 | # for -- to indicate "all arguments after this are for the command, not nix-shell" 38 | # but it seems they have stalled/not yet produced an implementation. 39 | # So let's use `$*` for now. 40 | nix-shell $nixpath --command "$binder_dir/start $*" & 41 | else 42 | nix-shell $nixpath --command "$*" & 43 | fi 44 | 45 | PID=$! 46 | wait "$PID" 47 | -------------------------------------------------------------------------------- /repo2docker/buildpacks/python3-login: -------------------------------------------------------------------------------- 1 | #!/bin/bash -l 2 | # This is an executable that launches Python in a login shell 3 | # to ensure that full profile setup occurs. 4 | # shebang on linux only allows 1 argument, 5 | # so we couldn't pick a login shell in one shebang line 6 | # for a Python script 7 | 8 | # -u means unbuffered, which one ~always wants in a container 9 | # otherwise output can be mysteriously missing 10 | 11 | exec python3 -u "$@" 12 | -------------------------------------------------------------------------------- /repo2docker/contentproviders/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import Local 2 | from .ckan import CKAN 3 | from .dataverse import Dataverse 4 | from .figshare import Figshare 5 | from .git import Git 6 | from .hydroshare import Hydroshare 7 | from .mercurial import Mercurial 8 | from .swhid import Swhid 9 | from .zenodo import Zenodo 10 | -------------------------------------------------------------------------------- /repo2docker/contentproviders/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base classes for repo2docker ContentProviders 3 | 4 | ContentProviders accept a `spec` of various kinds, and 5 | provide the contents from the spec to a given output directory. 6 | """ 7 | 8 | import logging 9 | import os 10 | 11 | 12 | class ContentProviderException(Exception): 13 | """Exception raised when a ContentProvider can not provide content.""" 14 | 15 | pass 16 | 17 | 18 | class ContentProvider: 19 | def __init__(self): 20 | self.log = logging.getLogger("repo2docker") 21 | 22 | @property 23 | def content_id(self): 24 | """A unique ID to represent the version of the content. 25 | This ID is used to name the built images. If the ID is the same between 26 | two runs of repo2docker we will reuse an existing image (if it exists). 27 | By providing an ID that summarizes the content we can reuse existing 28 | images and speed up build times. A good ID is the revision of a Git 29 | repository or a hash computed from all the content. 30 | The type content ID can be any string. 31 | To disable this behaviour set this property to `None` in which case 32 | a fresh image will always be built. 33 | """ 34 | return None 35 | 36 | def detect(self, repo, ref=None, extra_args=None): 37 | """Determine compatibility between source and this provider. 38 | 39 | If the provider knows how to fetch this source it will return a 40 | `spec` that can be passed to `fetch`. The arguments are the `repo` 41 | string passed on the command-line, the value of the --ref parameter, 42 | if provided and any provider specific arguments provided on the 43 | command-line. 44 | 45 | If the provider does not know how to fetch this source it will return 46 | `None`. 47 | """ 48 | raise NotImplementedError() 49 | 50 | def fetch(self, spec, output_dir, yield_output=False): 51 | """Provide the contents of given spec to output_dir 52 | 53 | This generator yields logging information if `yield_output=True`, 54 | otherwise log output is printed to stdout. 55 | 56 | Arguments: 57 | spec -- Dict specification understood by this ContentProvider 58 | output_dir {string} -- Path to output directory (must already exist) 59 | yield_output {bool} -- If True, return output line by line. If not, 60 | output just goes to stdout. 61 | """ 62 | raise NotImplementedError() 63 | 64 | 65 | class Local(ContentProvider): 66 | def detect(self, source, ref=None, extra_args=None): 67 | if os.path.isdir(source): 68 | return {"path": source} 69 | 70 | def fetch(self, spec, output_dir, yield_output=False): 71 | # nothing to be done if your content is already in the output directory 72 | msg = f'Local content provider assumes {spec["path"]} == {output_dir}' 73 | assert output_dir == spec["path"], msg 74 | yield f'Using local repo {spec["path"]}.\n' 75 | -------------------------------------------------------------------------------- /repo2docker/contentproviders/mercurial.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | from ..utils import R2dState, execute_cmd 4 | from .base import ContentProvider, ContentProviderException 5 | 6 | args_enabling_topic = ["--config", "extensions.topic="] 7 | 8 | 9 | class Mercurial(ContentProvider): 10 | """Provide contents of a remote Mercurial repository.""" 11 | 12 | def detect(self, source, ref=None, extra_args=None): 13 | if "github.com/" in source or source.endswith(".git"): 14 | return None 15 | try: 16 | subprocess.check_output( 17 | ["hg", "identify", source, "--config", "extensions.hggit=!"] 18 | + args_enabling_topic, 19 | stderr=subprocess.DEVNULL, 20 | ) 21 | except subprocess.CalledProcessError: 22 | return None 23 | 24 | return {"repo": source, "ref": ref} 25 | 26 | def fetch(self, spec, output_dir, yield_output=False): 27 | repo = spec["repo"] 28 | ref = spec.get("ref", None) 29 | 30 | # make a clone of the remote repository 31 | try: 32 | cmd = [ 33 | "hg", 34 | "clone", 35 | repo, 36 | output_dir, 37 | "--config", 38 | "phases.publish=False", 39 | ] + args_enabling_topic 40 | if ref is not None: 41 | # don't update so the clone will include an empty working 42 | # directory, the given ref will be updated out later 43 | cmd.extend(["--noupdate"]) 44 | yield from execute_cmd(cmd, capture=yield_output) 45 | 46 | except subprocess.CalledProcessError as error: 47 | msg = f"Failed to clone repository from {repo}" 48 | if ref is not None: 49 | msg += f" (ref {ref})" 50 | msg += "." 51 | raise ContentProviderException(msg) from error 52 | 53 | # check out the specific ref given by the user 54 | if ref is not None: 55 | try: 56 | yield from execute_cmd( 57 | ["hg", "update", "--clean", ref] + args_enabling_topic, 58 | cwd=output_dir, 59 | capture=yield_output, 60 | ) 61 | except subprocess.CalledProcessError: 62 | self.log.error( 63 | f"Failed to update to ref {ref}", extra=dict(phase=R2dState.FAILED) 64 | ) 65 | raise ValueError(f"Failed to update to ref {ref}") 66 | 67 | cmd = ["hg", "identify", "-i"] + args_enabling_topic 68 | sha1 = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=output_dir) 69 | self._node_id = sha1.stdout.read().decode().strip() 70 | 71 | @property 72 | def content_id(self): 73 | """A unique ID to represent the version of the content.""" 74 | return self._node_id 75 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # used by our travis auto-deployment system 2 | # needs changing if repo2docker ever stops being 3 | # a pure python module 4 | [wheel] 5 | universal = 1 6 | 7 | [metadata] 8 | license_file = LICENSE 9 | 10 | [versioneer] 11 | VCS = git 12 | style = pep440 13 | versionfile_source = repo2docker/_version.py 14 | versionfile_build = repo2docker/_version.py 15 | tag_prefix = 16 | parentdir_prefix = repo2docker- 17 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from distutils.cmd import Command 3 | 4 | from setuptools import find_packages, setup 5 | 6 | import versioneer 7 | 8 | if sys.version_info[0] < 3: 9 | readme = None 10 | else: 11 | with open("README.md", encoding="utf8") as f: 12 | readme = f.read() 13 | 14 | 15 | class GenerateDataverseInstallationsFileCommand(Command): 16 | description = "Generate Dataverse installations data map" 17 | user_options = [] 18 | 19 | def initialize_options(self): 20 | self.url = ( 21 | "https://services.dataverse.harvard.edu/miniverse/map/installations-json" 22 | ) 23 | 24 | def finalize_options(self): 25 | pass 26 | 27 | def run(self): 28 | import json 29 | from urllib.request import urlopen 30 | 31 | resp = urlopen(self.url, timeout=5) 32 | resp_body = resp.read() 33 | data = json.loads(resp_body.decode("utf-8")) 34 | if "installations" not in data: 35 | raise ValueError("Malformed installation map.") 36 | 37 | def get_identifier(json): 38 | return int(json["id"]) 39 | 40 | data["installations"].sort(key=get_identifier) 41 | with open("repo2docker/contentproviders/dataverse.json", "w") as fp: 42 | json.dump(data, fp, indent=4, sort_keys=True) 43 | 44 | 45 | __cmdclass = versioneer.get_cmdclass() 46 | __cmdclass["generate_dataverse_file"] = GenerateDataverseInstallationsFileCommand 47 | 48 | setup( 49 | name="jupyter-repo2docker", 50 | version=versioneer.get_version(), 51 | install_requires=[ 52 | "chardet", 53 | "docker!=5.0.0", 54 | "entrypoints", 55 | "escapism", 56 | "iso8601", 57 | "jinja2", 58 | "python-json-logger", 59 | "requests", 60 | "ruamel.yaml>=0.15", 61 | "semver", 62 | "toml", 63 | "traitlets", 64 | ], 65 | python_requires=">=3.9", 66 | author="Project Jupyter Contributors", 67 | author_email="jupyter@googlegroups.com", 68 | url="https://repo2docker.readthedocs.io/en/latest/", 69 | project_urls={ 70 | "Documentation": "https://repo2docker.readthedocs.io", 71 | "Funding": "https://jupyter.org/about", 72 | "Source": "https://github.com/jupyterhub/repo2docker/", 73 | "Tracker": "https://github.com/jupyterhub/repo2docker/issues", 74 | }, 75 | # this should be a whitespace separated string of keywords, not a list 76 | keywords="reproducible science environments docker", 77 | description="Repo2docker: Turn code repositories into Jupyter enabled Docker Images", 78 | long_description=readme, 79 | long_description_content_type="text/markdown", 80 | license="BSD", 81 | classifiers=[ 82 | "Environment :: Console", 83 | "Intended Audience :: Developers", 84 | "Intended Audience :: System Administrators", 85 | "Intended Audience :: Science/Research", 86 | "License :: OSI Approved :: BSD License", 87 | "Programming Language :: Python", 88 | "Programming Language :: Python :: 3", 89 | ], 90 | packages=find_packages(), 91 | include_package_data=True, 92 | cmdclass=__cmdclass, 93 | entry_points={ 94 | "console_scripts": [ 95 | "jupyter-repo2docker = repo2docker.__main__:main", 96 | "repo2docker = repo2docker.__main__:main", 97 | ], 98 | "repo2docker.engines": ["docker = repo2docker.docker:DockerEngine"], 99 | }, 100 | ) 101 | -------------------------------------------------------------------------------- /tests/base/node/README.md: -------------------------------------------------------------------------------- 1 | Test that node 16 and npm 8 are installed and runnable. 2 | -------------------------------------------------------------------------------- /tests/base/node/verify: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -ex 4 | 5 | # check node system package and its version 6 | which node 7 | node --version 8 | node --version | grep v20 9 | 10 | # npm comes with the nodejs conda-forge package that we have pinned, but we 11 | # don't have npm pinned. For example nodejs 18.13.0 brought npm 8.19.3, but 12 | # nodejs 18.14.0 brought npm 9.3.1. 13 | # 14 | # Since we have no npm version pinning, we just test that some version is 15 | # installed. 16 | # 17 | which npm 18 | npm --version 19 | -------------------------------------------------------------------------------- /tests/check-tmp: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Script to check for leftover files 4 | 5 | Checks a collection of temporary or cache directories, 6 | to ensure we aren't wasting image size by forgetting cleanup steps. 7 | 8 | This script is run in every local repo image we test 9 | """ 10 | 11 | import os 12 | import sys 13 | from subprocess import check_output 14 | from textwrap import indent 15 | 16 | # directories larger than this are considered a failure 17 | # a few little files here aren't a problem 18 | THRESHOLD = 1 # in MB 19 | 20 | MB = 1024 * 1024 21 | 22 | # the paths to check 23 | # all of these locations 24 | # should be cleaned up 25 | # missing is okay 26 | PATHS = [ 27 | "/tmp/", 28 | # check whole home? 29 | # this shouldn't be empty, but for our tests (so far) it should be very small 30 | # This is the easiest way to ensure we aren't leaving any unexpected files 31 | # without knowing ahead of time where all possible caches might be (.npm, .cache, etc.) 32 | "~/", 33 | "/root/", 34 | ] 35 | 36 | 37 | def du(path): 38 | """Return disk usage in megabytes of a path""" 39 | # -ks: get total size, reported in kilobytes 40 | out = check_output(["du", "-Hks", path]) 41 | return int(out.split(None, 1)[0]) / 1024 42 | 43 | 44 | def check_dir_size(path): 45 | """Check the size of a directory 46 | 47 | Returns: 48 | 49 | True: directory size is below THRESHOLD or is missing 50 | False: directory is larger than THRESHOLD 51 | """ 52 | path = os.path.expanduser(path) 53 | 54 | if not os.path.exists(path): 55 | print("{path}: missing OK".format(**locals())) 56 | return True 57 | 58 | size_mb = du(path) 59 | print("{path}: {size_mb:.1f} MB".format(**locals()), end=" ") 60 | if size_mb <= THRESHOLD: 61 | print("OK") 62 | return True 63 | else: 64 | print("FAIL") 65 | # check size of files one-level deep (du only reports dirs) 66 | for name in os.listdir(path): 67 | subpath = os.path.join(path, name) 68 | if os.path.isfile(subpath): 69 | file_sz = os.stat(subpath).st_size / MB 70 | if file_sz > 0.1: 71 | print(" {file_sz:.1f}M {subpath}".format(**locals())) 72 | # get report on all subdirs that are at least 100k 73 | print( 74 | indent( 75 | check_output(["du", "-Hh", "-t", "100000", path]).decode("utf8"), " " 76 | ) 77 | ) 78 | return False 79 | 80 | 81 | def main(): 82 | results = [check_dir_size(path) for path in PATHS] 83 | if not all(results): 84 | sys.exit(1) 85 | 86 | 87 | if __name__ == "__main__": 88 | main() 89 | -------------------------------------------------------------------------------- /tests/conda/README.md: -------------------------------------------------------------------------------- 1 | # Overview of tests for the conda buildpack 2 | 3 | ## Tested configuration files 4 | 5 | - [`.binder/`](https://repo2docker.readthedocs.io/en/latest/usage.html#where-to-put-configuration-files) 6 | - [`requirements.txt`](https://repo2docker.readthedocs.io/en/latest/config_files.html#requirements-txt-install-a-python-environment) 7 | - [`postBuild](https://repo2docker.readthedocs.io/en/latest/config_files.html#postbuild-run-code-after-installing-the-environment) 8 | 9 | ## Tested repo2docker command line flags 10 | 11 | - [`--target-repo-dir`](https://repo2docker.readthedocs.io/en/latest/usage.html#cmdoption-jupyter-repo2docker-target-repo-dir) 12 | 13 | ### py2 14 | 15 | - Test setup of a Python 2 environment by declaring `python=2` in 16 | `environment.yml`. 17 | 18 | ### py35-binder-dir 19 | 20 | - Test setup of a Python 3.5 environment by declaring `python=3.5` in 21 | `environment.yml`. 22 | 23 | The reasons for testing 3.5 specifically is that it is the oldest version of 24 | Python 3 supported by repo2docker's conda buildpack. See 25 | `repo2docker/buildpacks/conda` for details. 26 | 27 | - Test use of a `.binder` directory. 28 | 29 | ### py310-requirements-file 30 | 31 | - Test setup of a Python 3.10 environment by declaring `python=3.10` in 32 | `environment.yml`. 33 | 34 | The reasons for testing 3.10 specifically is that it is the newest version of 35 | Python 3 supported by repo2docker's conda buildpack. See 36 | `repo2docker/buildpacks/conda` for details. 37 | 38 | - Test use of a `requirements.txt` file, where an `environment.yml` file should 39 | take precedence and the `requirements.txt` should be ignored. 40 | 41 | ### py-r-postbuild-file 42 | 43 | - Test setup of the default Python environment by omitting `python` from 44 | `environment.yml` file. 45 | 46 | - Test setup of the default R environment by specifying `r-base` in 47 | `environment.yml`. 48 | 49 | - Test use of a `postBuild` file. 50 | 51 | ### r3.6-target-repo-dir-flag 52 | 53 | - Test setup of a R 3.6 environment by specifying `r-base=3.6` in 54 | `environment.yml`. 55 | 56 | - Test use of repo2docker with the `--target-repo-dir` flag. 57 | 58 | `--target-repo-dir` is meant to support custom paths where repositories can be 59 | copied to besides `${HOME}`. 60 | 61 | This test makes use of the `test-extra-args.yaml` file to influence additional 62 | arguments passed to `repo2docker` during the test. In this test, specify 63 | `--target-repo-dir=/srv/repo`. 64 | -------------------------------------------------------------------------------- /tests/conda/downgrade/environment.yml: -------------------------------------------------------------------------------- 1 | # originally xeus-cling@0.6.0 2 | # added python=3.9 pin because implicit downgrade of Python from 3.10 to 3.9 isn't allowed 3 | name: xeus-cling 4 | channels: 5 | - conda-forge 6 | dependencies: 7 | - python=3.9 8 | - xeus-cling=0.6.0 9 | - xtensor=0.20.8 10 | - xtensor-blas=0.16.1 11 | - notebook 12 | -------------------------------------------------------------------------------- /tests/conda/downgrade/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import json 3 | import pprint 4 | from subprocess import check_output 5 | 6 | 7 | def json_sh(cmd): 8 | """Run a command that produces JSON on stdout and return the parsed result""" 9 | buf = check_output(cmd) 10 | return json.loads(buf.decode("utf8", "replace")) 11 | 12 | 13 | conda_pkg_list = json_sh(["conda", "list", "--json"]) 14 | pprint.pprint(conda_pkg_list) 15 | # make it a dict 16 | pkgs = {pkg["name"]: pkg for pkg in conda_pkg_list} 17 | 18 | # base env resolved 19 | assert pkgs["python"]["version"].startswith("3.9"), pkgs["python"] 20 | assert "xeus-cling" in pkgs 21 | assert pkgs["xeus-cling"]["version"] == "0.6.0" 22 | 23 | # verify downgrade 24 | # this may be brittle, but it's unlikely any of these old versions 25 | # of packages will be rebuilt 26 | # xeus-cling 0.6.0 pins xeus 0.20, which pins openssl 1.1.1, 27 | # which in turn downgrades Python from >=3.9.16 to 3.9.6 28 | 29 | assert pkgs["openssl"]["version"].startswith("1.1.1"), pkgs["openssl"]["version"] 30 | assert pkgs["python"]["version"] == "3.9.0", pkgs["python"]["version"] 31 | -------------------------------------------------------------------------------- /tests/conda/py-r-postbuild-file/environment.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - pytest 3 | - r-base 4 | - r-digest 5 | -------------------------------------------------------------------------------- /tests/conda/py-r-postbuild-file/postBuild: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | pip install there 3 | -------------------------------------------------------------------------------- /tests/conda/py-r-postbuild-file/verify: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | jupyter serverextension list 2>&1 | grep jupyter_server_proxy 4 | jupyter nbextension list 2>&1 | grep jupyter_server_proxy 5 | 6 | R -e "library('digest')" 7 | 8 | # Fail if version is not at least 4.2 9 | R --version 10 | R -e 'if (!(version$major == "4" && as.double(version$minor) >= 2)) quit("yes", 1)' 11 | 12 | pytest --verbose --color=yes ./verify.py 13 | -------------------------------------------------------------------------------- /tests/conda/py-r-postbuild-file/verify.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | 6 | def test_sys_version(): 7 | """The default python version should be 3.10""" 8 | assert sys.version_info[:2] == (3, 10) 9 | 10 | 11 | def test_there(): 12 | """there is to be installed via postBuild""" 13 | import there 14 | 15 | 16 | def test_conda_activated(): 17 | assert os.environ.get("CONDA_PREFIX") == os.environ["NB_PYTHON_PREFIX"], dict( 18 | os.environ 19 | ) 20 | -------------------------------------------------------------------------------- /tests/conda/py2/environment.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - python=2 3 | - numpy 4 | -------------------------------------------------------------------------------- /tests/conda/py2/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | 4 | assert sys.version_info[:2] == (3, 10), sys.version 5 | 6 | # verify that we have Python 2 and Python 3 kernelspecs 7 | from jupyter_client.kernelspec import KernelSpecManager 8 | 9 | ksm = KernelSpecManager() 10 | specs = ksm.get_all_specs() 11 | assert sorted(specs) == ["python2", "python3"], specs.keys() 12 | 13 | # verify that we created the kernel env 14 | import json 15 | from subprocess import check_output 16 | 17 | envs = json.loads(check_output(["micromamba", "env", "list", "--json"]).decode("utf8")) 18 | assert envs == { 19 | "envs": ["/srv/conda", "/srv/conda/envs/kernel", "/srv/conda/envs/notebook"] 20 | }, envs 21 | 22 | pkgs = json.loads( 23 | check_output(["micromamba", "list", "-n", "kernel", "--json"]).decode("utf8") 24 | ) 25 | pkg_names = [pkg["name"] for pkg in pkgs] 26 | assert "ipykernel" in pkg_names, pkg_names 27 | assert "numpy" in pkg_names 28 | for pkg in pkgs: 29 | if pkg["name"] == "python": 30 | assert pkg["version"].startswith("2.7.") 31 | break 32 | else: 33 | assert False, f"python not found in {pkg_names}" 34 | -------------------------------------------------------------------------------- /tests/conda/py310-requirements-file/environment.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - python=3.10 3 | - numpy 4 | - pip 5 | - pip: 6 | - simplejson 7 | -------------------------------------------------------------------------------- /tests/conda/py310-requirements-file/requirements.txt: -------------------------------------------------------------------------------- 1 | there 2 | -------------------------------------------------------------------------------- /tests/conda/py310-requirements-file/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | 4 | assert sys.version_info[:2] == (3, 10), sys.version 5 | 6 | import numpy 7 | 8 | try: 9 | import there 10 | except ImportError: 11 | pass 12 | else: 13 | raise Exception("'there' shouldn't have been installed from requirements.txt") 14 | -------------------------------------------------------------------------------- /tests/conda/py312/environment.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - python=3.12 3 | -------------------------------------------------------------------------------- /tests/conda/py312/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | 4 | assert sys.version_info[:2] == (3, 12), sys.version 5 | -------------------------------------------------------------------------------- /tests/conda/py35-binder-dir/.binder/environment.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - python=3.5 3 | - numpy 4 | # Without this numpy fails to load with error 5 | # ImportError: libgfortran.so.3: cannot open shared object file: No such file or directory 6 | - libgfortran=3 7 | -------------------------------------------------------------------------------- /tests/conda/py35-binder-dir/Dockerfile: -------------------------------------------------------------------------------- 1 | # This file should be ignored because there's a dedicated binder folder 2 | FROM doesntmatter 3 | -------------------------------------------------------------------------------- /tests/conda/py35-binder-dir/environment.yml: -------------------------------------------------------------------------------- 1 | # This file should be ignored as we have a dedicated binder folder. 2 | dependencies: 3 | - thiswontwork 4 | invalid 5 | -------------------------------------------------------------------------------- /tests/conda/py35-binder-dir/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | from subprocess import STDOUT, check_output 5 | 6 | assert sys.version_info[:2] == (3, 10), sys.version 7 | 8 | 9 | def sh(cmd, **kwargs): 10 | return check_output(cmd, **kwargs).decode("utf8").strip() 11 | 12 | 13 | kernel_python = os.path.join(os.environ["KERNEL_PYTHON_PREFIX"], "bin", "python") 14 | out = sh([kernel_python, "--version"], stderr=STDOUT) 15 | v = out.split()[1] 16 | assert v[:3] == "3.5", out 17 | 18 | out = sh(["micromamba", "--version"]) 19 | assert out == "2.1.0", out 20 | 21 | out = sh(["mamba", "--version"]) 22 | assert out == "2.1.0", out 23 | 24 | 25 | sh([kernel_python, "-c", "import numpy"]) 26 | -------------------------------------------------------------------------------- /tests/conda/py36-postBuild/environment.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - python=3.6 3 | - numpy 4 | -------------------------------------------------------------------------------- /tests/conda/py36-postBuild/postBuild: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex 3 | 4 | # mamba/conda installs in kernel env 5 | mamba install -y make 6 | 7 | # note `pip` on path is _not_ the kernel env! 8 | # is this what we (or users) want? 9 | pip install pytest 10 | -------------------------------------------------------------------------------- /tests/conda/py36-postBuild/verify: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex 3 | pytest -vs ./verify.py 4 | -------------------------------------------------------------------------------- /tests/conda/py36-postBuild/verify.py: -------------------------------------------------------------------------------- 1 | """ 2 | tests to be run with pytest inside the container 3 | 4 | can't be called test_whatever.py because then _host_ pytest will try to run it! 5 | """ 6 | 7 | import json 8 | import os 9 | import shutil 10 | from subprocess import check_output 11 | 12 | from pytest import fixture 13 | 14 | kernel_prefix = os.environ.get("KERNEL_PYTHON_PREFIX") 15 | server_prefix = os.environ.get("NB_PYTHON_PREFIX") 16 | 17 | 18 | def json_cmd(cmd): 19 | """Run a command and decode its JSON output""" 20 | out = check_output(cmd) 21 | return json.loads(out.decode("utf8", "replace")) 22 | 23 | 24 | def conda_pkgs(prefix): 25 | """Conda package list as a dict""" 26 | conda_json = json_cmd(["conda", "list", "--json", "-p", prefix]) 27 | return {pkg["name"]: pkg for pkg in conda_json} 28 | 29 | 30 | def pip_pkgs(prefix): 31 | """Pip package list as a dict""" 32 | pip_json = json_cmd([f"{prefix}/bin/pip", "list", "--format=json"]) 33 | return {pkg["name"]: pkg for pkg in pip_json} 34 | 35 | 36 | @fixture(scope="session") 37 | def kernel_conda(): 38 | return conda_pkgs(kernel_prefix) 39 | 40 | 41 | @fixture(scope="session") 42 | def server_conda(): 43 | return conda_pkgs(server_prefix) 44 | 45 | 46 | @fixture(scope="session") 47 | def kernel_pip(): 48 | return pip_pkgs(kernel_prefix) 49 | 50 | 51 | @fixture(scope="session") 52 | def server_pip(): 53 | return pip_pkgs(server_prefix) 54 | 55 | 56 | def test_which_python(): 57 | # server python comes first. Is this expected? 58 | assert shutil.which("python3") == f"{server_prefix}/bin/python3" 59 | 60 | 61 | def test_kernel_env(kernel_conda): 62 | assert kernel_prefix != server_prefix 63 | kernel_python = kernel_conda["python"]["version"] 64 | assert kernel_python[:3] == "3.6" 65 | # test environment.yml packages 66 | assert "numpy" in kernel_conda 67 | 68 | 69 | def test_server_env(server_conda): 70 | # this should be the default version 71 | # it will need updating when the default changes 72 | assert server_conda["python"]["version"].split(".")[:2] == ["3", "10"] 73 | 74 | 75 | def test_conda_install(kernel_conda, server_conda): 76 | # test that postBuild conda install went in the kernel env 77 | assert "make" in kernel_conda 78 | assert "make" not in server_conda 79 | 80 | 81 | def test_pip_install(kernel_pip, server_pip): 82 | # server env comes first for pip 83 | # is this expected? 84 | assert "pytest" not in kernel_pip 85 | assert "pytest" in server_pip 86 | -------------------------------------------------------------------------------- /tests/conda/r3.6-target-repo-dir-flag/environment.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - r-base=3.6 3 | - r-digest 4 | -------------------------------------------------------------------------------- /tests/conda/r3.6-target-repo-dir-flag/test-extra-args.yaml: -------------------------------------------------------------------------------- 1 | # This file is respected by repo2docker's test suite, but not repo2docker 2 | # itself. It is used solely to help us test repo2docker's command line flags. 3 | # 4 | - --target-repo-dir=/srv/repo 5 | -------------------------------------------------------------------------------- /tests/conda/r3.6-target-repo-dir-flag/verify: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | jupyter serverextension list 2>&1 | grep jupyter_server_proxy 4 | jupyter nbextension list 2>&1 | grep jupyter_server_proxy 5 | 6 | R -e "library('digest')" 7 | 8 | # Fail if R version is not 3.6 9 | R --version 10 | R -e 'if (!(version$major == "3" && as.double(version$minor) >= 6 && as.double(version$minor) < 7)) quit("yes", 1)' 11 | 12 | ./verify.py 13 | -------------------------------------------------------------------------------- /tests/conda/r3.6-target-repo-dir-flag/verify.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | from glob import glob 5 | 6 | # conda should still be in /srv/conda 7 | # and Python should still be in $NB_PYTHON_PREFIX 8 | assert sys.executable == os.path.join( 9 | os.environ["NB_PYTHON_PREFIX"], "bin", "python" 10 | ), sys.executable 11 | assert sys.executable.startswith("/srv/conda/"), sys.executable 12 | 13 | # Repo should be in /srv/repo 14 | assert os.path.exists("/srv/repo/verify.py") 15 | assert os.path.abspath(__file__) == "/srv/repo/verify.py" 16 | 17 | # Repo should be writable 18 | assert os.access("/srv/repo", os.W_OK) 19 | 20 | # All files in repo dir should be readable and writeable 21 | for path in glob("/src/repo/**/*", recursive=True): 22 | assert os.access(path, os.R_OK) 23 | assert os.access(path, os.W_OK) 24 | 25 | # Should be able to make a new file 26 | with open("/srv/repo/writeable", "w") as fp: 27 | fp.write("writeable") 28 | -------------------------------------------------------------------------------- /tests/dockerfile/binder-dir/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.5 2 | 3 | RUN exit 1 4 | -------------------------------------------------------------------------------- /tests/dockerfile/binder-dir/binder/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10 2 | 3 | RUN pip install --no-cache notebook 4 | 5 | CMD "/bin/sh" 6 | 7 | ADD sayhi.sh /usr/local/bin/sayhi.sh 8 | ADD verify verify 9 | 10 | ARG NB_UID 11 | ENV HOME /tmp 12 | USER $NB_UID 13 | -------------------------------------------------------------------------------- /tests/dockerfile/binder-dir/sayhi.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo hi 3 | exit 0 4 | -------------------------------------------------------------------------------- /tests/dockerfile/binder-dir/verify: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | /usr/local/bin/sayhi.sh 4 | -------------------------------------------------------------------------------- /tests/dockerfile/editable/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.5 2 | 3 | RUN pip install --no-cache notebook 4 | 5 | CMD "/bin/sh" 6 | 7 | ADD change.sh /usr/local/bin/change.sh 8 | 9 | ARG NB_UID 10 | ENV HOME /tmp 11 | WORKDIR ${HOME} 12 | 13 | USER $NB_UID 14 | -------------------------------------------------------------------------------- /tests/dockerfile/editable/README.rst: -------------------------------------------------------------------------------- 1 | Docker - Edit mode 2 | ------------------ 3 | 4 | Using the --editable option with a local repository, one can modify a 5 | file or create a new file in the container, and this change is 6 | reflected in the respective host directory. It is essentially a 7 | shortcut for `--mount 8 | type=bind,source=,target=.` (where the target 9 | resolves into the container working directory). 10 | 11 | This is tested by running the change.sh script inside the container 12 | (using the 'cmd' argument to the Repo2Docker app), which creates a new 13 | file, and then verifying on the host side the new file is created with 14 | the proper contents. 15 | 16 | In practice, this can be used to run a notebook from inside a 17 | container (which provides the proper environment), making changes as 18 | necessary, which are then immediately reflected in the host 19 | repository. 20 | -------------------------------------------------------------------------------- /tests/dockerfile/editable/change.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cat < newfile 4 | new contents 5 | EOF 6 | exit 0 7 | -------------------------------------------------------------------------------- /tests/dockerfile/jupyter-stack/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jupyter/base-notebook:b4dd11e16ae4 2 | 3 | RUN pip install there 4 | ADD verify verify 5 | -------------------------------------------------------------------------------- /tests/dockerfile/jupyter-stack/README.rst: -------------------------------------------------------------------------------- 1 | Docker - Specifying dependencies 2 | -------------------------------- 3 | 4 | You can use a Dockerfiles to use a "source" Docker image that has a pre-built 5 | environment. This may be more flexible in running non-standard code. 6 | 7 | We recommend sourcing your Dockerfile from one of the Jupyter base images 8 | to ensure that it works with JupyterHub. In this case, we use a stripped-down 9 | image that has minimal dependencies installed. 10 | -------------------------------------------------------------------------------- /tests/dockerfile/jupyter-stack/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | assert sys.version_info[:2] == (3, 6) 6 | 7 | import jupyter 8 | import jupyterhub 9 | import there 10 | -------------------------------------------------------------------------------- /tests/dockerfile/simple/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10 2 | 3 | RUN pip install --no-cache notebook 4 | 5 | CMD "/bin/sh" 6 | 7 | ADD sayhi.sh /usr/local/bin/sayhi.sh 8 | ADD verify verify 9 | 10 | ARG NB_UID 11 | ENV HOME /tmp 12 | USER $NB_UID 13 | -------------------------------------------------------------------------------- /tests/dockerfile/simple/README.rst: -------------------------------------------------------------------------------- 1 | Docker - Running scripts 2 | ------------------------ 3 | 4 | It's possible to run scripts using Docker in your build. In this case, we run 5 | a simple shell script after installing dependencies. 6 | 7 | While it's possible to run code with Dockerfiles, we recommend 8 | that try accomplishing the same thing with ``apt.txt`` and 9 | ``postBuild`` files. Only use Dockerfiles when necessary. 10 | -------------------------------------------------------------------------------- /tests/dockerfile/simple/sayhi.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo hi 3 | exit 0 4 | -------------------------------------------------------------------------------- /tests/dockerfile/simple/verify: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | /usr/local/bin/sayhi.sh 4 | -------------------------------------------------------------------------------- /tests/external/reproductions.repos.yaml: -------------------------------------------------------------------------------- 1 | # A bunch of external repos that reproduce something cool we care about 2 | # Test that a full remote/hash works 3 | - name: LIGO Gravitational Waves 4 | url: https://github.com/minrk/ligo-binder/ 5 | ref: origin/b8259dac9eb 6 | verify: python -c 'import matplotlib' 7 | # Test that ref is added if not present 8 | - name: LIGO Gravitational Waves 9 | url: https://github.com/minrk/ligo-binder/ 10 | ref: b8259dac9eb 11 | verify: python -c 'import matplotlib' 12 | # Test that a full remote/ref works 13 | - name: Binder Examples - Requirements - origin/main 14 | url: https://github.com/binder-examples/requirements 15 | ref: origin/main 16 | verify: python -c 'import matplotlib' 17 | # Test that ref is added to branch if not present 18 | - name: Binder Examples - Requirements - main 19 | url: https://github.com/binder-examples/requirements 20 | ref: main 21 | verify: python -c 'import matplotlib' 22 | # Test that tags work + ref is added to tag if not present 23 | - name: Binder Examples - Requirements - tag 24 | url: https://github.com/binder-examples/requirements 25 | ref: python-3.8 26 | verify: python -c 'import matplotlib' 27 | # Zenodo record of https://github.com/binder-examples/requirements 28 | - name: 10.5281/zenodo.3242074 29 | url: 10.5281/zenodo.3242074 30 | verify: /srv/conda/envs/kernel/bin/python -c 'import matplotlib' 31 | # Test that files in git-lfs are properly cloned 32 | - name: LFS 33 | url: https://github.com/binderhub-ci-repos/lfs 34 | ref: 9abf54a 35 | verify: grep "I am stored in git lfs" in-lfs.dat 36 | -------------------------------------------------------------------------------- /tests/julia/README.md: -------------------------------------------------------------------------------- 1 | # Overview of tests for the julia buildpack 2 | 3 | ## Tested configuration files 4 | 5 | - [`Project.toml`](https://repo2docker.readthedocs.io/en/latest/config_files.html#project-toml-install-a-julia-environment) 6 | - [`REQUIRE`](https://repo2docker.readthedocs.io/en/latest/config_files.html#require-install-a-julia-environment-legacy) 7 | - [`requirements.txt`](https://repo2docker.readthedocs.io/en/latest/config_files.html#requirements-txt-install-a-python-environment) 8 | 9 | ## Test folders 10 | 11 | ### project 12 | 13 | - Tests use of a `Project.toml` file for Julia, using the repo2docker default 14 | version of Julia as specified in `julia_project.py`. 15 | 16 | ### project-1.0.2-binder-dir 17 | 18 | - Tests use of a `Project.toml` file for Julia, using a version of Julia 19 | specified via `julia = "=1.0.2"` in `Project.toml`'s `[compat]` section. 20 | 21 | - Test use of a `.binder` directory. 22 | 23 | ### require 24 | 25 | - Tests use of a `REQUIRE` file for Julia, using the repo2docker default version 26 | of Julia as specified in `julia_require.py`. Note that this is default version 27 | is currently 0.6.4! 28 | 29 | - Starting with Julia v0.7 and up, the package manager has changed, so this 30 | tests that the Julia version below that can be installed correctly as well. 31 | 32 | ### require-1-requirements-file 33 | 34 | - Tests use of a `REQUIRE` file for Julia, using a major version version 35 | specification. Note that this major version specification is currently 36 | resolving to a pinned minor and patch version as declared in 37 | `julia_require.py`. 38 | 39 | - Test use of a `requirements.txt` file, where it is expected to be respected 40 | alongside the `REQUIRE` file. 41 | -------------------------------------------------------------------------------- /tests/julia/project-binder-dir/.binder/Project.toml: -------------------------------------------------------------------------------- 1 | [deps] 2 | IteratorInterfaceExtensions = "82899510-4779-5014-852e-03e436cf321d" 3 | 4 | [compat] 5 | julia = "=1.3" 6 | -------------------------------------------------------------------------------- /tests/julia/project-binder-dir/Project.toml: -------------------------------------------------------------------------------- 1 | # A broken Project.toml, but the idea is that the test should ignore this file 2 | # anyhow as there is a .binder folder. 3 | # 4 | [deps] 5 | IteratorInterfaceExtensions = "invalid" 6 | 7 | [compat] 8 | julia = "=1.0.invalid" 9 | -------------------------------------------------------------------------------- /tests/julia/project-binder-dir/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env julia 2 | 3 | if VERSION != v"1.3" 4 | println("Julia version should be 1.3, got $VERSION") 5 | exit(1) 6 | end 7 | 8 | try 9 | # Test that the package was installed. 10 | using IteratorInterfaceExtensions 11 | 12 | # Verify that the environment variables are set correctly for julia 1.0+ 13 | @assert "julia" ∈ readdir(Sys.BINDIR) 14 | catch 15 | exit(1) 16 | end 17 | 18 | # Verify that kernels are not installed in home directory (issue #620) 19 | try 20 | using IJulia 21 | @assert IJulia.kerneldir() == ENV["NB_PYTHON_PREFIX"] * "/share/jupyter/kernels" 22 | catch 23 | exit(1) 24 | end 25 | 26 | exit(0) 27 | -------------------------------------------------------------------------------- /tests/julia/project/Project.toml: -------------------------------------------------------------------------------- 1 | [deps] 2 | IteratorInterfaceExtensions = "82899510-4779-5014-852e-03e436cf321d" 3 | -------------------------------------------------------------------------------- /tests/julia/project/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env julia 2 | 3 | # FIXME: The default version set by repo2docker is in practice the latest 4 | # available, but from the julia_project.py file that doesn't seem 5 | # intented. 6 | # 7 | # if ! (VERSION >= v"1.6" && VERSION < v"1.7") 8 | # println("Default Julia version should be at 1.6.x") 9 | # exit(1) 10 | # end 11 | 12 | try 13 | # Test that the package was installed. 14 | using IteratorInterfaceExtensions 15 | 16 | # Verify that the environment variables are set correctly for julia 1.0+ 17 | @assert "julia" ∈ readdir(Sys.BINDIR) 18 | catch 19 | exit(1) 20 | end 21 | 22 | # Verify that kernels are not installed in home directory (issue #620) 23 | try 24 | using IJulia 25 | @assert IJulia.kerneldir() == ENV["NB_PYTHON_PREFIX"] * "/share/jupyter/kernels" 26 | catch 27 | exit(1) 28 | end 29 | 30 | exit(0) 31 | -------------------------------------------------------------------------------- /tests/nix/binder-dir/README.rst: -------------------------------------------------------------------------------- 1 | default.nix in a binder/ directory 2 | ---------------------------------- 3 | 4 | Check we find and use ``default.nix`` when it is in a ``binder/`` sub-directory. 5 | -------------------------------------------------------------------------------- /tests/nix/binder-dir/binder/default.nix: -------------------------------------------------------------------------------- 1 | let 2 | # Pinning nixpkgs to specific release 3 | # To get sha256 use "nix-prefetch-git --rev " 4 | commitRev="5574b6a152b1b3ae5f93ba37c4ffd1981f62bf5a"; 5 | nixpkgs = builtins.fetchTarball { 6 | url = "https://github.com/NixOS/nixpkgs/archive/${commitRev}.tar.gz"; 7 | sha256 = "1pqdddp4aiz726c7qs1dwyfzixi14shp0mbzi1jhapl9hrajfsjg"; 8 | }; 9 | pkgs = import nixpkgs { config = { allowUnfree = true; }; }; 10 | in 11 | pkgs.mkShell { 12 | buildInputs = with pkgs; [ 13 | python36Packages.numpy 14 | python36Packages.notebook 15 | ]; 16 | 17 | shellHook = '' 18 | export NIX_PATH="nixpkgs=${nixpkgs}:." 19 | ''; 20 | } 21 | -------------------------------------------------------------------------------- /tests/nix/binder-dir/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import numpy 4 | -------------------------------------------------------------------------------- /tests/nix/ignore-outside/README.rst: -------------------------------------------------------------------------------- 1 | Check `start` works with nix 2 | ---------------------------- 3 | 4 | In this example we set a environment variable in the `start` script and check 5 | it works when using the nix build pack. 6 | -------------------------------------------------------------------------------- /tests/nix/ignore-outside/binder/default.nix: -------------------------------------------------------------------------------- 1 | let 2 | # Pinning nixpkgs to specific release 3 | # To get sha256 use "nix-prefetch-git --rev " 4 | commitRev="5574b6a152b1b3ae5f93ba37c4ffd1981f62bf5a"; 5 | nixpkgs = builtins.fetchTarball { 6 | url = "https://github.com/NixOS/nixpkgs/archive/${commitRev}.tar.gz"; 7 | sha256 = "1pqdddp4aiz726c7qs1dwyfzixi14shp0mbzi1jhapl9hrajfsjg"; 8 | }; 9 | pkgs = import nixpkgs { config = { allowUnfree = true; }; }; 10 | in 11 | pkgs.mkShell { 12 | buildInputs = with pkgs; [ 13 | python36Packages.notebook 14 | ]; 15 | 16 | shellHook = '' 17 | export NIX_PATH="nixpkgs=${nixpkgs}:." 18 | ''; 19 | } 20 | -------------------------------------------------------------------------------- /tests/nix/ignore-outside/default.nix: -------------------------------------------------------------------------------- 1 | let 2 | # Pinning nixpkgs to specific release 3 | # To get sha256 use "nix-prefetch-git --rev " 4 | commitRev="5574b6a152b1b3ae5f93ba37c4ffd1981f62bf5a"; 5 | nixpkgs = builtins.fetchTarball { 6 | url = "https://github.com/NixOS/nixpkgs/archive/${commitRev}.tar.gz"; 7 | sha256 = "1pqdddp4aiz726c7qs1dwyfzixi14shp0mbzi1jhapl9hrajfsjg"; 8 | }; 9 | pkgs = import nixpkgs { config = { allowUnfree = true; }; }; 10 | in 11 | pkgs.mkShell { 12 | buildInputs = with pkgs; [ 13 | python36Packages.numpy 14 | __THIS_IS_A_SYNTAX_ERROR_THAT_SHOULD_NOT_MATTER_AS_IT_ISNT_EXECUTED__ 15 | python36Packages.notebook 16 | ]; 17 | 18 | shellHook = '' 19 | export NIX_PATH="nixpkgs=${nixpkgs}:." 20 | ''; 21 | } 22 | -------------------------------------------------------------------------------- /tests/nix/ignore-outside/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # this script should not be executed 4 | echo "The start script in the top level directory should not be executed" 5 | exit 1 6 | -------------------------------------------------------------------------------- /tests/nix/ignore-outside/verify: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | # check that numpy isn't installed 4 | test -z $(pip list | grep numpy | awk '{print $1}') 5 | -------------------------------------------------------------------------------- /tests/nix/simple/README.rst: -------------------------------------------------------------------------------- 1 | Nix environment - default.nix 2 | ----------------------------- 3 | 4 | You can install a nix shell environment using the traditional default.nix. 5 | 6 | Documentation on the syntax and typical setup of a ``nix-shell`` environment can be found `here `_. 7 | -------------------------------------------------------------------------------- /tests/nix/simple/default.nix: -------------------------------------------------------------------------------- 1 | let 2 | # Pinning nixpkgs to specific release 3 | # To get sha256 use "nix-prefetch-git --rev " 4 | commitRev="5574b6a152b1b3ae5f93ba37c4ffd1981f62bf5a"; 5 | nixpkgs = builtins.fetchTarball { 6 | url = "https://github.com/NixOS/nixpkgs/archive/${commitRev}.tar.gz"; 7 | sha256 = "1pqdddp4aiz726c7qs1dwyfzixi14shp0mbzi1jhapl9hrajfsjg"; 8 | }; 9 | pkgs = import nixpkgs { config = { allowUnfree = true; }; }; 10 | in 11 | pkgs.mkShell { 12 | buildInputs = with pkgs; [ 13 | python36Packages.numpy 14 | python36Packages.notebook 15 | ]; 16 | 17 | shellHook = '' 18 | export NIX_PATH="nixpkgs=${nixpkgs}:." 19 | ''; 20 | } 21 | -------------------------------------------------------------------------------- /tests/nix/simple/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import numpy 4 | -------------------------------------------------------------------------------- /tests/nix/start/README.rst: -------------------------------------------------------------------------------- 1 | Check `start` works with nix 2 | ---------------------------- 3 | 4 | In this example we set a environment variable in the `start` script and check 5 | it works when using the nix build pack. 6 | -------------------------------------------------------------------------------- /tests/nix/start/default.nix: -------------------------------------------------------------------------------- 1 | let 2 | # Pinning nixpkgs to specific release 3 | # To get sha256 use "nix-prefetch-git --rev " 4 | commitRev="5574b6a152b1b3ae5f93ba37c4ffd1981f62bf5a"; 5 | nixpkgs = builtins.fetchTarball { 6 | url = "https://github.com/NixOS/nixpkgs/archive/${commitRev}.tar.gz"; 7 | sha256 = "1pqdddp4aiz726c7qs1dwyfzixi14shp0mbzi1jhapl9hrajfsjg"; 8 | }; 9 | pkgs = import nixpkgs { config = { allowUnfree = true; }; }; 10 | in 11 | pkgs.mkShell { 12 | buildInputs = with pkgs; [ 13 | python36Packages.numpy 14 | python36Packages.notebook 15 | ]; 16 | 17 | shellHook = '' 18 | export NIX_PATH="nixpkgs=${nixpkgs}:." 19 | ''; 20 | } 21 | -------------------------------------------------------------------------------- /tests/nix/start/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | export TEST_START_VAR="var is set" 4 | 5 | exec "$@" 6 | -------------------------------------------------------------------------------- /tests/nix/start/verify: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | # set value of TEST_START_VAR to empty string when it is not defined 5 | if [ "${TEST_START_VAR:-}" != "var is set" ] 6 | then 7 | echo "TEST_START_VAR is not set" 8 | exit 1 9 | fi 10 | -------------------------------------------------------------------------------- /tests/nix/test-building/README.rst: -------------------------------------------------------------------------------- 1 | Check that we can build 2 | ----------------------- 3 | 4 | Test that actual building instead of substituting (downloading an existing build) works. 5 | -------------------------------------------------------------------------------- /tests/nix/test-building/default.nix: -------------------------------------------------------------------------------- 1 | let 2 | # Pinning nixpkgs to specific release 3 | # To get sha256 use "nix-prefetch-git --rev " 4 | commitRev="5574b6a152b1b3ae5f93ba37c4ffd1981f62bf5a"; 5 | nixpkgs = builtins.fetchTarball { 6 | url = "https://github.com/NixOS/nixpkgs/archive/${commitRev}.tar.gz"; 7 | sha256 = "1pqdddp4aiz726c7qs1dwyfzixi14shp0mbzi1jhapl9hrajfsjg"; 8 | }; 9 | pkgs = import nixpkgs { config = { allowUnfree = true; }; }; 10 | 11 | # Test that we can actually build 12 | test-build = pkgs.runCommand "test-build" { } '' 13 | touch $out 14 | ''; 15 | 16 | in 17 | pkgs.mkShell { 18 | buildInputs = with pkgs; [ 19 | python36Packages.numpy 20 | python36Packages.notebook 21 | test-build 22 | ]; 23 | 24 | shellHook = '' 25 | export NIX_PATH="nixpkgs=${nixpkgs}:." 26 | ''; 27 | } -------------------------------------------------------------------------------- /tests/nix/test-building/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import numpy 4 | -------------------------------------------------------------------------------- /tests/norun/.gitignore: -------------------------------------------------------------------------------- 1 | tmp-certs-* 2 | -------------------------------------------------------------------------------- /tests/norun/Dockerfile: -------------------------------------------------------------------------------- 1 | # Smallest possible dockerfile, used only for building images to be tested 2 | FROM scratch -------------------------------------------------------------------------------- /tests/norun/test_find.py: -------------------------------------------------------------------------------- 1 | import secrets 2 | from pathlib import Path 3 | 4 | from repo2docker.__main__ import make_r2d 5 | 6 | HERE = Path(__file__).parent 7 | 8 | 9 | def test_find_image(): 10 | image_name = f"{secrets.token_hex(8)}:latest" 11 | r2d = make_r2d(["--image", image_name, str(HERE)]) 12 | 13 | r2d.build() 14 | 15 | assert r2d.find_image() 16 | 17 | 18 | def test_dont_find_image(): 19 | image_name = f"{secrets.token_hex(8)}:latest" 20 | r2d = make_r2d(["--image", image_name, str(HERE)]) 21 | 22 | # Just don't actually start the build, so image won't be found 23 | assert not r2d.find_image() 24 | -------------------------------------------------------------------------------- /tests/pipfile/binder-folder-lock/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | there = "*" 8 | -------------------------------------------------------------------------------- /tests/pipfile/binder-folder-lock/Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "12bcb8f7793a4452f84fe65d8714098183d7ddcc6d6d80c542b411f352cf70d8" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": {}, 8 | "sources": [ 9 | { 10 | "name": "pypi", 11 | "url": "https://pypi.python.org/simple", 12 | "verify_ssl": true 13 | } 14 | ] 15 | }, 16 | "default": { 17 | "there": { 18 | "hashes": [ 19 | "sha256:812638ab3683286bf677f273dd6658de93ebbc99fb74ef2548dc8c045d34e8a6", 20 | "sha256:a65e4b050b14a57ede8bb852c36eb61f0e9d9e1352039d2f9723f8d0601e146a" 21 | ], 22 | "index": "pypi", 23 | "version": "==0.0.9" 24 | } 25 | }, 26 | "develop": {} 27 | } 28 | -------------------------------------------------------------------------------- /tests/pipfile/binder-folder-lock/README.rst: -------------------------------------------------------------------------------- 1 | Python - binder/Pipfile.lock + Pipfile.lock 2 | ------------------------------------------- 3 | 4 | We should make ``binder/Pipfile.lock`` take precedence over ``Pipfile.lock``. 5 | -------------------------------------------------------------------------------- /tests/pipfile/binder-folder-lock/binder/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | pypi-pkg-test = "*" 8 | -------------------------------------------------------------------------------- /tests/pipfile/binder-folder-lock/binder/Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "760e5b427525256c5a02ebbfa2cfe695d89cdb5049859ae442c9c395930b3326" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": {}, 8 | "sources": [ 9 | { 10 | "name": "pypi", 11 | "url": "https://pypi.python.org/simple", 12 | "verify_ssl": true 13 | } 14 | ] 15 | }, 16 | "default": { 17 | "pypi-pkg-test": { 18 | "hashes": [ 19 | "sha256:3bc6d0d3d671f5da379eb53cc13ecbb97ce66b1d11916ea2035f6f738b963cc3", 20 | "sha256:ae89e01320aa248e9b626218ecc04c0fb4ae0b3b2a7895404277b488961edc25" 21 | ], 22 | "index": "pypi", 23 | "version": "==0.0.4" 24 | } 25 | }, 26 | "develop": {} 27 | } 28 | -------------------------------------------------------------------------------- /tests/pipfile/binder-folder-lock/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import pypi_pkg_test 3 | 4 | # pypi_pkg_test is installed from the binder folder's Pipfile, but not from the 5 | # root folder's Pipfile! 6 | -------------------------------------------------------------------------------- /tests/pipfile/binder-folder/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | there = "*" 8 | -------------------------------------------------------------------------------- /tests/pipfile/binder-folder/README.rst: -------------------------------------------------------------------------------- 1 | Python - binder/Pipfile + Pipfile 2 | --------------------------------- 3 | 4 | We should make ``binder/Pipfile`` take precedence over ``Pipfile``. 5 | -------------------------------------------------------------------------------- /tests/pipfile/binder-folder/binder/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | pypi-pkg-test = "*" 8 | -------------------------------------------------------------------------------- /tests/pipfile/binder-folder/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import pypi_pkg_test 3 | 4 | # pypi_pkg_test is installed from the binder folder's Pipfile, but not from the 5 | # root folder's Pipfile! 6 | -------------------------------------------------------------------------------- /tests/pipfile/environment-yml/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | requests = "*" 8 | 9 | [dev-packages] 10 | there = "*" 11 | -------------------------------------------------------------------------------- /tests/pipfile/environment-yml/README.rst: -------------------------------------------------------------------------------- 1 | Python - Pipfile(.lock) + environment.yml 2 | ----------------------------------------- 3 | 4 | We should ignore the ``Pipfile`` or ``Pipfile.lock`` if there is an 5 | ``environment.yml`` alongside it. Conda can install more things than ``pip`` or 6 | ``pipenv`` can so we would limit ourselves if we prioritized the ``Pipfile``s. 7 | -------------------------------------------------------------------------------- /tests/pipfile/environment-yml/environment.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - pip 3 | - pip: 4 | - pypi-pkg-test 5 | -------------------------------------------------------------------------------- /tests/pipfile/environment-yml/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import pypi_pkg_test 3 | -------------------------------------------------------------------------------- /tests/pipfile/pipfile-lock/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | numpy = "*" 8 | 9 | [dev-packages] 10 | there = "*" 11 | 12 | # The Pipfile.lock was generated when the Pipfile looked like this 13 | # ---------------------------------------------------------------- 14 | # [packages] 15 | # pypi-pkg-test = "*" 16 | # 17 | # [dev-packages] 18 | # there = "*" 19 | -------------------------------------------------------------------------------- /tests/pipfile/pipfile-lock/Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "43abc56b3f80c806377843b5636db099809298ce32cff3d9cb043adca59b7e9d" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": {}, 8 | "sources": [ 9 | { 10 | "name": "pypi", 11 | "url": "https://pypi.python.org/simple", 12 | "verify_ssl": true 13 | } 14 | ] 15 | }, 16 | "default": { 17 | "pypi-pkg-test": { 18 | "hashes": [ 19 | "sha256:3bc6d0d3d671f5da379eb53cc13ecbb97ce66b1d11916ea2035f6f738b963cc3", 20 | "sha256:ae89e01320aa248e9b626218ecc04c0fb4ae0b3b2a7895404277b488961edc25" 21 | ], 22 | "index": "pypi", 23 | "version": "==0.0.4" 24 | } 25 | }, 26 | "develop": { 27 | "there": { 28 | "hashes": [ 29 | "sha256:812638ab3683286bf677f273dd6658de93ebbc99fb74ef2548dc8c045d34e8a6", 30 | "sha256:a65e4b050b14a57ede8bb852c36eb61f0e9d9e1352039d2f9723f8d0601e146a" 31 | ], 32 | "index": "pypi", 33 | "version": "==0.0.9" 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /tests/pipfile/pipfile-lock/README.rst: -------------------------------------------------------------------------------- 1 | Python - Pipfile + Pipfile.lock 2 | ------------------------------- 3 | 4 | We should make ``Pipfile.lock`` take precedence over ``Pipfile``. 5 | -------------------------------------------------------------------------------- /tests/pipfile/pipfile-lock/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import pypi_pkg_test 3 | import there 4 | 5 | try: 6 | import numpy 7 | except ImportError: 8 | # We want an ImportError to be thrown for this test to pass 9 | pass 10 | else: 11 | raise Exception( 12 | "'numpy' shouldn't have been installed! It was listed in Pipfile but not in the Pipfile.lock." 13 | ) 14 | -------------------------------------------------------------------------------- /tests/pipfile/py2-with-server-and-kernel-req/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | numpy = "*" 8 | 9 | [dev-packages] 10 | parse = "*" 11 | 12 | [requires] 13 | python_version = "2.7" 14 | -------------------------------------------------------------------------------- /tests/pipfile/py2-with-server-and-kernel-req/Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "77f8627bb6f7914babc301ec0de3e35d7382677167cab27e1a00cc30b37f4e5f" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "2.7" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.python.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "numpy": { 20 | "hashes": [ 21 | "sha256:0778076e764e146d3078b17c24c4d89e0ecd4ac5401beff8e1c87879043a0633", 22 | "sha256:141c7102f20abe6cf0d54c4ced8d565b86df4d3077ba2343b61a6db996cefec7", 23 | "sha256:14270a1ee8917d11e7753fb54fc7ffd1934f4d529235beec0b275e2ccf00333b", 24 | "sha256:27e11c7a8ec9d5838bc59f809bfa86efc8a4fd02e58960fa9c49d998e14332d5", 25 | "sha256:2a04dda79606f3d2f760384c38ccd3d5b9bb79d4c8126b67aff5eb09a253763e", 26 | "sha256:3c26010c1b51e1224a3ca6b8df807de6e95128b0908c7e34f190e7775455b0ca", 27 | "sha256:52c40f1a4262c896420c6ea1c6fda62cf67070e3947e3307f5562bd783a90336", 28 | "sha256:6e4f8d9e8aa79321657079b9ac03f3cf3fd067bf31c1cca4f56d49543f4356a5", 29 | "sha256:7242be12a58fec245ee9734e625964b97cf7e3f2f7d016603f9e56660ce479c7", 30 | "sha256:7dc253b542bfd4b4eb88d9dbae4ca079e7bf2e2afd819ee18891a43db66c60c7", 31 | "sha256:94f5bd885f67bbb25c82d80184abbf7ce4f6c3c3a41fbaa4182f034bba803e69", 32 | "sha256:a89e188daa119ffa0d03ce5123dee3f8ffd5115c896c2a9d4f0dbb3d8b95bfa3", 33 | "sha256:ad3399da9b0ca36e2f24de72f67ab2854a62e623274607e37e0ce5f5d5fa9166", 34 | "sha256:b0348be89275fd1d4c44ffa39530c41a21062f52299b1e3ee7d1c61f060044b8", 35 | "sha256:b5554368e4ede1856121b0dfa35ce71768102e4aa55e526cb8de7f374ff78722", 36 | "sha256:cbddc56b2502d3f87fda4f98d948eb5b11f36ff3902e17cb6cc44727f2200525", 37 | "sha256:d79f18f41751725c56eceab2a886f021d70fd70a6188fd386e29a045945ffc10", 38 | "sha256:dc2ca26a19ab32dc475dbad9dfe723d3a64c835f4c23f625c2b6566ca32b9f29", 39 | "sha256:dd9bcd4f294eb0633bb33d1a74febdd2b9018b8b8ed325f861fffcd2c7660bb8", 40 | "sha256:e8baab1bc7c9152715844f1faca6744f2416929de10d7639ed49555a85549f52", 41 | "sha256:ec31fe12668af687b99acf1567399632a7c47b0e17cfb9ae47c098644ef36797", 42 | "sha256:f12b4f7e2d8f9da3141564e6737d79016fe5336cc92de6814eba579744f65b0a", 43 | "sha256:f58ac38d5ca045a377b3b377c84df8175ab992c970a53332fa8ac2373df44ff7" 44 | ], 45 | "index": "pypi", 46 | "version": "==1.16.4" 47 | } 48 | }, 49 | "develop": { 50 | "parse": { 51 | "hashes": [ 52 | "sha256:1b68657434d371e5156048ca4a0c5aea5afc6ca59a2fea4dd1a575354f617142", 53 | "sha256:7cc123e89f39b7374bd6fd1cbe4e1a6616a9ce328163fbe9caef474d6aba380c" 54 | ], 55 | "index": "pypi", 56 | "version": "==1.12.0" 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /tests/pipfile/py2-with-server-and-kernel-req/README.rst: -------------------------------------------------------------------------------- 1 | Python 2 - Pipfile and requirements3.txt 2 | ---------------------------------------- 3 | 4 | - We should get Python 2 setup for the Kernel 5 | - with 'numpy' installed through Pipfile as listed in 'packages' 6 | - with 'parse' installed through Pipfile as listed in 'dev-packages' 7 | - We should get Python 3 setup for the Notebook Server, with 'nbgitpuller' installed through requirements3.txt 8 | -------------------------------------------------------------------------------- /tests/pipfile/py2-with-server-and-kernel-req/requirements3.txt: -------------------------------------------------------------------------------- 1 | # install this in the environment in which the notebook server is running 2 | # nbgitpuller does not work with Python 2 3 | nbgitpuller 4 | -------------------------------------------------------------------------------- /tests/pipfile/py2-with-server-and-kernel-req/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | import os 3 | import sys 4 | 5 | # Verify - kernel's Python: use Python 2 6 | print(sys.version_info) 7 | assert sys.version_info[:2] == (2, 7) 8 | 9 | 10 | # Verify - notebook server's Python: 'numpy' isn't installed 11 | status_code = os.system("python3 -c 'import numpy'") 12 | if status_code == 0: 13 | raise Exception("notebook server's Python: 'numpy' IS installed") 14 | 15 | # Verify - notebook server's Python: 'parse' isn't installed 16 | status_code = os.system("python3 -c 'import parse'") 17 | if status_code == 0: 18 | raise Exception("notebook server's Python: 'parse' IS installed") 19 | 20 | # Verify - notebook server's Python: 'nbgitpuller' is installed 21 | status_code = os.system("python3 -c 'import nbgitpuller'") 22 | if not status_code == 0: 23 | raise Exception("notebook server's Python: 'nbgitpuller' ISN'T installed") 24 | 25 | 26 | # Verify - kernel's Python: 'nbgitpuller' isn't installed 27 | try: 28 | import nbgitpuller 29 | except ImportError: 30 | pass 31 | else: 32 | raise Exception("kernel's Python: 'nbgitpuller' IS installed") 33 | 34 | # Verify - kernel's Python: 'numpy' is installed 35 | try: 36 | import numpy 37 | except ImportError: 38 | raise Exception( 39 | "kernel's Python: 'numpy' ISN'T installed --- We probably setup a virtual env through pipenv but failed to enter it." 40 | ) 41 | 42 | # Verify - kernel's Python: 'parse' is installed 43 | try: 44 | import parse 45 | except ImportError: 46 | raise Exception("kernel's Python: 'parse' ISN'T installed") 47 | -------------------------------------------------------------------------------- /tests/pipfile/py36/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | pypi-pkg-test = "*" 8 | 9 | [dev-packages] 10 | there = "*" 11 | 12 | [requires] 13 | python_version = "3.6" 14 | -------------------------------------------------------------------------------- /tests/pipfile/py36/Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "6cdb6013c1cba848c7d9070f8cdac089ce88c98f2910dd08ac98acce77811cb7" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.6" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.python.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "pypi-pkg-test": { 20 | "hashes": [ 21 | "sha256:3bc6d0d3d671f5da379eb53cc13ecbb97ce66b1d11916ea2035f6f738b963cc3", 22 | "sha256:ae89e01320aa248e9b626218ecc04c0fb4ae0b3b2a7895404277b488961edc25" 23 | ], 24 | "index": "pypi", 25 | "version": "==0.0.4" 26 | } 27 | }, 28 | "develop": { 29 | "there": { 30 | "hashes": [ 31 | "sha256:812638ab3683286bf677f273dd6658de93ebbc99fb74ef2548dc8c045d34e8a6", 32 | "sha256:a65e4b050b14a57ede8bb852c36eb61f0e9d9e1352039d2f9723f8d0601e146a" 33 | ], 34 | "index": "pypi", 35 | "version": "==0.0.9" 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /tests/pipfile/py36/README.rst: -------------------------------------------------------------------------------- 1 | Python - Pipfile with python_version and runtime.txt 2 | ---------------------------------------------------- 3 | 4 | We are ignoring the runtime.txt if there is a Pipfile or Pipfile.lock available. 5 | And since `python_version = "3.6"` in the Pipfile, the `python-3.7` in 6 | runtime.txt should be ignored. Is it? 7 | -------------------------------------------------------------------------------- /tests/pipfile/py36/runtime.txt: -------------------------------------------------------------------------------- 1 | python-3.7 -------------------------------------------------------------------------------- /tests/pipfile/py36/verify: -------------------------------------------------------------------------------- 1 | #!/srv/conda/envs/kernel/bin/python 2 | import sys 3 | 4 | print(sys.version_info) 5 | assert sys.version_info[:2] == (3, 6) 6 | 7 | import pypi_pkg_test # noqa 8 | import there # noqa 9 | -------------------------------------------------------------------------------- /tests/pipfile/requirements-txt/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | pypi-pkg-test = "*" 8 | 9 | [dev-packages] 10 | there = "*" 11 | -------------------------------------------------------------------------------- /tests/pipfile/requirements-txt/README.rst: -------------------------------------------------------------------------------- 1 | Python - Pipfile + requirements.txt 2 | ----------------------------------- 3 | 4 | We should make ``Pipfile`` take precedence over ``requirements.txt``. 5 | -------------------------------------------------------------------------------- /tests/pipfile/requirements-txt/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | -------------------------------------------------------------------------------- /tests/pipfile/requirements-txt/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import pypi_pkg_test 3 | import there 4 | 5 | try: 6 | import numpy 7 | except ImportError: 8 | # We want an ImportError to be thrown for this test to pass 9 | pass 10 | else: 11 | raise Exception( 12 | "'numpy' shouldn't have been installed! It was listed in requirements.txt but not in the Pipfile that has precedence." 13 | ) 14 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-explicit-in-binder-dir/.binder/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | there = "*" 8 | dummy = {path="..", editable=true} 9 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-explicit-in-binder-dir/README.rst: -------------------------------------------------------------------------------- 1 | Python - Pipfile + setup.py 2 | --------------------------- 3 | 4 | The Pipfile asked the local package in the parent directory to be installed with 5 | ``setup.py``, was it? 6 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-explicit-in-binder-dir/dummy/__init__.py: -------------------------------------------------------------------------------- 1 | def dummy(): 2 | pass 3 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-explicit-in-binder-dir/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | setup( 4 | name="Dummy", 5 | version="1.0.0", 6 | url="https://git-place.org/dummy/dummy.git", 7 | author="Dummy Name", 8 | author_email="dummy@my-email.com", 9 | description="Dummy package for testing purposes only", 10 | packages=find_packages(), 11 | install_requires=["pypi-pkg-test==0.0.4"], 12 | ) 13 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-explicit-in-binder-dir/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import dummy 3 | 4 | # This package should be available, as it was a dependency for dummy 5 | import pypi_pkg_test 6 | import there 7 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-explicit/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | there = "*" 8 | dummy = {path=".", editable=true} 9 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-explicit/README.rst: -------------------------------------------------------------------------------- 1 | Python - Pipfile + setup.py 2 | --------------------------- 3 | 4 | The Pipfile asked the local package to be installed with ``setup.py``, was it? 5 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-explicit/dummy/__init__.py: -------------------------------------------------------------------------------- 1 | def dummy(): 2 | pass 3 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-explicit/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | setup( 4 | name="Dummy", 5 | version="1.0.0", 6 | url="https://git-place.org/dummy/dummy.git", 7 | author="Dummy Name", 8 | author_email="dummy@my-email.com", 9 | description="Dummy package for testing purposes only", 10 | packages=find_packages(), 11 | install_requires=["pypi-pkg-test==0.0.4"], 12 | ) 13 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-explicit/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import dummy 3 | 4 | # This package should be available, as it was a dependency for dummy 5 | import pypi_pkg_test 6 | import there 7 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-implicit/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | there = "*" 8 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-implicit/README.rst: -------------------------------------------------------------------------------- 1 | Python - Pipfile + setup.py 2 | --------------------------- 3 | 4 | The Pipfile did not ask for the local package to be installed with ``setup.py``, 5 | so lets ensure it wasn't. 6 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-implicit/dummy/__init__.py: -------------------------------------------------------------------------------- 1 | def dummy(): 2 | pass 3 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-implicit/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | setup( 4 | name="Dummy", 5 | version="1.0.0", 6 | url="https://git-place.org/dummy/dummy.git", 7 | author="Dummy Name", 8 | author_email="dummy@my-email.com", 9 | description="Dummy package for testing purposes only", 10 | packages=find_packages(), 11 | install_requires=["pypi-pkg-test==0.0.4"], 12 | ) 13 | -------------------------------------------------------------------------------- /tests/pipfile/setup-py-implicit/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import there 3 | 4 | try: 5 | import dummy 6 | import pypi_pkg_test 7 | except ImportError: 8 | # We want an ImportError to be thrown for this test to pass 9 | pass 10 | else: 11 | raise Exception( 12 | "'dummy' and its dependency 'pypi_pkg_test' shouldn't have been installed! A Pipfile was present without explicit instructions to install the local package 'dummy'." 13 | ) 14 | -------------------------------------------------------------------------------- /tests/r/README.md: -------------------------------------------------------------------------------- 1 | # Overview of tests for the R buildpack 2 | 3 | ## Tested configuration files 4 | 5 | - [`runtime.txt`](https://repo2docker.readthedocs.io/en/latest/config_files.html#runtime-txt-specifying-runtimes). 6 | - [`DESCRIPTION`](https://repo2docker.readthedocs.io/en/latest/config_files.html#description-install-an-r-package). 7 | - [`install.R`](https://repo2docker.readthedocs.io/en/latest/config_files.html#install-r-install-an-r-rstudio-environment). 8 | - [`requirements.txt`](https://repo2docker.readthedocs.io/en/latest/config_files.html#requirements-txt-install-a-python-environment) 9 | - [`apt.txt`](https://repo2docker.readthedocs.io/en/latest/config_files.html#apt-txt-install-packages-with-apt-get) 10 | 11 | ## Test folders 12 | 13 | ### r-rspm-apt-file 14 | 15 | - Test setup of the default R environment by omitting a version specification in 16 | `runtime.txt`, where the date provided in `runtime.txt` is recent enough for a 17 | RSPM snapshot of CRAN to be used. 18 | 19 | - Test use of a `apt.txt` file. 20 | 21 | ### r-rspm-description-file 22 | 23 | - Test use of a `DESCRIPTION` file instead of an `install.R` file, where a 24 | `runtime.txt` is omitted and a recent enough snapshot date is assumed a RSPM 25 | snapshot of CRAN to be used. 26 | 27 | ### r4.0-rspm 28 | 29 | - Test setup of a R 4.0 environment by specifying `r-4.0-...` in `runtime.txt`, 30 | where the date provided in `runtime.txt` is recent enough for a RSPM snapshot 31 | of CRAN to be used. 32 | -------------------------------------------------------------------------------- /tests/r/r-rspm-apt-file/apt.txt: -------------------------------------------------------------------------------- 1 | libsodium-dev 2 | -------------------------------------------------------------------------------- /tests/r/r-rspm-apt-file/install.R: -------------------------------------------------------------------------------- 1 | install.packages("digest") 2 | -------------------------------------------------------------------------------- /tests/r/r-rspm-apt-file/runtime.txt: -------------------------------------------------------------------------------- 1 | r-2022-06-23 2 | -------------------------------------------------------------------------------- /tests/r/r-rspm-apt-file/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | ./verify.sh 5 | ./verify.r 6 | -------------------------------------------------------------------------------- /tests/r/r-rspm-apt-file/verify.r: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env Rscript 2 | library('digest') 3 | 4 | 5 | # Fail if version isn't 4.2, the default version for the RBuildPack 6 | print(version) 7 | if (!(version$major == "4" && as.double(version$minor) >= 2 && as.double(version$minor) < 3)) { 8 | quit("yes", 1) 9 | } 10 | -------------------------------------------------------------------------------- /tests/r/r-rspm-apt-file/verify.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | apt list 5 | apt list | grep libsodium-dev 6 | -------------------------------------------------------------------------------- /tests/r/r-rspm-description-file/DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: binderdescription 2 | Version: 0.1 3 | Date: 2022-06-23 4 | Title: Binder R DESCRIPTION support 5 | Description: Test that automatically building R packages works 6 | Author: Bastian Greshake Tzovaras 7 | Maintainer: Bastian Greshake Tzovaras 8 | -------------------------------------------------------------------------------- /tests/r/r-rspm-description-file/NAMESPACE: -------------------------------------------------------------------------------- 1 | # Export all names 2 | exportPattern(".") 3 | -------------------------------------------------------------------------------- /tests/r/r-rspm-description-file/R/print_something.R: -------------------------------------------------------------------------------- 1 | print_something <- function() 2 | { 3 | print('blah'); 4 | } 5 | -------------------------------------------------------------------------------- /tests/r/r-rspm-description-file/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env Rscript 2 | library('binderdescription') 3 | -------------------------------------------------------------------------------- /tests/r/r3.6-rspm/install.R: -------------------------------------------------------------------------------- 1 | install.packages("digest") 2 | -------------------------------------------------------------------------------- /tests/r/r3.6-rspm/runtime.txt: -------------------------------------------------------------------------------- 1 | r-3.6-2018-12-07 2 | -------------------------------------------------------------------------------- /tests/r/r3.6-rspm/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env Rscript 2 | library('digest') 3 | 4 | # Fail if version is not 3.6 5 | print(version) 6 | if (!(version$major == "3" && as.double(version$minor) >= 6 && as.double(version$minor) < 7)) { 7 | quit("yes", 1) 8 | } 9 | 10 | # Fail if RSPM isn't the configured CRAN mirror 11 | if (!(startsWith(options()$repos["CRAN"], "https://packagemanager.posit.co"))) { 12 | quit("yes", 1) 13 | } 14 | -------------------------------------------------------------------------------- /tests/r/r4.3.2-rspm/install.R: -------------------------------------------------------------------------------- 1 | install.packages("digest") 2 | -------------------------------------------------------------------------------- /tests/r/r4.3.2-rspm/runtime.txt: -------------------------------------------------------------------------------- 1 | r-4.3.2-2024-01-10 2 | -------------------------------------------------------------------------------- /tests/r/r4.3.2-rspm/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env Rscript 2 | library('digest') 3 | 4 | # Fail if version is not 4.3.2 5 | print(version) 6 | if (!(version$major == "4" && version$minor == "3.2")) { 7 | quit("yes", 1) 8 | } 9 | 10 | # The date we have chosen should give us an rspm mirror 11 | if (!(startsWith(options()$repos["CRAN"], "https://packagemanager.posit.co"))) { 12 | quit("yes", 1) 13 | } 14 | -------------------------------------------------------------------------------- /tests/r/r4.4-rspm/install.R: -------------------------------------------------------------------------------- 1 | install.packages("digest") 2 | -------------------------------------------------------------------------------- /tests/r/r4.4-rspm/runtime.txt: -------------------------------------------------------------------------------- 1 | r-4.4-2025-01-01 2 | -------------------------------------------------------------------------------- /tests/r/r4.4-rspm/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env Rscript 2 | library('digest') 3 | 4 | # Fail if version is not 4.0 5 | print(version) 6 | if (!(version$major == "4" && as.double(version$minor) >= 4 && as.double(version$minor) < 5)) { 7 | quit("yes", 1) 8 | } 9 | 10 | # The date we have chosen should give us an rspm mirror 11 | if (!(startsWith(options()$repos["CRAN"], "https://packagemanager.posit.co"))) { 12 | quit("yes", 1) 13 | } 14 | -------------------------------------------------------------------------------- /tests/ui/README.md: -------------------------------------------------------------------------------- 1 | # User interface tests 2 | 3 | This contains very basic [Playwright](https://playwright.dev/python/) tests to check the 4 | 5 | - JupyterLab 6 | - RStudio 7 | - RShiny 8 | 9 | interfaces can be accessed. 10 | -------------------------------------------------------------------------------- /tests/ui/browser/environment.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - r-base 3 | -------------------------------------------------------------------------------- /tests/ui/browser/external-verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This script is run outside the container 3 | 4 | set -eux 5 | 6 | export TEST_REPO2DOCKER_URL="${1}/?token=token" 7 | pytest --verbose --color=yes --browser=firefox tests/ui/browser/external-verify.py 8 | -------------------------------------------------------------------------------- /tests/ui/browser/external-verify.py: -------------------------------------------------------------------------------- 1 | import os 2 | from subprocess import check_output 3 | from urllib.parse import urlsplit 4 | 5 | import pytest 6 | from playwright.sync_api import Page, expect 7 | 8 | 9 | # To run this test manually: 10 | # - Run: repo2docker tests/ui/browser/ 11 | # - Run: TEST_REPO2DOCKER_URL= python -mpytest --browser=firefox tests/ui/browser/external-verify.py [--headed] 12 | def test_user_interfaces(page: Page) -> None: 13 | url = os.getenv("TEST_REPO2DOCKER_URL") 14 | u = urlsplit(url) 15 | 16 | # Includes token 17 | page.goto(url) 18 | 19 | # Initial page should be Jupyter Notebook 20 | page.wait_for_url(f"{u.scheme}://{u.netloc}/tree") 21 | 22 | # Check JupyterLab 23 | page.goto(f"{u.scheme}://{u.netloc}/lab") 24 | expect(page.get_by_text("Python 3 (ipykernel)").nth(1)).to_be_visible() 25 | 26 | # Check JupyterLab RStudio launcher 27 | with page.expect_popup() as page1_info: 28 | page.get_by_text("RStudio [↗]").click() 29 | page1 = page1_info.value 30 | page1.wait_for_url(f"{u.scheme}://{u.netloc}/rstudio/") 31 | # Top-left logo 32 | expect(page1.locator("#rstudio_rstudio_logo")).to_be_visible() 33 | # Initial RStudio console text 34 | expect(page1.get_by_text("R version ")).to_be_visible() 35 | 36 | # Check JupyterLab RShiny launcher 37 | with page.expect_popup() as page2_info: 38 | page.get_by_text("Shiny [↗]").click() 39 | page2 = page2_info.value 40 | page2.wait_for_url(f"{u.scheme}://{u.netloc}/shiny/") 41 | expect(page2.get_by_text("Index of /")).to_be_visible() 42 | -------------------------------------------------------------------------------- /tests/ui/browser/test-extra-args.yaml: -------------------------------------------------------------------------------- 1 | - --env 2 | - JUPYTER_TOKEN=token 3 | -------------------------------------------------------------------------------- /tests/ui/browser/verify: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | -------------------------------------------------------------------------------- /tests/unit/contentproviders/test_ckan.py: -------------------------------------------------------------------------------- 1 | import os 2 | from contextlib import contextmanager 3 | from tempfile import NamedTemporaryFile, TemporaryDirectory 4 | 5 | from repo2docker.contentproviders import CKAN 6 | 7 | 8 | def test_detect_ckan(requests_mock): 9 | mock_response = {"result": {"metadata_modified": "2024-02-27T14:15:54.573058"}} 10 | requests_mock.get("http://demo.ckan.org/api/3/action/status_show", status_code=200) 11 | requests_mock.get( 12 | "http://demo.ckan.org/api/3/action/package_show?id=1234", json=mock_response 13 | ) 14 | 15 | expected = { 16 | "dataset_id": "1234", 17 | "activity_id": None, 18 | "api_url": "http://demo.ckan.org/api/3/action/", 19 | "version": "1709043354", 20 | } 21 | 22 | expected_activity = expected.copy() 23 | expected_activity["activity_id"] = "5678" 24 | 25 | assert CKAN().detect("http://demo.ckan.org/dataset/1234") == expected 26 | assert ( 27 | CKAN().detect("http://demo.ckan.org/dataset/1234?activity_id=5678") 28 | == expected_activity 29 | ) 30 | assert ( 31 | CKAN().detect("http://demo.ckan.org/dataset/1234/history/5678") 32 | == expected_activity 33 | ) 34 | 35 | 36 | def test_detect_not_ckan(): 37 | # Don't trigger the CKAN content provider 38 | assert CKAN().detect("/some/path/here") is None 39 | assert CKAN().detect("https://example.com/path/here") is None 40 | assert CKAN().detect("https://data.gov.tw/dataset/6564") is None 41 | 42 | 43 | @contextmanager 44 | def ckan_file(): 45 | with NamedTemporaryFile() as file: 46 | file.write(b"some content") 47 | yield file.name 48 | 49 | 50 | def test_ckan_fetch(requests_mock): 51 | with ckan_file() as ckan_path: 52 | mock_response = {"result": {"resources": [{"url": f"file://{ckan_path}"}]}} 53 | requests_mock.get( 54 | "http://demo.ckan.org/api/3/action/package_show?id=1234", json=mock_response 55 | ) 56 | requests_mock.get( 57 | "http://demo.ckan.org/api/3/action/activity_data_show?id=5678", 58 | json=mock_response, 59 | ) 60 | requests_mock.get(f"file://{ckan_path}", content=open(ckan_path, "rb").read()) 61 | 62 | ckan = CKAN() 63 | spec = {"dataset_id": "1234", "api_url": "http://demo.ckan.org/api/3/action/"} 64 | 65 | expected = {ckan_path.rsplit("/", maxsplit=1)[1]} 66 | 67 | with TemporaryDirectory() as d: 68 | spec["activity_id"] = None 69 | output = [] 70 | for l in ckan.fetch(spec, d): 71 | output.append(l) 72 | assert expected == set(os.listdir(d)) 73 | 74 | with TemporaryDirectory() as d: 75 | spec["activity_id"] = "5678" 76 | output = [] 77 | for l in ckan.fetch(spec, d): 78 | output.append(l) 79 | assert expected == set(os.listdir(d)) 80 | -------------------------------------------------------------------------------- /tests/unit/contentproviders/test_doi.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import os 4 | import re 5 | import tempfile 6 | import urllib 7 | from unittest.mock import MagicMock, mock_open, patch 8 | from zipfile import ZipFile 9 | 10 | import pytest 11 | 12 | from repo2docker import __version__ 13 | from repo2docker.contentproviders.base import ContentProviderException 14 | from repo2docker.contentproviders.doi import DoiProvider 15 | 16 | 17 | def test_content_id(): 18 | doi = DoiProvider() 19 | assert doi.content_id is None 20 | 21 | 22 | def test_url_headers(requests_mock): 23 | requests_mock.get("https://mybinder.org", text="resp") 24 | doi = DoiProvider() 25 | 26 | headers = {"test1": "value1", "Test2": "value2"} 27 | result = doi.urlopen("https://mybinder.org", headers=headers) 28 | assert "test1" in result.request.headers 29 | assert "Test2" in result.request.headers 30 | assert result.request.headers["User-Agent"] == f"repo2docker {__version__}" 31 | 32 | 33 | @pytest.mark.parametrize( 34 | "requested_doi, expected", 35 | [ 36 | ("10.5281/zenodo.3242074", "https://zenodo.org/record/3242074"), 37 | # Unresolving DOI: 38 | ("10.1/1234", "10.1/1234"), 39 | ], 40 | ) 41 | def test_doi2url(requested_doi, expected): 42 | doi = DoiProvider() 43 | assert doi.doi2url(requested_doi) == expected 44 | -------------------------------------------------------------------------------- /tests/unit/contentproviders/test_git.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | from tempfile import TemporaryDirectory 4 | 5 | import pytest 6 | 7 | from repo2docker.contentproviders import Git 8 | 9 | 10 | def test_clone(repo_with_content): 11 | """Test simple git clone to a target dir""" 12 | upstream, sha1 = repo_with_content 13 | 14 | with TemporaryDirectory() as clone_dir: 15 | spec = {"repo": upstream} 16 | git_content = Git() 17 | for _ in git_content.fetch(spec, clone_dir): 18 | pass 19 | assert os.path.exists(os.path.join(clone_dir, "test")) 20 | 21 | assert git_content.content_id == sha1[:7] 22 | 23 | 24 | def test_submodule_clone(repo_with_submodule): 25 | """Test git clone containing a git submodule.""" 26 | upstream, expected_sha1_upstream, expected_sha1_submod = repo_with_submodule 27 | 28 | # check that checking out a branch where there are no submodule 29 | # indeed doesn't get any submodule, even though they are in master 30 | with TemporaryDirectory() as clone_dir2: 31 | submod_dir = os.path.join(clone_dir2, "submod") # set by fixture 32 | spec = {"repo": upstream, "ref": "branch-without-submod"} 33 | git_content = Git() 34 | for _ in git_content.fetch(spec, clone_dir2): 35 | pass 36 | 37 | assert os.path.exists(os.path.join(clone_dir2, "test")) 38 | assert not os.path.exists(os.path.join(submod_dir, "requirements.txt")) 39 | 40 | with TemporaryDirectory() as clone_dir: 41 | submod_dir = os.path.join(clone_dir, "submod") # set by fixture 42 | spec = {"repo": upstream} 43 | git_content = Git() 44 | for _ in git_content.fetch(spec, clone_dir): 45 | pass 46 | assert os.path.exists(os.path.join(clone_dir, "test")) 47 | assert os.path.exists(os.path.join(submod_dir, "requirements.txt")) 48 | 49 | # get current sha1 of submodule 50 | cmd = ["git", "rev-parse", "HEAD"] 51 | sha1 = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=submod_dir) 52 | submod_sha1 = sha1.stdout.read().decode().strip() 53 | 54 | assert git_content.content_id == expected_sha1_upstream[:7] 55 | assert submod_sha1[:7] == expected_sha1_submod[:7] 56 | 57 | 58 | def test_bad_ref(repo_with_content): 59 | """ 60 | Test trying to checkout a ref that doesn't exist 61 | """ 62 | upstream, sha1 = repo_with_content 63 | with TemporaryDirectory() as clone_dir: 64 | spec = {"repo": upstream, "ref": "does-not-exist"} 65 | with pytest.raises(ValueError): 66 | for _ in Git().fetch(spec, clone_dir): 67 | pass 68 | 69 | 70 | def test_always_accept(): 71 | # The git content provider should always accept a spec 72 | assert Git().detect("/tmp/doesnt-exist", ref="1234") 73 | assert Git().detect("/tmp/doesnt-exist") 74 | # a path that exists 75 | assert Git().detect("/etc", ref="1234") 76 | # a remote URL 77 | assert Git().detect("https://example.com/path/here") 78 | -------------------------------------------------------------------------------- /tests/unit/contentproviders/test_local.py: -------------------------------------------------------------------------------- 1 | import os 2 | from tempfile import NamedTemporaryFile, TemporaryDirectory 3 | 4 | from repo2docker.contentproviders import Local 5 | 6 | 7 | def test_detect_local_dir(): 8 | with TemporaryDirectory() as d: 9 | local = Local() 10 | spec = local.detect(d) 11 | 12 | # should accept a local directory 13 | assert spec is not None, spec 14 | assert "path" in spec, spec 15 | assert spec["path"] == d 16 | 17 | 18 | def test_not_detect_local_file(): 19 | with NamedTemporaryFile() as f: 20 | local = Local() 21 | spec = local.detect(f.name) 22 | 23 | # should NOT accept a local file 24 | assert spec is None, spec 25 | 26 | 27 | def test_content_id_is_None(): 28 | # content_id property should always be None for local content provider 29 | # as we rely on the caching done by docker 30 | local = Local() 31 | assert local.content_id is None 32 | 33 | 34 | def test_content_available(): 35 | # create a directory with files, check they are available in the output 36 | # directory 37 | with TemporaryDirectory() as d: 38 | with open(os.path.join(d, "test"), "w") as f: 39 | f.write("Hello") 40 | 41 | local = Local() 42 | spec = {"path": d} 43 | for _ in local.fetch(spec, d): 44 | pass 45 | assert os.path.exists(os.path.join(d, "test")) 46 | # content_id property should always be None for local content provider 47 | # as we rely on the caching done by docker 48 | assert local.content_id is None 49 | -------------------------------------------------------------------------------- /tests/unit/test_app.py: -------------------------------------------------------------------------------- 1 | from tempfile import TemporaryDirectory 2 | from unittest.mock import patch 3 | 4 | import escapism 5 | 6 | import docker 7 | from repo2docker.__main__ import make_r2d 8 | from repo2docker.app import Repo2Docker 9 | from repo2docker.utils import chdir 10 | 11 | 12 | def test_image_name_remains_unchanged(): 13 | # if we specify an image name, it should remain unmodified 14 | with TemporaryDirectory() as src: 15 | app = Repo2Docker() 16 | argv = ["--image-name", "a-special-name", "--no-build", src] 17 | app = make_r2d(argv) 18 | 19 | app.start() 20 | 21 | assert app.output_image_spec == "a-special-name" 22 | 23 | 24 | def test_image_name_contains_sha1(repo_with_content): 25 | upstream, sha1 = repo_with_content 26 | app = Repo2Docker() 27 | # force selection of the git content provider by prefixing path with 28 | # file://. This is important as the Local content provider does not 29 | # store the SHA1 in the repo spec 30 | argv = ["--no-build", "file://" + upstream] 31 | app = make_r2d(argv) 32 | 33 | app.start() 34 | 35 | assert app.output_image_spec.endswith(sha1[:7]) 36 | 37 | 38 | def test_local_dir_image_name(repo_with_content): 39 | upstream, sha1 = repo_with_content 40 | app = Repo2Docker() 41 | argv = ["--no-build", upstream] 42 | app = make_r2d(argv) 43 | 44 | app.start() 45 | 46 | assert app.output_image_spec.startswith( 47 | "r2d" + escapism.escape(upstream, escape_char="-").lower() 48 | ) 49 | 50 | 51 | def test_extra_buildx_build_args(repo_with_content): 52 | upstream, sha1 = repo_with_content 53 | argv = ["--DockerEngine.extra_buildx_build_args=--check", upstream] 54 | app = make_r2d(argv) 55 | with patch("repo2docker.docker.execute_cmd") as execute_cmd: 56 | app.build() 57 | 58 | args, kwargs = execute_cmd.call_args 59 | cmd = args[0] 60 | assert cmd[:3] == ["docker", "buildx", "build"] 61 | # make sure it's inserted before the end 62 | assert "--check" in cmd[:-1] 63 | 64 | 65 | def test_run_kwargs(repo_with_content): 66 | upstream, sha1 = repo_with_content 67 | argv = [upstream] 68 | app = make_r2d(argv) 69 | app.extra_run_kwargs = {"somekey": "somevalue"} 70 | 71 | with patch.object(docker.DockerClient, "containers") as containers: 72 | app.start_container() 73 | containers.run.assert_called_once() 74 | args, kwargs = containers.run.call_args 75 | assert "somekey" in kwargs 76 | assert kwargs["somekey"] == "somevalue" 77 | -------------------------------------------------------------------------------- /tests/unit/test_binder_dir.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from repo2docker import buildpacks 6 | 7 | 8 | @pytest.mark.parametrize("binder_dir", ["binder", ".binder", ""]) 9 | def test_binder_dir(tmpdir, binder_dir, base_image): 10 | tmpdir.chdir() 11 | if binder_dir: 12 | os.mkdir(binder_dir) 13 | 14 | bp = buildpacks.BuildPack(base_image) 15 | assert binder_dir == bp.binder_dir 16 | assert bp.binder_path("foo.yaml") == os.path.join(binder_dir, "foo.yaml") 17 | 18 | 19 | def test_exclusive_binder_dir(tmpdir, base_image): 20 | tmpdir.chdir() 21 | os.mkdir("./binder") 22 | os.mkdir("./.binder") 23 | 24 | bp = buildpacks.BuildPack(base_image) 25 | with pytest.raises(RuntimeError): 26 | _ = bp.binder_dir 27 | -------------------------------------------------------------------------------- /tests/unit/test_buildpack.py: -------------------------------------------------------------------------------- 1 | from os.path import join as pjoin 2 | from tempfile import TemporaryDirectory 3 | 4 | import pytest 5 | 6 | from repo2docker.buildpacks import LegacyBinderDockerBuildPack, PythonBuildPack 7 | from repo2docker.utils import chdir 8 | 9 | 10 | def test_legacy_raises(base_image): 11 | # check legacy buildpack raises on a repo that triggers it 12 | with TemporaryDirectory() as repodir: 13 | with open(pjoin(repodir, "Dockerfile"), "w") as d: 14 | d.write("FROM andrewosh/binder-base") 15 | 16 | with chdir(repodir): 17 | bp = LegacyBinderDockerBuildPack(base_image) 18 | with pytest.raises(RuntimeError): 19 | bp.detect() 20 | 21 | 22 | def test_legacy_doesnt_detect(base_image): 23 | # check legacy buildpack doesn't trigger 24 | with TemporaryDirectory() as repodir: 25 | with open(pjoin(repodir, "Dockerfile"), "w") as d: 26 | d.write("FROM andrewosh/some-image") 27 | 28 | with chdir(repodir): 29 | bp = LegacyBinderDockerBuildPack(base_image) 30 | assert not bp.detect() 31 | 32 | 33 | def test_legacy_on_repo_without_dockerfile(base_image): 34 | # check legacy buildpack doesn't trigger on a repo w/o Dockerfile 35 | with TemporaryDirectory() as repodir: 36 | with chdir(repodir): 37 | bp = LegacyBinderDockerBuildPack(base_image) 38 | assert not bp.detect() 39 | 40 | 41 | @pytest.mark.parametrize("python_version", ["2.6", "3.0", "4.10", "3.99"]) 42 | def test_unsupported_python(tmpdir, python_version, base_image): 43 | tmpdir.chdir() 44 | bp = PythonBuildPack(base_image) 45 | bp._python_version = python_version 46 | assert bp.python_version == python_version 47 | with pytest.raises(ValueError): 48 | bp.render() 49 | -------------------------------------------------------------------------------- /tests/unit/test_cache_from.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test that --cache-from is passed in to docker API properly. 3 | """ 4 | 5 | from unittest.mock import MagicMock 6 | 7 | import docker 8 | from repo2docker.buildpacks import ( 9 | BaseImage, 10 | DockerBuildPack, 11 | LegacyBinderDockerBuildPack, 12 | ) 13 | 14 | 15 | def test_cache_from_base(tmpdir, base_image): 16 | cache_from = ["image-1:latest"] 17 | fake_log_value = {"stream": "fake"} 18 | fake_client = MagicMock(spec=docker.APIClient) 19 | fake_client.build.return_value = iter([fake_log_value]) 20 | extra_build_kwargs = {"somekey": "somevalue"} 21 | 22 | # Test base image build pack 23 | tmpdir.chdir() 24 | for line in BaseImage(base_image).build( 25 | fake_client, "image-2", 100, {}, cache_from, extra_build_kwargs 26 | ): 27 | assert line == fake_log_value 28 | called_args, called_kwargs = fake_client.build.call_args 29 | assert "cache_from" in called_kwargs 30 | assert called_kwargs["cache_from"] == cache_from 31 | 32 | 33 | def test_cache_from_docker(tmpdir, base_image): 34 | cache_from = ["image-1:latest"] 35 | fake_log_value = {"stream": "fake"} 36 | fake_client = MagicMock(spec=docker.APIClient) 37 | fake_client.build.return_value = iter([fake_log_value]) 38 | extra_build_kwargs = {"somekey": "somevalue"} 39 | tmpdir.chdir() 40 | 41 | # test dockerfile 42 | with tmpdir.join("Dockerfile").open("w") as f: 43 | f.write("FROM scratch\n") 44 | 45 | for line in DockerBuildPack(base_image).build( 46 | fake_client, "image-2", 100, {}, cache_from, extra_build_kwargs 47 | ): 48 | assert line == fake_log_value 49 | called_args, called_kwargs = fake_client.build.call_args 50 | assert "cache_from" in called_kwargs 51 | assert called_kwargs["cache_from"] == cache_from 52 | -------------------------------------------------------------------------------- /tests/unit/test_connect_url.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test if the explict hostname is supplied correctly to the container 3 | """ 4 | 5 | import time 6 | 7 | import requests 8 | 9 | from repo2docker.app import Repo2Docker 10 | 11 | # Minimal Dockerfile to make build as fast as possible 12 | DOCKER_FILE = """ 13 | FROM python:3.7-slim 14 | # install the notebook package 15 | RUN pip install --no-cache --upgrade pip && \ 16 | pip install --no-cache notebook 17 | 18 | # create user with a home directory 19 | ARG NB_USER 20 | ARG NB_UID 21 | ENV USER ${NB_USER} 22 | ENV HOME /home/${NB_USER} 23 | 24 | RUN adduser --disabled-password \ 25 | --gecos "Default user" \ 26 | --uid ${NB_UID} \ 27 | ${NB_USER} 28 | WORKDIR ${HOME} 29 | USER ${NB_USER} 30 | """ 31 | 32 | 33 | def test_connect_url(tmpdir): 34 | tmpdir.chdir() 35 | p = tmpdir.join("Dockerfile") 36 | p.write(DOCKER_FILE) 37 | 38 | # we set run=False so that we can start the container ourselves and 39 | # get a handle to the container, used to inspect the logs 40 | app = Repo2Docker(repo=str(tmpdir), run=False) 41 | app.initialize() 42 | app.start() 43 | container = app.start_container() 44 | 45 | container_url = f"http://{app.hostname}:{app.port}/api" 46 | expected_url = f"http://{app.hostname}:{app.port}" 47 | 48 | # wait a bit for the container to be ready 49 | # give the container a chance to start 50 | time.sleep(1) 51 | 52 | try: 53 | # try a few times to connect 54 | success = False 55 | for i in range(1, 4): 56 | container.reload() 57 | assert container.status == "running" 58 | if expected_url not in container.logs().decode("utf8"): 59 | time.sleep(i * 3) 60 | continue 61 | try: 62 | info = requests.get(container_url).json() 63 | except Exception as e: 64 | print(f"Error: {e}") 65 | time.sleep(i * 3) 66 | else: 67 | print(info) 68 | success = True 69 | break 70 | assert success, f"Notebook never started in {container}" 71 | finally: 72 | # stop the container 73 | container.stop() 74 | app.wait_for_container(container) 75 | -------------------------------------------------------------------------------- /tests/unit/test_docker.py: -------------------------------------------------------------------------------- 1 | """Tests for docker bits""" 2 | 3 | import os 4 | from subprocess import check_output 5 | 6 | repo_root = os.path.abspath( 7 | os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) 8 | ) 9 | 10 | 11 | def test_git_credential_env(): 12 | credential_env = "username=abc\npassword=def" 13 | out = ( 14 | check_output( 15 | os.path.join(repo_root, "docker", "git-credential-env"), 16 | env={"GIT_CREDENTIAL_ENV": credential_env}, 17 | ) 18 | .decode() 19 | .strip() 20 | ) 21 | assert out == credential_env 22 | -------------------------------------------------------------------------------- /tests/unit/test_editable.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import tempfile 4 | import time 5 | 6 | from repo2docker.__main__ import make_r2d 7 | 8 | DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "dockerfile", "editable") 9 | 10 | 11 | def test_editable(run_repo2docker): 12 | """Run a local repository in edit mode. Verify a new file has been 13 | created afterwards""" 14 | newfile = os.path.join(DIR, "newfile") 15 | try: 16 | # If the file didn't get properly cleaned up last time, we 17 | # need to do that now 18 | os.remove(newfile) 19 | except FileNotFoundError: 20 | pass 21 | argv = ["--editable", DIR, "/usr/local/bin/change.sh"] 22 | run_repo2docker(argv) 23 | try: 24 | with open(newfile) as fp: 25 | contents = fp.read() 26 | assert contents == "new contents\n" 27 | finally: 28 | os.remove(newfile) 29 | 30 | 31 | def test_editable_by_host(): 32 | """Test whether a new file created by the host environment, is 33 | detected in the container""" 34 | 35 | app = make_r2d(["--editable", DIR]) 36 | app.initialize() 37 | app.build() 38 | container = app.start_container() 39 | 40 | # give the container a chance to start 41 | while container.status != "running": 42 | time.sleep(1) 43 | 44 | try: 45 | with tempfile.NamedTemporaryFile(dir=DIR, prefix="testfile", suffix=".txt"): 46 | status, output = container._c.exec_run( 47 | ["sh", "-c", "ls testfile????????.txt"] 48 | ) 49 | assert status == 0 50 | assert re.match(rb"^testfile\w{8}\.txt\n$", output) is not None 51 | # After exiting the with block the file should stop existing 52 | # in the container as well as locally 53 | status, output = container._c.exec_run(["sh", "-c", "ls testfile????????.txt"]) 54 | assert status == 2 55 | assert re.match(rb"^testfile\w{8}\.txt\n$", output) is None 56 | 57 | finally: 58 | # stop the container, we don't care how it stops or 59 | # what the exit code is. 60 | container.stop(timeout=1) 61 | container.reload() 62 | assert container.status == "exited", container.status 63 | container.remove() 64 | -------------------------------------------------------------------------------- /tests/unit/test_engine.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from traitlets import TraitError 3 | 4 | from repo2docker.engine import ContainerEngine 5 | 6 | 7 | def test_registry_credentials(): 8 | e = ContainerEngine(parent=None) 9 | 10 | # This should be fine 11 | e.registry_credentials = { 12 | "registry": "something", 13 | "username": "something", 14 | "password": "something", 15 | } 16 | 17 | with pytest.raises(TraitError): 18 | e.registry_credentials = {"hi": "bye"} 19 | -------------------------------------------------------------------------------- /tests/unit/test_env.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test that environment variables may be defined 3 | """ 4 | 5 | import os 6 | import subprocess 7 | import sys 8 | import tempfile 9 | import time 10 | from getpass import getuser 11 | 12 | 13 | def test_env(capfd): 14 | """ 15 | Validate that you can define environment variables 16 | 17 | See https://gist.github.com/hwine/9f5b02c894427324fafcf12f772b27b7 18 | for how docker handles its -e & --env argument values 19 | """ 20 | ts = str(time.time()) 21 | # There appear to be some odd combinations of default dir that do 22 | # not work on macOS Catalina with Docker CE 2.2.0.5, so use 23 | # the current dir -- it'll be deleted immediately 24 | 25 | with tempfile.TemporaryDirectory(dir=os.path.abspath(os.curdir)) as tmpdir: 26 | username = getuser() 27 | os.environ["SPAM"] = "eggs" 28 | os.environ["SPAM_2"] = "ham" 29 | result = subprocess.run( 30 | [ 31 | "repo2docker", 32 | # 'key=value' are exported as is in docker 33 | "-e", 34 | f"FOO={ts}", 35 | "--env", 36 | "BAR=baz", 37 | # 'key' is exported with the currently exported value 38 | "--env", 39 | "SPAM", 40 | # 'key' is not exported if it is not exported. 41 | "-e", 42 | "NO_SPAM", 43 | # 'key=' is exported in docker with an empty string as 44 | # value 45 | "--env", 46 | "SPAM_2=", 47 | tmpdir, 48 | "--", 49 | "/bin/bash", 50 | "-c", 51 | # Docker exports all passed env variables, so we can 52 | # just look at exported variables. 53 | "export", 54 | ], 55 | ) 56 | captured = capfd.readouterr() 57 | print(captured.out, end="") 58 | print(captured.err, file=sys.stderr, end="") 59 | 60 | assert result.returncode == 0 61 | 62 | # all docker output is returned by repo2docker on stderr 63 | # extract just the declare for better failure message formatting 64 | # stdout should be empty 65 | assert not result.stdout 66 | 67 | # stderr should contain lines of output 68 | declares = [x for x in captured.err.splitlines() if x.startswith("declare")] 69 | assert f'declare -x FOO="{ts}"' in declares 70 | assert 'declare -x BAR="baz"' in declares 71 | assert 'declare -x SPAM="eggs"' in declares 72 | assert "declare -x NO_SPAM" not in declares 73 | assert 'declare -x SPAM_2=""' in declares 74 | -------------------------------------------------------------------------------- /tests/unit/test_env_yml.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test if the environment.yml is empty or it constains other data structure than a dictionary 3 | """ 4 | 5 | import pytest 6 | 7 | from repo2docker import buildpacks 8 | 9 | 10 | def test_empty_env_yml(tmpdir, base_image): 11 | tmpdir.chdir() 12 | p = tmpdir.join("environment.yml") 13 | p.write("") 14 | bp = buildpacks.CondaBuildPack(base_image) 15 | py_ver = bp.python_version 16 | # If the environment.yml is empty, python_version will get the default Python version 17 | assert py_ver == bp.major_pythons["3"] 18 | 19 | 20 | def test_no_dict_env_yml(tmpdir, base_image): 21 | tmpdir.chdir() 22 | q = tmpdir.join("environment.yml") 23 | q.write("numpy\n " "matplotlib\n") 24 | bq = buildpacks.CondaBuildPack(base_image) 25 | with pytest.raises(TypeError): 26 | py_ver = bq.python_version 27 | -------------------------------------------------------------------------------- /tests/unit/test_external_scripts.py: -------------------------------------------------------------------------------- 1 | """Test if assemble scripts from outside of r2d repo are accepted.""" 2 | 3 | import time 4 | 5 | from repo2docker.app import Repo2Docker 6 | from repo2docker.buildpacks import PythonBuildPack 7 | 8 | 9 | def test_Repo2Docker_external_build_scripts(tmpdir): 10 | tempfile = tmpdir.join("absolute-script") 11 | tempfile.write("Hello World of Absolute Paths!") 12 | 13 | class MockBuildPack(PythonBuildPack): 14 | def detect(self): 15 | return True 16 | 17 | def get_build_script_files(self): 18 | files = {str(tempfile): "/tmp/my_extra_script"} 19 | files.update(super().get_build_script_files()) 20 | return files 21 | 22 | app = Repo2Docker(repo=str(tmpdir)) 23 | app.buildpacks = [MockBuildPack] 24 | app.initialize() 25 | app.build() 26 | container = app.start_container() 27 | 28 | # give the container a chance to start 29 | tic = 180 30 | while container.status != "running" or tic < 0: 31 | time.sleep(1) 32 | tic -= 1 33 | 34 | assert container.status == "running" 35 | 36 | try: 37 | status, output = container._c.exec_run(["sh", "-c", "cat /tmp/my_extra_script"]) 38 | assert status == 0 39 | assert output.decode("utf-8") == "Hello World of Absolute Paths!" 40 | finally: 41 | container.stop(timeout=1) 42 | container.reload() 43 | assert container.status == "exited", container.status 44 | container.remove() 45 | -------------------------------------------------------------------------------- /tests/unit/test_freeze.py: -------------------------------------------------------------------------------- 1 | import os 2 | from tempfile import TemporaryDirectory 3 | from unittest.mock import patch 4 | 5 | import pytest 6 | from ruamel.yaml import YAML 7 | 8 | from repo2docker.buildpacks.conda.freeze import set_python 9 | 10 | V = "3.7" 11 | yaml = YAML(typ="rt") 12 | 13 | 14 | def test_set_python(): 15 | with TemporaryDirectory() as d: 16 | env_fname = os.path.join(d, "some-env.yml") 17 | 18 | # function being tested 19 | set_python(env_fname, V) 20 | 21 | # check that set_python() did its job 22 | with open(env_fname) as f: 23 | env = yaml.load(f) 24 | f.seek(0) 25 | assert "AUTO GENERATED FROM" in f.readline() 26 | 27 | for dep in env["dependencies"]: 28 | # the "- pip:" entry isn't a string, hence this complex if 29 | # statement 30 | if isinstance(dep, str) and dep.startswith("python="): 31 | assert dep == f"python={V}.*", f"Unexpected dependency spec: '{dep}'" 32 | break 33 | else: 34 | assert False, f"Did not find 'python={V}.*' listed in the generated file" 35 | 36 | 37 | def test_doesnt_clobber(): 38 | # check a file not containing the word GENERATED on the first line is 39 | # left unchanged 40 | with TemporaryDirectory() as d: 41 | env_fname = os.path.join(d, "some-env.yml") 42 | with open(env_fname, "w") as f: 43 | f.write("some text here") 44 | 45 | set_python(env_fname, V) 46 | 47 | with open(env_fname) as f: 48 | assert f.read() == "some text here" 49 | 50 | 51 | def test_python_missing_in_source_env(): 52 | # check we raise an exception when python isn't in the source environemt 53 | with TemporaryDirectory() as d: 54 | # prep our source environment 55 | source_env_fname = os.path.join(d, "source-env.yml") 56 | with open(source_env_fname, "w") as f: 57 | yaml.dump({"dependencies": ["a_package_name=1.2.3"]}, f) 58 | 59 | with patch("repo2docker.buildpacks.conda.freeze.ENV_FILE", source_env_fname): 60 | target_env_fname = os.path.join(d, "some-env.yml") 61 | 62 | with pytest.raises(ValueError) as e: 63 | set_python(target_env_fname, V) 64 | 65 | assert "python dependency not found" in str(e.value) 66 | -------------------------------------------------------------------------------- /tests/unit/test_labels.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test if labels are supplied correctly to the container 3 | """ 4 | 5 | from unittest.mock import Mock 6 | 7 | import pytest 8 | 9 | from repo2docker import __version__ 10 | from repo2docker.app import Repo2Docker 11 | from repo2docker.buildpacks import BuildPack 12 | 13 | URL = "https://github.com/binderhub-ci-repos/repo2docker-ci-clone-depth" 14 | 15 | 16 | def test_buildpack_labels_rendered(base_image): 17 | bp = BuildPack(base_image) 18 | assert "LABEL" not in bp.render() 19 | bp.labels["first_label"] = "firstlabel" 20 | assert 'LABEL first_label="firstlabel"\n' in bp.render() 21 | bp.labels["second_label"] = "anotherlabel" 22 | assert 'LABEL second_label="anotherlabel"\n' in bp.render() 23 | 24 | 25 | @pytest.mark.parametrize( 26 | "ref, repo, expected_repo_label", 27 | [(None, URL, URL), ("some-ref", None, "local"), (None, None, "local")], 28 | ) 29 | def test_Repo2Docker_labels(ref, repo, expected_repo_label, tmpdir): 30 | app = Repo2Docker(dry_run=True) 31 | # Add mock BuildPack to app 32 | mock_buildpack = Mock() 33 | mock_buildpack.return_value.labels = {} 34 | app.buildpacks = [mock_buildpack] 35 | 36 | if repo is None: 37 | repo = str(tmpdir) 38 | app.repo = repo 39 | if ref is not None: 40 | app.ref = ref 41 | 42 | app.initialize() 43 | app.start() 44 | expected_labels = { 45 | "repo2docker.ref": ref, 46 | "repo2docker.repo": expected_repo_label, 47 | "repo2docker.version": __version__, 48 | } 49 | 50 | assert mock_buildpack().labels == expected_labels 51 | -------------------------------------------------------------------------------- /tests/unit/test_memlimit.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test that build time memory limits are enforced 3 | """ 4 | 5 | import os 6 | from unittest.mock import MagicMock 7 | 8 | import pytest 9 | 10 | import docker 11 | from repo2docker.buildpacks import BaseImage, DockerBuildPack 12 | 13 | basedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 14 | 15 | 16 | def test_memory_limit_enforced(tmpdir, base_image): 17 | fake_cache_from = ["image-1:latest"] 18 | fake_log_value = {"stream": "fake"} 19 | fake_client = MagicMock(spec=docker.APIClient) 20 | fake_client.build.return_value = iter([fake_log_value]) 21 | fake_extra_build_kwargs = {"somekey": "somevalue"} 22 | 23 | # some memory limit value, the important bit is that this value is 24 | # later passed to the `build` method of the Docker API client 25 | memory_limit = 128 * 1024 26 | 27 | # Test that the buildpack passes the right arguments to the docker 28 | # client in order to enforce the memory limit 29 | tmpdir.chdir() 30 | for line in BaseImage(base_image).build( 31 | fake_client, 32 | "image-2", 33 | memory_limit, 34 | {}, 35 | fake_cache_from, 36 | fake_extra_build_kwargs, 37 | ): 38 | pass 39 | 40 | # check that we pass arguments asking for memory limiting 41 | # to the Docker API client 42 | args, kwargs = fake_client.build.call_args 43 | assert "container_limits" in kwargs 44 | assert kwargs["container_limits"] == { 45 | "memory": memory_limit, 46 | "memswap": memory_limit, 47 | } 48 | 49 | 50 | @pytest.mark.parametrize("BuildPack", [BaseImage, DockerBuildPack]) 51 | def test_memlimit_argument_type(BuildPack, base_image): 52 | # check that an exception is raised when the memory limit isn't an int 53 | fake_log_value = {"stream": "fake"} 54 | fake_client = MagicMock(spec=docker.APIClient) 55 | fake_client.build.return_value = iter([fake_log_value]) 56 | 57 | with pytest.raises(ValueError) as exc_info: 58 | for line in BuildPack(base_image).build( 59 | fake_client, "image-2", "10Gi", {}, [], {} 60 | ): 61 | pass 62 | 63 | assert "The memory limit has to be specified as an" in str(exc_info.value) 64 | -------------------------------------------------------------------------------- /tests/unit/test_preassemble.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from repo2docker import buildpacks 6 | 7 | 8 | @pytest.mark.parametrize("binder_dir", ["", ".binder", "binder"]) 9 | def test_combine_preassemble_steps(tmpdir, binder_dir, base_image): 10 | tmpdir.chdir() 11 | if binder_dir: 12 | os.mkdir(binder_dir) 13 | 14 | # create two empty files for the build pack to use for pre-assembly 15 | open(os.path.join(binder_dir, "requirements.txt"), "w").close() 16 | open(os.path.join(binder_dir, "install.R"), "w").close() 17 | 18 | # trigger R build pack detection 19 | with open(os.path.join(binder_dir, "runtime.txt"), "w") as f: 20 | f.write("r-2019-01-30") 21 | 22 | bp = buildpacks.RBuildPack(base_image) 23 | files = bp.get_preassemble_script_files() 24 | 25 | assert len(files) == 2 26 | assert os.path.join(binder_dir, "requirements.txt") in files 27 | assert os.path.join(binder_dir, "install.R") in files 28 | -------------------------------------------------------------------------------- /tests/unit/test_r.py: -------------------------------------------------------------------------------- 1 | from datetime import date 2 | from unittest.mock import patch 3 | 4 | import pytest 5 | from requests.models import Response 6 | 7 | from repo2docker import buildpacks 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "runtime_version, expected", [("", "4.2"), ("3.6", "3.6"), ("3.5.1", "3.5")] 12 | ) 13 | def test_version_specification(tmpdir, runtime_version, expected, base_image): 14 | tmpdir.chdir() 15 | 16 | with open("runtime.txt", "w") as f: 17 | if runtime_version: 18 | runtime_version += "-" 19 | f.write(f"r-{runtime_version}2019-01-01") 20 | 21 | r = buildpacks.RBuildPack(base_image) 22 | assert r.r_version.startswith(expected) 23 | 24 | 25 | def test_version_completion(tmpdir, base_image): 26 | tmpdir.chdir() 27 | 28 | with open("runtime.txt", "w") as f: 29 | f.write("r-3.6-2019-01-01") 30 | 31 | r = buildpacks.RBuildPack(base_image) 32 | assert r.r_version == "3.6.3" 33 | 34 | 35 | @pytest.mark.parametrize( 36 | "runtime, expected", 37 | [ 38 | ("r-2019-01-01", (2019, 1, 1)), 39 | ("r-3.6.1-2019-01-01", (2019, 1, 1)), 40 | ("r-3.5-2019-01-01", (2019, 1, 1)), 41 | ], 42 | ) 43 | def test_cran_date(tmpdir, runtime, expected, base_image): 44 | tmpdir.chdir() 45 | 46 | with open("runtime.txt", "w") as f: 47 | f.write(runtime) 48 | 49 | r = buildpacks.RBuildPack(base_image) 50 | assert r.checkpoint_date == date(*expected) 51 | 52 | 53 | def test_snapshot_rspm_date(base_image): 54 | test_dates = { 55 | # Even though there is no snapshot specified in the interface at https://packagemanager.posit.co/client/#/repos/1/overview 56 | # For 2021 Oct 22, the API still returns a valid URL that one can install 57 | # packages from - probably some server side magic that repeats our client side logic. 58 | # No snapshot for this date from 59 | date(2021, 10, 22): date(2021, 10, 22), 60 | # Snapshot exists for this date 61 | date(2022, 1, 1): date(2022, 1, 1), 62 | } 63 | 64 | r = buildpacks.RBuildPack(base_image) 65 | for requested, expected in test_dates.items(): 66 | snapshot_url = r.get_rspm_snapshot_url(requested) 67 | assert snapshot_url.startswith( 68 | # VERSION_CODENAME is handled at runtime during the build 69 | "https://packagemanager.posit.co/all/__linux__/${VERSION_CODENAME}/" 70 | + expected.strftime("%Y-%m-%d") 71 | ) 72 | 73 | with pytest.raises(ValueError): 74 | r.get_rspm_snapshot_url(date(1691, 9, 5)) 75 | 76 | 77 | def test_mran_dead(tmpdir, base_image): 78 | tmpdir.chdir() 79 | 80 | with open("runtime.txt", "w") as f: 81 | f.write("r-3.6-2017-06-04") 82 | 83 | r = buildpacks.RBuildPack(base_image) 84 | with pytest.raises( 85 | RuntimeError, 86 | match=r"^Microsoft killed MRAN, the source of R package snapshots before 2018-12-07.*", 87 | ): 88 | r.get_build_scripts() 89 | -------------------------------------------------------------------------------- /tests/unit/test_subdir.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test if the subdirectory is correctly navigated to 3 | """ 4 | 5 | import os 6 | 7 | import escapism 8 | import pytest 9 | 10 | from repo2docker.app import Repo2Docker 11 | 12 | TEST_REPO = "https://github.com/binderhub-ci-repos/repo2docker-subdir-support" 13 | 14 | 15 | def test_subdir(run_repo2docker): 16 | # Build from a subdirectory 17 | # if subdir support is broken this will fail as the instructions in the 18 | # root of the test repo are invalid 19 | cwd = os.getcwd() 20 | 21 | argv = ["--subdir", "a directory", TEST_REPO] 22 | run_repo2docker(argv) 23 | 24 | # check that we restored the current working directory 25 | assert cwd == os.getcwd(), f"We should be back in {cwd}" 26 | 27 | 28 | def test_subdir_in_image_name(): 29 | app = Repo2Docker(repo=TEST_REPO, subdir="a directory") 30 | app.initialize() 31 | app.build() 32 | 33 | escaped_dirname = escapism.escape("a directory", escape_char="-").lower() 34 | assert escaped_dirname in app.output_image_spec 35 | 36 | 37 | def test_subdir_invalid(): 38 | # test an error is raised when requesting a non existent subdir 39 | app = Repo2Docker(repo=TEST_REPO, subdir="invalid-sub-dir") 40 | app.initialize() 41 | with pytest.raises(FileNotFoundError): 42 | app.build() # Just build the image and do not run it. 43 | -------------------------------------------------------------------------------- /tests/unit/test_users.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test that User name and ID mapping works 3 | """ 4 | 5 | import os 6 | import subprocess 7 | import tempfile 8 | import time 9 | from getpass import getuser 10 | from unittest import mock 11 | 12 | from repo2docker import Repo2Docker 13 | 14 | 15 | def test_automatic_username_deduction(): 16 | # check we pickup the right username 17 | with mock.patch("os.environ") as mock_env: 18 | expected = "someusername" 19 | mock_env.get.return_value = expected 20 | 21 | r2d = Repo2Docker() 22 | assert r2d.user_name == expected 23 | 24 | 25 | def test_user(): 26 | """ 27 | Validate user id and name setting 28 | """ 29 | ts = str(time.time()) 30 | # FIXME: Use arbitrary login here, We need it now since we wanna put things to volume. 31 | username = getuser() 32 | userid = str(os.geteuid()) 33 | with tempfile.TemporaryDirectory() as tmpdir: 34 | tmpdir = os.path.realpath(tmpdir) 35 | subprocess.check_call( 36 | [ 37 | "repo2docker", 38 | f"--volume={tmpdir}:/home/{username}", 39 | f"--user-id={userid}", 40 | f"--user-name={username}", 41 | tmpdir, 42 | "--", 43 | "/bin/bash", 44 | "-c", 45 | "id -u > id && pwd > pwd && whoami > name && echo -n $USER > env_user", 46 | ] 47 | ) 48 | 49 | with open(os.path.join(tmpdir, "id")) as f: 50 | assert f.read().strip() == userid 51 | with open(os.path.join(tmpdir, "pwd")) as f: 52 | assert f.read().strip() == f"/home/{username}" 53 | with open(os.path.join(tmpdir, "name")) as f: 54 | assert f.read().strip() == username 55 | with open(os.path.join(tmpdir, "name")) as f: 56 | assert f.read().strip() == username 57 | -------------------------------------------------------------------------------- /tests/unit/test_volumes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test that volume mounts work when running 3 | """ 4 | 5 | import os 6 | import subprocess 7 | import tempfile 8 | import time 9 | from getpass import getuser 10 | 11 | 12 | def test_volume_abspath(): 13 | """ 14 | Validate that you can bind mount a volume onto an absolute dir & write to it 15 | """ 16 | ts = str(time.time()) 17 | with tempfile.TemporaryDirectory() as tmpdir: 18 | tmpdir = os.path.realpath(tmpdir) 19 | 20 | username = getuser() 21 | subprocess.check_call( 22 | [ 23 | "repo2docker", 24 | "-v", 25 | f"{tmpdir}:/home/{username}", 26 | "--user-id", 27 | str(os.geteuid()), 28 | "--user-name", 29 | username, 30 | tmpdir, 31 | "--", 32 | "/bin/bash", 33 | "-c", 34 | f"echo -n {ts} > ts", 35 | ] 36 | ) 37 | 38 | with open(os.path.join(tmpdir, "ts")) as f: 39 | assert f.read() == ts 40 | 41 | 42 | def test_volume_relpath(): 43 | """ 44 | Validate that you can bind mount a volume onto an relative path & write to it 45 | """ 46 | curdir = os.getcwd() 47 | try: 48 | ts = str(time.time()) 49 | with tempfile.TemporaryDirectory() as tmpdir: 50 | os.chdir(tmpdir) 51 | subprocess.check_call( 52 | [ 53 | "repo2docker", 54 | "-v", 55 | ".:.", 56 | "--user-id", 57 | str(os.geteuid()), 58 | "--user-name", 59 | getuser(), 60 | tmpdir, 61 | "--", 62 | "/bin/bash", 63 | "-c", 64 | f"echo -n {ts} > ts", 65 | ] 66 | ) 67 | 68 | with open(os.path.join(tmpdir, "ts")) as f: 69 | assert f.read() == ts 70 | finally: 71 | os.chdir(curdir) 72 | -------------------------------------------------------------------------------- /tests/venv/apt-packages/README.rst: -------------------------------------------------------------------------------- 1 | System - APT Packages 2 | --------------------- 3 | 4 | It is possible to install packages using the Shell with the ``apt.txt`` file. 5 | This allows you to install libraries that aren't easy to install with package 6 | managers such as ``pip`` or ``conda``. This can be useful if you must install 7 | something that depends on a low-level library already being present. 8 | 9 | In this case we install ``gfortran``, which does not have an easy Python 10 | install. 11 | -------------------------------------------------------------------------------- /tests/venv/apt-packages/apt.txt: -------------------------------------------------------------------------------- 1 | # testing to skip comments in this file 2 | 3 | gfortran 4 | 5 | # testing to see if all packages get installed 6 | unp 7 | byacc 8 | 9 | -------------------------------------------------------------------------------- /tests/venv/apt-packages/verify: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -exuo pipefail 3 | which gfortran 4 | which unp 5 | which byacc 6 | -------------------------------------------------------------------------------- /tests/venv/binder-dir/.containerignore: -------------------------------------------------------------------------------- 1 | binder/ 2 | -------------------------------------------------------------------------------- /tests/venv/binder-dir/.dockerignore: -------------------------------------------------------------------------------- 1 | binder/ 2 | -------------------------------------------------------------------------------- /tests/venv/binder-dir/README.rst: -------------------------------------------------------------------------------- 1 | Binder Directory for configuration files 2 | ---------------------------------------- 3 | 4 | If a directory called ``binder/`` exists in the top level of the repository, 5 | then all configuration files that are **not** in ``binder/`` will be ignored. 6 | This is particularly useful if you have a ``Dockerfile`` defined in a 7 | repository, but don't want ``repo2docker``to use it for building the 8 | environment. 9 | -------------------------------------------------------------------------------- /tests/venv/binder-dir/apt.txt: -------------------------------------------------------------------------------- 1 | thiswontwork 2 | -------------------------------------------------------------------------------- /tests/venv/binder-dir/binder/apt.txt: -------------------------------------------------------------------------------- 1 | gfortran 2 | -------------------------------------------------------------------------------- /tests/venv/binder-dir/binder/postBuild: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo 'Done!' > $HOME/postbuild.txt -------------------------------------------------------------------------------- /tests/venv/binder-dir/binder/requirements.txt: -------------------------------------------------------------------------------- 1 | ipyleaflet 2 | -------------------------------------------------------------------------------- /tests/venv/binder-dir/environment.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - thiswontwork 3 | invalid 4 | -------------------------------------------------------------------------------- /tests/venv/binder-dir/postBuild: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "This should never run" 3 | exit 1 4 | -------------------------------------------------------------------------------- /tests/venv/binder-dir/verify: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | which gfortran 4 | test -z $(pip list | grep scipy) 5 | pip list | grep leaflet 6 | 7 | grep 'Done!' $HOME/postbuild.txt 8 | -------------------------------------------------------------------------------- /tests/venv/default/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Verify that the default just provides a py3 environment with jupyter 3 | import sys 4 | 5 | assert sys.version_info[:2] == (3, 10), sys.version 6 | import jupyter 7 | 8 | with open("/tmp/appendix") as f: 9 | assert f.read().strip() == "appendix" 10 | -------------------------------------------------------------------------------- /tests/venv/numpy/README.rst: -------------------------------------------------------------------------------- 1 | Python - Requirements.txt 2 | ------------------------- 3 | 4 | The simplest way to specify Python packages is with a ``requirements.txt`` file 5 | that is compatible with ``pip install``. In this case, we install ``numpy``. 6 | -------------------------------------------------------------------------------- /tests/venv/numpy/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | -------------------------------------------------------------------------------- /tests/venv/numpy/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | 4 | assert sys.version_info[:2] == (3, 10) 5 | 6 | import numpy 7 | -------------------------------------------------------------------------------- /tests/venv/postBuild/README.rst: -------------------------------------------------------------------------------- 1 | System - Post-build scripts 2 | --------------------------- 3 | 4 | It is possible to run scripts after you've built the environment specified in 5 | your other files. This could be used to, for example, download data or run 6 | some configuration scripts. 7 | 8 | In this example, we download and install a Jupyter Notebook extension. 9 | -------------------------------------------------------------------------------- /tests/venv/postBuild/postBuild: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | jupyter server extension disable --sys-prefix jupyter_collaboration 3 | npm install --global configurable-http-proxy 4 | npm cache clean --force 5 | -------------------------------------------------------------------------------- /tests/venv/postBuild/requirements.txt: -------------------------------------------------------------------------------- 1 | jupyter-collaboration 2 | -------------------------------------------------------------------------------- /tests/venv/postBuild/verify: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | jupyter server extension list 2>&1 | grep 'jupyter_collaboration' | grep disabled 4 | which configurable-http-proxy 5 | -------------------------------------------------------------------------------- /tests/venv/py2-with-kernel-requirements/requirements.txt: -------------------------------------------------------------------------------- 1 | # install this in the kernel's environment, the user wants Python 2 there 2 | numpy 3 | -------------------------------------------------------------------------------- /tests/venv/py2-with-kernel-requirements/requirements3.txt: -------------------------------------------------------------------------------- 1 | # install this in the environment in which the notebook server 2 | # is running nbgitpuller does not work with Python 2 3 | nbgitpuller==0.6.1 4 | -------------------------------------------------------------------------------- /tests/venv/py2-with-kernel-requirements/runtime.txt: -------------------------------------------------------------------------------- 1 | python-2.7 2 | -------------------------------------------------------------------------------- /tests/venv/py2-with-kernel-requirements/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | import os 3 | import sys 4 | 5 | print(sys.version_info) 6 | assert sys.version_info[:2] == (2, 7) 7 | 8 | import numpy 9 | 10 | try: 11 | import nbgitpuller 12 | except ImportError: 13 | pass 14 | else: 15 | raise Exception( 16 | "'nbgitpuller' shouldn't have been installed from requirements3.txt" 17 | ) 18 | 19 | # Python 3 is the executable used for the notebook server, this should 20 | # have nbgitpuller installed 21 | os.system("python3 -c 'import nbgitpuller'") 22 | -------------------------------------------------------------------------------- /tests/venv/py2/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | -------------------------------------------------------------------------------- /tests/venv/py2/runtime.txt: -------------------------------------------------------------------------------- 1 | python-2.7 2 | -------------------------------------------------------------------------------- /tests/venv/py2/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | import sys 3 | 4 | print(sys.version_info) 5 | assert sys.version_info[:2] == (2, 7) 6 | 7 | import numpy 8 | -------------------------------------------------------------------------------- /tests/venv/py3/runtime.txt: -------------------------------------------------------------------------------- 1 | python-3 2 | -------------------------------------------------------------------------------- /tests/venv/py3/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | 4 | print(sys.version_info) 5 | assert sys.version_info[:1] == (3,) 6 | -------------------------------------------------------------------------------- /tests/venv/py35/runtime.txt: -------------------------------------------------------------------------------- 1 | python-3.5 2 | -------------------------------------------------------------------------------- /tests/venv/py35/verify: -------------------------------------------------------------------------------- 1 | #!/srv/conda/envs/kernel/bin/python 2 | import sys 3 | 4 | print(sys.version_info) 5 | assert sys.version_info[:2] == (3, 5), sys.version 6 | -------------------------------------------------------------------------------- /tests/venv/start/postBuild/README.rst: -------------------------------------------------------------------------------- 1 | postBuild and start 2 | ------------------- 3 | 4 | This test checks that we can use a postBuild and start script 5 | at the same time. 6 | 7 | It also checks that exit on error (set -e) has not leaked into the main shell. 8 | -------------------------------------------------------------------------------- /tests/venv/start/postBuild/postBuild: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # this value should not be visible in `verify` 4 | export TEST_START_VAR="var is set by postBuild" 5 | 6 | echo 'Done!' > $HOME/postbuild.txt 7 | -------------------------------------------------------------------------------- /tests/venv/start/postBuild/requirements.txt: -------------------------------------------------------------------------------- 1 | ipyleaflet 2 | -------------------------------------------------------------------------------- /tests/venv/start/postBuild/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | export TEST_START_VAR="var is set" 4 | exec "$@" 5 | -------------------------------------------------------------------------------- /tests/venv/start/postBuild/verify: -------------------------------------------------------------------------------- 1 | #!/bin/bash -il 2 | # Run this as an interactive login shell so that the conda profile is sourced 3 | 4 | # Test that `set -e` isn't incorrectly set in a profile 5 | false 6 | 7 | set -euo pipefail 8 | grep 'Done!' $HOME/postbuild.txt 9 | # set value of TEST_START_VAR to empty string when it is not defined 10 | if [ "${TEST_START_VAR:-}" != "var is set" ] 11 | then 12 | echo "TEST_START_VAR is not set" 13 | exit 1 14 | fi 15 | -------------------------------------------------------------------------------- /tests/venv/start/start-script/README.rst: -------------------------------------------------------------------------------- 1 | System - launch scripts 2 | ----------------------- 3 | 4 | It is possible to run `start` scripts before your notebook server starts. 5 | This is useful to set environment variables or perform last minute 6 | configurations. 7 | 8 | In this example we set a environment variable in the `start` script. 9 | -------------------------------------------------------------------------------- /tests/venv/start/start-script/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | export TEST_START_VAR="var is set" 4 | 5 | exec "$@" 6 | -------------------------------------------------------------------------------- /tests/venv/start/start-script/verify: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | # set value of TEST_START_VAR to empty string when it is not defined 5 | if [ "${TEST_START_VAR:-}" != "var is set" ] 6 | then 7 | echo "TEST_START_VAR is not set" 8 | exit 1 9 | fi 10 | -------------------------------------------------------------------------------- /tests/venv/usr-bin/verify: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Verify that ~/.local/bin & REPO_DIR/.local/bin is on the PATH 3 | import os 4 | 5 | assert os.path.expanduser("~/.local/bin") in os.getenv("PATH"), os.getenv("PATH") 6 | assert os.getcwd() == os.environ["REPO_DIR"] 7 | assert f'{os.environ["REPO_DIR"]}/.local/bin' in os.getenv("PATH") 8 | --------------------------------------------------------------------------------