├── .flake8 ├── .github ├── dependabot.yaml └── workflows │ ├── pre-commit.yaml │ ├── publish-chart.yaml │ └── test-chart.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── LICENSE ├── README.md ├── dev-requirements.txt ├── docs ├── Makefile ├── make.bat ├── requirements.txt └── source │ ├── _static │ ├── images │ │ ├── logo │ │ │ ├── favicon.ico │ │ │ └── logo.png │ │ └── technical-overview.png │ └── logo │ │ ├── favicon.ico │ │ └── logo.png │ ├── api │ └── index.rst │ ├── conf.py │ └── index.rst ├── helm-chart ├── .helmignore ├── chartpress.yaml ├── images │ └── jupyterhub-ssh │ │ ├── Dockerfile │ │ └── jupyterhub_ssh_config.py ├── jupyterhub-ssh │ ├── .helmignore │ ├── Chart.yaml │ ├── schema.yaml │ ├── templates │ │ ├── NOTES.txt │ │ ├── _helpers.tpl │ │ ├── secret.yaml │ │ ├── sftp │ │ │ ├── deployment.yaml │ │ │ ├── netpol.yaml │ │ │ └── service.yaml │ │ └── ssh │ │ │ ├── deployment.yaml │ │ │ ├── netpol.yaml │ │ │ └── service.yaml │ ├── values.dev.yaml │ ├── values.lint.yaml │ └── values.yaml └── tools │ └── generate-json-schema.py ├── jupyterhub-sftp ├── Dockerfile ├── etc │ ├── libnss-ato.conf │ ├── nsswitch.conf │ ├── pam.d │ │ └── common-auth │ └── ssh │ │ └── sshd_config ├── jupyterhub-token-verify.py ├── requirements.in ├── requirements.txt └── setup-nss.bash ├── jupyterhub_ssh ├── __init__.py ├── __main__.py └── terminado.py └── setup.py /.flake8: -------------------------------------------------------------------------------- 1 | ; A flake8 linting config adjusted for black autoformatting, defined mainly 2 | ; based on the following documentation: 3 | ; 4 | ; https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#line-length 5 | ; 6 | [flake8] 7 | select = C,E,F,W,B,B950 8 | extend-ignore = E203, E501 9 | 10 | # Let's ignore the traitlets config object 11 | builtins = c 12 | -------------------------------------------------------------------------------- /.github/dependabot.yaml: -------------------------------------------------------------------------------- 1 | # dependabot.yml reference: https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/configuration-options-for-dependency-updates 2 | # 3 | # Notes: 4 | # - Status and logs from dependabot are provided at 5 | # https://github.com/yuvipanda/jupyterhub-ssh/network/updates. 6 | # - YAML anchors are not supported here or in GitHub Workflows. 7 | # - We explicitly set the "maintenance" label to help our changelog generator 8 | # tool github-activity to categorize PRs. 9 | # 10 | 11 | version: 2 12 | updates: 13 | # Maintain Python dependencies for the jupyterhub-sftp image 14 | - package-ecosystem: pip 15 | directory: "/jupyterhub-sftp" 16 | schedule: 17 | interval: daily 18 | time: "05:00" 19 | timezone: "Etc/UTC" 20 | versioning-strategy: lockfile-only 21 | labels: 22 | - maintenance 23 | - dependencies 24 | 25 | # Maintain dependencies in our GitHub Workflows 26 | - package-ecosystem: github-actions 27 | directory: / # This should be / rather than .github/workflows 28 | schedule: 29 | interval: weekly 30 | day: monday 31 | time: 05:00 32 | timezone: Etc/UTC 33 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yaml: -------------------------------------------------------------------------------- 1 | # This is a GitHub workflow defining a set of jobs with a set of steps. 2 | # ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions 3 | # 4 | name: Run pre-commit 5 | 6 | on: 7 | pull_request: 8 | push: 9 | branches: 10 | - main 11 | workflow_dispatch: 12 | 13 | jobs: 14 | run-hooks: 15 | name: Run pre-commit 16 | runs-on: ubuntu-20.04 17 | 18 | steps: 19 | - uses: actions/checkout@v2 20 | - name: Set Up Python 21 | uses: actions/setup-python@v2 22 | with: 23 | python-version: 3.9 24 | 25 | - name: Install pre-commit 26 | run: pip install --upgrade pre-commit 27 | 28 | - name: pre-commit run --all 29 | run: pre-commit run --all 30 | -------------------------------------------------------------------------------- /.github/workflows/publish-chart.yaml: -------------------------------------------------------------------------------- 1 | # This is a GitHub workflow defining a set of jobs with a set of steps. 2 | # ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions 3 | # 4 | name: Publish chart 5 | 6 | # Trigger the workflow on pushed tags or commits to main branch. 7 | on: 8 | pull_request: 9 | paths-ignore: 10 | - "docs/**" 11 | - "**.md" 12 | - ".github/workflows/*" 13 | - "!.github/workflows/publish-chart.yaml" 14 | push: 15 | paths-ignore: 16 | - "docs/**" 17 | - "**.md" 18 | - ".github/workflows/*" 19 | - "!.github/workflows/publish-chart.yaml" 20 | branches-ignore: 21 | - "dependabot/**" 22 | tags: 23 | - "**" 24 | 25 | jobs: 26 | # Builds and pushes docker images to Quay.io, package the Helm chart, and 27 | # pushes it to yuvipanda/jupyterhub-ssh@gh-pages where index.yaml represents 28 | # the JupyterHub organization Helm chart repository. 29 | # 30 | publish: 31 | runs-on: ubuntu-20.04 32 | 33 | # Explicitly request permissions to push to this git repository's gh-pages 34 | # branch via the the GITHUB_TOKEN we can have access to. 35 | permissions: 36 | contents: write 37 | 38 | steps: 39 | - uses: actions/checkout@v2 40 | with: 41 | # chartpress requires the full history 42 | fetch-depth: 0 43 | 44 | - uses: actions/setup-python@v2 45 | with: 46 | python-version: "3.8" 47 | 48 | - name: store whether we are publishing the chart 49 | id: publishing 50 | shell: python 51 | run: | 52 | import os 53 | publishing = "" 54 | if ( 55 | "${{ github.repository }}" == "yuvipanda/jupyterhub-ssh" 56 | and "${{ github.event_name }}" == "push" 57 | and ( 58 | "${{ github.event.ref }}".startswith("refs/tags/") 59 | or 60 | "${{ github.event.ref }}" == "refs/heads/main" 61 | ) 62 | ): 63 | publishing = "true" 64 | print("Publishing chart") 65 | with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: 66 | print(f'publishing={publishing}', file=fh) 67 | 68 | - name: Set up QEMU (for docker buildx) 69 | uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 70 | 71 | - name: Set up Docker Buildx (for chartpress multi-arch builds) 72 | uses: docker/setup-buildx-action@abe5d8f79a1606a2d3e218847032f3f2b1726ab0 73 | 74 | - name: Install dependencies 75 | run: | 76 | pip install -r dev-requirements.txt 77 | pip list 78 | helm version 79 | 80 | - name: Setup push rights to container registry 81 | if: steps.publishing.outputs.publishing 82 | run: docker login quay.io -u "${{ secrets.QUAY_USERNAME }}" -p "${{ secrets.QUAY_PASSWORD }}" 83 | 84 | - name: Configure a git user 85 | run: | 86 | git config --global user.email "github-actions@example.local" 87 | git config --global user.name "GitHub Actions user" 88 | 89 | - name: Generate values.schema.json 90 | run: helm-chart/tools/generate-json-schema.py 91 | 92 | - name: Update Chart.yaml and values.yaml 93 | run: | 94 | cd helm-chart 95 | chartpress --no-build 96 | 97 | - name: Publish images and chart with chartpress 98 | if: steps.publishing.outputs.publishing 99 | env: 100 | GITHUB_TOKEN: "${{ github.token }}" 101 | run: | 102 | set -eux 103 | cd helm-chart 104 | 105 | PUBLISH_ARGS=" \ 106 | --push --publish-chart \ 107 | --builder docker-buildx \ 108 | --platform linux/amd64 --platform linux/arm64 \ 109 | " 110 | 111 | if [[ $GITHUB_REF != refs/tags/* ]]; then 112 | PR_OR_HASH=$(git log -1 --pretty=%h-%B | head -n1 | sed 's/^.*\(#[0-9]*\).*/\1/' | sed 's/^\([0-9a-f]*\)-.*/@\1/') 113 | LATEST_COMMIT_TITLE=$(git log -1 --pretty=%B | head -n1) 114 | EXTRA_MESSAGE="${{ github.repository }}${PR_OR_HASH} ${LATEST_COMMIT_TITLE}" 115 | chartpress $PUBLISH_ARGS --extra-message "${EXTRA_MESSAGE}" 116 | else 117 | chartpress $PUBLISH_ARGS --tag "${GITHUB_REF:10}" 118 | fi 119 | 120 | git --no-pager diff --color 121 | 122 | - name: CI artifact - Package Helm chart 123 | if: steps.publishing.outputs.publishing == '' 124 | run: helm package helm-chart/jupyterhub-ssh 125 | 126 | # ref: https://github.com/actions/upload-artifact 127 | - name: CI artifact - Upload Helm chart 128 | uses: actions/upload-artifact@v2 129 | if: steps.publishing.outputs.publishing == '' 130 | with: 131 | name: jupyterhub-ssh-${{ github.sha }} 132 | path: jupyterhub-ssh-*.tgz 133 | if-no-files-found: error 134 | -------------------------------------------------------------------------------- /.github/workflows/test-chart.yaml: -------------------------------------------------------------------------------- 1 | # This is a GitHub workflow defining a set of jobs with a set of steps. 2 | # ref: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions 3 | # 4 | name: Test chart 5 | 6 | # Trigger the workflow's on all PRs and pushes so that other contributors can 7 | # run tests in their own forks. Avoid triggering these tests on changes to 8 | # documentation only changes. 9 | on: 10 | pull_request: 11 | paths-ignore: 12 | - "docs/**" 13 | - "**.md" 14 | - ".github/workflows/*" 15 | - "!.github/workflows/test-chart.yaml" 16 | push: 17 | paths-ignore: 18 | - "docs/**" 19 | - "**.md" 20 | - ".github/workflows/*" 21 | - "!.github/workflows/test-chart.yaml" 22 | branches-ignore: 23 | - "dependabot/**" 24 | workflow_dispatch: 25 | 26 | jobs: 27 | test-chart-templates: 28 | runs-on: ubuntu-20.04 29 | steps: 30 | - uses: actions/checkout@v2 31 | - uses: actions/setup-python@v2 32 | with: 33 | python-version: "3.8" 34 | 35 | - name: Install dependencies 36 | run: | 37 | pip install -r dev-requirements.txt 38 | pip list 39 | helm version 40 | 41 | - name: Generate values.schema.json from schema.yaml 42 | run: helm-chart/tools/generate-json-schema.py 43 | 44 | - name: Helm lint (values.yaml) 45 | run: helm lint helm-chart/jupyterhub-ssh 46 | 47 | - name: Helm lint --strict (values.yaml) 48 | run: helm lint --strict helm-chart/jupyterhub-ssh 49 | continue-on-error: true 50 | 51 | - name: Helm lint (values.lint.yaml) 52 | run: helm lint helm-chart/jupyterhub-ssh --values helm-chart/jupyterhub-ssh/values.lint.yaml 53 | 54 | - name: Helm lint --strict (values.lint.yaml) 55 | run: helm lint --strict helm-chart/jupyterhub-ssh 56 | continue-on-error: true 57 | 58 | - name: Helm template (values.yaml) 59 | run: helm template helm-chart/jupyterhub-ssh --set hubUrl=http://hub:8081 60 | 61 | - name: Helm template (values.lint.yaml) 62 | run: helm template helm-chart/jupyterhub-ssh --values helm-chart/jupyterhub-ssh/values.lint.yaml 63 | 64 | test-chart-install: 65 | runs-on: ubuntu-20.04 66 | timeout-minutes: 20 67 | 68 | strategy: 69 | # Keep running even if one variation of the job fail 70 | fail-fast: false 71 | matrix: 72 | # We run this job multiple times with different parameterization 73 | # specified below, these parameters have no meaning on their own and 74 | # gain meaning on how job steps use them. 75 | # 76 | # k3s-version: https://github.com/rancher/k3s/tags 77 | # k3s-channel: https://update.k3s.io/v1-release/channels 78 | include: 79 | - k3s-channel: v1.21 80 | debuggable: debuggable 81 | - k3s-channel: v1.20 82 | - k3s-channel: v1.19 83 | - k3s-channel: v1.18 84 | - k3s-channel: v1.17 85 | 86 | steps: 87 | - uses: actions/checkout@v2 88 | with: 89 | # chartpress requires the full history 90 | fetch-depth: 0 91 | 92 | # Starts a k8s cluster with NetworkPolicy enforcement and installs both 93 | # kubectl and helm 94 | # 95 | # action reference: https://github.com/jupyterhub/action-k3s-helm/ 96 | - uses: jupyterhub/action-k3s-helm@v1 97 | with: 98 | k3s-channel: ${{ matrix.k3s-channel }} 99 | metrics-enabled: false 100 | traefik-enabled: false 101 | docker-enabled: true 102 | 103 | - uses: actions/setup-python@v2 104 | with: 105 | python-version: "3.8" 106 | 107 | - name: Install dependencies 108 | run: | 109 | pip install -r dev-requirements.txt 110 | pip list 111 | helm version 112 | 113 | # Build our images if needed and update values.yaml with the tags 114 | - name: Run chartpress 115 | run: | 116 | cd helm-chart 117 | chartpress 118 | 119 | - name: Generate values.schema.json from schema.yaml 120 | run: helm-chart/tools/generate-json-schema.py 121 | 122 | # Validate rendered helm templates against the k8s api-server with the 123 | # dedicated values.lint.yaml config. 124 | - name: Helm template --validate (with lint config) 125 | run: helm template --validate jupyterhub-ssh helm-chart/jupyterhub-ssh --values helm-chart/jupyterhub-ssh/values.lint.yaml 126 | 127 | # FIXME: Add back when we are using pytest do test something more 128 | # complicated than just installing the chart by itself. 129 | # - name: Install jupyterhub 130 | # run: helm install jupyterhub --repo https://jupyterhub.github.io/helm-chart/ jupyterhub 131 | 132 | - name: Install jupyterhub-ssh 133 | run: helm install jupyterhub-ssh helm-chart/jupyterhub-ssh --values helm-chart/jupyterhub-ssh/values.dev.yaml 134 | 135 | - name: Await readiness 136 | uses: jupyterhub/action-k8s-await-workloads@v1 137 | with: 138 | timeout: 150 139 | max-restarts: 0 140 | 141 | # FIXME: Setup pytest tests against our deployed Helm chart 142 | # 143 | # - name: Run tests 144 | # run: | 145 | # pytest --verbose --maxfail=2 --color=yes ./helm-chart/tests 146 | 147 | # action reference: https://github.com/jupyterhub/action-k8s-namespace-report 148 | - name: Kubernetes namespace report 149 | uses: jupyterhub/action-k8s-namespace-report@v1 150 | if: always() 151 | with: 152 | important-workloads: deploy/jupyterhub-ssh deploy/jupyterhub-sftp deploy/hub 153 | 154 | # WARNING: Only allow this for pull_request runs that doesn't contain 155 | # sensitive information. 156 | # 157 | # action reference: https://github.com/mxschmitt/action-tmate@v3 158 | - name: To enter a SSH debugging session, read these logs 159 | if: failure() && github.event_name == 'pull_request' && matrix.debuggable == 'debuggable' 160 | uses: mxschmitt/action-tmate@v3 161 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | values.schema.json 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | pip-wheel-metadata/ 26 | share/python-wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .nox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | *.py,cover 53 | .hypothesis/ 54 | .pytest_cache/ 55 | 56 | # Translations 57 | *.mo 58 | *.pot 59 | 60 | # Django stuff: 61 | *.log 62 | local_settings.py 63 | db.sqlite3 64 | db.sqlite3-journal 65 | 66 | # Flask stuff: 67 | instance/ 68 | .webassets-cache 69 | 70 | # Scrapy stuff: 71 | .scrapy 72 | 73 | # Sphinx documentation 74 | docs/_build/ 75 | 76 | # PyBuilder 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | .python-version 88 | 89 | # pipenv 90 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 91 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 92 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 93 | # install all needed dependencies. 94 | #Pipfile.lock 95 | 96 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 97 | __pypackages__/ 98 | 99 | # Celery stuff 100 | celerybeat-schedule 101 | celerybeat.pid 102 | 103 | # SageMath parsed files 104 | *.sage.py 105 | 106 | # Environments 107 | .env 108 | .venv 109 | env/ 110 | venv/ 111 | ENV/ 112 | env.bak/ 113 | venv.bak/ 114 | 115 | # Spyder project settings 116 | .spyderproject 117 | .spyproject 118 | 119 | # Rope project settings 120 | .ropeproject 121 | 122 | # mkdocs documentation 123 | /site 124 | 125 | # mypy 126 | .mypy_cache/ 127 | .dmypy.json 128 | dmypy.json 129 | 130 | # Pyre type checker 131 | .pyre/ 132 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # pre-commit is a tool to perform a predefined set of tasks manually and/or 2 | # automatically before git commits are made. 3 | # 4 | # Config reference: https://pre-commit.com/#pre-commit-configyaml---top-level 5 | # 6 | # Common tasks 7 | # 8 | # - Run on all files: pre-commit run --all-files 9 | # - Register git hooks: pre-commit install --install-hooks 10 | # 11 | repos: 12 | # Autoformat: Python code 13 | - repo: https://github.com/psf/black 14 | rev: 22.3.0 15 | hooks: 16 | - id: black 17 | args: [--target-version=py39] 18 | 19 | # Autoformat: Python code 20 | - repo: https://github.com/asottile/reorder_python_imports 21 | rev: v2.5.0 22 | hooks: 23 | - id: reorder-python-imports 24 | 25 | # Autoformat: markdown, yaml (but not Helm templates) 26 | - repo: https://github.com/pre-commit/mirrors-prettier 27 | rev: v2.3.2 28 | hooks: 29 | - id: prettier 30 | exclude: helm-chart/jupyterhub-ssh/templates 31 | 32 | # Misc... 33 | - repo: https://github.com/pre-commit/pre-commit-hooks 34 | rev: v4.0.1 35 | # ref: https://github.com/pre-commit/pre-commit-hooks#hooks-available 36 | hooks: 37 | # Autoformat: Makes sure files end in a newline and only a newline. 38 | - id: end-of-file-fixer 39 | 40 | # Autoformat: Sorts entries in requirements.txt. 41 | - id: requirements-txt-fixer 42 | 43 | # Lint: Check for files with names that would conflict on a 44 | # case-insensitive filesystem like MacOS HFS+ or Windows FAT. 45 | - id: check-case-conflict 46 | 47 | # Lint: Checks that non-binary executables have a proper shebang. 48 | - id: check-executables-have-shebangs 49 | 50 | # Lint: Python code 51 | - repo: https://github.com/pycqa/flake8 52 | rev: "3.9.2" 53 | hooks: 54 | - id: flake8 55 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Configuration on how ReadTheDocs (RTD) builds our documentation 2 | # ref: https://readthedocs.org/projects/jupyterhub-ssh/ 3 | # ref: https://docs.readthedocs.io/en/stable/config-file/v2.html 4 | # 5 | version: 2 6 | 7 | sphinx: 8 | configuration: docs/source/conf.py 9 | 10 | build: 11 | os: ubuntu-22.04 12 | tools: 13 | python: "3.11" 14 | 15 | python: 16 | install: 17 | - requirements: docs/requirements.txt 18 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2020, Yuvi Panda 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # JupyterHub SSH and SFTP 2 | 3 | [![Documentation build status](https://img.shields.io/readthedocs/jupyterhub-ssh?logo=read-the-docs)](https://jupyterhub-ssh.readthedocs.io/en/latest/) 4 | [![GitHub Workflow Status - Test](https://img.shields.io/github/workflow/status/jupyterhub/zero-to-jupyterhub-k8s/Test%20chart?logo=github&label=tests)](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/actions) 5 | 6 | Access through [SSH](https://www.ssh.com/ssh) any JupyterHub, regardless how it was deployed and easily transfer files through [SFTP](https://www.ssh.com/ssh/sftp). 7 | With a JupyterHub [SSH](https://www.ssh.com/ssh) server deployed, you can start and access your JupyterHub user environment through SSH. With a JupyterHub 8 | [SFTP](https://www.ssh.com/ssh/sftp) server deployed alongside the JupyterHub's user storage, you can use SFTP to work against your JupyterHub user's home directory. 9 | These services are authenticated using an access token acquired from your JupyterHub's user interface under `/hub/token`. 10 | 11 | ## Development Status 12 | 13 | This project is under active develpoment :tada:, so expect a few changes along the way. 14 | 15 | ## Technical Overview 16 | 17 | The JupyterHub SSH service provides SSH access to your user environment in a JupyterHub. JupyterHub SSH is made up of two main components: 18 | 19 | - an SSH server that maps a SSH connection to a Notebook server on a JupyterHub. 20 | - a [Terminado](https://github.com/jupyter/terminado) client that knows how to connect and communicate to a Jupyter terminal. 21 | 22 | ![Overview](https://raw.githubusercontent.com/yuvipanda/jupyterhub-ssh/main/docs/source/_static/images/technical-overview.png) 23 | 24 | Apart from SSH access to JupyterHub, once `jupyterhub-ssh` was deployed, you can also use it to transfer files from your local 25 | home directory into your remote hub home directory. This is achieved through `jupyterhub-sftp`, a service that provides a SFTP 26 | setup using [OpenSSH](https://www.openssh.com/). `jupyterhub-sftp` currently supports only [NFS](https://tldp.org/LDP/nag/node140.html) 27 | based home directories. 28 | 29 | ## Installation 30 | 31 | Instructions on how to install and deploy JupyterHub SSH & SFTP services. 32 | 33 | ### Regular deployment (jupyterhub ssh only) 34 | 35 | 1. Clone the repo and install the jupyterhub-ssh package: 36 | 37 | ```bash 38 | $ git clone https://github.com/yuvipanda/jupyterhub-ssh.git 39 | $ cd jupyterhub-ssh 40 | $ pip install -e . 41 | ``` 42 | 43 | 1. Or install the package directly: 44 | 45 | ```bash 46 | $ pip install git+https://github.com/yuvipanda/jupyterhub-ssh.git 47 | ``` 48 | 49 | 1. Create the config file: 50 | 51 | ```bash 52 | $ touch jupyterhub_ssh_config.py 53 | ``` 54 | 55 | 1. Put in the config file at least the following two config options: 56 | 57 | - `c.JupyterHubSSH.hub_url`: URL of JupyterHub to connect to. 58 | - `c.JupyterHubSSH.host_key_path`: Path to host's private SSH Key. 59 | 60 | More configuration options can be found in the docs [here](https://jupyterhub-ssh.readthedocs.io/en/latest/api/index.html#module-jupyterhub_ssh). 61 | 62 | 1. Start the JupyterHubSSH app from the directory where the config file 63 | `jupyterhub_ssh_config.py` is located: 64 | 65 | ```bash 66 | python -m jupyterhub_ssh 67 | ``` 68 | 69 | ### Kubernetes based deployment (jupyterhub ssh and/or sftp) 70 | 71 | If your JupyterHub has been deployed to Kubernetes, you can use the Helm chart 72 | available in this repo to deploy JupyterHub SSH and/or JupyterHub SFTP directly 73 | into your Kubernetes cluster. 74 | 75 | ```bash 76 | helm install \ 77 | --repo https://yuvipanda.github.io/jupyterhub-ssh/ jupyterhub-ssh \ 78 | --version \ 79 | --set hubUrl=https://jupyter.example.org \ 80 | --set ssh.enabled=true \ 81 | --set sftp.enabled=false 82 | ``` 83 | 84 | If you install JupyterHub SFTP, then it needs access to the home folders. These 85 | home folders are assumed to be exposed via a k8s PVC resource that you should 86 | name via the `sftp.pvc.name` configuration. 87 | 88 | If your JupyterHub has been deployed using [the official JupyterHub Helm 89 | chart](https://z2jh.jupyter.org) version 1.1.0 or later, and you have 90 | _configured the official JupyterHub Helm chart_ with `proxy.https.enabled=true` 91 | and `proxy.https.type=letsencrypt`, then you can add the following to to acquire 92 | access to the jupyterhub-ssh and jupyterhub-sftp services via that setup. 93 | 94 | ```yaml 95 | # Configuration for the official JupyterHub Helm chart to accept traffic via 96 | proxy: 97 | https: 98 | enabled: true 99 | type: letsencrypt 100 | letsencryptEmail: 101 | 102 | service: 103 | # jupyterhub-ssh/sftp integration part 1/3: 104 | # 105 | # We must accept traffic to the k8s Service (proxy-public) receiving traffic 106 | # from the internet. Port 22 is typically used for both SSH and SFTP, but we 107 | # can't use the same port for both so we use 2222 for SFTP in this example. 108 | # 109 | extraPorts: 110 | - name: ssh 111 | port: 22 112 | targetPort: ssh 113 | - name: sftp 114 | port: 2222 115 | targetPort: sftp 116 | 117 | traefik: 118 | # jupyterhub-ssh/sftp integration part 2/3: 119 | # 120 | # We must accept traffic arriving to the autohttps pod (traefik) from the 121 | # proxy-public service. Expose a port and update the NetworkPolicy 122 | # to tolerate incoming (ingress) traffic on the exposed port. 123 | # 124 | extraPorts: 125 | - name: ssh 126 | containerPort: 8022 127 | - name: sftp 128 | containerPort: 2222 129 | networkPolicy: 130 | allowedIngressPorts: [http, https, ssh, sftp] 131 | 132 | # jupyterhub-ssh/sftp integration part 3/3: 133 | # 134 | # We must let traefik know it should listen for traffic (traefik entrypoint) 135 | # and route it (traefik router) onwards to the jupyterhub-ssh k8s Service 136 | # (traefik service). 137 | # 138 | extraStaticConfig: 139 | entryPoints: 140 | ssh-entrypoint: 141 | address: :8022 142 | sftp-entrypoint: 143 | address: :2222 144 | extraDynamicConfig: 145 | tcp: 146 | services: 147 | ssh-service: 148 | loadBalancer: 149 | servers: 150 | - address: jupyterhub-ssh:22 151 | sftp-service: 152 | loadBalancer: 153 | servers: 154 | - address: jupyterhub-sftp:22 155 | routers: 156 | ssh-router: 157 | entrypoints: [ssh-entrypoint] 158 | rule: HostSNI(`*`) 159 | service: ssh-service 160 | sftp-router: 161 | entrypoints: [sftp-entrypoint] 162 | rule: HostSNI(`*`) 163 | service: sftp-service 164 | ``` 165 | 166 | ## How to use it 167 | 168 | ### How to SSH 169 | 170 | 1. Login into your JupyterHub and go to `https:///hub/token`. 171 | 172 | 2. Copy the token from JupyterHub. 173 | 174 | 3. SSH into JupyterHub: 175 | 176 | ```bash 177 | ssh @ 178 | ``` 179 | 180 | The `` should be what you see in the top right corner when 181 | viewing `https:///hub/home`. This may different from the UNIX 182 | user you see within the started server, for example in a terminal prompt. 183 | 184 | 4. Enter the token received from JupyterHub as a password. 185 | 186 | 5. TADA :tada: Now you have an interactive terminal! You can do anything you would generally interactively do via ssh: run editors, 187 | fully interactive programs, use the commandline, etc. Some features like non-interactive command running, tunneling, etc are currently 188 | unavailable. 189 | 190 | ### How to SFTP 191 | 192 | 1. Login into your JupyterHub and go to `https:///hub/token`. 193 | 194 | 2. Copy the token from JupyterHub. 195 | 196 | 3. Transfer file into Jupyterhub: 197 | 198 | - Using the `sftp` command: 199 | 200 | ```bash 201 | sftp @ 202 | ``` 203 | 204 | 4. Enter the token received from JupyterHub as a password. 205 | 206 | 5. TADA :tada: Now you can transfer files to and from your home directory on the hubs. 207 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | chartpress 2 | pytest 3 | pyyaml 4 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | autodoc-traits 2 | myst_parser 3 | sphinx-book-theme 4 | sphinx-copybutton 5 | -------------------------------------------------------------------------------- /docs/source/_static/images/logo/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yuvipanda/jupyterhub-ssh/a054eda03fcc609b42c304a02b7a22dcf2b6c62b/docs/source/_static/images/logo/favicon.ico -------------------------------------------------------------------------------- /docs/source/_static/images/logo/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yuvipanda/jupyterhub-ssh/a054eda03fcc609b42c304a02b7a22dcf2b6c62b/docs/source/_static/images/logo/logo.png -------------------------------------------------------------------------------- /docs/source/_static/images/technical-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yuvipanda/jupyterhub-ssh/a054eda03fcc609b42c304a02b7a22dcf2b6c62b/docs/source/_static/images/technical-overview.png -------------------------------------------------------------------------------- /docs/source/_static/logo/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yuvipanda/jupyterhub-ssh/a054eda03fcc609b42c304a02b7a22dcf2b6c62b/docs/source/_static/logo/favicon.ico -------------------------------------------------------------------------------- /docs/source/_static/logo/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yuvipanda/jupyterhub-ssh/a054eda03fcc609b42c304a02b7a22dcf2b6c62b/docs/source/_static/logo/logo.png -------------------------------------------------------------------------------- /docs/source/api/index.rst: -------------------------------------------------------------------------------- 1 | ================== 2 | JupyterHub-SSH API 3 | ================== 4 | 5 | Module: :mod:`jupyterhub_ssh` 6 | ============================= 7 | 8 | .. automodule:: jupyterhub_ssh 9 | 10 | .. currentmodule:: jupyterhub_ssh 11 | 12 | .. :class:`Terminado` 13 | .. ------------------ 14 | 15 | .. .. autoconfigurable:: Terminado 16 | .. :members: 17 | 18 | .. :class:`NotebookSSHServer` 19 | .. -------------------------- 20 | 21 | .. .. autoconfigurable:: NotebookSSHServer 22 | .. :members: 23 | 24 | :class:`JupyterHubSSH` 25 | ---------------------- 26 | 27 | .. autoconfigurable:: JupyterHubSSH 28 | :members: 29 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | # -- Path setup -------------------------------------------------------------- 7 | # If extensions (or modules to document with autodoc) are in another directory, 8 | # add these directories to sys.path here. If the directory is relative to the 9 | # documentation root, use os.path.abspath to make it absolute, like shown here. 10 | # 11 | # import os 12 | # import sys 13 | # sys.path.insert(0, os.path.abspath('.')) 14 | # -- Project information ----------------------------------------------------- 15 | 16 | project = "JupyterHub-SSH" 17 | copyright = "2020, Yuvi Panda" 18 | author = "Yuvi Panda" 19 | 20 | 21 | # -- General configuration --------------------------------------------------- 22 | 23 | # Add any Sphinx extension module names here, as strings. They can be 24 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 25 | # ones. 26 | extensions = [ 27 | "sphinx.ext.autodoc", 28 | "sphinx_copybutton", 29 | "sphinx.ext.napoleon", 30 | "autodoc_traits", 31 | "myst_parser", 32 | ] 33 | 34 | source_suffix = [".rst", ".md"] 35 | 36 | # Add any paths that contain templates here, relative to this directory. 37 | templates_path = ["_templates"] 38 | 39 | # List of patterns, relative to source directory, that match files and 40 | # directories to ignore when looking for source files. 41 | # This pattern also affects html_static_path and html_extra_path. 42 | exclude_patterns = [] 43 | 44 | 45 | # -- Options for HTML output ------------------------------------------------- 46 | 47 | # The theme to use for HTML and HTML Help pages. See the documentation for 48 | # a list of builtin themes. 49 | # 50 | html_theme = "sphinx_book_theme" 51 | html_logo = "_static/images/logo/logo.png" 52 | html_favicon = "_static/images/logo/favicon.ico" 53 | html_title = "" 54 | # Add any paths that contain custom static files (such as style sheets) here, 55 | # relative to this directory. They are copied after the builtin static files, 56 | # so a file named "default.css" will overwrite the builtin "default.css". 57 | html_static_path = ["_static"] 58 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. JupyterHub-SSH documentation master file, created by 2 | sphinx-quickstart on Fri Oct 23 15:23:33 2020. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | ======================= 7 | JupyterHub SSH and SFTP 8 | ======================= 9 | 10 | - An SSH interface to JupyterHub 11 | - A tranfer file utility using SFTP 12 | 13 | Regardless of the way JupyterHub was deployed, JupyterHub SSH can be deployed too on that infrastructure and used as an 14 | SSH interface to your Hub. SSH access provides the exact same environment (packages, home directory, etc) as web-based 15 | access to JupyterHub. You can do anything you would generally interactively do via `ssh` like use the commandline, or 16 | run editors or fully interactive programs, etc. Some features, like non-interactive command running, tunneling, etc are 17 | not yet available, though. File transfer to and from your home directory on the hub is also possible through SFTP. 18 | 19 | Contents 20 | ======== 21 | 22 | API Reference 23 | ------------- 24 | 25 | .. toctree:: 26 | :maxdepth: 3 27 | 28 | api/index 29 | 30 | Indices and tables 31 | ================== 32 | 33 | * :ref:`genindex` 34 | * :ref:`modindex` 35 | * :ref:`search` 36 | -------------------------------------------------------------------------------- /helm-chart/.helmignore: -------------------------------------------------------------------------------- 1 | # Patterns to ignore when building packages. 2 | # This supports shell glob matching, relative path matching, and 3 | # negation (prefixed with !). Only one pattern per line. 4 | .DS_Store 5 | # Common VCS dirs 6 | .git/ 7 | .gitignore 8 | .bzr/ 9 | .bzrignore 10 | .hg/ 11 | .hgignore 12 | .svn/ 13 | # Common backup files 14 | *.swp 15 | *.bak 16 | *.tmp 17 | *~ 18 | # Various IDEs 19 | .project 20 | .idea/ 21 | *.tmproj 22 | -------------------------------------------------------------------------------- /helm-chart/chartpress.yaml: -------------------------------------------------------------------------------- 1 | charts: 2 | - name: jupyterhub-ssh 3 | imagePrefix: quay.io/jupyterhub-ssh/ 4 | repo: 5 | git: yuvipanda/jupyterhub-ssh 6 | published: https://yuvipanda.github.io/jupyterhub-ssh/ 7 | images: 8 | sftp: 9 | contextPath: ../jupyterhub-sftp 10 | valuesPath: sftp.image 11 | ssh: 12 | # Context to send to docker build for use by the Dockerfile 13 | contextPath: .. 14 | rebuildOnContextPathChanges: false 15 | # Dockerfile path relative to chartpress.yaml 16 | dockerfilePath: images/jupyterhub-ssh/Dockerfile 17 | valuesPath: ssh.image 18 | # Paths that when changed should trigger a rebuild 19 | paths: 20 | - ../setup.py 21 | - ../jupyterhub_ssh 22 | - images/jupyterhub-ssh 23 | -------------------------------------------------------------------------------- /helm-chart/images/jupyterhub-ssh/Dockerfile: -------------------------------------------------------------------------------- 1 | # To build this Dockerfile locally: 2 | # 3 | # docker build --tag jupyterhub-ssh ../../.. -f Dockerfile 4 | # 5 | FROM python:3.8-slim 6 | 7 | RUN apt-get update -y > /dev/null \ 8 | && apt-get upgrade -y > /dev/null \ 9 | && apt-get install -y \ 10 | wget \ 11 | && rm -rf /var/lib/apt/lists/* 12 | 13 | # Setup tini 14 | # - tini helps ensure SIGTERM propegate to python3 when "docker stop 15 | # " or "kubectl delete " sends the SIGTERM signal, which makes 16 | # the container terminate quickly in a controlled manner. 17 | # - tini reference: https://github.com/krallin/tini 18 | # 19 | RUN ARCH=`uname -m`; \ 20 | if [ "$ARCH" = x86_64 ]; then ARCH=amd64; fi; \ 21 | if [ "$ARCH" = aarch64 ]; then ARCH=arm64; fi; \ 22 | wget -qO /tini "https://github.com/krallin/tini/releases/download/v0.19.0/tini-$ARCH" \ 23 | && chmod +x /tini 24 | ENTRYPOINT ["/tini", "--"] 25 | 26 | # Prepare a user to run as 27 | ENV NB_UID=1000 \ 28 | NB_USER=jovyan 29 | RUN adduser \ 30 | --disabled-password \ 31 | --shell "/sbin/nologin" \ 32 | --gecos "Default Jupyter user" \ 33 | --uid ${NB_UID} \ 34 | ${NB_USER} 35 | USER $NB_UID 36 | 37 | # Install jupyterhub_ssh the Python package 38 | WORKDIR /srv/jupyterhub-ssh 39 | COPY jupyterhub_ssh ./jupyterhub_ssh 40 | COPY setup.py LICENSE README.md ./ 41 | COPY helm-chart/images/jupyterhub-ssh/jupyterhub_ssh_config.py ./ 42 | RUN pip3 install --no-cache-dir . 43 | 44 | # Configure to run jupyterhub_ssh 45 | # - PYTHONUNBUFFERED help make Python logs made available for k8s directly 46 | # 47 | ENV PYTHONUNBUFFERED=1 48 | CMD ["python3", "-m", "jupyterhub_ssh"] 49 | -------------------------------------------------------------------------------- /helm-chart/images/jupyterhub-ssh/jupyterhub_ssh_config.py: -------------------------------------------------------------------------------- 1 | from ruamel.yaml import YAML 2 | 3 | yaml = YAML() 4 | with open("/etc/jupyterhub-ssh/config/values.yaml") as f: 5 | config = yaml.load(f) 6 | 7 | # FIXME: help this config migrate to ssh.config as well 8 | c.JupyterHubSSH.hub_url = config["hubUrl"] 9 | 10 | ssh_config = config.get("ssh", {}).get("config", {}) 11 | # load generic configuration 12 | for app, cfg in ssh_config.items(): 13 | c[app].update(cfg) 14 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/.helmignore: -------------------------------------------------------------------------------- 1 | schema.yaml 2 | values.dev.yaml 3 | values.lint.yaml 4 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/Chart.yaml: -------------------------------------------------------------------------------- 1 | # Chart.yaml v2 reference: https://helm.sh/docs/topics/charts/#the-chartyaml-file 2 | apiVersion: v2 3 | name: jupyterhub-ssh 4 | version: 0.0.1-set.by.chartpress 5 | appVersion: 0.1.0 6 | description: SSH interface to JupyterHub 7 | keywords: 8 | - jupyterhub 9 | - ssh 10 | - sftp 11 | home: https://github.com/yuvipanda/jupyterhub-ssh 12 | sources: 13 | - https://github.com/yuvipanda/jupyterhub-ssh 14 | icon: https://jupyter.org/assets/hublogo.svg 15 | maintainers: 16 | # Since it is a requirement of Artifact Hub to have specific maintainers 17 | # listed, we have added some below, but in practice the entire JupyterHub team 18 | # contributes to the maintenance of this Helm chart. 19 | - name: Erik Sundell 20 | email: erik@sundellopensource.se 21 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/schema.yaml: -------------------------------------------------------------------------------- 1 | # This schema (a jsonschema in YAML format) is used to generate 2 | # values.schema.json which is packaged with the Helm chart for client side 3 | # validation by Helm of values before template rendering. 4 | # 5 | # This schema is also used by our documentation system to build the 6 | # configuration reference section based on the description fields. See 7 | # doc/source/conf.py for that logic! 8 | # 9 | # We look to document everything we have default values for in values.yaml, but 10 | # we don't look to enforce the perfect validation logic within this file. 11 | # 12 | # ref: https://json-schema.org/learn/getting-started-step-by-step.html 13 | # 14 | $schema": http://json-schema.org/draft-07/schema# 15 | type: object 16 | additionalProperties: false 17 | required: 18 | - ssh 19 | - sftp 20 | - global 21 | properties: 22 | hubUrl: 23 | type: [string, "null"] 24 | description: TODO 25 | 26 | hostKey: 27 | type: [string, "null"] 28 | description: | 29 | This private SSH key is automatically generated unless explicitly set. It 30 | is used by the jupyterhub-ssh and jupyterhub-sftp pods to encrypt outbound 31 | traffic via SSH. 32 | 33 | fullnameOverride: 34 | type: [string, "null"] 35 | description: TODO 36 | 37 | nameOverride: 38 | type: [string, "null"] 39 | description: TODO 40 | 41 | ssh: 42 | type: object 43 | description: TODO 44 | properties: 45 | config: 46 | type: object 47 | description: | 48 | The Jupyter ecosystem's Python classes expose configuration through 49 | [_traitlets_](https://traitlets.readthedocs.io/en/stable/), and this 50 | Helm chart config represented as _static_ YAML values will be directly 51 | mapped to updating such traitlet configuration. 52 | 53 | __Example__ 54 | 55 | If you inspect documentation for JupyterHubSSH the Python class to 56 | describing it can be configured like: 57 | 58 | ```python 59 | c.JupyterHubSSH.debug = true 60 | ``` 61 | 62 | Then in this Helm chart, the equivalent configuration would be like 63 | this: 64 | 65 | ```yaml 66 | hub: 67 | config: 68 | JupyterHubSSH: 69 | debug: true 70 | ``` 71 | 72 | sftp: 73 | type: object 74 | description: TODO 75 | 76 | global: 77 | type: object 78 | description: Global scoped config for all Helm charts to read. 79 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/templates/NOTES.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yuvipanda/jupyterhub-ssh/a054eda03fcc609b42c304a02b7a22dcf2b6c62b/helm-chart/jupyterhub-ssh/templates/NOTES.txt -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{/* 2 | name is used to set the app.kubernetes.io/name label and influences the fullname 3 | function if fullnameOverride isn't specified. 4 | */}} 5 | {{- define "jupyterhub-ssh.name" -}} 6 | {{- .Values.nameOverride | default .Chart.Name | trunc 63 | trimSuffix "-" }} 7 | {{- end }} 8 | 9 | {{/* 10 | fullname is used to name k8s resources either directly based on fullnameOverride, 11 | or by combining the helm release name with the chart name. If the release name 12 | contains the chart name, the chart name won't be repeated. 13 | */}} 14 | {{- define "jupyterhub-ssh.fullname" -}} 15 | {{- if .Values.fullnameOverride }} 16 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} 17 | {{- else }} 18 | {{- $name := .Values.nameOverride | default .Chart.Name }} 19 | {{- if contains $name .Release.Name }} 20 | {{- .Release.Name | trunc 63 | trimSuffix "-" }} 21 | {{- else }} 22 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} 23 | {{- end }} 24 | {{- end }} 25 | {{- end }} 26 | 27 | {{- define "jupyterhub-ssh.sftp.fullname" -}} 28 | {{ include "jupyterhub-ssh.fullname" . }}-sftp 29 | {{- end }} 30 | 31 | {{- define "jupyterhub-ssh.ssh.fullname" -}} 32 | {{ include "jupyterhub-ssh.fullname" . }}-ssh 33 | {{- end }} 34 | 35 | {{/* 36 | chart is used to set the helm.sh/chart label. 37 | */}} 38 | {{- define "jupyterhub-ssh.chart" -}} 39 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} 40 | {{- end }} 41 | 42 | {{/* 43 | labels are selectorLabels and other common labels k8s resources get 44 | */}} 45 | {{- define "jupyterhub-ssh.labels" -}} 46 | helm.sh/chart: {{ include "jupyterhub-ssh.chart" . }} 47 | {{ include "jupyterhub-ssh.selectorLabels" . }} 48 | {{- if .Chart.AppVersion }} 49 | app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} 50 | {{- end }} 51 | app.kubernetes.io/managed-by: {{ .Release.Service }} 52 | {{- end }} 53 | 54 | {{- define "jupyterhub-ssh.sftp.labels" -}} 55 | {{ include "jupyterhub-ssh.labels" . }} 56 | app.kubernetes.io/component: sftp 57 | {{- end }} 58 | 59 | {{- define "jupyterhub-ssh.ssh.labels" -}} 60 | {{ include "jupyterhub-ssh.labels" . }} 61 | app.kubernetes.io/component: ssh 62 | {{- end }} 63 | 64 | {{/* 65 | selectorLabels are used to taget specific resources, such as how Services and 66 | Deployment resources target Pods. Changes to this will be breaking changes. 67 | Deployment's matchLabels field are for example immutable and will require the 68 | resource to be recreated. Handling breaking changes was quite easy to do with 69 | `helm2 upgrade --force` but require manual intervention in `helm3 upgrade` by 70 | manually deleting the Deployment resources first. 71 | */}} 72 | {{- define "jupyterhub-ssh.selectorLabels" -}} 73 | app.kubernetes.io/name: {{ include "jupyterhub-ssh.name" . }} 74 | app.kubernetes.io/instance: {{ .Release.Name }} 75 | {{- end }} 76 | 77 | {{- define "jupyterhub-ssh.sftp.selectorLabels" -}} 78 | {{ include "jupyterhub-ssh.selectorLabels" . }} 79 | app.kubernetes.io/component: sftp 80 | {{- end }} 81 | 82 | {{- define "jupyterhub-ssh.ssh.selectorLabels" -}} 83 | {{ include "jupyterhub-ssh.selectorLabels" . }} 84 | app.kubernetes.io/component: ssh 85 | {{- end }} 86 | 87 | {{- /* 88 | This named template is used to return the explicitly set hostKey, lookup an 89 | previously set hostKey, or generate and return a new hostKey. 90 | */}} 91 | {{- define "jupyterhub-ssh.hostKey" -}} 92 | {{- if .Values.hostKey }} 93 | {{- .Values.hostKey }} 94 | {{- else }} 95 | {{- $k8s_state := lookup "v1" "Secret" .Release.Namespace (include "jupyterhub-ssh.ssh.fullname" .) | default (dict "data" (dict)) }} 96 | {{- if hasKey $k8s_state.data "hostKey" }} 97 | {{- index $k8s_state.data "hostKey" }} 98 | {{- else }} 99 | {{- /* While ed25519 is preferred, using it with jupyterhub-sftp seem to fail. */}} 100 | {{- genPrivateKey "rsa" }} 101 | {{- end }} 102 | {{- end }} 103 | {{- end }} 104 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/templates/secret.yaml: -------------------------------------------------------------------------------- 1 | kind: Secret 2 | apiVersion: v1 3 | metadata: 4 | name: {{ include "jupyterhub-ssh.fullname" . }} 5 | labels: 6 | {{- include "jupyterhub-ssh.labels" . | nindent 4 }} 7 | type: Opaque 8 | stringData: 9 | hubUrl: {{ .Values.hubUrl | required "hubUrl must be set to a valid JupyterHub URL" | quote }} 10 | hostKey: | 11 | {{- include "jupyterhub-ssh.hostKey" . | required "This should not happen: blank output from named template 'jupyterhub-ssh.hostKey'" | nindent 4 }} 12 | values.yaml: | 13 | {{- .Values | toYaml | nindent 4 }} 14 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/templates/sftp/deployment.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.sftp.enabled -}} 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: {{ include "jupyterhub-ssh.sftp.fullname" . }} 6 | labels: 7 | {{- include "jupyterhub-ssh.sftp.labels" . | nindent 4 }} 8 | spec: 9 | replicas: {{ .Values.sftp.replicaCount }} 10 | selector: 11 | matchLabels: 12 | {{- include "jupyterhub-ssh.selectorLabels" . | nindent 6 }} 13 | template: 14 | metadata: 15 | labels: 16 | {{- include "jupyterhub-ssh.sftp.labels" . | nindent 8 }} 17 | {{- with .Values.sftp.podLabels }} 18 | {{- . | toYaml | nindent 8 }} 19 | {{- end }} 20 | annotations: 21 | checksum/config: {{ omit .Values "ssh" | toYaml | sha256sum }} 22 | {{- with .Values.sftp.podAnnotations }} 23 | {{- . | toYaml | nindent 8 }} 24 | {{- end }} 25 | spec: 26 | # We don't need any interaction with k8s API 27 | automountServiceAccountToken: false 28 | volumes: 29 | {{- if .Values.sftp.pvc.enabled }} 30 | - name: home 31 | persistentVolumeClaim: 32 | claimName: {{ .Values.sftp.pvc.name }} 33 | {{- end }} 34 | # Selects only parts of the k8s Secret as a Volume. Note that we can 35 | # also specify a mode for individual files at this point, for example to 36 | # make the hostKey read only in the file system. 37 | # 38 | # ref: https://v1-18.docs.kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#secretvolumesource-v1-core 39 | - name: config 40 | secret: 41 | secretName: {{ include "jupyterhub-ssh.fullname" . }} 42 | items: 43 | - key: hostKey 44 | path: hostKey 45 | mode: 0400 46 | - key: hubUrl 47 | path: hubUrl 48 | containers: 49 | - name: server 50 | image: "{{ .Values.sftp.image.repository }}:{{ .Values.sftp.image.tag | default .Chart.AppVersion }}" 51 | imagePullPolicy: {{ .Values.sftp.image.pullPolicy }} 52 | securityContext: 53 | privileged: true 54 | volumeMounts: 55 | {{- if .Values.sftp.pvc.enabled }} 56 | - name: home 57 | mountPath: /mnt/home 58 | {{- end }} 59 | - name: config 60 | mountPath: /etc/jupyterhub-sftp/config 61 | readOnly: true 62 | ports: 63 | - name: sftp 64 | containerPort: 2222 65 | protocol: TCP 66 | resources: 67 | {{- .Values.sftp.resources | toYaml | nindent 12 }} 68 | {{- with .Values.sftp.imagePullSecrets }} 69 | imagePullSecrets: 70 | {{- . | toYaml | nindent 8 }} 71 | {{- end }} 72 | {{- with .Values.sftp.nodeSelector }} 73 | nodeSelector: 74 | {{- . | toYaml | nindent 8 }} 75 | {{- end }} 76 | {{- with .Values.sftp.affinity }} 77 | affinity: 78 | {{- . | toYaml | nindent 8 }} 79 | {{- end }} 80 | {{- with .Values.sftp.tolerations }} 81 | tolerations: 82 | {{- . | toYaml | nindent 8 }} 83 | {{- end }} 84 | {{- end }} 85 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/templates/sftp/netpol.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.sftp.enabled -}} 2 | apiVersion: networking.k8s.io/v1 3 | kind: NetworkPolicy 4 | metadata: 5 | name: {{ include "jupyterhub-ssh.sftp.fullname" . }} 6 | labels: 7 | {{- include "jupyterhub-ssh.sftp.labels" . | nindent 4 }} 8 | spec: 9 | podSelector: 10 | matchLabels: 11 | {{- include "jupyterhub-ssh.sftp.selectorLabels" . | nindent 6 }} 12 | policyTypes: 13 | - Ingress 14 | - Egress 15 | egress: 16 | - ports: 17 | - protocol: UDP 18 | port: 53 19 | - protocol: TCP 20 | port: 53 21 | # FIXME: This is way too permissive 22 | - to: 23 | - ipBlock: 24 | cidr: 0.0.0.0/0 25 | {{- with .Values.sftp.networkPolicy.egress }} 26 | {{- . | toYaml | trimSuffix "\n" | nindent 4 }} 27 | {{- end }} 28 | ingress: 29 | # Allow pods in the same namespace with this label to have network access 30 | - from: 31 | - podSelector: 32 | matchLabels: 33 | hub.jupyter.org/network-access-sftp-server: "true" 34 | ports: 35 | - port: sftp 36 | protocol: TCP 37 | {{- with .Values.sftp.networkPolicy.ingress}} 38 | {{- . | toYaml | trimSuffix "\n" | nindent 4 }} 39 | {{- end }} 40 | {{- end }} 41 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/templates/sftp/service.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.sftp.enabled -}} 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: {{ include "jupyterhub-ssh.sftp.fullname" . }} 6 | labels: 7 | {{- include "jupyterhub-ssh.sftp.labels" . | nindent 4 }} 8 | {{- with .Values.sftp.serviceLabels }} 9 | {{- . | toYaml | nindent 4 }} 10 | {{- end }} 11 | {{- with .Values.sftp.serviceAnnotations }} 12 | annotations: 13 | {{- . | toYaml | nindent 4 }} 14 | {{- end }} 15 | spec: 16 | type: {{ .Values.sftp.service.type }} 17 | ports: 18 | - name: sftp 19 | port: {{ .Values.sftp.service.port }} 20 | targetPort: sftp 21 | protocol: TCP 22 | {{- if eq .Values.sftp.service.type "NodePort" }} 23 | nodePort: {{ .Values.sftp.service.nodePort }} 24 | {{- end }} 25 | selector: 26 | {{- include "jupyterhub-ssh.sftp.selectorLabels" . | nindent 4 }} 27 | {{- end }} 28 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/templates/ssh/deployment.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.ssh.enabled -}} 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: {{ include "jupyterhub-ssh.ssh.fullname" . }} 6 | labels: 7 | {{- include "jupyterhub-ssh.ssh.labels" . | nindent 4 }} 8 | spec: 9 | replicas: {{ .Values.ssh.replicaCount }} 10 | selector: 11 | matchLabels: 12 | {{- include "jupyterhub-ssh.selectorLabels" . | nindent 6 }} 13 | template: 14 | metadata: 15 | labels: 16 | {{- include "jupyterhub-ssh.ssh.labels" . | nindent 8 }} 17 | {{- with .Values.ssh.podLabels }} 18 | {{- . | toYaml | nindent 8 }} 19 | {{- end }} 20 | annotations: 21 | checksum/config: {{ omit .Values "sftp" | toYaml | sha256sum }} 22 | {{- with .Values.ssh.podAnnotations }} 23 | {{- . | toYaml | nindent 8 }} 24 | {{- end }} 25 | spec: 26 | # We don't need any interaction with k8s API 27 | automountServiceAccountToken: false 28 | volumes: 29 | - name: config 30 | secret: 31 | secretName: {{ include "jupyterhub-ssh.fullname" . }} 32 | containers: 33 | - name: server 34 | image: "{{ .Values.ssh.image.repository }}:{{ .Values.ssh.image.tag | default .Chart.AppVersion }}" 35 | imagePullPolicy: {{ .Values.ssh.image.pullPolicy }} 36 | securityContext: 37 | allowPrivilegeEscalation: false 38 | runAsNonRoot: true 39 | volumeMounts: 40 | - name: config 41 | mountPath: /etc/jupyterhub-ssh/config 42 | readOnly: true 43 | ports: 44 | - name: ssh 45 | containerPort: 8022 46 | protocol: TCP 47 | resources: 48 | {{- .Values.ssh.resources | toYaml | nindent 12 }} 49 | {{- with .Values.ssh.imagePullSecrets }} 50 | imagePullSecrets: 51 | {{- . | toYaml | nindent 8 }} 52 | {{- end }} 53 | {{- with .Values.ssh.nodeSelector }} 54 | nodeSelector: 55 | {{- . | toYaml | nindent 8 }} 56 | {{- end }} 57 | {{- with .Values.ssh.affinity }} 58 | affinity: 59 | {{- . | toYaml | nindent 8 }} 60 | {{- end }} 61 | {{- with .Values.ssh.tolerations }} 62 | tolerations: 63 | {{- . | toYaml | nindent 8 }} 64 | {{- end }} 65 | {{- end }} 66 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/templates/ssh/netpol.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.ssh.enabled -}} 2 | apiVersion: networking.k8s.io/v1 3 | kind: NetworkPolicy 4 | metadata: 5 | name: {{ include "jupyterhub-ssh.ssh.fullname" . }} 6 | labels: 7 | {{- include "jupyterhub-ssh.ssh.labels" . | nindent 4 }} 8 | spec: 9 | podSelector: 10 | matchLabels: 11 | {{- include "jupyterhub-ssh.ssh.selectorLabels" . | nindent 6 }} 12 | policyTypes: 13 | - Ingress 14 | - Egress 15 | egress: 16 | - ports: 17 | - protocol: UDP 18 | port: 53 19 | - protocol: TCP 20 | port: 53 21 | # FIXME: This is way too permissive 22 | - to: 23 | - ipBlock: 24 | cidr: 0.0.0.0/0 25 | {{- with .Values.ssh.networkPolicy.egress}} 26 | {{- . | toYaml | trimSuffix "\n" | nindent 4 }} 27 | {{- end }} 28 | ingress: 29 | # Allow pods in the same namespace with this label to have network access 30 | - from: 31 | - podSelector: 32 | matchLabels: 33 | hub.jupyter.org/network-access-ssh-server: "true" 34 | ports: 35 | - port: ssh 36 | protocol: TCP 37 | {{- with .Values.ssh.networkPolicy.ingress}} 38 | {{- . | toYaml | trimSuffix "\n" | nindent 4 }} 39 | {{- end }} 40 | {{- end }} 41 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/templates/ssh/service.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.ssh.enabled -}} 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: {{ include "jupyterhub-ssh.ssh.fullname" . }} 6 | labels: 7 | {{- include "jupyterhub-ssh.ssh.labels" . | nindent 4 }} 8 | {{- with .Values.ssh.serviceLabels }} 9 | {{- . | toYaml | nindent 4 }} 10 | {{- end }} 11 | {{- with .Values.ssh.serviceAnnotations }} 12 | annotations: 13 | {{- . | toYaml | nindent 4 }} 14 | {{- end }} 15 | spec: 16 | type: {{ .Values.ssh.service.type }} 17 | ports: 18 | - name: ssh 19 | port: {{ .Values.ssh.service.port }} 20 | targetPort: ssh 21 | protocol: TCP 22 | {{- if eq .Values.ssh.service.type "NodePort" }} 23 | nodePort: {{ .Values.ssh.service.nodePort }} 24 | {{- end }} 25 | selector: 26 | {{- include "jupyterhub-ssh.ssh.selectorLabels" . | nindent 4 }} 27 | {{- end }} 28 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/values.dev.yaml: -------------------------------------------------------------------------------- 1 | hubUrl: http://hub:8081 2 | 3 | sftp: 4 | pvc: 5 | # Don't attempt to mount storage for JupyterHub's home directories for now, 6 | # just try make sure we can start the Helm chart containers successfully. 7 | enabled: false 8 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/values.lint.yaml: -------------------------------------------------------------------------------- 1 | hubUrl: http://hub:8081 2 | 3 | sftp: 4 | pvc: 5 | name: jupyterhub-home-folders-pvc-name 6 | -------------------------------------------------------------------------------- /helm-chart/jupyterhub-ssh/values.yaml: -------------------------------------------------------------------------------- 1 | # Required configuration 2 | # hubUrl should be a URL like http://hub:8081 or https://jupyter.example.org. 3 | # Only HTTP(S) traffic will be sent to this URL. 4 | hubUrl: "" 5 | 6 | # hostKey is a private SSH key used to encrypt outgoing communication from the 7 | # jupyterhub-ssh and jupyterhub-sftp pods. If this value is blank, whatever was 8 | # used previously will be used, but if nothing has been used previously a RSA 9 | # 4086 key will be generated. 10 | hostKey: "" 11 | 12 | # nameOverride if set, will override the name of the chart in two contexts. 13 | # 1. The label: app.kubernetes.io/name: 14 | # 2. The Helm template function: jupyterhub-ssh.fullname, if fullnameOverride 15 | # isn't set because then it takes precedence. 16 | nameOverride: "" 17 | # fullnameOverride set to a truthy value will make all k8s resources be named 18 | # with an optional prefix. But if fullnameOverride is set to 19 | # a falsy value will make all k8s resource names become either - with an optional suffix, or with an optional 21 | # suffix. The chart name part is excluded if release name is found to contain 22 | # the chart name. 23 | fullnameOverride: "jupyterhub" 24 | 25 | ssh: 26 | enabled: true 27 | replicaCount: 1 28 | 29 | config: 30 | JupyterHubSSH: 31 | debug: true 32 | host_key_path: /etc/jupyterhub-ssh/config/hostKey 33 | 34 | image: 35 | repository: quay.io/jupyterhub-ssh/ssh 36 | tag: "set-by-chartpress" 37 | pullPolicy: "" 38 | 39 | service: 40 | type: ClusterIP 41 | port: 22 42 | 43 | podLabels: 44 | hub.jupyter.org/network-access-hub: "true" 45 | hub.jupyter.org/network-access-proxy-http: "true" 46 | podAnnotations: {} 47 | serviceLabels: {} 48 | serviceAnnotations: {} 49 | 50 | resources: {} 51 | imagePullSecrets: [] 52 | nodeSelector: {} 53 | tolerations: [] 54 | affinity: {} 55 | networkPolicy: {} 56 | 57 | sftp: 58 | enabled: true 59 | replicaCount: 1 60 | 61 | pvc: 62 | enabled: true 63 | name: "" 64 | 65 | image: 66 | repository: quay.io/jupyterhub-ssh/sftp 67 | tag: "set-by-chartpress" 68 | pullPolicy: "" 69 | 70 | service: 71 | type: ClusterIP 72 | port: 22 73 | 74 | podLabels: 75 | hub.jupyter.org/network-access-hub: "true" 76 | hub.jupyter.org/network-access-proxy-http: "true" 77 | podAnnotations: {} 78 | serviceLabels: {} 79 | serviceAnnotations: {} 80 | 81 | resources: {} 82 | imagePullSecrets: [] 83 | nodeSelector: {} 84 | tolerations: [] 85 | affinity: {} 86 | networkPolicy: {} 87 | 88 | global: {} 89 | -------------------------------------------------------------------------------- /helm-chart/tools/generate-json-schema.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | This script reads schema.yaml and generates a values.schema.json that we can 4 | package with the Helm chart, allowing helm the CLI perform validation. 5 | 6 | While we can directly generate a values.schema.json from schema.yaml, it 7 | contains a lot of description text we use to generate our configuration 8 | reference that isn't helpful to ship along the validation schema. Due to that, 9 | we trim away everything that isn't needed. 10 | """ 11 | import json 12 | import os 13 | from collections.abc import MutableMapping 14 | 15 | import yaml 16 | 17 | here_dir = os.path.abspath(os.path.dirname(__file__)) 18 | schema_yaml = os.path.join(here_dir, os.pardir, "jupyterhub-ssh", "schema.yaml") 19 | values_schema_json = os.path.join( 20 | here_dir, os.pardir, "jupyterhub-ssh", "values.schema.json" 21 | ) 22 | 23 | 24 | def clean_jsonschema(d, parent_key=""): 25 | """ 26 | Modifies a dictionary representing a jsonschema in place to not contain 27 | jsonschema keys not relevant for a values.schema.json file solely for use by 28 | the helm CLI. 29 | """ 30 | JSONSCHEMA_KEYS_TO_REMOVE = {"description"} 31 | 32 | # start by cleaning up the current level 33 | for k in set.intersection(JSONSCHEMA_KEYS_TO_REMOVE, set(d.keys())): 34 | del d[k] 35 | 36 | # Recursively cleanup nested levels, bypassing one level where there could 37 | # be a valid Helm chart configuration named just like the jsonschema 38 | # specific key to remove. 39 | if "properties" in d: 40 | for k, v in d["properties"].items(): 41 | if isinstance(v, MutableMapping): 42 | clean_jsonschema(v, k) 43 | 44 | 45 | def run(): 46 | # Using these sets, we can validate further manually by printing the results 47 | # of set operations. 48 | with open(schema_yaml) as f: 49 | schema = yaml.safe_load(f) 50 | 51 | # Drop what isn't relevant for a values.schema.json file packaged with the 52 | # Helm chart, such as the description keys only relevant for our 53 | # configuration reference. 54 | clean_jsonschema(schema) 55 | 56 | # dump schema to values.schema.json 57 | with open(values_schema_json, "w") as f: 58 | json.dump(schema, f) 59 | 60 | print("values.schema.json created") 61 | 62 | 63 | run() 64 | -------------------------------------------------------------------------------- /jupyterhub-sftp/Dockerfile: -------------------------------------------------------------------------------- 1 | # To build this Dockerfile locally: 2 | # 3 | # docker build --tag jupyterhub-sftp . 4 | # 5 | FROM buildpack-deps:focal 6 | 7 | RUN apt-get update -y > /dev/null \ 8 | && apt-get upgrade -y > /dev/null \ 9 | && apt-get install -y \ 10 | openssh-server \ 11 | python3 \ 12 | python3-pip \ 13 | && rm -rf /var/lib/apt/lists/* 14 | 15 | # Setup the jovyan user everyone will impersonate 16 | ENV NB_UID=1000 \ 17 | NB_USER=jovyan 18 | RUN adduser \ 19 | --disabled-password \ 20 | --shell "/sbin/nologin" \ 21 | --gecos "Default Jupyter user" \ 22 | --uid ${NB_UID} \ 23 | ${NB_USER} 24 | 25 | # Setup Name Service Switch (NSS) All-To-One (ATO) logic 26 | # - It map unknown users to the jovyan user 27 | # - Setup involves git clone and C code compilation 28 | # 29 | # FIXME: Slim down image to not include gcc etc. as is required to compile this 30 | # software by using a dedicated build step from which we later copy the 31 | # compiled software from. 32 | # 33 | COPY setup-nss.bash /tmp/ 34 | RUN /tmp/setup-nss.bash && rm /tmp/setup-nss.bash 35 | COPY etc/libnss-ato.conf etc/nsswitch.conf /etc/ 36 | 37 | # Setup our custom Python logic 38 | # - It couples PAM auth to the verification of JupyterHub tokens 39 | # 40 | COPY requirements.txt /tmp/ 41 | RUN pip install -r /tmp/requirements.txt 42 | COPY jupyterhub-token-verify.py /usr/sbin/ 43 | COPY etc/pam.d/common-auth /etc/pam.d/ 44 | 45 | # Setup SSHD - The OpenSSH server process 46 | # - /export/home is what sshd will expose 47 | # - /run/sshd is referred to as a privilege separation dir (what is it?) 48 | # 49 | # sshd reference: 50 | # -D sshd will not detach and does not become a daemon 51 | # -e sshd will send the output to the standard error instead of the system 52 | # log. 53 | # 54 | # NOTE: sshd listens to SIGTERM and not just SIGKILL, so terminating this 55 | # container will be quick as it should be. 56 | # 57 | RUN mkdir -p \ 58 | /export/home \ 59 | /run/sshd 60 | COPY etc/ssh/sshd_config /etc/ssh/ 61 | EXPOSE 2222 62 | CMD ["/usr/sbin/sshd", "-De"] 63 | -------------------------------------------------------------------------------- /jupyterhub-sftp/etc/libnss-ato.conf: -------------------------------------------------------------------------------- 1 | # Copy pasted from /etc/passwd of built image 2 | jovyan:x:1000:1000:Default Jupyter user,,,:/home/jovyan:/sbin/nologin 3 | -------------------------------------------------------------------------------- /jupyterhub-sftp/etc/nsswitch.conf: -------------------------------------------------------------------------------- 1 | # nsswitch that reads from files and ato 2 | # we only modify passwd & shadow, although I'm not sure what overriding shadow does 3 | passwd: files ato systemd 4 | group: files systemd 5 | shadow: files ato 6 | gshadow: files 7 | 8 | hosts: files dns 9 | networks: files 10 | 11 | protocols: db files 12 | services: db files 13 | ethers: db files 14 | rpc: db files 15 | 16 | netgroup: nis 17 | -------------------------------------------------------------------------------- /jupyterhub-sftp/etc/pam.d/common-auth: -------------------------------------------------------------------------------- 1 | # The PAM configuration file for the Shadow `login' service 2 | # 3 | 4 | # Our "Special Script" 5 | 6 | auth sufficient pam_exec.so expose_authtok debug log=/tmp/debug.log /usr/sbin/jupyterhub-token-verify.py 7 | 8 | # here are the per-package modules (the "Primary" block) 9 | auth [success=1 default=ignore] pam_unix.so nullok_secure 10 | # here's the fallback if no module succeeds 11 | auth requisite pam_deny.so 12 | # prime the stack with a positive return value if there isn't one already; 13 | # this avoids us returning an error just because nothing sets a success code 14 | # since the modules above will each just jump around 15 | auth required pam_permit.so 16 | -------------------------------------------------------------------------------- /jupyterhub-sftp/etc/ssh/sshd_config: -------------------------------------------------------------------------------- 1 | # sshd still runs as root for chroot functionality, but 2 | # we listen on a non-privileged port anyway. This matches 3 | # the recommended port we expose in the helm chart. 4 | Port 2222 5 | 6 | # This file is assumed to be mounted to the Docker container 7 | HostKey /etc/jupyterhub-sftp/config/hostKey 8 | 9 | # Only allow password auth, BECAUSE WE ARE EVIL HAHA 10 | # But also because users log in with their jupyterhub tokens 11 | PubkeyAuthentication no 12 | PasswordAuthentication yes 13 | 14 | # Passwords are jupyterhub Auth tokens, so they can't be empty 15 | PermitEmptyPasswords no 16 | 17 | # FIXME: I'm not sure what to do with this one 18 | ChallengeResponseAuthentication no 19 | 20 | # Our custom PAM module (pam_exec) calls jupyterhub-token-verify.py to 21 | # authenticate with a JupyterHub token. 22 | UsePAM yes 23 | 24 | # Explicitly turn off all features of sshd we won't use 25 | AllowAgentForwarding No 26 | AllowTcpForwarding no 27 | GatewayPorts no 28 | X11Forwarding no 29 | PermitTTY no 30 | PrintMotd no 31 | PrintLastLog no 32 | PermitUserEnvironment no 33 | PermitTunnel no 34 | 35 | # Use heartbeat packets to terminate 'stuck' connections 36 | # https://man.openbsd.org/sshd_config#TCPKeepAlive 37 | TCPKeepAlive yes 38 | 39 | # Use the built-in internal-sftp setup, rather than shelling out to sftp-server 40 | Subsystem sftp internal-sftp 41 | 42 | # Only allow sftp 43 | ChrootDirectory /export/home/%u 44 | ForceCommand internal-sftp -d %u 45 | -------------------------------------------------------------------------------- /jupyterhub-sftp/jupyterhub-token-verify.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | """ 3 | Verify that a JupyterHub token is valid for a given user 4 | 5 | # SECURITY WARNING!!!! 6 | 7 | This code runs as **ROOT**, and deals with **User Input** 8 | (in the form of username), manipulating paths on the host filesystem. 9 | This requires a very high standard for securing it, since security 10 | lapses here will lead to exposing users' home directories. At 11 | the very least, watch out for the following classes of vulnerabilities: 12 | 13 | 1. Path Traversal Attacks - https://owasp.org/www-community/attacks/Path_Traversal 14 | 2. Arbitrary code execution vulnerabilities in YAML 15 | 3. Sending hub tokens to arbitrary URLs, not just the configured hub 16 | 17 | There's gonna be more, but really - just watch out! 18 | 19 | # bind mounting and chroot 20 | 21 | sshd supports chrooting to a given directory after authenticating as 22 | a user. This is required for SFTP so we can expose *just* a user's 23 | home directory to them, without accidentally also exposing other 24 | users' home directories. sshd requires that all the directories in the 25 | path - including the user's home directory - are owned by root and 26 | not writeably by anyone else. This is a little problematic for our 27 | use case - JupyterHub home directories are usually writeable by 28 | the user. So we can't directly chroot to the directories. We also 29 | can't just chroot to the parent directory - it contains all the 30 | users' home directories, and they're all usually owned by the same uid 31 | (1000). 32 | 33 | So in *this* script, we do some bind mounting magic to make this work. 34 | We provision a directory (${DEST_DIR}) owned by root. For each user 35 | who successfully logs in, we: 36 | 37 | 1. Create an empty, root owned directory (${DEST_DIR}/${USERNAME}) 38 | 2. Create another empty, root owned directory inside this - 39 | (${DEST_DIR}/${USERNAME}/${USERNAME}). This will act as a 40 | bind target only. 41 | 3. Bind mount the user's actual home directory (${SRC_DIR}/${USERNAME}) 42 | to this nested directory (${DEST_DIR}/${USERNAME}/${USERNAME}). 43 | 4. Make sshd chroot to the first level directory (${DEST_DIR}/${USERNAME}). 44 | This is root owned, so it's fine. 45 | 5. Tell sftp to start in the subdirectory where our user's home directory 46 | has been bind mounted (${DEST_DIR}/${USERNAME}/${USERNAME}). This shows 47 | them their home directory when they log in, but at most they can 48 | escape to the parent directory - nowhere else, thanks to the proper 49 | chrooting. 50 | 51 | We do this if needed the first time a user logs in. However, the user 52 | controls ${USERNAME}. If we aren't careful, they can use it to have us 53 | give them read (and possibly write) access to *any* part of the filesystem. 54 | So we have to be very careful doing this. 55 | 56 | # DEVELOPER NOTES 57 | 58 | If this script raises a Python error, logs will be emitted to /tmp/debug.log as 59 | configured via the /etc/pam.d/common-auth file. 60 | 61 | FIXME: make some logs be emitted one way or another 62 | """ 63 | import os 64 | import string 65 | import subprocess 66 | import sys 67 | from pathlib import PosixPath 68 | 69 | import requests 70 | from escapism import escape 71 | 72 | 73 | def valid_user(hub_url, username, token): 74 | """ 75 | Check if token is valid for username in hub at hub_url 76 | """ 77 | # FIXME: Construct this URL better? 78 | url = f"{hub_url}/hub/api/user" 79 | headers = {"Authorization": f"token {token}"} 80 | resp = requests.get(url, headers=headers) 81 | return resp.status_code == 200 82 | 83 | 84 | # Directory containing user home directories 85 | SRC_DIR = PosixPath("/mnt/home") 86 | # Directory sshd is exposing. We will bind-mount users there 87 | DEST_DIR = PosixPath("/export/home") 88 | 89 | 90 | def bind_mount_user(untrusted_username): 91 | # username is user controlled data, so should be treated more cautiously 92 | # It's been authenticated as valid by sshd, but that doesn't mean anything 93 | # However, the untrusted username is what sshd will use as chroot, so we 94 | # have to do our bind mounts there. 95 | # 96 | 97 | # In JupyterHub, we escape most file system naming interactions with this 98 | # escapism call. This should also work here to make sure we mount the correct 99 | # directory. 100 | # FIXME: Verify usernames starting with '-' work fine with PAM & NSS 101 | safe_chars = set(string.ascii_lowercase + string.digits) 102 | source_username = escape( 103 | untrusted_username, safe=safe_chars, escape_char="-" 104 | ).lower() 105 | 106 | # To protect against path traversal attacks, we: 107 | # 1. Resolve our paths to absolute paths, traversing any symlinks if needed 108 | # 2. Make sure that the absolote paths are within the source directories appropriately 109 | # This prevents any relative path (..) or symlink attacks. 110 | src_path = (SRC_DIR / source_username).resolve() 111 | 112 | # Make sure src_path isn't outside of SRC_DIR 113 | # And doesn't refer to other users' home directories 114 | assert str(src_path.relative_to(SRC_DIR)) == source_username 115 | 116 | dest_chroot_path = (DEST_DIR / untrusted_username).resolve() 117 | # Make sure dest_chroot_path isn't outside of DEST_DIR 118 | assert str(dest_chroot_path.relative_to(DEST_DIR)) == untrusted_username 119 | 120 | dest_bind_path = (dest_chroot_path / untrusted_username).resolve() 121 | # Make sure dest_bind_path isn't outside dest_chroot_path 122 | assert str(dest_bind_path.relative_to(dest_chroot_path)) == untrusted_username 123 | 124 | if not os.path.exists(dest_chroot_path): 125 | os.makedirs(dest_chroot_path, exist_ok=True) 126 | os.makedirs(dest_bind_path, exist_ok=True) 127 | subprocess.check_call(["mount", "-o", "bind", src_path, dest_bind_path]) 128 | 129 | 130 | # PAM_USER is passed in to us by pam_exec: http://www.linux-pam.org/Linux-PAM-html/sag-pam_exec.html 131 | # We *must* treat this as untrusted. From `pam_exec`'s documentation: 132 | # > Commands called by pam_exec need to be aware of that the user can have control over the environment. 133 | untrusted_username = os.environ["PAM_USER"] 134 | 135 | # Password is a null delimited string, passed in via stdin by pam_exec 136 | password = sys.stdin.read().rstrip("\x00") 137 | 138 | with open("/etc/jupyterhub-sftp/config/hubUrl", "r") as f: 139 | hub_url = f.read() 140 | 141 | if valid_user(hub_url, untrusted_username, password): 142 | # FIXME: We're doing a bind mount here based on an untrusted_username 143 | # THIS IS *SCARY* and we should do more work to ensure we aren't 144 | # accidentally exposing user data. 145 | bind_mount_user(untrusted_username) 146 | sys.exit(0) 147 | 148 | sys.exit(1) 149 | -------------------------------------------------------------------------------- /jupyterhub-sftp/requirements.in: -------------------------------------------------------------------------------- 1 | # This file is automatically used by Dependabot to update requirements.txt via 2 | # the command pip-compile. If you add something new here, install pip-tools and 3 | # then run pip-compile from this folder. 4 | # 5 | escapism 6 | requests 7 | -------------------------------------------------------------------------------- /jupyterhub-sftp/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with python 3.7 3 | # To update, run: 4 | # 5 | # pip-compile 6 | # 7 | certifi==2021.5.30 8 | # via requests 9 | charset-normalizer==2.0.3 10 | # via requests 11 | escapism==1.0.1 12 | # via -r requirements.in 13 | idna==3.2 14 | # via requests 15 | requests==2.26.0 16 | # via -r requirements.in 17 | urllib3==1.26.6 18 | # via requests 19 | -------------------------------------------------------------------------------- /jupyterhub-sftp/setup-nss.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | # FIXME: Pin this 5 | git clone --depth 1 https://github.com/donapieppo/libnss-ato 6 | pushd libnss-ato 7 | 8 | make 9 | make install 10 | 11 | popd 12 | rm -rf libnss-ato 13 | -------------------------------------------------------------------------------- /jupyterhub_ssh/__init__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from functools import partial 4 | 5 | import asyncssh 6 | from aiohttp import ClientSession 7 | from async_timeout import timeout 8 | from traitlets import Any 9 | from traitlets import Bool 10 | from traitlets import Integer 11 | from traitlets import Unicode 12 | from traitlets import validate 13 | from traitlets.config import Application 14 | from yarl import URL 15 | 16 | from .terminado import Terminado 17 | 18 | 19 | class NotebookSSHServer(asyncssh.SSHServer): 20 | """ 21 | A single SSH connection mapping to a notebook server on a JupyterHub 22 | """ 23 | 24 | def __init__(self, app, *args, **kwargs): 25 | self.app = app 26 | super().__init__(*args, **kwargs) 27 | 28 | def connection_made(self, conn): 29 | """ 30 | Connection has been successfully established 31 | """ 32 | self._conn = conn 33 | 34 | def password_auth_supported(self): 35 | return True 36 | 37 | async def get_user_server_url(self, session, username): 38 | """ 39 | Return user's server url if it is running. 40 | 41 | Else return None 42 | """ 43 | async with session.get(self.app.hub_url / "hub/api/users" / username) as resp: 44 | if resp.status != 200: 45 | return None 46 | user = await resp.json() 47 | print(user) 48 | # URLs will have preceding slash, but yarl forbids those 49 | server = user.get("servers", {}).get("", {}) 50 | if server.get("ready", False): 51 | return self.app.hub_url / user["servers"][""]["url"][1:] 52 | else: 53 | return None 54 | 55 | async def start_user_server(self, session, username): 56 | """ """ 57 | # REST API reference: https://jupyterhub.readthedocs.io/en/stable/_static/rest-api/index.html#operation--users--name--server-post 58 | # REST API implementation: https://github.com/jupyterhub/jupyterhub/blob/187fe911edce06eb067f736eaf4cc9ea52e69e08/jupyterhub/apihandlers/users.py#L451-L497 59 | create_url = self.app.hub_url / "hub/api/users" / username / "server" 60 | 61 | async with session.post(create_url) as resp: 62 | if resp.status == 201 or resp.status == 400: 63 | # FIXME: code 400 can mean "pending stop" or "already running", 64 | # but we assume it means that the server is already 65 | # running. 66 | 67 | # Server started quickly 68 | # We manually generate this, even though it's *bad* 69 | # Mostly because when the server is already running, JupyterHub 70 | # doesn't respond with the whole model! 71 | return self.app.hub_url / "user" / username 72 | elif resp.status == 202: 73 | # Server start has been requested, now and potentially earlier, 74 | # but hasn't started quickly and is pending spawn. 75 | # We check for a while, reporting progress to user - until we're 76 | # done 77 | try: 78 | async with timeout(self.app.start_timeout): 79 | notebook_url = None 80 | self._conn.send_auth_banner("Starting your server...") 81 | while notebook_url is None: 82 | # FIXME: Exponential backoff + make this configurable 83 | await asyncio.sleep(0.5) 84 | notebook_url = await self.get_user_server_url( 85 | session, username 86 | ) 87 | self._conn.send_auth_banner(".") 88 | self._conn.send_auth_banner("done!\n") 89 | return notebook_url 90 | except asyncio.TimeoutError: 91 | # Server didn't start on time! 92 | self._conn.send_auth_banner("failed to start server on time!\n") 93 | return None 94 | elif resp.status == 403: 95 | # Token is wrong! 96 | return None 97 | else: 98 | # FIXME: Handle other cases that pop up 99 | resp.raise_for_status() 100 | 101 | async def validate_password(self, username, token): 102 | self.username = username 103 | self.token = token 104 | 105 | headers = {"Authorization": f"token {token}"} 106 | async with ClientSession(headers=headers) as session: 107 | notebook_url = await self.start_user_server(session, username) 108 | if notebook_url is None: 109 | return False 110 | else: 111 | self.notebook_url = notebook_url 112 | return True 113 | 114 | async def _handle_ws_recv(self, stdout, data_packet): 115 | """ 116 | Handle receiving a single data message from terminado. 117 | """ 118 | kind, data = data_packet 119 | if kind == "setup": 120 | # Signals we can get going now! 121 | return 122 | elif kind == "change": 123 | # Sets terminal size, but let's ignore this for now 124 | return 125 | elif kind == "disconnect": 126 | # Not exactly sure what to do here? 127 | return 128 | elif kind != "stdout": 129 | raise ValueError(f"Unknown type {data[0]} received from terminado") 130 | stdout.write(data) 131 | await stdout.drain() 132 | 133 | async def _handle_stdin(self, stdin, terminado): 134 | """ 135 | Handle receiving a single byte from stdin 136 | 137 | We aren't buffering anything rn, but maybe we should? 138 | """ 139 | while not stdin.at_eof(): 140 | try: 141 | # Return *upto* 4096 bytes from the stdin buffer 142 | # Returns pretty immediately - doesn't *wait* for 4096 bytes 143 | # to be present in the buffer. 144 | data = await stdin.read(4096) 145 | await terminado.send_stdin(data) 146 | except asyncssh.misc.TerminalSizeChanged as e: 147 | await terminado.set_size(e.height, e.width) 148 | except asyncssh.misc.BreakReceived: 149 | pass 150 | 151 | async def _handle_client(self, stdin, stdout, stderr): 152 | """ 153 | Handle data transfer once session has been fully established. 154 | """ 155 | async with ClientSession() as client, Terminado( 156 | self.notebook_url, self.token, client 157 | ) as terminado: 158 | 159 | # If a pty has been asked for, we tell terminado what the pty's current size is 160 | # Otherwise, terminado uses default size of 80x22 161 | channel = stdin.channel 162 | if channel.get_terminal_type(): 163 | rows, cols = channel.get_terminal_size()[:2] 164 | await terminado.set_size(rows, rows) 165 | 166 | # We run two tasks concurrently 167 | # 168 | # terminado's stdout -> ssh stdout 169 | # ssh stdin -> terminado's stdin 170 | # 171 | # Terminado doesn't seem to separate out stderr, so we leave it alone 172 | # 173 | # When either of these tasks exit, we want to: 174 | # 1. Clean up the other task 175 | # 2. (Ideally) close the terminal opened by terminado on the notebook server 176 | # 3. Close the ssh connection 177 | # 178 | # We don't do all of these yet in a way that I can be satisfied with. 179 | 180 | # Pipe stdout from terminado to ssh 181 | ws_to_stdout = asyncio.create_task( 182 | terminado.on_receive(partial(self._handle_ws_recv, stdout)) 183 | ) 184 | # 185 | # Pipe stdin from ssh to terminado 186 | stdin_to_ws = asyncio.create_task(self._handle_stdin(stdin, terminado)) 187 | 188 | tasks = [ws_to_stdout, stdin_to_ws] 189 | 190 | # Wait for either pipe to be done 191 | done, pending = await asyncio.wait( 192 | tasks, return_when=asyncio.FIRST_COMPLETED 193 | ) 194 | 195 | # At least one of the pipes is done. 196 | # Close the ssh connection explicitly 197 | self._conn.close() 198 | 199 | # Explicitly cancel the other tasks currently pending 200 | # FIXME: I don't know if this actually does anything? 201 | for t in pending: 202 | t.cancel() 203 | 204 | def session_requested(self): 205 | return self._handle_client 206 | 207 | 208 | class JupyterHubSSH(Application): 209 | config_file = Unicode( 210 | "jupyterhub_ssh_config.py", 211 | help=""" 212 | Config file to load JupyterHub SSH config from 213 | """, 214 | config=True, 215 | ) 216 | 217 | port = Integer( 218 | 8022, 219 | help=""" 220 | Port the ssh server listens on 221 | """, 222 | config=True, 223 | ) 224 | 225 | debug = Bool( 226 | True, 227 | help=""" 228 | Turn on debugg logging 229 | """, 230 | config=True, 231 | ) 232 | 233 | hub_url = Any( 234 | "", 235 | help=""" 236 | URL of JupyterHub's proxy to connect to. 237 | 238 | jupyterhub-ssh needs to be able to connect to both the JupyterHub API 239 | (/hub/api) and to the users' servers (/user/) via HTTP or 240 | HTTPS. This URL doesn't have to be the public URL though as the URL is 241 | only be used by jupyterhub-ssh itself. 242 | 243 | *Must* be set. 244 | 245 | Examples: 246 | 247 | - If jupyterhub-ssh is deployed in the same Kubernetes cluster and 248 | namespace as the official JupyterHub Helm chart, you can use 249 | `http://proxy-public` or `http://proxy-http:8000` depending on 250 | how the JupyterHub Helm chart is configured. Use 251 | `http://proxy-http:8000` if the JupyterHub Helm chart has been 252 | configured with both `proxy.https.enabled=true` and 253 | `proxy.https.type=letsencrypt`, otherwise use `http://proxy-public`. 254 | 255 | - If jupyterhub-ssh can't access JupyterHub's proxy via local network or 256 | you don't trust the local network to be secure, use a public URL with 257 | HTTPS such as `https://my-hub-url.com`. 258 | """, 259 | config=True, 260 | ) 261 | 262 | @validate("hub_url") 263 | def _hub_url_cast_string_to_yarl_url(self, proposal): 264 | if isinstance(proposal.value, str): 265 | return URL(proposal.value) 266 | elif isinstance(proposal.value, URL): 267 | return proposal.value 268 | else: 269 | raise ValueError("hub_url must either be a string or a yarl.URL") 270 | 271 | host_key_path = Unicode( 272 | "", 273 | help=""" 274 | Path to host's private SSH Key. 275 | 276 | *Must* be set. 277 | """, 278 | config=True, 279 | ) 280 | 281 | start_timeout = Integer( 282 | 30, 283 | help=""" 284 | Timeout in seconds to wait for a server to start before before closing 285 | the SSH connection. 286 | """, 287 | config=True, 288 | ) 289 | 290 | def init_logging(self): 291 | """ 292 | Make traitlets & asyncssh logging work properly 293 | 294 | self.log is managed by traitlets, while the logger named 295 | asyncssh is managed by asyncssh. We want the debug flag to 296 | control debug logs everywhere, so we wire 'em together here. 297 | 298 | """ 299 | self.log.setLevel(logging.DEBUG if self.debug else logging.INFO) 300 | # This propagates traitlet logs to root logger - somehow, 301 | # no logs were coming out otherwise 302 | self.log.propagate = True 303 | 304 | asyncssh_logger = logging.getLogger("asyncssh") 305 | asyncssh_logger.propagate = True 306 | asyncssh_logger.parent = self.log 307 | asyncssh_logger.setLevel(self.log.level) 308 | 309 | def initialize(self, *args, **kwargs): 310 | super().initialize(*args, **kwargs) 311 | self.load_config_file(self.config_file) 312 | self.init_logging() 313 | 314 | async def start_server(self): 315 | await asyncssh.listen( 316 | host="", 317 | port=self.port, 318 | server_factory=partial(NotebookSSHServer, self), 319 | line_editor=False, 320 | password_auth=True, 321 | server_host_keys=[self.host_key_path], 322 | agent_forwarding=False, # The cause of so much pain! Let's not allow this by default 323 | keepalive_interval=30, # FIXME: Make this configurable 324 | ) 325 | 326 | 327 | def main(): 328 | app = JupyterHubSSH() 329 | app.initialize() 330 | loop = asyncio.get_event_loop() 331 | loop.run_until_complete(app.start_server()) 332 | loop.run_forever() 333 | -------------------------------------------------------------------------------- /jupyterhub_ssh/__main__.py: -------------------------------------------------------------------------------- 1 | from . import main 2 | 3 | if __name__ == "__main__": 4 | main() 5 | -------------------------------------------------------------------------------- /jupyterhub_ssh/terminado.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import websockets 4 | 5 | 6 | class Terminado: 7 | def __init__(self, notebook_url, token, session): 8 | self.notebook_url = notebook_url 9 | self.token = token 10 | self.session = session 11 | 12 | self.headers = {"Authorization": f"token {self.token}"} 13 | 14 | async def __aenter__(self): 15 | """ 16 | Create a terminal & connect to it 17 | """ 18 | notebook_secure = self.notebook_url.scheme == "https" 19 | 20 | create_url = self.notebook_url / "api/terminals" 21 | async with self.session.post(create_url, headers=self.headers) as resp: 22 | data = await resp.json() 23 | self.terminal_name = data["name"] 24 | socket_url = self.notebook_url / "terminals/websocket" / self.terminal_name 25 | ws_url = socket_url.with_scheme("wss" if notebook_secure else "ws") 26 | 27 | self.ws = await websockets.connect(str(ws_url), extra_headers=self.headers) 28 | 29 | return self 30 | 31 | async def __aexit__(self, exc_type, exc, tb): 32 | """ 33 | Close the websocket to terminado 34 | """ 35 | await self.ws.close() 36 | 37 | delete_url = self.notebook_url / "api/terminals" / self.terminal_name 38 | async with self.session.delete(delete_url, headers=self.headers) as resp: 39 | # If we send EOD on the websocket URL, the terminal is auto closed 40 | # But we should clean up regardless! 41 | if resp.status != 204 and resp.status != 404: 42 | resp.raise_for_status() 43 | 44 | def send(self, data): 45 | """ 46 | Send given data to terminado socket 47 | 48 | data should be a list of strings 49 | """ 50 | return self.ws.send(json.dumps(data)) 51 | 52 | def send_stdin(self, data): 53 | """ 54 | Send data to stdin of terminado 55 | 56 | data should be a string 57 | """ 58 | return self.send(["stdin", data]) 59 | 60 | def set_size(self, rows, cols): 61 | """ 62 | Set terminado's terminal cols / rows size 63 | """ 64 | return self.send(["set_size", rows, cols]) 65 | 66 | async def on_receive(self, on_receive): 67 | """ 68 | Add callback for when data is received from terminado 69 | 70 | on_receive is called for each incoming message. Receives the JSON decoded 71 | message as only param. 72 | 73 | Returns when the connection has been closed 74 | """ 75 | while True: 76 | try: 77 | data = await self.ws.recv() 78 | except websockets.exceptions.ConnectionClosedError: 79 | print("websocket done") 80 | break 81 | await on_receive(json.loads(data)) 82 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | with open("README.md", "r") as fh: 4 | long_description = fh.read() 5 | 6 | setuptools.setup( 7 | name="jupyterhub-ssh", 8 | version="0.1", 9 | author="Yuvi Panda", 10 | author_email="yuvipanda@gmail.com", 11 | description="SSH access to JupyterHubs", 12 | long_description=long_description, 13 | long_description_content_type="text/markdown", 14 | url="https://github.com/yuvipanda/jupyterhub-ssh", 15 | packages=setuptools.find_packages(), 16 | classifiers=[ 17 | "Programming Language :: Python :: 3", 18 | "Operating System :: OS Independent", 19 | ], 20 | python_requires=">=3.7", 21 | install_requires=[ 22 | "jupyterhub", 23 | "asyncssh", 24 | "aiohttp", 25 | "yarl", 26 | "websockets", 27 | "async-timeout", 28 | ], 29 | ) 30 | --------------------------------------------------------------------------------