├── .gitmodules
├── .yapfignore
├── kernels-mixer
├── MANIFEST.in
├── kernels_mixer
│ ├── __init__.py
│ ├── config.py
│ ├── websockets_test.py
│ ├── websockets.py
│ ├── kernelspecs.py
│ ├── kernels.py
│ └── kernels_test.py
├── setup.py
├── conftest.py
├── README.md
└── LICENSE
├── google-cloud-jupyter-config
├── MANIFEST.in
├── setup.cfg
├── google
│ └── cloud
│ │ └── jupyter_config
│ │ ├── __init__.py
│ │ ├── tokenrenewer.py
│ │ ├── config_test.py
│ │ └── config.py
├── setup.py
├── README.md
└── LICENSE
├── .style.yapf
├── jupyter-gcs-contents-manager
├── Pipfile
├── setup.py
├── README.md
├── gcs_contents_manager.py
└── Pipfile.lock
├── README.md
├── CONTRIBUTING
├── .github
└── workflows
│ └── presubmit.yaml
├── .gitignore
├── LICENSE
└── .pylintrc
/.gitmodules:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.yapfignore:
--------------------------------------------------------------------------------
1 | node_modules
2 |
--------------------------------------------------------------------------------
/kernels-mixer/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
--------------------------------------------------------------------------------
/google-cloud-jupyter-config/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
--------------------------------------------------------------------------------
/google-cloud-jupyter-config/setup.cfg:
--------------------------------------------------------------------------------
1 | [bdist_wheel]
2 | universal = 1
--------------------------------------------------------------------------------
/.style.yapf:
--------------------------------------------------------------------------------
1 | [style]
2 | based_on_style = google
3 | indent_width = 2
4 |
--------------------------------------------------------------------------------
/jupyter-gcs-contents-manager/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | url = "https://pypi.python.org/simple"
3 | verify_ssl = true
4 | name = "pypi"
5 |
6 | [requires]
7 | python_version = "3.7"
8 |
9 | [packages]
10 | google-cloud-storage = "~=1.28.1"
11 | nbformat = "~=5.0.6"
12 | notebook = "~=6.1.5"
13 | traitlets = "~=4.3.3"
14 | tornado = "~=6.0.4"
15 |
16 | [dev-packages]
17 |
18 |
--------------------------------------------------------------------------------
/kernels-mixer/kernels_mixer/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | from kernels_mixer.config import configure_kernels_mixer
16 |
--------------------------------------------------------------------------------
/jupyter-gcs-contents-manager/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools
2 |
3 | with open("README.md", "r") as fh:
4 | long_description = fh.read()
5 |
6 | setuptools.setup(
7 | name="jupyter-gcs-contents-manager",
8 | version="0.0.1",
9 | description="GCS Contents Manager for Jupyter",
10 | long_description=long_description,
11 | long_description_content_type="text/markdown",
12 | url="https://github.com/GoogleCloudPlatform/jupyter-extensions",
13 | py_modules=["gcs_contents_manager"],
14 | license="Apache License 2.0",
15 | python_requires=">=2.7",
16 | install_requires=[
17 | "google-cloud-storage",
18 | "nbformat",
19 | "notebook",
20 | "traitlets",
21 | "tornado",
22 | ],
23 | )
24 |
--------------------------------------------------------------------------------
/google-cloud-jupyter-config/google/cloud/jupyter_config/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | from google.cloud.jupyter_config.config import async_get_gcloud_config
16 | from google.cloud.jupyter_config.config import get_gcloud_config
17 | from google.cloud.jupyter_config.config import gcp_project
18 | from google.cloud.jupyter_config.config import gcp_region
19 | from google.cloud.jupyter_config.config import configure_gateway_client
20 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Google Cloud Platform Extensions for Jupyter and JupyterLab
2 |
3 | This repository serves as a common repository for Google-developed Jupyter extensions.
4 |
5 | ## Disclaimer
6 |
7 | This is not an officially supported Google product.
8 |
9 | ## Contents
10 |
11 | ### Gcloud config helper for Jupyter extensions
12 |
13 | The `google-cloud-jupyter-config` subdirectory contains the source code for the
14 | [google-cloud-jupyter-config](https://pypi.org/project/google-cloud-jupyter-config/)
15 | package.
16 |
17 | This intended to be a reusable library that other extensions can use to get configuration
18 | information from the [gcloud command line tool](https://cloud.google.com/cli)
19 |
20 | ### GCS Contents Manager
21 |
22 | The `jupyter-gcs-contents-manager` subdirectory contains a Jupyter Contents Manager
23 | that reads contents from a GCS bucket.
24 |
25 | ### Kernels Mixer
26 |
27 | The `kernels-mixer` subdirectory contains the source code for the
28 | [kernels-mixer](https://pypi.org/project/kernels-mixer/) package.
29 |
30 | This is an extension for jupyter_server that allows local and remote kernels to be used
31 | simultaneously.
32 |
--------------------------------------------------------------------------------
/CONTRIBUTING:
--------------------------------------------------------------------------------
1 | # How to Contribute
2 |
3 | We'd love to accept your patches and contributions to this project. There are
4 | just a few small guidelines you need to follow.
5 |
6 | ## Contributor License Agreement
7 |
8 | Contributions to this project must be accompanied by a Contributor License
9 | Agreement (CLA). You (or your employer) retain the copyright to your
10 | contribution; this simply gives us permission to use and redistribute your
11 | contributions as part of the project. Head over to
12 | to see your current agreements on file or
13 | to sign a new one.
14 |
15 | You generally only need to submit a CLA once, so if you've already submitted one
16 | (even if it was for a different project), you probably don't need to do it
17 | again.
18 |
19 | ## Code reviews
20 |
21 | All submissions, including submissions by project members, require review. We
22 | use GitHub pull requests for this purpose. Consult
23 | [GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
24 | information on using pull requests.
25 |
26 | ## Community Guidelines
27 |
28 | This project follows
29 | [Google's Open Source Community Guidelines](https://opensource.google/conduct/).
--------------------------------------------------------------------------------
/kernels-mixer/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | import setuptools
15 |
16 | with open("README.md", "r") as fh:
17 | long_description = fh.read()
18 |
19 | setuptools.setup(
20 | name="kernels-mixer",
21 | version="0.0.15",
22 | author="Google, Inc.",
23 | description="Jupyter server extension that allows mixing local and remote kernels together",
24 | long_description=long_description,
25 | long_description_content_type="text/markdown",
26 | url="https://github.com/GoogleCloudPlatform/jupyter-extensions/tree/master/kernels-mixer",
27 | license="Apache License 2.0",
28 | packages=setuptools.find_packages(),
29 | python_requires=">=3.8",
30 | install_requires=[
31 | "jupyter_server>=2.7.3",
32 | "traitlets",
33 | "google-cloud-jupyter-config",
34 | ],
35 | )
36 |
--------------------------------------------------------------------------------
/google-cloud-jupyter-config/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | import setuptools
15 |
16 | with open("README.md", "r") as fh:
17 | long_description = fh.read()
18 |
19 | setuptools.setup(
20 | name="google-cloud-jupyter-config",
21 | author="Google, Inc.",
22 | version="0.0.10",
23 | description="Jupyter configuration utilities using gcloud",
24 | long_description=long_description,
25 | long_description_content_type="text/markdown",
26 | url="https://github.com/GoogleCloudPlatform/jupyter-extensions/tree/master/google-cloud-jupyter-config",
27 | license="Apache License 2.0",
28 | packages=setuptools.find_namespace_packages(),
29 | python_requires=">=3.8",
30 | install_requires=[
31 | "cachetools",
32 | "jupyter_server>=2.4.0",
33 | "traitlets",
34 | ],
35 | )
36 |
--------------------------------------------------------------------------------
/.github/workflows/presubmit.yaml:
--------------------------------------------------------------------------------
1 | name: Presubmit
2 | on:
3 | pull_request:
4 | branches:
5 | - "*"
6 | jobs:
7 | test_matrix:
8 | strategy:
9 | matrix:
10 | os: [ubuntu-latest, windows-latest, macos-latest]
11 | runs-on: ${{ matrix.os }}
12 | steps:
13 | - uses: actions/checkout@v3
14 | - uses: actions/setup-python@v2
15 | with:
16 | python-version: "3.11"
17 | - name: Upgrade pip
18 | run: |
19 | python -m pip install --upgrade pip
20 | - name: Install pytest
21 | run: |
22 | python -m pip install pytest
23 | - name: Install dependencies
24 | run: |
25 | python -m pip install ipykernel jupyter_server pytest-jupyter traitlets
26 | - name: Set up Cloud SDK
27 | uses: 'google-github-actions/setup-gcloud@v1'
28 | - name: Install google-cloud-jupyter-config
29 | run: |
30 | python -m pip install ./google-cloud-jupyter-config
31 | - name: Test gcloud config
32 | run: |
33 | pytest ./google-cloud-jupyter-config
34 | - name: Install kernels-mixer
35 | run: |
36 | python -m pip install ./kernels-mixer
37 |
38 | - name: Setup the `python3` Jupyter kernel
39 | run: |
40 | python3 -m ipykernel install --user
41 |
42 | - name: Test kernels-mixer
43 | run: |
44 | pytest ./kernels-mixer
45 |
--------------------------------------------------------------------------------
/kernels-mixer/kernels_mixer/config.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | from google.cloud.jupyter_config import configure_gateway_client
16 | from jupyter_server.services.sessions.sessionmanager import SessionManager
17 |
18 | from kernels_mixer.kernelspecs import MixingKernelSpecManager
19 | from kernels_mixer.kernels import MixingMappingKernelManager
20 | from kernels_mixer.websockets import DelegatingWebsocketConnection
21 |
22 |
23 | def configure_kernels_mixer(c):
24 | """Helper method for configuring the given Config object to use the GCP kernel gateway."""
25 | configure_gateway_client(c)
26 | c.ServerApp.kernel_spec_manager_class = MixingKernelSpecManager
27 | c.ServerApp.kernel_manager_class = MixingMappingKernelManager
28 | c.ServerApp.session_manager_class = SessionManager
29 | c.ServerApp.kernel_websocket_connection_class = DelegatingWebsocketConnection
30 |
--------------------------------------------------------------------------------
/kernels-mixer/conftest.py:
--------------------------------------------------------------------------------
1 | # Copyright 2024 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import pytest
16 |
17 | from jupyter_client.kernelspec import KernelSpecManager
18 | from jupyter_server.services.sessions.sessionmanager import SessionManager
19 |
20 | from kernels_mixer.kernelspecs import MixingKernelSpecManager
21 | from kernels_mixer.kernels import MixingMappingKernelManager
22 | from kernels_mixer.websockets import DelegatingWebsocketConnection
23 |
24 |
25 | pytest_plugins = ['pytest_jupyter.jupyter_server']
26 |
27 |
28 | @pytest.fixture
29 | def jp_server_config(jp_server_config):
30 | return {
31 | "ServerApp": {
32 | "kernel_spec_manager_class": KernelSpecManager,
33 | "kernel_manager_class": MixingMappingKernelManager,
34 | "kernel_websocket_connection_class": DelegatingWebsocketConnection,
35 | "session_manager_class": SessionManager,
36 | },
37 | }
38 |
--------------------------------------------------------------------------------
/kernels-mixer/README.md:
--------------------------------------------------------------------------------
1 | # Jupyter Kernel Mixing
2 |
3 | This package provides a Jupyter Server extension that allows you to run local and remote
4 | kernels side by side.
5 |
6 | It does this by "mixing" the local and remote kernels together into a single collection
7 | containing both.
8 |
9 | This collection then keeps track of whether specific kernels were local or remote and
10 | forwards any corresponding kernel requests accordingly.
11 |
12 | ## Installation
13 |
14 | Install the `kernels-mixer` Python package using `pip`:
15 |
16 | ```sh
17 | pip install kernels-mixer
18 | ```
19 |
20 | ## Setup
21 |
22 | If you do not already have a Jupyter config file (e.g. `~/.jupyter/jupyter_lab_config.py`),
23 | the first generate one with the following command:
24 |
25 | ```sh
26 | jupyter lab --generate-config
27 | ```
28 |
29 | The open your config file and add the following two lines to the end:
30 |
31 | ```py
32 | import kernels_mixer
33 | kernels_mixer.configure_kernels_mixer(c)
34 | ```
35 |
36 | ## Kernel Name Uniqueness
37 |
38 | This extension expects that local and remote kernels have different names. If that is not
39 | the case then the local kernel will override the remote kernel. For example, if there is
40 | a local kernel named "python3", then any kernels in the remote kernel gateway named "python3"
41 | will be hidden in favor of it.
42 |
43 | When using this extension, it is recommended that the remote kernel gateway is set up to
44 | add a prefix onto every kernel name in order to distinguish them from the local kernels.
45 |
46 | Similarly, it is recommended that remote kernel display names are augmented to indicate
47 | where they are running.
48 |
49 | The default kernel gateway used with this extension is the regional GCP kernel gateway
50 | hosted under `kernels.googleusercontent.com`, which ensures that both of those conditions
51 | are followed.
52 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 |
8 | # Runtime data
9 | pids
10 | *.pid
11 | *.seed
12 | *.pid.lock
13 |
14 | # Directory for instrumented libs generated by jscoverage/JSCover
15 | lib-cov
16 |
17 | # Coverage directory used by tools like istanbul
18 | coverage
19 | htmlcov
20 |
21 | # nyc test coverage
22 | .nyc_output
23 | .coverage
24 |
25 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
26 | .grunt
27 |
28 | # Bower dependency directory (https://bower.io/)
29 | bower_components
30 |
31 | # node-waf configuration
32 | .lock-wscript
33 |
34 | # Compiled binary addons (https://nodejs.org/api/addons.html)
35 | build/Release
36 |
37 | # Dependency directories
38 | node_modules/
39 | jspm_packages/
40 |
41 | # TypeScript v1 declaration files
42 | typings/
43 |
44 | # Optional npm cache directory
45 | .npm
46 |
47 | # Optional eslint cache
48 | .eslintcache
49 |
50 | # Optional REPL history
51 | .node_repl_history
52 |
53 | # Output of 'npm pack'
54 | *.tgz
55 |
56 | # Yarn files
57 | .yarn-integrity
58 | yarn.lock
59 |
60 | # dotenv environment variables file
61 | .env
62 |
63 | # json secrets
64 | *.secrets.json
65 |
66 | # next.js build output
67 | .next
68 |
69 | # idea
70 | .idea/
71 |
72 | # VScode settings
73 | .vscode/
74 |
75 | # python virtual env
76 | venv/
77 |
78 | # Plugin binary
79 | lib/
80 | *.pyc
81 |
82 | # Python package stuff
83 | build/
84 | dist/
85 | __pycache__
86 | *.egg-info
87 | .ipynb_checkpoints/
88 | *.ipynb
89 |
90 | # TypeScript Build Info
91 | *.tsbuildinfo
92 |
93 | # macOS Folder Settings
94 | *.DS_Store
95 |
96 | # package-lock.json files from extensions
97 | shared/package-lock.json
98 | jupyterlab_*/package-lock.json
99 |
100 | *~
101 |
--------------------------------------------------------------------------------
/google-cloud-jupyter-config/README.md:
--------------------------------------------------------------------------------
1 | # Notebook server configuration using the Google Cloud SDK
2 |
3 | This package provides Python classes that can be used in the Jupyter config file
4 | (e.g. `~/.jupyter/jupyter_lab_config.py`) in order to fill in some configuration
5 | using the Google Cloud SDK's [`gcloud` tool](https://cloud.google.com/sdk/gcloud).
6 |
7 | ## Included features
8 |
9 | This package provides utility methods to look up any configuration options stored
10 | in the active gcloud config, in particular the project and region.
11 |
12 | Additionally, this provides a utility method to update a given
13 | [`Config`](https://traitlets.readthedocs.io/en/latest/config-api.html#traitlets.config.Config)
14 | object to connect to a kernel gateway URL managed by Google.
15 |
16 | ## Prerequisites
17 |
18 | Install both Jupyter and [gcloud](https://cloud.google.com/sdk/docs/install).
19 |
20 | For the kernel gateway feature, you will need an installation of Jupyter that uses the
21 | `jupyter_server` project and the version of `jupyter_server` you have installed needs to be
22 | at least version `2.4.0`.
23 |
24 | You will also need to log in to gcloud:
25 |
26 | ```sh
27 | gcloud auth login
28 | ```
29 |
30 | ... and configure your project and region:
31 |
32 | ```sh
33 | gcloud config set core/project ${PROJECT}
34 | gcloud config set compute/region ${REGION}
35 | ```
36 |
37 | ## Install
38 |
39 | Clone this repository, and from this directory run the following:
40 |
41 | ```sh
42 | pip install .
43 | ```
44 |
45 | ## Setup
46 |
47 | If you do not already have a Jupyter config file (e.g. `~/.jupyter/jupyter_lab_config.py`),
48 | the first generate one with the following command:
49 |
50 | ```sh
51 | jupyter lab --generate-config
52 | ```
53 |
54 | The open your config file and add the following two lines to the end:
55 |
56 | ```py
57 | import google.cloud.jupyter_config
58 | google.cloud.jupyter_config.configure_gateway_client(c)
59 | ```
60 |
--------------------------------------------------------------------------------
/jupyter-gcs-contents-manager/README.md:
--------------------------------------------------------------------------------
1 | # GCS Contents Manager for Jupyter
2 |
3 | This repository provides a [ContentsManager](https://jupyter-notebook.readthedocs.io/en/stable/extending/contents.html)
4 | for Jupyter that stores notebook files in [Google Cloud Storage](https://cloud.google.com/storage).
5 |
6 | ## Disclaimer
7 |
8 | This is not an officially supported Google product.
9 |
10 | ## Development
11 | 1. Install [pipenv](https://github.com/pypa/pipenv#installation).
12 | 2. Run `pipenv install` in this folder.
13 | 3. Run `pipenv shell` to activate the virtual Python environment with the
14 | necessary dependencies installed.
15 |
16 | ## Prerequisites
17 |
18 | You must have the GCS Python client library installed. You can
19 | install it using the following command:
20 |
21 | ```sh
22 | pip install google-cloud-storage
23 | ```
24 |
25 | Additionally, you must have application default credentials
26 | set up. Those can be created using the following command:
27 |
28 | ```sh
29 | gcloud auth application-default login
30 | ```
31 |
32 | ## Installation
33 |
34 | Download the `gcs_contents_manager.py` file from this repository,
35 | and then copy it into a directory in your PYTHONPATH.
36 |
37 | ## Usage
38 |
39 | Add the following lines to your Jupyter config file (e.g. jupyter_notebook_config.py):
40 |
41 | from gcs_contents_manager import GCSContentsManager
42 | c.NotebookApp.contents_manager_class = GCSContentsManager
43 | c.GCSContentsManager.bucket_name = '${NOTEBOOK_BUCKET}'
44 | c.GCSContentsManager.bucket_notebooks_path = '${NOTEBOOK_PATH}'
45 | c.GCSContentsManager.project = '${NOTEBOOK_PROJECT}'
46 |
47 | For `${NOTEBOOK_BUCKET}` specify the name of the GCS bucket where
48 | you want to store your notebooks, and for `${NOTEBOOK_PATH}`,
49 | specify the name of the directory within that bucket that will be
50 | treated as your root directory by Jupyter.
51 |
52 | For `${NOTEBOOK_PROJECT}` specify the name of your GCP project
53 | that you want to use for Jupyter. For most uses this will be the
54 | same project that owns the GCS bucket.
55 |
--------------------------------------------------------------------------------
/google-cloud-jupyter-config/google/cloud/jupyter_config/tokenrenewer.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import datetime
16 | import subprocess
17 | import sys
18 | import tempfile
19 | import typing
20 |
21 |
22 | from abc import abstractmethod
23 | from traitlets import Int, Unicode
24 | from jupyter_server.gateway.gateway_client import GatewayTokenRenewerBase
25 |
26 |
27 | class CachedTokenRenewerBase(GatewayTokenRenewerBase):
28 | """Token renewer base class that only renews the token after a specified timeout."""
29 |
30 | token_lifetime_seconds = Int(
31 | default_value=300,
32 | config=True,
33 | help="""Time (in seconds) to wait between successive token renewals.""",
34 | )
35 |
36 | @abstractmethod
37 | def force_new_token(
38 | self,
39 | auth_header_key: str,
40 | auth_scheme: typing.Union[str, None],
41 | **kwargs: typing.Any,
42 | ):
43 | pass
44 |
45 | _created = datetime.datetime.min
46 |
47 | def get_token(
48 | self,
49 | auth_header_key: str,
50 | auth_scheme: typing.Union[str, None],
51 | auth_token: str,
52 | **kwargs: typing.Any,
53 | ):
54 | current_time = datetime.datetime.now()
55 | duration = (current_time - self._created).total_seconds()
56 | if (not auth_token) or (duration > self.token_lifetime_seconds):
57 | auth_token = self.force_new_token(auth_header_key, auth_scheme, **kwargs)
58 | self._created = datetime.datetime.now()
59 |
60 | return auth_token
61 |
62 |
63 | class CommandTokenRenewer(CachedTokenRenewerBase):
64 | """Token renewer that invokes an external command to generate the token."""
65 |
66 | token_command = Unicode(
67 | default_value="",
68 | config=True,
69 | help="""External command run to generate auth tokens.""",
70 | )
71 |
72 | def force_new_token(
73 | self,
74 | auth_header_key: str,
75 | auth_scheme: typing.Union[str, None],
76 | **kwargs: typing.Any,
77 | ):
78 | """Run the specified command to generate a new token, which is taken from its output.
79 |
80 | We reuse the system stderr for the command so that any prompts from it
81 | will be displayed to the user.
82 | """
83 | with tempfile.TemporaryFile() as t:
84 | p = subprocess.run(
85 | self.token_command,
86 | stdin=subprocess.DEVNULL,
87 | stderr=sys.stderr,
88 | stdout=t,
89 | check=True,
90 | shell=True,
91 | encoding="UTF-8",
92 | )
93 | t.seek(0)
94 | return t.read().decode("UTF-8").strip()
95 |
--------------------------------------------------------------------------------
/google-cloud-jupyter-config/google/cloud/jupyter_config/config_test.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import os
16 | import unittest
17 |
18 | from google.cloud.jupyter_config.config import (
19 | async_run_gcloud_subcommand,
20 | async_get_gcloud_config,
21 | gcp_account,
22 | gcp_credentials,
23 | gcp_project,
24 | gcp_region,
25 | clear_gcloud_cache,
26 | )
27 |
28 |
29 | class TestConfig(unittest.IsolatedAsyncioTestCase):
30 | _mock_cloudsdk_variables = {
31 | "CLOUDSDK_AUTH_ACCESS_TOKEN": "example-token",
32 | "CLOUDSDK_CORE_ACCOUNT": "example-account",
33 | "CLOUDSDK_CORE_PROJECT": "example-project",
34 | "CLOUDSDK_DATAPROC_REGION": "example-region",
35 | }
36 |
37 | def setUp(self):
38 | self.original_cloudsdk_variables = {}
39 | for key in os.environ:
40 | if key.startswith("CLOUDSDK_"):
41 | self.original_cloudsdk_variables[key] = os.environ[key]
42 | for key in self._mock_cloudsdk_variables:
43 | os.environ[key] = self._mock_cloudsdk_variables[key]
44 | clear_gcloud_cache()
45 |
46 | def tearDown(self):
47 | for key in self._mock_cloudsdk_variables:
48 | del os.environ[key]
49 | for key in self.original_cloudsdk_variables:
50 | os.environ[key] = self.original_cloudsdk_variables[key]
51 | clear_gcloud_cache()
52 |
53 | def test_gcp_account(self):
54 | self.assertEqual(gcp_account(), "example-account")
55 | os.environ["CLOUDSDK_CORE_ACCOUNT"] = "should-not-be-used"
56 | self.assertEqual(gcp_account(), "example-account")
57 |
58 | def test_gcp_credentials(self):
59 | self.assertEqual(gcp_credentials(), "example-token")
60 | os.environ["CLOUDSDK_AUTH_ACCESS_TOKEN"] = "should-not-be-used"
61 | self.assertEqual(gcp_credentials(), "example-token")
62 |
63 | def test_gcp_project(self):
64 | self.assertEqual(gcp_project(), "example-project")
65 | os.environ["CLOUDSDK_CORE_PROJECT"] = "should-not-be-used"
66 | self.assertEqual(gcp_project(), "example-project")
67 |
68 | def test_gcp_region(self):
69 | self.assertEqual(gcp_region(), "example-region")
70 | os.environ["CLOUDSDK_DATAPROC_REGION"] = "should-not-be-used"
71 | self.assertEqual(gcp_region(), "example-region")
72 |
73 | async def test_async_run_gcloud_subcommand(self):
74 | test_project = await async_run_gcloud_subcommand("config get core/project")
75 | self.assertEqual(test_project, "example-project")
76 |
77 | async def test_async_gcloud_config(self):
78 | test_account = await async_get_gcloud_config(
79 | "configuration.properties.core.account"
80 | )
81 | self.assertEqual(test_account, "example-account")
82 |
83 |
84 | if __name__ == "__main__":
85 | unittest.main()
86 |
--------------------------------------------------------------------------------
/kernels-mixer/kernels_mixer/websockets_test.py:
--------------------------------------------------------------------------------
1 | # Copyright 2024 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import datetime
16 | import json
17 | import uuid
18 |
19 | import pytest
20 |
21 |
22 | @pytest.fixture
23 | async def test_kernel(jp_fetch):
24 | kr = await jp_fetch("api", "kernels", method="POST", body=json.dumps({"name": "python3"}))
25 | k = json.loads(kr.body.decode("utf-8"))
26 | return k
27 |
28 |
29 | async def close_and_drain_pending_messages(ws):
30 | ws.close()
31 | for attempt in range(10):
32 | resp = await ws.read_message()
33 | if resp == None:
34 | return
35 | raise AssertionError("failed to drain the pending messages after 10 attempts")
36 |
37 |
38 | async def test_websocket(jp_fetch, jp_ws_fetch, test_kernel):
39 | k = await test_kernel
40 | assert "id" in k
41 |
42 | ksr = await jp_fetch("api", "kernelspecs", k.get("name"))
43 | ks = json.loads(ksr.body.decode("utf-8"))
44 | assert " (Local)" in ks.get("spec", {}).get("display_name", None)
45 |
46 | session_id = uuid.uuid1().hex
47 | message_id = uuid.uuid1().hex
48 | ws = await jp_ws_fetch("api", "kernels", k["id"], "channels")
49 | await ws.write_message(json.dumps({
50 | "channel": "shell",
51 | "header": {
52 | "date": datetime.datetime.now().isoformat(),
53 | "session": session_id,
54 | "msg_id": message_id,
55 | "msg_type": "execute_request",
56 | "username": "",
57 | "version": "5.2",
58 | },
59 | "parent_header": {},
60 | "metadata": {},
61 | "content": {
62 | "code": "1 + 2",
63 | "silent": False,
64 | "allow_stdin": False,
65 | "stop_on_error": True,
66 | },
67 | "buffers": [],
68 | }))
69 |
70 | # We expect multiple response messages, including at least (but possiblye more):
71 | #
72 | # An initial "busy" status message in response to a kernel info request.
73 | # A subsequent "idle" status messages in response to a kernel info request.
74 | # A busy status message in response to the execute request.
75 | # An execute input message.
76 | # An execute result message.
77 | # An execute reply message.
78 | # An idle status message in response to the execute request.
79 | for attempt in range(10):
80 | resp = await ws.read_message()
81 | resp_json = json.loads(resp)
82 | response_type = resp_json.get("header", {}).get("msg_type", None)
83 | parent_message = resp_json.get("parent_header", {}).get("msg_id", None)
84 | if response_type == "execute_result" and parent_message == message_id:
85 | result = resp_json.get("content", {}).get("data", {}).get("text/plain", None)
86 | assert result == "3"
87 |
88 | await close_and_drain_pending_messages(ws)
89 | return
90 |
91 | await close_and_drain_pending_messages(ws)
92 | raise AssertionError("Never got a response to the code execution")
93 |
--------------------------------------------------------------------------------
/kernels-mixer/kernels_mixer/websockets.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import datetime
16 | import json
17 | import uuid
18 |
19 | from tornado.escape import json_decode, utf8
20 |
21 | from jupyter_server.gateway.connections import GatewayWebSocketConnection
22 | from jupyter_server.gateway.managers import GatewayKernelManager
23 | from jupyter_server.services.kernels.connection.base import BaseKernelWebsocketConnection
24 | from jupyter_server.services.kernels.connection.channels import ZMQChannelsWebsocketConnection
25 |
26 | class StartingReportingWebsocketConnection(GatewayWebSocketConnection):
27 | """Extension of GatewayWebSocketConnection that reports a starting status on connection.
28 |
29 | The purpose of this class is to bridge the time period between when the websocket
30 | connection from the client is created and the websocket connection to the Gateway
31 | server is created.
32 |
33 | During that time, the JupyterLab UI believes that it is connected to the running
34 | kernel, but it has not yet received any status messages from the kernel, so it will
35 | display a kernel status of "Unknown".
36 |
37 | That "Unknown" status is not very helpful to users because they have no idea why
38 | the kernel status is unknown and have no indication that something is still
39 | happening under the hood.
40 |
41 | To improve that, we report a provisional status as soon as the client connection
42 | is established. This provisional status will be replaced by the real status
43 | reported by the kernel as soon as the backend kernel connection is established.
44 |
45 | The only kernel statuses supported by the JupyterLab UI are "starting", "idle",
46 | "busy", "restarting", and "dead".
47 |
48 | Of those, the "starting" message is the closest match to what is going on, so
49 | we use that one.
50 |
51 | However, the "starting" message is only supposed to be reported once, so we
52 | also intercept any "starting" messages received from the kernel and discard
53 | them, as we know we will have already reported this status.
54 | """
55 | def __init__(self, *args, **kwargs):
56 | super().__init__(*args, **kwargs)
57 |
58 | async def connect(self):
59 | # The kernel message format is defined
60 | # [here](https://jupyter-client.readthedocs.io/en/latest/messaging.html#general-message-format).
61 | status_message_id = str(uuid.uuid4())
62 | status_message = {
63 | "header": {
64 | "msg_id": status_message_id,
65 | "session": self.kernel_id,
66 | "username": "username",
67 | "date": datetime.datetime.utcnow().isoformat(),
68 | "msg_type": "status",
69 | "version": "5.3",
70 | },
71 | "parent_header": {},
72 | "metadata": {},
73 | "msg_id": status_message_id,
74 | "msg_type": "status",
75 | "channel": "iopub",
76 | "content": {
77 | "execution_state": "starting",
78 | },
79 | "buffers": [],
80 | }
81 | super().handle_outgoing_message(json.dumps(status_message))
82 | return await super().connect()
83 |
84 | def is_starting_message(self, incoming_msg):
85 | try:
86 | msg = json_decode(utf8(incoming_msg))
87 | if msg.get("content", {}).get("execution_state", "") == "starting":
88 | return True
89 | except Exception as ex:
90 | pass
91 | return False
92 |
93 | def handle_outgoing_message(self, incoming_msg, *args, **kwargs):
94 | if self.is_starting_message(incoming_msg):
95 | # We already sent a starting message, so drop this one.
96 | return
97 | return super().handle_outgoing_message(incoming_msg, *args, **kwargs)
98 |
99 |
100 | class DelegatingWebsocketConnection(BaseKernelWebsocketConnection):
101 | """Implementation of BaseKernelWebsocketConnection that delegates to another connection.
102 |
103 | If the parent KernelManager instance is for a remote kernel (i.e. it is a
104 | GatewayKernelManager), then the delegate is an instance of
105 | StartingReportingWebsocketConnection, which extends GatewayWebSocketConnection.
106 |
107 | Otherwise, it is an instance of ZMQChannelsWebsocketConnection.
108 | """
109 |
110 | def __init__(self, *args, **kwargs):
111 | super().__init__(*args, **kwargs)
112 | delegate_class = ZMQChannelsWebsocketConnection
113 | if self.kernel_manager.is_remote:
114 | delegate_class = StartingReportingWebsocketConnection
115 | self.delegate = delegate_class(
116 | parent=self.kernel_manager.delegate,
117 | websocket_handler=self.websocket_handler,
118 | config=self.config)
119 |
120 | async def connect(self):
121 | return await self.delegate.connect()
122 |
123 | def disconnect(self):
124 | return self.delegate.disconnect()
125 |
126 | def handle_incoming_message(self, msg):
127 | return self.delegate.handle_incoming_message(msg)
128 |
129 | def handle_outgoing_message(self, stream, msg):
130 | return self.delegate.handle_outgoing_message(stream, msg)
131 |
132 | # Prepare actually comes from ZMQChannelsWebsocketConnection.
133 | #
134 | # It is called by the jupyter_server kernels websocket handler if present, so
135 | # we provide an implemention of it in case the delegate is an instance of the
136 | # ZMQChannelWebsocketConnection class.
137 | async def prepare(self):
138 | if hasattr(self.delegate, "prepare"):
139 | return await self.delegate.prepare()
140 |
--------------------------------------------------------------------------------
/kernels-mixer/kernels_mixer/kernelspecs.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | from jupyter_client.kernelspec import KernelSpecManager
16 | from jupyter_core.utils import ensure_async
17 | from jupyter_server.gateway.managers import GatewayKernelSpecManager
18 |
19 | from traitlets import Type, Unicode, default
20 |
21 |
22 | def append_display_name(spec, suffix):
23 | """Append the given suffix onto the display name of the given kernelspec.
24 |
25 | The supplied kernelspec is updated in place.
26 |
27 | Args:
28 | spec: Either an object with a "display_name" attribute, or a
29 | dictionary with a "display_name" string field.
30 | suffix: A string suffix to append to the spec's display name.
31 | """
32 | if hasattr(spec, "display_name"):
33 | spec.display_name = spec.display_name + suffix
34 | else:
35 | spec["display_name"] = spec.get("display_name", "") + suffix
36 |
37 |
38 | class MixingKernelSpecManager(KernelSpecManager):
39 |
40 | local_display_name_suffix = Unicode(
41 | " (Local)",
42 | config=True,
43 | help="Suffix added to the display names of local kernels.",
44 | )
45 |
46 | remote_display_name_suffix = Unicode(
47 | " (Remote)",
48 | config=True,
49 | help="Suffix added to the display names of remote kernels.",
50 | )
51 |
52 | local_kernel_spec_manager_class = Type(
53 | config=True,
54 | default_value=KernelSpecManager,
55 | help="""
56 | The kernel spec manager class to use for local kernels.
57 |
58 | Must be a subclass of `jupyter_client.kernelspec.KernelSpecManager`.""",
59 | )
60 |
61 | def __init__(self, *args, **kwargs):
62 | super().__init__(*args, **kwargs)
63 | self.local_manager = self.local_kernel_spec_manager_class(*args, **kwargs)
64 | self.remote_manager= GatewayKernelSpecManager(*args, **kwargs)
65 | self._local_kernels = set()
66 | self._remote_kernels = set()
67 |
68 | def is_remote(self, kernel_name):
69 | return kernel_name in self._remote_kernels
70 |
71 | async def get_all_specs(self):
72 | """Get a list of all kernelspecs supported.
73 |
74 | This is a combination of the kernelspecs supported by both the local and remote
75 | kernel spec managers.
76 |
77 | In case a kernel name is supported by both the local and remote kernel spec
78 | manager, the local kernel spec manager's version is used and the remote
79 | one is ignored.
80 |
81 | The return value is a dictionary mapping kernel names to kernelspecs.
82 |
83 | Each kernelspec is a dictionary with the following keys:
84 | 1. "name": The name of the kernel, which must adhere to the Jupyter API naming rules.
85 | 2. "spec": A "KernelSpecFile" resource, which itself is a dictionary.
86 | 3. "resources": A dictionary mapping resource names to URIs.
87 |
88 | A KernelSpecFile dictionary is described here:
89 | https://github.com/jupyter-server/jupyter_server/blob/c5dc0f696f376e1db5a9a0cbcebb40a0bf98875c/jupyter_server/services/api/api.yaml#L781
90 |
91 | Of particular note, it contains one entry with a key of "display_name" whose value is
92 | the name for the kernel displayed in the JupyterLab launcher and kernel picker.
93 |
94 | Returns:
95 | A map from kernel names (str) to kernelspecs.
96 | """
97 | ks = self.local_manager.get_all_specs()
98 | for name, kernelspec in ks.items():
99 | spec = kernelspec.get("spec", {})
100 | append_display_name(spec, self.local_display_name_suffix)
101 | self._local_kernels = self._local_kernels | {name}
102 | try:
103 | remote_ks = await ensure_async(self.remote_manager.get_all_specs())
104 | for name, kernelspec in remote_ks.items():
105 | if name not in self._local_kernels:
106 | spec = kernelspec.get("spec", {})
107 | append_display_name(spec, self.remote_display_name_suffix)
108 | ks[name] = kernelspec
109 | self._remote_kernels = self._remote_kernels | {name}
110 |
111 | except Exception as ex:
112 | self.log.exception('Failure listing remote kernelspecs: %s', ex)
113 | # Otherwise ignore the exception, so that local kernels are still usable.
114 | self.log.debug(f'Found {len(self._local_kernels)} local kernels: {self._local_kernels}')
115 | self.log.debug(f'Found {len(self._remote_kernels)} remote kernels: {self._remote_kernels}')
116 | return ks
117 |
118 | async def get_original_kernel_spec(self, kernel_name, *args, **kwargs):
119 | if self.is_remote(kernel_name):
120 | self.log.debug(f'Looking up remote kernel {kernel_name}...')
121 | return await self.remote_manager.get_kernel_spec(kernel_name, *args, **kwargs)
122 | self.log.debug(f'Looking up local kernel {kernel_name}...')
123 | return self.local_manager.get_kernel_spec(kernel_name, *args, **kwargs)
124 |
125 | async def get_kernel_spec(self, kernel_name, *args, **kwargs):
126 | spec = await self.get_original_kernel_spec(kernel_name, *args, **kwargs)
127 | suffix = self.local_display_name_suffix
128 | if self.is_remote(kernel_name):
129 | suffix = self.remote_display_name_suffix
130 | append_display_name(spec, suffix)
131 | return spec
132 |
133 | async def get_kernel_spec_resource(self, kernel_name, path):
134 | if self.is_remote(kernel_name):
135 | self.log.debug(f'Looking up remote kernel spec resource for {kernel_name}...')
136 | return await self.remote_manager.get_kernel_spec_resource(kernel_name, path)
137 | return None
138 |
--------------------------------------------------------------------------------
/kernels-mixer/kernels_mixer/kernels.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import copy
16 |
17 | from jupyter_client.kernelspec import KernelSpecManager
18 | from jupyter_client.manager import in_pending_state
19 | from jupyter_core.utils import ensure_async, run_sync
20 | from jupyter_server.gateway.managers import GatewayMappingKernelManager
21 | from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager
22 | from jupyter_server.services.kernels.kernelmanager import ServerKernelManager
23 |
24 | from traitlets import Instance, default, observe
25 |
26 | from .kernelspecs import MixingKernelSpecManager
27 |
28 | class MixingMappingKernelManager(AsyncMappingKernelManager):
29 |
30 | kernel_spec_manager = Instance(KernelSpecManager)
31 |
32 | @default("kernel_spec_manager")
33 | def _default_kernel_spec_manager(self):
34 | return "kernels_mixer.kernelspecs.MixingKernelSpecManager"
35 |
36 | @observe("kernel_spec_manager")
37 | def _observe_kernel_spec_manager(self, change):
38 | self.log.debug(f"Configured kernel spec manager: {change.new}")
39 | if isinstance(change.new, MixingKernelSpecManager):
40 | return
41 | self.kernel_spec_manager = MixingKernelSpecManager(parent=change.new.parent)
42 | self.kernel_spec_manager.local_manager = change.new
43 | self.parent.kernel_spec_manager = self.kernel_spec_manager
44 |
45 | @default("kernel_manager_class")
46 | def _default_kernel_manager_class(self):
47 | return "kernels_mixer.kernels.MixingKernelManager"
48 |
49 | def __init__(self, *args, **kwargs):
50 | super().__init__(*args, **kwargs)
51 | self.log.debug(f"Kernel spec manager: {self.kernel_spec_manager}")
52 |
53 | # Set up the local kernel management.
54 | self.local_manager = AsyncMappingKernelManager(
55 | parent=self.parent,
56 | log=self.log,
57 | connection_dir=self.connection_dir,
58 | kernel_spec_manager=self.kernel_spec_manager.local_manager)
59 |
60 | # Set up the remote kernel management.
61 | self.remote_manager = GatewayMappingKernelManager(
62 | parent=self.parent,
63 | log=self.log,
64 | connection_dir=self.connection_dir,
65 | kernel_spec_manager=self.kernel_spec_manager.remote_manager)
66 |
67 | def has_remote_kernels(self):
68 | for kid in self._kernels:
69 | if self._kernels[kid].is_remote:
70 | return True
71 | return False
72 |
73 | async def list_kernels(self):
74 | if self.has_remote_kernels():
75 | # We have remote kernels, so we must call `list_kernels` on the
76 | # Gateway kernel manager to update our kernel models.
77 | try:
78 | await ensure_async(self.remote_manager.list_kernels())
79 | except Exception as ex:
80 | self.log.exception('Failure listing remote kernels: %s', ex)
81 | # Ignore the exception listing remote kernels, so that local kernels are still usable.
82 | return super().list_kernels()
83 |
84 | def kernel_model(self, kernel_id):
85 | self._check_kernel_id(kernel_id)
86 | kernel = self._kernels[kernel_id]
87 | # Normally, calls to `run_sync` pose a danger of locking up Tornado's
88 | # single-threaded event loop.
89 | #
90 | # However, the call below should be fine because it cannot block for an
91 | # arbitrary amount of time.
92 | #
93 | # This call blocks on the `model` method defined below, which in turn
94 | # blocks on the `GatewayMappingKernelManager`'s `kernel_model` method
95 | # (https://github.com/jupyter-server/jupyter_server/blob/547f7a244d89f79dd09fa7d382322d1c40890a3f/jupyter_server/gateway/managers.py#L94).
96 | #
97 | # That will only take a small, deterministic amount of time to complete
98 | # because that `kernel_model` only operates on existing, in-memory data
99 | # and does not block on any outgoing network requests.
100 | return run_sync(kernel.model)()
101 |
102 | class MixingKernelManager(ServerKernelManager):
103 | _kernel_id_map = {}
104 |
105 | def __init__(self, *args, **kwargs):
106 | super().__init__(*args, **kwargs)
107 |
108 | @property
109 | def is_remote(self):
110 | if not self.kernel_name or not self.kernel_id:
111 | return False
112 | return self.parent.kernel_spec_manager.is_remote(self.kernel_name)
113 |
114 | @property
115 | def delegate_kernel_id(self):
116 | if not self.kernel_id:
117 | return None
118 | return MixingKernelManager._kernel_id_map.get(self.kernel_id, None)
119 |
120 | @property
121 | def delegate_multi_kernel_manager(self):
122 | if self.is_remote:
123 | return self.parent.remote_manager
124 | return self.parent.local_manager
125 |
126 | @property
127 | def delegate(self):
128 | if not self.kernel_name or not self.kernel_id:
129 | return None
130 | return self.delegate_multi_kernel_manager.get_kernel(self.delegate_kernel_id)
131 |
132 | @property
133 | def has_kernel(self):
134 | delegate = self.delegate
135 | if not delegate:
136 | return false
137 | return delegate.has_kernel
138 |
139 | def client(self, *args, **kwargs):
140 | delegate = self.delegate
141 | if not delegate:
142 | return None
143 | return delegate.client(*args, **kwargs)
144 |
145 | @in_pending_state
146 | async def start_kernel(self, *args, **kwargs):
147 | self.kernel_name = kwargs.get("kernel_name", self.kernel_name)
148 | kernel_id = kwargs.pop("kernel_id", self.kernel_id)
149 | if kernel_id:
150 | self.kernel_id = kernel_id
151 | created_kernel_id = await ensure_async(self.delegate_multi_kernel_manager.start_kernel(
152 | kernel_name=self.kernel_name, **kwargs))
153 | MixingKernelManager._kernel_id_map[self.kernel_id] = created_kernel_id
154 |
155 | async def shutdown_kernel(self, *args, **kwargs):
156 | await ensure_async(self.delegate_multi_kernel_manager.shutdown_kernel(
157 | self.delegate_kernel_id, *args, **kwargs))
158 | MixingKernelManager._kernel_id_map.pop(self.kernel_id)
159 |
160 | async def interrupt_kernel(self):
161 | await ensure_async(self.delegate_multi_kernel_manager.interrupt_kernel(
162 | self.delegate_kernel_id))
163 |
164 | async def restart_kernel(self, *args, **kwargs):
165 | await ensure_async(self.delegate_multi_kernel_manager.restart_kernel(
166 | self.delegate_kernel_id, *args, **kwargs))
167 |
168 | async def model(self):
169 | delegate_model = await ensure_async(
170 | self.delegate_multi_kernel_manager.kernel_model(self.delegate_kernel_id))
171 | model = copy.deepcopy(delegate_model)
172 | model["id"] = self.kernel_id
173 | return model
174 |
--------------------------------------------------------------------------------
/google-cloud-jupyter-config/google/cloud/jupyter_config/config.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import asyncio
16 | import datetime
17 | import json
18 | import subprocess
19 | import sys
20 | import tempfile
21 | import threading
22 |
23 | import cachetools
24 |
25 | from google.cloud.jupyter_config.tokenrenewer import CommandTokenRenewer
26 |
27 |
28 | def run_gcloud_subcommand(subcmd):
29 | """Run a specified gcloud sub-command and return its output.
30 |
31 | The supplied subcommand is the full command line invocation, *except* for
32 | the leading `gcloud` being omitted.
33 |
34 | e.g. `info` instead of `gcloud info`.
35 |
36 | We reuse the system stderr for the command so that any prompts from gcloud
37 | will be displayed to the user.
38 | """
39 | with tempfile.TemporaryFile() as t:
40 | p = subprocess.run(
41 | f"gcloud {subcmd}",
42 | stdin=subprocess.DEVNULL,
43 | stderr=sys.stderr,
44 | stdout=t,
45 | check=True,
46 | encoding="UTF-8",
47 | shell=True,
48 | )
49 | t.seek(0)
50 | return t.read().decode("UTF-8").strip()
51 |
52 |
53 | async def async_run_gcloud_subcommand(subcmd):
54 | """Run a specified gcloud sub-command and return its output.
55 |
56 | The supplied subcommand is the full command line invocation, *except* for
57 | the leading `gcloud` being omitted.
58 |
59 | e.g. `info` instead of `gcloud info`.
60 |
61 | We reuse the system stderr for the command so that any prompts from gcloud
62 | will be displayed to the user.
63 | """
64 | with tempfile.TemporaryFile() as t:
65 | p = await asyncio.create_subprocess_shell(
66 | f"gcloud {subcmd}",
67 | stdin=subprocess.DEVNULL,
68 | stderr=sys.stderr,
69 | stdout=t,
70 | )
71 | await p.wait()
72 | if p.returncode != 0:
73 | raise subprocess.CalledProcessError(p.returncode, None, None, None)
74 | t.seek(0)
75 | return t.read().decode("UTF-8").strip()
76 |
77 |
78 | @cachetools.cached(
79 | cache=cachetools.TTLCache(maxsize=1024, ttl=(20 * 60)), lock=threading.Lock()
80 | )
81 | def cached_gcloud_subcommand(subcmd):
82 | return run_gcloud_subcommand(subcmd)
83 |
84 |
85 | def clear_gcloud_cache():
86 | """Clear the TTL cache used to cache gcloud subcommand results."""
87 | cached_gcloud_subcommand.cache_clear()
88 |
89 |
90 | def _get_config_field(config, field):
91 | subconfig = config
92 | for path_part in field.split("."):
93 | if path_part:
94 | subconfig = subconfig.get(path_part, {})
95 | return subconfig
96 |
97 |
98 | def get_gcloud_config(field):
99 | """Helper method that invokes the gcloud config helper.
100 |
101 | Invoking gcloud commands is a very heavyweight process, so the config is
102 | cached for up to 20 minutes.
103 |
104 | The config is generated with a minimum credential expiry of 30 minutes, so
105 | that we can ensure that the caller can use the cached credentials for at
106 | least ~10 minutes even if the cache entry is about to expire.
107 |
108 | Args:
109 | field: A period-separated search path for the config value to return.
110 | For example, 'configuration.properties.core.project'
111 | Returns:
112 | A JSON object whose type depends on the search path for the field within
113 | the gcloud config.
114 |
115 | For example, if the field is `configuration.properties.core.project`,
116 | then the return value will be a string. In comparison, if the field
117 | is `configuration.properties.core`, then the return value will be a
118 | dictionary containing a field named `project` with a string value.
119 | """
120 | subcommand = "config config-helper --min-expiry=30m --format=json"
121 | cached_config_str = cached_gcloud_subcommand(subcommand)
122 | cached_config = json.loads(cached_config_str)
123 | return _get_config_field(cached_config, field)
124 |
125 |
126 | async def async_get_gcloud_config(field):
127 | """Async helper method that invokes the gcloud config helper.
128 |
129 | This is like `get_gcloud_config` but does not block on the underlying
130 | gcloud invocation when there is a cache miss.
131 |
132 | Args:
133 | field: A period-separated search path for the config value to return.
134 | For example, 'configuration.properties.core.project'
135 | Returns:
136 | An awaitable that resolves to a JSON object with a type depending on
137 | the search path for the field within the gcloud config.
138 |
139 | For example, if the field is `configuration.properties.core.project`,
140 | then the JSON object will be a string. In comparison, if the field
141 | is `configuration.properties.core`, then it will be a dictionary
142 | containing a field named `project` with a string value.
143 | """
144 | subcommand = "config config-helper --min-expiry=30m --format=json"
145 | with cached_gcloud_subcommand.cache_lock:
146 | if subcommand in cached_gcloud_subcommand.cache:
147 | cached_config_str = cached_gcloud_subcommand.cache[subcommand]
148 | cached_config = json.loads(cached_config_str)
149 | return _get_config_field(cached_config, field)
150 |
151 | out = await async_run_gcloud_subcommand(subcommand)
152 | with cached_gcloud_subcommand.cache_lock:
153 | cached_gcloud_subcommand.cache[subcommand] = out
154 | config = json.loads(out)
155 | return _get_config_field(config, field)
156 |
157 |
158 | def gcp_account():
159 | """Helper method to get the project configured through gcloud"""
160 | return get_gcloud_config("configuration.properties.core.account")
161 |
162 |
163 | def gcp_credentials():
164 | """Helper method to get the project configured through gcloud"""
165 | return get_gcloud_config("credential.access_token")
166 |
167 |
168 | def gcp_project():
169 | """Helper method to get the project configured through gcloud"""
170 | return get_gcloud_config("configuration.properties.core.project")
171 |
172 |
173 | def gcp_project_number():
174 | """Helper method to get the project number for the project configured through gcloud"""
175 | project = gcp_project()
176 | return run_gcloud_subcommand(
177 | f'projects describe {project} --format="value(projectNumber)"'
178 | )
179 |
180 |
181 | def gcp_region():
182 | """Helper method to get the project configured through gcloud"""
183 | region = get_gcloud_config("configuration.properties.dataproc.region")
184 | if not region:
185 | region = get_gcloud_config("configuration.properties.compute.region")
186 | return region
187 |
188 |
189 | def gcp_kernel_gateway_url():
190 | """Helper method to return the kernel gateway URL for the configured project and region."""
191 | project = gcp_project_number()
192 | region = gcp_region()
193 | return f"https://{project}-dot-{region}.kernels.googleusercontent.com"
194 |
195 |
196 | def configure_gateway_client(c):
197 | """Helper method for configuring the given Config object to use the GCP kernel gateway."""
198 | c.GatewayClient.url = gcp_kernel_gateway_url()
199 | c.GatewayClient.gateway_token_renewer_class = CommandTokenRenewer
200 | c.CommandTokenRenewer.token_command = (
201 | 'gcloud config config-helper --format="value(credential.access_token)"'
202 | )
203 |
204 | # Version 2.8.0 of the `jupyter_server` package requires the `auth_token`
205 | # value to be set to a non-empty value or else it will never invoke the
206 | # token renewer. To accommodate this, we set it to an invalid initial
207 | # value that will be immediately replaced by the token renewer.
208 | #
209 | # See https://github.com/jupyter-server/jupyter_server/issues/1339 for more
210 | # details and discussion.
211 | c.GatewayClient.auth_token = "Initial, invalid value"
212 |
213 | c.GatewayClient.auth_scheme = "Bearer"
214 | c.GatewayClient.headers = '{"Cookie": "_xsrf=XSRF", "X-XSRFToken": "XSRF"}'
215 |
--------------------------------------------------------------------------------
/kernels-mixer/kernels_mixer/kernels_test.py:
--------------------------------------------------------------------------------
1 | # Copyright 2023 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import datetime
16 | import time
17 | import unittest
18 | import uuid
19 |
20 | from jupyter_client.kernelspec import KernelSpec
21 | from jupyter_client.kernelspec import KernelSpecManager
22 | from jupyter_core.utils import ensure_async, run_sync
23 | from jupyter_server.gateway.gateway_client import GatewayClient
24 | from jupyter_server.gateway.managers import GatewayMappingKernelManager
25 | from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager, ServerKernelManager
26 |
27 | from kernels_mixer.kernels import MixingMappingKernelManager
28 | from kernels_mixer.kernelspecs import MixingKernelSpecManager
29 |
30 | remote_kernel = "remote-python3"
31 | remote_kernel_manager = KernelSpecManager()
32 | remote_kernel_manager.default_kernel_name=remote_kernel
33 | local_kernel_manager = KernelSpecManager()
34 |
35 |
36 | def mock_kernel_spec(kernel_name):
37 | return KernelSpec(name=kernel_name, display_name=kernel_name, language="python")
38 |
39 |
40 | class MockKernelSpecManager(KernelSpecManager):
41 | def get_kernel_spec(self, kernel_name, **kwargs):
42 | return mock_kernel_spec(kernel_name)
43 |
44 | def get_all_specs(self):
45 | return [self.get_kernel_spec(self.default_kernel_name)]
46 |
47 | def list_kernel_specs(self):
48 | return {
49 | "default": self.default_kernel_name,
50 | "kernelspecs": self.get_all_specs(),
51 | }
52 |
53 |
54 | class MockClient():
55 | async def wait_for_ready(self, *args, **kwargs):
56 | return
57 |
58 |
59 | class MockKernelManager(ServerKernelManager):
60 | def __init__(self, *args, **kwargs):
61 | super(MockKernelManager, self).__init__(*args, **kwargs)
62 | self.kernel_id = None
63 | pass
64 |
65 | def has_kernel(self):
66 | return self.kernel_id is not None
67 |
68 | def client(self, *args, **kwargs):
69 | return MockClient()
70 |
71 | async def start_kernel(self, kernel_name=None, kernel_id=None, **kwargs):
72 | kernel_name = kernel_name or self.parent.kernel_spec_manager.default_kernel_name
73 | kernel_id = kernel_id or str(uuid.uuid4())
74 | self.kernel_name = kernel_name
75 | self.kernel_id = kernel_id
76 | self.last_activity = datetime.datetime.utcnow()
77 | self.execution_state = "idle"
78 | self.ready.set_result(None)
79 | self.kernel = {
80 | "id": kernel_id,
81 | "name": kernel_name,
82 | "execution_state": "starting",
83 | "additional": {
84 | "foo": "bar",
85 | },
86 | }
87 | return kernel_id
88 |
89 | async def shutdown_kernel(self, *args, **kwargs):
90 | self.kernel_id = None
91 |
92 | async def refresh_model(self, model):
93 | self.kernel = model
94 |
95 | async def restart_kernel(self, *args, **kwargs):
96 | pass
97 |
98 | async def interrupt_kernel(self, *args, **kwargs):
99 | pass
100 |
101 |
102 | class MockLocalMappingKernelManager(AsyncMappingKernelManager):
103 | def __init__(self, *args, **kwargs):
104 | super(MockLocalMappingKernelManager, self).__init__(*args, **kwargs)
105 | self.kernel_spec_manager = MockKernelSpecManager()
106 | self.kernel_manager_class = "kernels_mixer.kernels_test.MockKernelManager"
107 |
108 | async def start_kernel(self, *args, **kwargs):
109 | km = self.kernel_manager_factory(parent=self)
110 | kernel_id = await km.start_kernel(*args, **kwargs)
111 | self._kernels[kernel_id] = km
112 | return kernel_id
113 |
114 | def _get_changed_ports(self, *args, **kwargs):
115 | pass
116 |
117 |
118 | class MockRemoteMappingKernelManager(GatewayMappingKernelManager):
119 | def __init__(self, *args, list_delay=None, **kwargs):
120 | self.kernel_spec_manager = MockKernelSpecManager()
121 | self.kernel_spec_manager.default_kernel_name = remote_kernel
122 | self.kernel_manager_class = "kernels_mixer.kernels_test.MockKernelManager"
123 | self._list_delay = list_delay
124 |
125 | async def list_kernels(self, **kwargs):
126 | if self._list_delay:
127 | time.sleep(self._list_delay)
128 | for _, km in self._kernels.items():
129 | model = km.kernel
130 | model['execution_state'] = 'idle'
131 | await km.refresh_model(model)
132 | return [km.kernel for km in self._kernels.values()]
133 |
134 |
135 | class MockMixingKernelSpecManager(MixingKernelSpecManager):
136 | def __init__(self, local_km=None, remote_km=None, **kwargs):
137 | self.local_manager = local_km or local_kernel_manager
138 | self.remote_manager= remote_km or remote_kernel_manager
139 | self._local_kernels = set()
140 | self._remote_kernels = set()
141 |
142 | def is_remote(self, kernel_name):
143 | return kernel_name == remote_kernel
144 |
145 | class TestKernelModel(unittest.TestCase):
146 | gwc = GatewayClient.instance()
147 | gwc.url = gwc.url or "https://example.com"
148 | gwc.kernelspecs_endpoint = gwc.kernelspecs_endpoint or "/api/kernelspecs"
149 |
150 | local_multikernel_manager = MockLocalMappingKernelManager()
151 | remote_multikernel_manager = MockRemoteMappingKernelManager()
152 | slow_remote_multikernel_manager = MockRemoteMappingKernelManager(list_delay=10)
153 |
154 | ksm = MockMixingKernelSpecManager(
155 | local_km=local_multikernel_manager,
156 | remote_km=remote_multikernel_manager)
157 | slow_remote_listing_ksm = MockMixingKernelSpecManager(
158 | local_km=local_multikernel_manager,
159 | remote_km=slow_remote_multikernel_manager)
160 |
161 | mkm = MixingMappingKernelManager(kernel_spec_manager=ksm)
162 | mkm.local_manager = local_multikernel_manager
163 | mkm.remote_manager = remote_multikernel_manager
164 |
165 | slow_remote_mkm = MixingMappingKernelManager(kernel_spec_manager=slow_remote_listing_ksm)
166 | slow_remote_mkm.local_manager = local_multikernel_manager
167 | slow_remote_mkm.remote_manager = slow_remote_multikernel_manager
168 |
169 | async def interrupt_kernel(self, mkm, kernel_id):
170 | await ensure_async(mkm.interrupt_kernel(kernel_id))
171 |
172 | def test_local_kernel_model(self):
173 | start_time = time.time()
174 | local_kernel_name = "python3"
175 | local_kernel_id = run_sync(self.slow_remote_mkm.start_kernel)(kernel_name=local_kernel_name)
176 | self.assertFalse(self.slow_remote_mkm.has_remote_kernels())
177 | run_sync(self.slow_remote_mkm.list_kernels)()
178 | local_kernel_model = self.slow_remote_mkm.kernel_model(local_kernel_id)
179 | self.assertEqual(local_kernel_model["id"], local_kernel_id)
180 | self.assertEqual(local_kernel_model["name"], local_kernel_name)
181 | self.assertEqual(local_kernel_model["execution_state"], "idle")
182 | self.assertNotIn("additional", local_kernel_model)
183 | run_sync(self.interrupt_kernel)(self.slow_remote_mkm, local_kernel_id)
184 | run_sync(self.slow_remote_mkm.restart_kernel)(local_kernel_id)
185 | end_time = time.time()
186 | self.assertLess(end_time - start_time, 1)
187 |
188 | def test_remote_kernel_model(self):
189 | start_time = time.time()
190 | remote_kernel_id = run_sync(self.mkm.start_kernel)(kernel_name=remote_kernel)
191 | self.assertTrue(self.mkm.has_remote_kernels())
192 | run_sync(self.mkm.list_kernels)()
193 | remote_kernel_model = self.mkm.kernel_model(remote_kernel_id)
194 | self.assertEqual(remote_kernel_model["id"], remote_kernel_id)
195 | self.assertEqual(remote_kernel_model["name"], remote_kernel)
196 | self.assertEqual(remote_kernel_model["additional"]["foo"], "bar")
197 | self.assertEqual(remote_kernel_model["execution_state"], "idle")
198 | run_sync(self.interrupt_kernel)(self.mkm, remote_kernel_id)
199 | run_sync(self.mkm.restart_kernel)(remote_kernel_id)
200 | end_time = time.time()
201 | self.assertLess(end_time - start_time, 1)
202 |
203 |
204 | if __name__ == '__main__':
205 | unittest.main()
206 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/kernels-mixer/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/google-cloud-jupyter-config/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/.pylintrc:
--------------------------------------------------------------------------------
1 | # This Pylint rcfile contains a best-effort configuration to uphold the
2 | # best-practices and style described in the Google Python style guide:
3 | # https://google.github.io/styleguide/pyguide.html
4 | #
5 | # Its canonical open-source location is:
6 | # https://google.github.io/styleguide/pylintrc
7 |
8 | [MASTER]
9 |
10 | # Add files or directories to the blacklist. They should be base names, not
11 | # paths.
12 | ignore=third_party
13 |
14 | # Add files or directories matching the regex patterns to the blacklist. The
15 | # regex matches against base names, not paths.
16 | ignore-patterns=
17 |
18 | # Pickle collected data for later comparisons.
19 | persistent=no
20 |
21 | # List of plugins (as comma separated values of python modules names) to load,
22 | # usually to register additional checkers.
23 | load-plugins=
24 |
25 | # Use multiple processes to speed up Pylint.
26 | jobs=4
27 |
28 | # Allow loading of arbitrary C extensions. Extensions are imported into the
29 | # active Python interpreter and may run arbitrary code.
30 | unsafe-load-any-extension=no
31 |
32 | # A comma-separated list of package or module names from where C extensions may
33 | # be loaded. Extensions are loading into the active Python interpreter and may
34 | # run arbitrary code
35 | extension-pkg-whitelist=
36 |
37 |
38 | [MESSAGES CONTROL]
39 |
40 | # Only show warnings with the listed confidence levels. Leave empty to show
41 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
42 | confidence=
43 |
44 | # Enable the message, report, category or checker with the given id(s). You can
45 | # either give multiple identifier separated by comma (,) or put this option
46 | # multiple time (only on the command line, not in the configuration file where
47 | # it should appear only once). See also the "--disable" option for examples.
48 | #enable=
49 |
50 | # Disable the message, report, category or checker with the given id(s). You
51 | # can either give multiple identifiers separated by comma (,) or put this
52 | # option multiple times (only on the command line, not in the configuration
53 | # file where it should appear only once).You can also use "--disable=all" to
54 | # disable everything first and then reenable specific checks. For example, if
55 | # you want to run only the similarities checker, you can use "--disable=all
56 | # --enable=similarities". If you want to run only the classes checker, but have
57 | # no Warning level messages displayed, use"--disable=all --enable=classes
58 | # --disable=W"
59 | disable=apply-builtin,
60 | backtick,
61 | bad-option-value,
62 | basestring-builtin,
63 | buffer-builtin,
64 | c-extension-no-member,
65 | cmp-builtin,
66 | cmp-method,
67 | coerce-builtin,
68 | coerce-method,
69 | delslice-method,
70 | div-method,
71 | duplicate-code,
72 | eq-without-hash,
73 | execfile-builtin,
74 | file-builtin,
75 | filter-builtin-not-iterating,
76 | fixme,
77 | getslice-method,
78 | global-statement,
79 | hex-method,
80 | idiv-method,
81 | implicit-str-concat-in-sequence,
82 | import-error,
83 | import-self,
84 | import-star-module-level,
85 | input-builtin,
86 | intern-builtin,
87 | invalid-str-codec,
88 | locally-disabled,
89 | long-builtin,
90 | long-suffix,
91 | map-builtin-not-iterating,
92 | metaclass-assignment,
93 | next-method-called,
94 | next-method-defined,
95 | no-absolute-import,
96 | no-else-break,
97 | no-else-continue,
98 | no-else-raise,
99 | no-else-return,
100 | no-member,
101 | no-self-use,
102 | nonzero-method,
103 | oct-method,
104 | old-division,
105 | old-ne-operator,
106 | old-octal-literal,
107 | old-raise-syntax,
108 | parameter-unpacking,
109 | print-statement,
110 | raising-string,
111 | range-builtin-not-iterating,
112 | raw_input-builtin,
113 | rdiv-method,
114 | reduce-builtin,
115 | relative-import,
116 | reload-builtin,
117 | round-builtin,
118 | setslice-method,
119 | signature-differs,
120 | standarderror-builtin,
121 | suppressed-message,
122 | sys-max-int,
123 | too-few-public-methods,
124 | too-many-ancestors,
125 | too-many-arguments,
126 | too-many-boolean-expressions,
127 | too-many-branches,
128 | too-many-instance-attributes,
129 | too-many-locals,
130 | too-many-public-methods,
131 | too-many-return-statements,
132 | too-many-statements,
133 | trailing-newlines,
134 | unichr-builtin,
135 | unicode-builtin,
136 | unpacking-in-except,
137 | useless-else-on-loop,
138 | useless-suppression,
139 | using-cmp-argument,
140 | xrange-builtin,
141 | zip-builtin-not-iterating,
142 |
143 |
144 | [REPORTS]
145 |
146 | # Set the output format. Available formats are text, parseable, colorized, msvs
147 | # (visual studio) and html. You can also give a reporter class, eg
148 | # mypackage.mymodule.MyReporterClass.
149 | output-format=text
150 |
151 | # Put messages in a separate file for each module / package specified on the
152 | # command line instead of printing them on stdout. Reports (if any) will be
153 | # written in a file name "pylint_global.[txt|html]". This option is deprecated
154 | # and it will be removed in Pylint 2.0.
155 | files-output=no
156 |
157 | # Tells whether to display a full report or only the messages
158 | reports=no
159 |
160 | # Python expression which should return a note less than 10 (10 is the highest
161 | # note). You have access to the variables errors warning, statement which
162 | # respectively contain the number of errors / warnings messages and the total
163 | # number of statements analyzed. This is used by the global evaluation report
164 | # (RP0004).
165 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
166 |
167 | # Template used to display messages. This is a python new-style format string
168 | # used to format the message information. See doc for all details
169 | #msg-template=
170 |
171 |
172 | [BASIC]
173 |
174 | # Good variable names which should always be accepted, separated by a comma
175 | good-names=main,_
176 |
177 | # Bad variable names which should always be refused, separated by a comma
178 | bad-names=
179 |
180 | # Colon-delimited sets of names that determine each other's naming style when
181 | # the name regexes allow several styles.
182 | name-group=
183 |
184 | # Include a hint for the correct naming format with invalid-name
185 | include-naming-hint=no
186 |
187 | # List of decorators that produce properties, such as abc.abstractproperty. Add
188 | # to this list to register other decorators that produce valid properties.
189 | property-classes=abc.abstractproperty,cached_property.cached_property,cached_property.threaded_cached_property,cached_property.cached_property_with_ttl,cached_property.threaded_cached_property_with_ttl
190 |
191 | # Regular expression matching correct function names
192 | function-rgx=^(?:(?PsetUp|tearDown|setUpModule|tearDownModule)|(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$
193 |
194 | # Regular expression matching correct variable names
195 | variable-rgx=^[a-z][a-z0-9_]*$
196 |
197 | # Regular expression matching correct constant names
198 | const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
199 |
200 | # Regular expression matching correct attribute names
201 | attr-rgx=^_{0,2}[a-z][a-z0-9_]*$
202 |
203 | # Regular expression matching correct argument names
204 | argument-rgx=^[a-z][a-z0-9_]*$
205 |
206 | # Regular expression matching correct class attribute names
207 | class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
208 |
209 | # Regular expression matching correct inline iteration names
210 | inlinevar-rgx=^[a-z][a-z0-9_]*$
211 |
212 | # Regular expression matching correct class names
213 | class-rgx=^_?[A-Z][a-zA-Z0-9]*$
214 |
215 | # Regular expression matching correct module names
216 | module-rgx=^(_?[a-z][a-z0-9_]*|__init__)$
217 |
218 | # Regular expression matching correct method names
219 | method-rgx=(?x)^(?:(?P_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P_{0,2}[a-z][a-z0-9_]*))$
220 |
221 | # Regular expression which should only match function or class names that do
222 | # not require a docstring.
223 | no-docstring-rgx=(__.*__|main|test.*|.*test|.*Test)$
224 |
225 | # Minimum line length for functions/classes that require docstrings, shorter
226 | # ones are exempt.
227 | docstring-min-length=10
228 |
229 |
230 | [TYPECHECK]
231 |
232 | # List of decorators that produce context managers, such as
233 | # contextlib.contextmanager. Add to this list to register other decorators that
234 | # produce valid context managers.
235 | contextmanager-decorators=contextlib.contextmanager,contextlib2.contextmanager
236 |
237 | # Tells whether missing members accessed in mixin class should be ignored. A
238 | # mixin class is detected if its name ends with "mixin" (case insensitive).
239 | ignore-mixin-members=yes
240 |
241 | # List of module names for which member attributes should not be checked
242 | # (useful for modules/projects where namespaces are manipulated during runtime
243 | # and thus existing member attributes cannot be deduced by static analysis. It
244 | # supports qualified module names, as well as Unix pattern matching.
245 | ignored-modules=
246 |
247 | # List of class names for which member attributes should not be checked (useful
248 | # for classes with dynamically set attributes). This supports the use of
249 | # qualified names.
250 | ignored-classes=optparse.Values,thread._local,_thread._local
251 |
252 | # List of members which are set dynamically and missed by pylint inference
253 | # system, and so shouldn't trigger E1101 when accessed. Python regular
254 | # expressions are accepted.
255 | generated-members=
256 |
257 |
258 | [FORMAT]
259 |
260 | # Maximum number of characters on a single line.
261 | max-line-length=80
262 |
263 | # TODO(https://github.com/PyCQA/pylint/issues/3352): Direct pylint to exempt
264 | # lines made too long by directives to pytype.
265 |
266 | # Regexp for a line that is allowed to be longer than the limit.
267 | ignore-long-lines=(?x)(
268 | ^\s*(\#\ )??$|
269 | ^\s*(from\s+\S+\s+)?import\s+.+$)
270 |
271 | # Allow the body of an if to be on the same line as the test if there is no
272 | # else.
273 | single-line-if-stmt=yes
274 |
275 | # List of optional constructs for which whitespace checking is disabled. `dict-
276 | # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
277 | # `trailing-comma` allows a space between comma and closing bracket: (a, ).
278 | # `empty-line` allows space-only lines.
279 | no-space-check=
280 |
281 | # Maximum number of lines in a module
282 | max-module-lines=99999
283 |
284 | # String used as indentation unit. The internal Google style guide mandates 2
285 | # spaces. Google's externaly-published style guide says 4, consistent with
286 | # PEP 8. Here, we use 2 spaces, for conformity with many open-sourced Google
287 | # projects (like TensorFlow).
288 | indent-string=' '
289 |
290 | # Number of spaces of indent required inside a hanging or continued line.
291 | indent-after-paren=4
292 |
293 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
294 | expected-line-ending-format=
295 |
296 |
297 | [MISCELLANEOUS]
298 |
299 | # List of note tags to take in consideration, separated by a comma.
300 | notes=TODO
301 |
302 |
303 | [VARIABLES]
304 |
305 | # Tells whether we should check for unused import in __init__ files.
306 | init-import=no
307 |
308 | # A regular expression matching the name of dummy variables (i.e. expectedly
309 | # not used).
310 | dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_)
311 |
312 | # List of additional names supposed to be defined in builtins. Remember that
313 | # you should avoid to define new builtins when possible.
314 | additional-builtins=
315 |
316 | # List of strings which can identify a callback function by name. A callback
317 | # name must start or end with one of those strings.
318 | callbacks=cb_,_cb
319 |
320 | # List of qualified module names which can have objects that can redefine
321 | # builtins.
322 | redefining-builtins-modules=six,six.moves,past.builtins,future.builtins,functools
323 |
324 |
325 | [LOGGING]
326 |
327 | # Logging modules to check that the string format arguments are in logging
328 | # function parameter format
329 | logging-modules=logging,absl.logging,tensorflow.google.logging
330 |
331 |
332 | [SIMILARITIES]
333 |
334 | # Minimum lines number of a similarity.
335 | min-similarity-lines=4
336 |
337 | # Ignore comments when computing similarities.
338 | ignore-comments=yes
339 |
340 | # Ignore docstrings when computing similarities.
341 | ignore-docstrings=yes
342 |
343 | # Ignore imports when computing similarities.
344 | ignore-imports=no
345 |
346 |
347 | [SPELLING]
348 |
349 | # Spelling dictionary name. Available dictionaries: none. To make it working
350 | # install python-enchant package.
351 | spelling-dict=
352 |
353 | # List of comma separated words that should not be checked.
354 | spelling-ignore-words=
355 |
356 | # A path to a file that contains private dictionary; one word per line.
357 | spelling-private-dict-file=
358 |
359 | # Tells whether to store unknown words to indicated private dictionary in
360 | # --spelling-private-dict-file option instead of raising a message.
361 | spelling-store-unknown-words=no
362 |
363 |
364 | [IMPORTS]
365 |
366 | # Deprecated modules which should not be used, separated by a comma
367 | deprecated-modules=regsub,
368 | TERMIOS,
369 | Bastion,
370 | rexec,
371 | sets
372 |
373 | # Create a graph of every (i.e. internal and external) dependencies in the
374 | # given file (report RP0402 must not be disabled)
375 | import-graph=
376 |
377 | # Create a graph of external dependencies in the given file (report RP0402 must
378 | # not be disabled)
379 | ext-import-graph=
380 |
381 | # Create a graph of internal dependencies in the given file (report RP0402 must
382 | # not be disabled)
383 | int-import-graph=
384 |
385 | # Force import order to recognize a module as part of the standard
386 | # compatibility libraries.
387 | known-standard-library=
388 |
389 | # Force import order to recognize a module as part of a third party library.
390 | known-third-party=enchant, absl
391 |
392 | # Analyse import fallback blocks. This can be used to support both Python 2 and
393 | # 3 compatible code, which means that the block might have code that exists
394 | # only in one or another interpreter, leading to false positives when analysed.
395 | analyse-fallback-blocks=no
396 |
397 |
398 | [CLASSES]
399 |
400 | # List of method names used to declare (i.e. assign) instance attributes.
401 | defining-attr-methods=__init__,
402 | __new__,
403 | setUp
404 |
405 | # List of member names, which should be excluded from the protected access
406 | # warning.
407 | exclude-protected=_asdict,
408 | _fields,
409 | _replace,
410 | _source,
411 | _make
412 |
413 | # List of valid names for the first argument in a class method.
414 | valid-classmethod-first-arg=cls,
415 | class_
416 |
417 | # List of valid names for the first argument in a metaclass class method.
418 | valid-metaclass-classmethod-first-arg=mcs
419 |
420 |
421 | [EXCEPTIONS]
422 |
423 | # Exceptions that will emit a warning when being caught. Defaults to
424 | # "Exception"
425 | overgeneral-exceptions=StandardError,
426 | Exception,
427 | BaseException
428 |
--------------------------------------------------------------------------------
/jupyter-gcs-contents-manager/gcs_contents_manager.py:
--------------------------------------------------------------------------------
1 | # Copyright 2020 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # Setup:
16 | # First, install the GCS Python client library using the
17 | # command: `pip install google-cloud-storage`
18 | #
19 | # Then, copy this library into a directory in your PYTHONPATH.
20 | #
21 | # Finally, make sure you have application default credentials
22 | # set up by running: `gcloud auth application-default login`
23 | #
24 | # Usage: Add the following lines to your Jupyter config file
25 | # (e.g. jupyter_notebook_config.py):
26 | #
27 | # from gcs_contents_manager import CombinedContentsManager, GCSContentsManager
28 | # c.NotebookApp.contents_manager_class = CombinedContentsManager
29 | # c.GCSContentsManager.bucket_name = '${NOTEBOOK_BUCKET}'
30 | # c.GCSContentsManager.bucket_notebooks_path = '${NOTEBOOK_PATH}'
31 | # c.GCSContentsManager.project = '${NOTEBOOK_BUCKET_PROJECT}'
32 | # c.FileContentsManager.root_dir = '${LOCAL_DISK_NOTEBOOK_DIR}'
33 | #
34 | # For '${NOTEBOOK_BUCKET}' specify the name of the GCS bucket where
35 | # you want to store your notebooks, and for '${NOTEBOOK_PATH}',
36 | # specify the name of the directory within that bucket that will be
37 | # treated as your root directory by Jupyter. For
38 | # '${NOTEBOOK_BUCKET_PROJECT}', specify the ID of the GCP project
39 | # that owns the GCS bucket.
40 | #
41 | # If you run JupyterLab with widgets that assume the current file
42 | # browser path is a location on your local disk (e.g. the
43 | # jupyterlab-git extension), then you will also need to set up a
44 | # link somewhere on your local disk for those widgets to use.
45 | #
46 | # For example, you could run the following:
47 | #
48 | # mkdir -p ~/.jupyter/symlinks_for_jupyterlab_widgets
49 | # ln -s ${LOCAL_DISK_NOTEBOOK_DIR} ~/.jupyter/symlinks_for_jupyterlab_widgets/Local\ Disk
50 | #
51 | # And then add the following snippet to your Jupyter config:
52 | #
53 | # c.CombinedContentsManager.root_dir = '~/.jupyter/symlinks_for_jupyterlab_widgets'
54 |
55 | import base64
56 | import errno
57 | import json
58 | import logging
59 | import mimetypes
60 | import posixpath
61 | import re
62 |
63 | import nbformat
64 | from notebook.services.contents.filecheckpoints import GenericFileCheckpoints
65 | from notebook.services.contents.filemanager import FileContentsManager
66 | from notebook.services.contents.manager import ContentsManager
67 | from notebook.services.contents.checkpoints import Checkpoints, GenericCheckpointsMixin
68 | from tornado.web import HTTPError
69 | from traitlets import Unicode, default
70 |
71 | from google.cloud import storage
72 |
73 | utf8_encoding = 'utf-8'
74 |
75 |
76 | class GCSCheckpointManager(GenericCheckpointsMixin, Checkpoints):
77 | checkpoints_dir = '.ipynb_checkpoints'
78 |
79 | def __init__(self, **kwargs):
80 | self._kwargs = kwargs
81 | self._parent = kwargs['parent']
82 |
83 | @property
84 | def bucket(self):
85 | return self._parent.bucket
86 |
87 | def checkpoint_path(self, checkpoint_id, path):
88 | path = (path or '').strip('/')
89 | return posixpath.join(self.checkpoints_dir, path, checkpoint_id)
90 |
91 | def checkpoint_blob(self, checkpoint_id, path, create_if_missing=False):
92 | blob_name = self.checkpoint_path(checkpoint_id, path)
93 | blob = self.bucket.get_blob(blob_name)
94 | if not blob and create_if_missing:
95 | blob = self.bucket.blob(blob_name)
96 | return blob
97 |
98 | def create_file_checkpoint(self, content, format, path):
99 | checkpoint_id = 'checkpoint'
100 | blob = self.checkpoint_blob(checkpoint_id, path, create_if_missing=True)
101 | content_type = 'text/plain' if format == 'text' else 'application/octet-stream'
102 | # GCS doesn't allow specifying the key version, so drop it if present
103 | if blob.kms_key_name:
104 | blob._properties['kmsKeyName'] = re.split('/cryptoKeyVersions/\d+$',
105 | blob.kms_key_name)[0]
106 | blob.upload_from_string(content, content_type=content_type)
107 | return {
108 | 'id': checkpoint_id,
109 | 'last_modified': blob.updated,
110 | }
111 |
112 | def create_notebook_checkpoint(self, nb, path):
113 | content = nbformat.writes(nb)
114 | return self.create_file_checkpoint(content, 'text', path)
115 |
116 | def _checkpoint_contents(self, checkpoint_id, path):
117 | blob = self.checkpoint_blob(checkpoint_id, path)
118 | if not blob:
119 | raise HTTPError(
120 | 404, 'No such checkpoint for "{}": {}'.format(path, checkpoint_id))
121 | return blob.download_as_string(), blob.content_type
122 |
123 | def get_file_checkpoint(self, checkpoint_id, path):
124 | contents, content_type = self._checkpoint_contents(checkpoint_id, path)
125 | checkpoint_obj = {
126 | 'type': 'file',
127 | 'content': contents.decode(utf8_encoding),
128 | }
129 | checkpoint_obj[
130 | 'format'] = 'text' if content_type == 'text/plain' else 'base64'
131 | return checkpoint_obj
132 |
133 | def get_notebook_checkpoint(self, checkpoint_id, path):
134 | contents, _ = self._checkpoint_contents(checkpoint_id, path)
135 | checkpoint_obj = {
136 | 'type': 'notebook',
137 | 'content': nbformat.reads(contents, as_version=4),
138 | }
139 | return checkpoint_obj
140 |
141 | def delete_checkpoint(self, checkpoint_id, path):
142 | blob = self.checkpoint_blob(checkpoint_id, path)
143 | if blob:
144 | blob.delete()
145 | return None
146 |
147 | def list_checkpoints(self, path):
148 | checkpoints = []
149 | for b in self.bucket.list_blobs(
150 | prefix=posixpath.join(self.checkpoints_dir, path)):
151 | checkpoint = {
152 | 'id': posixpath.basename(b.name),
153 | 'last_modified': b.updated,
154 | }
155 | checkpoints.append(checkpoint)
156 | return checkpoints
157 |
158 | def rename_checkpoint(self, checkpoint_id, old_path, new_path):
159 | blob = self.checkpoint_blob(checkpoint_id, old_path)
160 | if not blob:
161 | return None
162 | new_blob_name = self.checkpoint_path(checkpoint_id, new_path)
163 | self.bucket.rename_blob(blob, new_blob_name)
164 | return None
165 |
166 |
167 | class GCSContentsManager(ContentsManager):
168 |
169 | bucket_name = Unicode(config=True)
170 |
171 | bucket_notebooks_path = Unicode(config=True)
172 |
173 | project = Unicode(config=True)
174 |
175 | @default('checkpoints_class')
176 | def _checkpoints_class_default(self):
177 | return GCSCheckpointManager
178 |
179 | @default('bucket_notebooks_path')
180 | def _bucket_notebooks_path_default(self):
181 | return ''
182 |
183 | def __init__(self, **kwargs):
184 | super(GCSContentsManager, self).__init__(**kwargs)
185 | self._bucket = None
186 |
187 | @property
188 | def bucket(self):
189 | if not self._bucket:
190 | if self.project:
191 | storage_client = storage.Client(project=self.project)
192 | else:
193 | storage_client = storage.Client()
194 | self._bucket = storage_client.get_bucket(self.bucket_name)
195 | return self._bucket
196 |
197 | def _normalize_path(self, path):
198 | path = path or ''
199 | return path.strip('/')
200 |
201 | def _gcs_path(self, normalized_path):
202 | if not self.bucket_notebooks_path:
203 | return normalized_path
204 | if not normalized_path:
205 | return self.bucket_notebooks_path
206 | return posixpath.join(self.bucket_notebooks_path, normalized_path)
207 |
208 | def is_hidden(self, path):
209 | try:
210 | path = self._normalize_path(path)
211 | return posixpath.basename(path).startswith('.')
212 | except HTTPError as err:
213 | raise err
214 | except Exception as ex:
215 | raise HTTPError(500, 'Internal server error: {}'.format(str(ex)))
216 |
217 | def file_exists(self, path):
218 | try:
219 | path = self._normalize_path(path)
220 | if not path:
221 | return False
222 | blob_name = self._gcs_path(path)
223 | blob = self.bucket.get_blob(blob_name)
224 | return blob is not None
225 | except HTTPError as err:
226 | raise err
227 | except Exception as ex:
228 | raise HTTPError(500, 'Internal server error: {}'.format(str(ex)))
229 |
230 | def dir_exists(self, path):
231 | try:
232 | path = self._normalize_path(path)
233 | if not path:
234 | return self.bucket.exists()
235 |
236 | dir_gcs_path = self._gcs_path(path)
237 | if self.bucket.get_blob(dir_gcs_path):
238 | # There is a regular file matching the specified directory.
239 | #
240 | # Would could have both a blob matching a directory path
241 | # and other blobs under that path. In that case, we cannot
242 | # treat the path as both a directory and a regular file,
243 | # so we treat the regular file as overriding the logical
244 | # directory.
245 | return False
246 |
247 | dir_contents = self.bucket.list_blobs(prefix=dir_gcs_path)
248 | for _ in dir_contents:
249 | return True
250 |
251 | return False
252 | except HTTPError as err:
253 | raise err
254 | except Exception as ex:
255 | raise HTTPError(500, 'Internal server error: {}'.format(str(ex)))
256 |
257 | def _blob_model(self, normalized_path, blob, content=True):
258 | blob_obj = {}
259 | blob_obj['path'] = normalized_path
260 | blob_obj['name'] = posixpath.basename(normalized_path)
261 | blob_obj['last_modified'] = blob.updated
262 | blob_obj['created'] = blob.time_created
263 | blob_obj['writable'] = True
264 | blob_obj['type'] = 'notebook' if blob_obj['name'].endswith(
265 | '.ipynb') else 'file'
266 | if not content:
267 | blob_obj['mimetype'] = None
268 | blob_obj['format'] = None
269 | blob_obj['content'] = None
270 | return blob_obj
271 |
272 | content_str = blob.download_as_string() if content else None
273 | if blob_obj['type'] == 'notebook':
274 | blob_obj['mimetype'] = None
275 | blob_obj['format'] = 'json'
276 | blob_obj['content'] = nbformat.reads(content_str, as_version=4)
277 | elif blob.content_type.startswith('text/'):
278 | blob_obj['mimetype'] = 'text/plain'
279 | blob_obj['format'] = 'text'
280 | blob_obj['content'] = content_str.decode(utf8_encoding)
281 | else:
282 | blob_obj['mimetype'] = 'application/octet-stream'
283 | blob_obj['format'] = 'base64'
284 | blob_obj['content'] = base64.b64encode(content_str)
285 |
286 | return blob_obj
287 |
288 | def _empty_dir_model(self, normalized_path, content=True):
289 | dir_obj = {}
290 | dir_obj['path'] = normalized_path
291 | dir_obj['name'] = posixpath.basename(normalized_path)
292 | dir_obj['type'] = 'directory'
293 | dir_obj['mimetype'] = None
294 | dir_obj['writable'] = True
295 | dir_obj['last_modified'] = self.bucket.time_created
296 | dir_obj['created'] = self.bucket.time_created
297 | dir_obj['format'] = None
298 | dir_obj['content'] = None
299 | if content:
300 | dir_obj['format'] = 'json'
301 | dir_obj['content'] = []
302 | return dir_obj
303 |
304 | def _list_dir(self, normalized_path, content=True):
305 | dir_obj = self._empty_dir_model(normalized_path, content=content)
306 | if not content:
307 | return dir_obj
308 |
309 | # We have to convert a list of GCS blobs, which may include multiple
310 | # entries corresponding to a single sub-directory, into a list of immediate
311 | # directory contents with no duplicates.
312 | #
313 | # To do that, we keep a dictionary of immediate children, and then convert
314 | # that dictionary into a list once it is fully populated.
315 | children = {}
316 |
317 | def add_child(name, model, override_existing=False):
318 | """Add the given child model (for either a regular file or directory), to
319 |
320 | the list of children for the current directory model being built.
321 |
322 | It is possible that we will encounter a GCS blob corresponding to a
323 | regular file after we encounter blobs indicating that name should be a
324 | directory. For example, if we have the following blobs:
325 | some/dir/path/
326 | some/dir/path/with/child
327 | some/dir/path
328 | ... then the first two entries tell us that 'path' is a subdirectory of
329 | 'dir', but the third one tells us that it is a regular file.
330 |
331 | In this case, we treat the regular file as shadowing the directory. The
332 | 'override_existing' keyword argument handles that by letting the caller
333 | specify that the child being added should override (i.e. hide) any
334 | pre-existing children with the same name.
335 | """
336 | if self.is_hidden(model['path']) and not self.allow_hidden:
337 | return
338 | if (name in children) and not override_existing:
339 | return
340 | children[name] = model
341 |
342 | dir_gcs_path = self._gcs_path(normalized_path)
343 | for b in self.bucket.list_blobs(prefix=dir_gcs_path):
344 | # For each nested blob, identify the corresponding immediate child
345 | # of the directory, and then add that child to the directory model.
346 | prefix_len = len(dir_gcs_path) + 1 if dir_gcs_path else 0
347 | suffix = b.name[prefix_len:]
348 | if suffix: # Ignore the place-holder blob for the directory itself
349 | first_slash = suffix.find('/')
350 | if first_slash < 0:
351 | child_path = posixpath.join(normalized_path, suffix)
352 | add_child(suffix,
353 | self._blob_model(child_path, b, content=False),
354 | override_existing=True)
355 | else:
356 | subdir = suffix[0:first_slash]
357 | if subdir:
358 | child_path = posixpath.join(normalized_path, subdir)
359 | add_child(subdir, self._empty_dir_model(child_path, content=False))
360 |
361 | for child in children:
362 | dir_obj['content'].append(children[child])
363 |
364 | return dir_obj
365 |
366 | def get(self, path, content=True, type=None, format=None):
367 | try:
368 | path = self._normalize_path(path)
369 | if not type and self.dir_exists(path):
370 | type = 'directory'
371 | if type == 'directory':
372 | return self._list_dir(path, content=content)
373 |
374 | gcs_path = self._gcs_path(path)
375 | blob = self.bucket.get_blob(gcs_path)
376 | return self._blob_model(path, blob, content=content)
377 | except HTTPError as err:
378 | raise err
379 | except Exception as ex:
380 | raise HTTPError(500, 'Internal server error: {}'.format(str(ex)))
381 |
382 | def _mkdir(self, normalized_path):
383 | gcs_path = self._gcs_path(normalized_path) + '/'
384 | blob = self.bucket.blob(gcs_path)
385 | blob.upload_from_string('', content_type='text/plain')
386 | return self._empty_dir_model(normalized_path, content=False)
387 |
388 | def save(self, model, path):
389 | try:
390 | self.run_pre_save_hook(model=model, path=path)
391 |
392 | normalized_path = self._normalize_path(path)
393 | if model['type'] == 'directory':
394 | return self._mkdir(normalized_path)
395 |
396 | gcs_path = self._gcs_path(normalized_path)
397 | blob = self.bucket.get_blob(gcs_path)
398 | if not blob:
399 | blob = self.bucket.blob(gcs_path)
400 |
401 | content_type = model.get('mimetype', None)
402 | if not content_type:
403 | content_type, _ = mimetypes.guess_type(normalized_path)
404 | contents = model['content']
405 | if model['type'] == 'notebook':
406 | contents = nbformat.writes(nbformat.from_dict(contents))
407 | elif model['type'] == 'file' and model['format'] == 'base64':
408 | b64_bytes = contents.encode('ascii')
409 | contents = base64.decodebytes(b64_bytes)
410 |
411 | # GCS doesn't allow specifying the key version, so drop it if present
412 | if blob.kms_key_name:
413 | blob._properties['kmsKeyName'] = re.split('/cryptoKeyVersions/\d+$',
414 | blob.kms_key_name)[0]
415 |
416 | blob.upload_from_string(contents, content_type=content_type)
417 | return self.get(path, type=model['type'], content=False)
418 | except HTTPError as err:
419 | raise err
420 | except Exception as ex:
421 | raise HTTPError(500, 'Internal server error: {}'.format(str(ex)))
422 |
423 | def delete_file(self, path):
424 | try:
425 | normalized_path = self._normalize_path(path)
426 | gcs_path = self._gcs_path(normalized_path)
427 | blob = self.bucket.get_blob(gcs_path)
428 | if blob:
429 | # The path corresponds to a regular file; just delete it.
430 | blob.delete()
431 | return None
432 |
433 | # The path (possibly) corresponds to a directory. Delete
434 | # every file underneath it.
435 | for blob in self.bucket.list_blobs(prefix=gcs_path):
436 | blob.delete()
437 |
438 | return None
439 | except HTTPError as err:
440 | raise err
441 | except Exception as ex:
442 | raise HTTPError(500, 'Internal server error: {}'.format(str(ex)))
443 |
444 | def rename_file(self, old_path, new_path):
445 | try:
446 | old_gcs_path = self._gcs_path(self._normalize_path(old_path))
447 | new_gcs_path = self._gcs_path(self._normalize_path(new_path))
448 | blob = self.bucket.get_blob(old_gcs_path)
449 | if blob:
450 | # The path corresponds to a regular file.
451 | self.bucket.rename_blob(blob, new_gcs_path)
452 | return None
453 |
454 | # The path (possibly) corresponds to a directory. Rename
455 | # every file underneath it.
456 | for b in self.bucket.list_blobs(prefix=old_gcs_path):
457 | self.bucket.rename_blob(b, b.name.replace(old_gcs_path, new_gcs_path))
458 | return None
459 | except HTTPError as err:
460 | raise err
461 | except Exception as ex:
462 | raise HTTPError(500, 'Internal server error: {}'.format(str(ex)))
463 |
464 |
465 | class CombinedCheckpointsManager(GenericCheckpointsMixin, Checkpoints):
466 |
467 | def __init__(self, content_managers):
468 | self._content_managers = content_managers
469 |
470 | def _checkpoint_manager_for_path(self, path):
471 | path = path or ''
472 | path = path.strip('/')
473 | for path_prefix in self._content_managers:
474 | if path == path_prefix or path.startswith(path_prefix + '/'):
475 | relative_path = path[len(path_prefix):]
476 | return self._content_managers[path_prefix].checkpoints, relative_path
477 | raise HTTPError(400, 'Unsupported checkpoint path: {}'.format(path))
478 |
479 | def checkpoint_path(self, checkpoint_id, path):
480 | checkpoint_manager, relative_path = self._checkpoint_manager_for_path(path)
481 | return checkpoint_manager.checkpoint_path(checkpoint_id, relative_path)
482 |
483 | def checkpoint_blob(self, checkpoint_id, path, create_if_missing=False):
484 | checkpoint_manager, relative_path = self._checkpoint_manager_for_path(path)
485 | return checkpoint_manager.checkpoint_blob(
486 | checkpoint_id, relative_path, create_if_missing=create_if_missing)
487 |
488 | def create_file_checkpoint(self, content, format, path):
489 | checkpoint_manager, relative_path = self._checkpoint_manager_for_path(path)
490 | return checkpoint_manager.create_file_checkpoint(content, format,
491 | relative_path)
492 |
493 | def create_notebook_checkpoint(self, nb, path):
494 | checkpoint_manager, relative_path = self._checkpoint_manager_for_path(path)
495 | return checkpoint_manager.create_notebook_checkpoint(nb, relative_path)
496 |
497 | def get_file_checkpoint(self, checkpoint_id, path):
498 | checkpoint_manager, relative_path = self._checkpoint_manager_for_path(path)
499 | return checkpoint_manager.get_file_checkpoint(checkpoint_id, relative_path)
500 |
501 | def get_notebook_checkpoint(self, checkpoint_id, path):
502 | checkpoint_manager, relative_path = self._checkpoint_manager_for_path(path)
503 | return checkpoint_manager.get_notebook_checkpoint(checkpoint_id,
504 | relative_path)
505 |
506 | def delete_checkpoint(self, checkpoint_id, path):
507 | checkpoint_manager, relative_path = self._checkpoint_manager_for_path(path)
508 | return checkpoint_manager.delete_checkpoint(checkpoint_id, relative_path)
509 |
510 | def list_checkpoints(self, path):
511 | checkpoint_manager, relative_path = self._checkpoint_manager_for_path(path)
512 | return checkpoint_manager.list_checkpoints(relative_path)
513 |
514 | def rename_checkpoint(self, checkpoint_id, old_path, new_path):
515 | checkpoint_manager, old_relative_path = self._checkpoint_manager_for_path(
516 | old_path)
517 | new_checkpoint_manager, new_relative_path = self._checkpoint_manager_for_path(
518 | new_path)
519 | if new_checkpoint_manager != checkpoint_manager:
520 | raise HTTPError(
521 | 400, 'Unsupported rename across file systems: {}->{}'.format(
522 | old_path, new_path))
523 | return checkpoint_manager.rename_checkpoint(checkpoint_id,
524 | old_relative_path,
525 | new_relative_path)
526 |
527 |
528 | class CombinedContentsManager(ContentsManager):
529 | root_dir = Unicode(config=True)
530 |
531 | preferred_dir = Unicode("", config=True)
532 |
533 | @default('checkpoints')
534 | def _default_checkpoints(self):
535 | return CombinedCheckpointsManager(self._content_managers)
536 |
537 | def __init__(self, **kwargs):
538 | print('Creating the combined contents manager...')
539 | super(CombinedContentsManager, self).__init__(**kwargs)
540 |
541 | file_cm = FileContentsManager(**kwargs)
542 | file_cm.checkpoints = GenericFileCheckpoints(**file_cm.checkpoints_kwargs)
543 | gcs_cm = GCSContentsManager(**kwargs)
544 | self._content_managers = {
545 | 'Local Disk': file_cm,
546 | 'GCS': gcs_cm,
547 | }
548 |
549 | def _content_manager_for_path(self, path):
550 | path = path or ''
551 | path = path.strip('/')
552 | for path_prefix in self._content_managers:
553 | if path == path_prefix or path.startswith(path_prefix + '/'):
554 | relative_path = path[len(path_prefix):]
555 | return self._content_managers[path_prefix], relative_path, path_prefix
556 | if '/' in path:
557 | path_parts = path.split('/', 1)
558 | return None, path_parts[1], path_parts[0]
559 | return None, path, ''
560 |
561 | def is_hidden(self, path):
562 | try:
563 | cm, relative_path, unused_path_prefix = self._content_manager_for_path(
564 | path)
565 | if not cm:
566 | return False
567 | return cm.is_hidden(relative_path)
568 | except HTTPError as err:
569 | raise err
570 | except Exception as ex:
571 | raise HTTPError(
572 | 500, 'Internal server error: [{}] {}'.format(type(ex), str(ex)))
573 |
574 | def file_exists(self, path):
575 | try:
576 | cm, relative_path, unused_path_prefix = self._content_manager_for_path(
577 | path)
578 | if not cm:
579 | return False
580 | return cm.file_exists(relative_path)
581 | except HTTPError as err:
582 | raise err
583 | except Exception as ex:
584 | raise HTTPError(
585 | 500, 'Internal server error: [{}] {}'.format(type(ex), str(ex)))
586 |
587 | def dir_exists(self, path):
588 | if path in ['', '/']:
589 | return True
590 | try:
591 | cm, relative_path, unused_path_prefix = self._content_manager_for_path(
592 | path)
593 | if not cm:
594 | return False
595 | return cm.dir_exists(relative_path)
596 | except HTTPError as err:
597 | raise err
598 | except Exception as ex:
599 | raise HTTPError(
600 | 500, 'Internal server error: [{}] {}'.format(type(ex), str(ex)))
601 |
602 | def _make_model_relative(self, model, path_prefix):
603 | if 'path' in model:
604 | model['path'] = '{}/{}'.format(path_prefix, model['path'])
605 | if model.get('type', None) == 'directory':
606 | self._make_children_relative(model, path_prefix)
607 |
608 | def _make_children_relative(self, model, path_prefix):
609 | children = model.get('content', None)
610 | if children:
611 | for child in children:
612 | self._make_model_relative(child, path_prefix)
613 |
614 | def get(self, path, content=True, type=None, format=None):
615 | if path in ['', '/']:
616 | dir_obj = {}
617 | dir_obj['path'] = ''
618 | dir_obj['name'] = ''
619 | dir_obj['type'] = 'directory'
620 | dir_obj['mimetype'] = None
621 | dir_obj['writable'] = False
622 | dir_obj['format'] = None
623 | dir_obj['content'] = None
624 | dir_obj['format'] = 'json'
625 | contents = []
626 | for path_prefix in self._content_managers:
627 | child_obj = self._content_managers[path_prefix].get('', content=False)
628 | child_obj['path'] = path_prefix
629 | child_obj['name'] = path_prefix
630 | child_obj['writable'] = False
631 | contents.append(child_obj)
632 | dir_obj['content'] = contents
633 | dir_obj['created'] = contents[0]['created']
634 | dir_obj['last_modified'] = contents[0]['last_modified']
635 | return dir_obj
636 | try:
637 | cm, relative_path, path_prefix = self._content_manager_for_path(path)
638 | if not cm:
639 | raise HTTPError(404, 'No content manager defined for "{}"'.format(path))
640 | model = cm.get(relative_path, content=content, type=type, format=format)
641 | self._make_model_relative(model, path_prefix)
642 | return model
643 | except HTTPError as err:
644 | raise err
645 | except Exception as ex:
646 | raise HTTPError(
647 | 500, 'Internal server error: [{}] {}'.format(type(ex), str(ex)))
648 |
649 | def save(self, model, path):
650 | if path in ['', '/']:
651 | raise HTTPError(403, 'The top-level directory is read-only')
652 | try:
653 | self.run_pre_save_hook(model=model, path=path)
654 |
655 | cm, relative_path, path_prefix = self._content_manager_for_path(path)
656 | if (relative_path in ['', '/']) or (path_prefix in ['', '/']):
657 | raise HTTPError(403, 'The top-level directory contents are read-only')
658 | if not cm:
659 | raise HTTPError(404, 'No content manager defined for "{}"'.format(path))
660 |
661 | if 'path' in model:
662 | model['path'] = relative_path
663 |
664 | model = cm.save(model, relative_path)
665 | if 'path' in model:
666 | model['path'] = path
667 | return model
668 | except HTTPError as err:
669 | raise err
670 | except Exception as ex:
671 | raise HTTPError(
672 | 500, 'Internal server error: [{}] {}'.format(type(ex), str(ex)))
673 |
674 | def delete_file(self, path):
675 | if path in ['', '/']:
676 | raise HTTPError(403, 'The top-level directory is read-only')
677 | try:
678 | cm, relative_path, path_prefix = self._content_manager_for_path(path)
679 | if (relative_path in ['', '/']) or (path_prefix in ['', '/']):
680 | raise HTTPError(403, 'The top-level directory contents are read-only')
681 | if not cm:
682 | raise HTTPError(404, 'No content manager defined for "{}"'.format(path))
683 | return cm.delete_file(relative_path)
684 | except OSError as err:
685 | # The built-in file contents manager will not attempt to wrap permissions
686 | # errors when deleting files if they occur while trying to move the
687 | # to-be-deleted file to the trash, because the underlying send2trash
688 | # library does not set the errno attribute of the raised OSError.
689 | #
690 | # To work around this we explicitly catch such errors, check if they
691 | # start with the magic text "Permission denied", and then wrap them
692 | # in an HTTPError.
693 | if str(err).startswith('Permission denied'):
694 | raise HTTPError(403, str(err))
695 | raise HTTPError(
696 | 500, 'Internal server error: [{}] {}'.format(err.errno, str(err)))
697 | except HTTPError as err:
698 | raise err
699 | except Exception as ex:
700 | raise HTTPError(
701 | 500, 'Internal server error: [{}] {}'.format(type(ex), str(ex)))
702 |
703 | def rename_file(self, old_path, new_path):
704 | if (old_path in ['', '/']) or (new_path in ['', '/']):
705 | raise HTTPError(403, 'The top-level directory is read-only')
706 | try:
707 | old_cm, old_relative_path, old_prefix = self._content_manager_for_path(
708 | old_path)
709 | if (old_relative_path in ['', '/']) or (old_prefix in ['', '/']):
710 | raise HTTPError(403, 'The top-level directory contents are read-only')
711 | if not old_cm:
712 | raise HTTPError(404,
713 | 'No content manager defined for "{}"'.format(old_path))
714 |
715 | new_cm, new_relative_path, new_prefix = self._content_manager_for_path(
716 | new_path)
717 | if (new_relative_path in ['', '/']) or (new_prefix in ['', '/']):
718 | raise HTTPError(403, 'The top-level directory contents are read-only')
719 | if not new_cm:
720 | raise HTTPError(404,
721 | 'No content manager defined for "{}"'.format(new_path))
722 |
723 | if old_cm != new_cm:
724 | raise HTTPError(400, 'Unsupported rename across file systems')
725 | return old_cm.rename_file(old_relative_path, new_relative_path)
726 | except HTTPError as err:
727 | raise err
728 | except Exception as ex:
729 | raise HTTPError(
730 | 500, 'Internal server error: [{}] {}'.format(type(ex), str(ex)))
731 |
--------------------------------------------------------------------------------
/jupyter-gcs-contents-manager/Pipfile.lock:
--------------------------------------------------------------------------------
1 | {
2 | "_meta": {
3 | "hash": {
4 | "sha256": "183eb879a4168b2f329b00958d7278541c6be3b574b868d60b851a8645ef1846"
5 | },
6 | "pipfile-spec": 6,
7 | "requires": {
8 | "python_version": "3.7"
9 | },
10 | "sources": [
11 | {
12 | "name": "pypi",
13 | "url": "https://pypi.python.org/simple",
14 | "verify_ssl": true
15 | }
16 | ]
17 | },
18 | "default": {
19 | "argon2-cffi": {
20 | "hashes": [
21 | "sha256:05a8ac07c7026542377e38389638a8a1e9b78f1cd8439cd7493b39f08dd75fbf",
22 | "sha256:0bf066bc049332489bb2d75f69216416329d9dc65deee127152caeb16e5ce7d5",
23 | "sha256:18dee20e25e4be86680b178b35ccfc5d495ebd5792cd00781548d50880fee5c5",
24 | "sha256:392c3c2ef91d12da510cfb6f9bae52512a4552573a9e27600bdb800e05905d2b",
25 | "sha256:57358570592c46c420300ec94f2ff3b32cbccd10d38bdc12dc6979c4a8484fbc",
26 | "sha256:6678bb047373f52bcff02db8afab0d2a77d83bde61cfecea7c5c62e2335cb203",
27 | "sha256:6ea92c980586931a816d61e4faf6c192b4abce89aa767ff6581e6ddc985ed003",
28 | "sha256:77e909cc756ef81d6abb60524d259d959bab384832f0c651ed7dcb6e5ccdbb78",
29 | "sha256:7d455c802727710e9dfa69b74ccaab04568386ca17b0ad36350b622cd34606fe",
30 | "sha256:8a84934bd818e14a17943de8099d41160da4a336bcc699bb4c394bbb9b94bd32",
31 | "sha256:9bee3212ba4f560af397b6d7146848c32a800652301843df06b9e8f68f0f7361",
32 | "sha256:9dfd5197852530294ecb5795c97a823839258dfd5eb9420233c7cfedec2058f2",
33 | "sha256:b160416adc0f012fb1f12588a5e6954889510f82f698e23ed4f4fa57f12a0647",
34 | "sha256:ba7209b608945b889457f949cc04c8e762bed4fe3fec88ae9a6b7765ae82e496",
35 | "sha256:cc0e028b209a5483b6846053d5fd7165f460a1f14774d79e632e75e7ae64b82b",
36 | "sha256:d8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d",
37 | "sha256:da7f0445b71db6d3a72462e04f36544b0de871289b0bc8a7cc87c0f5ec7079fa",
38 | "sha256:e2db6e85c057c16d0bd3b4d2b04f270a7467c147381e8fd73cbbe5bc719832be"
39 | ],
40 | "version": "==20.1.0"
41 | },
42 | "async-generator": {
43 | "hashes": [
44 | "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b",
45 | "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"
46 | ],
47 | "version": "==1.10"
48 | },
49 | "attrs": {
50 | "hashes": [
51 | "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6",
52 | "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"
53 | ],
54 | "version": "==20.3.0"
55 | },
56 | "backcall": {
57 | "hashes": [
58 | "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e",
59 | "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"
60 | ],
61 | "version": "==0.2.0"
62 | },
63 | "bleach": {
64 | "hashes": [
65 | "sha256:52b5919b81842b1854196eaae5ca29679a2f2e378905c346d3ca8227c2c66080",
66 | "sha256:9f8ccbeb6183c6e6cddea37592dfb0167485c1e3b13b3363bc325aa8bda3adbd"
67 | ],
68 | "version": "==3.2.1"
69 | },
70 | "cachetools": {
71 | "hashes": [
72 | "sha256:513d4ff98dd27f85743a8dc0e92f55ddb1b49e060c2d5961512855cda2c01a98",
73 | "sha256:bbaa39c3dede00175df2dc2b03d0cf18dd2d32a7de7beb68072d13043c9edb20"
74 | ],
75 | "version": "==4.1.1"
76 | },
77 | "certifi": {
78 | "hashes": [
79 | "sha256:1f422849db327d534e3d0c5f02a263458c3955ec0aae4ff09b95f195c59f4edd",
80 | "sha256:f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4"
81 | ],
82 | "version": "==2020.11.8"
83 | },
84 | "cffi": {
85 | "hashes": [
86 | "sha256:005f2bfe11b6745d726dbb07ace4d53f057de66e336ff92d61b8c7e9c8f4777d",
87 | "sha256:09e96138280241bd355cd585148dec04dbbedb4f46128f340d696eaafc82dd7b",
88 | "sha256:0b1ad452cc824665ddc682400b62c9e4f5b64736a2ba99110712fdee5f2505c4",
89 | "sha256:0ef488305fdce2580c8b2708f22d7785ae222d9825d3094ab073e22e93dfe51f",
90 | "sha256:15f351bed09897fbda218e4db5a3d5c06328862f6198d4fb385f3e14e19decb3",
91 | "sha256:22399ff4870fb4c7ef19fff6eeb20a8bbf15571913c181c78cb361024d574579",
92 | "sha256:23e5d2040367322824605bc29ae8ee9175200b92cb5483ac7d466927a9b3d537",
93 | "sha256:2791f68edc5749024b4722500e86303a10d342527e1e3bcac47f35fbd25b764e",
94 | "sha256:2f9674623ca39c9ebe38afa3da402e9326c245f0f5ceff0623dccdac15023e05",
95 | "sha256:3363e77a6176afb8823b6e06db78c46dbc4c7813b00a41300a4873b6ba63b171",
96 | "sha256:33c6cdc071ba5cd6d96769c8969a0531be2d08c2628a0143a10a7dcffa9719ca",
97 | "sha256:3b8eaf915ddc0709779889c472e553f0d3e8b7bdf62dab764c8921b09bf94522",
98 | "sha256:3cb3e1b9ec43256c4e0f8d2837267a70b0e1ca8c4f456685508ae6106b1f504c",
99 | "sha256:3eeeb0405fd145e714f7633a5173318bd88d8bbfc3dd0a5751f8c4f70ae629bc",
100 | "sha256:44f60519595eaca110f248e5017363d751b12782a6f2bd6a7041cba275215f5d",
101 | "sha256:4d7c26bfc1ea9f92084a1d75e11999e97b62d63128bcc90c3624d07813c52808",
102 | "sha256:529c4ed2e10437c205f38f3691a68be66c39197d01062618c55f74294a4a4828",
103 | "sha256:6642f15ad963b5092d65aed022d033c77763515fdc07095208f15d3563003869",
104 | "sha256:85ba797e1de5b48aa5a8427b6ba62cf69607c18c5d4eb747604b7302f1ec382d",
105 | "sha256:8f0f1e499e4000c4c347a124fa6a27d37608ced4fe9f7d45070563b7c4c370c9",
106 | "sha256:a624fae282e81ad2e4871bdb767e2c914d0539708c0f078b5b355258293c98b0",
107 | "sha256:b0358e6fefc74a16f745afa366acc89f979040e0cbc4eec55ab26ad1f6a9bfbc",
108 | "sha256:bbd2f4dfee1079f76943767fce837ade3087b578aeb9f69aec7857d5bf25db15",
109 | "sha256:bf39a9e19ce7298f1bd6a9758fa99707e9e5b1ebe5e90f2c3913a47bc548747c",
110 | "sha256:c11579638288e53fc94ad60022ff1b67865363e730ee41ad5e6f0a17188b327a",
111 | "sha256:c150eaa3dadbb2b5339675b88d4573c1be3cb6f2c33a6c83387e10cc0bf05bd3",
112 | "sha256:c53af463f4a40de78c58b8b2710ade243c81cbca641e34debf3396a9640d6ec1",
113 | "sha256:cb763ceceae04803adcc4e2d80d611ef201c73da32d8f2722e9d0ab0c7f10768",
114 | "sha256:cc75f58cdaf043fe6a7a6c04b3b5a0e694c6a9e24050967747251fb80d7bce0d",
115 | "sha256:d80998ed59176e8cba74028762fbd9b9153b9afc71ea118e63bbf5d4d0f9552b",
116 | "sha256:de31b5164d44ef4943db155b3e8e17929707cac1e5bd2f363e67a56e3af4af6e",
117 | "sha256:e66399cf0fc07de4dce4f588fc25bfe84a6d1285cc544e67987d22663393926d",
118 | "sha256:f0620511387790860b249b9241c2f13c3a80e21a73e0b861a2df24e9d6f56730",
119 | "sha256:f4eae045e6ab2bb54ca279733fe4eb85f1effda392666308250714e01907f394",
120 | "sha256:f92cdecb618e5fa4658aeb97d5eb3d2f47aa94ac6477c6daf0f306c5a3b9e6b1",
121 | "sha256:f92f789e4f9241cd262ad7a555ca2c648a98178a953af117ef7fad46aa1d5591"
122 | ],
123 | "version": "==1.14.3"
124 | },
125 | "chardet": {
126 | "hashes": [
127 | "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
128 | "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
129 | ],
130 | "version": "==3.0.4"
131 | },
132 | "decorator": {
133 | "hashes": [
134 | "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760",
135 | "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"
136 | ],
137 | "version": "==4.4.2"
138 | },
139 | "defusedxml": {
140 | "hashes": [
141 | "sha256:6687150770438374ab581bb7a1b327a847dd9c5749e396102de3fad4e8a3ef93",
142 | "sha256:f684034d135af4c6cbb949b8a4d2ed61634515257a67299e5f940fbaa34377f5"
143 | ],
144 | "version": "==0.6.0"
145 | },
146 | "entrypoints": {
147 | "hashes": [
148 | "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
149 | "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
150 | ],
151 | "version": "==0.3"
152 | },
153 | "google-api-core": {
154 | "hashes": [
155 | "sha256:1bb3c485c38eacded8d685b1759968f6cf47dd9432922d34edb90359eaa391e2",
156 | "sha256:94d8c707d358d8d9e8b0045c42be20efb58433d308bd92cf748511c7825569c8"
157 | ],
158 | "version": "==1.23.0"
159 | },
160 | "google-auth": {
161 | "hashes": [
162 | "sha256:5176db85f1e7e837a646cd9cede72c3c404ccf2e3373d9ee14b2db88febad440",
163 | "sha256:b728625ff5dfce8f9e56a499c8a4eb51443a67f20f6d28b67d5774c310ec4b6b"
164 | ],
165 | "version": "==1.23.0"
166 | },
167 | "google-cloud-core": {
168 | "hashes": [
169 | "sha256:21afb70c1b0bce8eeb8abb5dca63c5fd37fc8aea18f4b6d60e803bd3d27e6b80",
170 | "sha256:75abff9056977809937127418323faa3917f32df68490704d39a4f0d492ebc2b"
171 | ],
172 | "version": "==1.4.3"
173 | },
174 | "google-cloud-storage": {
175 | "hashes": [
176 | "sha256:0b28536acab1d7e856a7a89bbfcad41f26f40b46af59786ca874ff0f94bbc0f9",
177 | "sha256:a7b5c326e7307a83fa1f1f0ef71aba9ad1f3a2bc6a768401e13fc02369fd8612"
178 | ],
179 | "index": "pypi",
180 | "version": "==1.28.1"
181 | },
182 | "google-resumable-media": {
183 | "hashes": [
184 | "sha256:97155236971970382b738921f978a6f86a7b5a0b0311703d991e065d3cb55773",
185 | "sha256:cdc64378dc9a7a7bf963a8d0c944c99b549dc0c195a9acbf1fcd465f380b9002"
186 | ],
187 | "version": "==0.5.1"
188 | },
189 | "googleapis-common-protos": {
190 | "hashes": [
191 | "sha256:560716c807117394da12cecb0a54da5a451b5cf9866f1d37e9a5e2329a665351",
192 | "sha256:c8961760f5aad9a711d37b675be103e0cc4e9a39327e0d6d857872f698403e24"
193 | ],
194 | "version": "==1.52.0"
195 | },
196 | "idna": {
197 | "hashes": [
198 | "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
199 | "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
200 | ],
201 | "version": "==2.10"
202 | },
203 | "importlib-metadata": {
204 | "hashes": [
205 | "sha256:77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da",
206 | "sha256:cefa1a2f919b866c5beb7c9f7b0ebb4061f30a8a9bf16d609b000e2dfaceb9c3"
207 | ],
208 | "markers": "python_version < '3.8'",
209 | "version": "==2.0.0"
210 | },
211 | "ipykernel": {
212 | "hashes": [
213 | "sha256:9b2652af1607986a1b231c62302d070bc0534f564c393a5d9d130db9abbbe89d",
214 | "sha256:d6fbba26dba3cebd411382bc484f7bc2caa98427ae0ddb4ab37fe8bfeb5c7dd3"
215 | ],
216 | "version": "==5.3.4"
217 | },
218 | "ipython": {
219 | "hashes": [
220 | "sha256:c987e8178ced651532b3b1ff9965925bfd445c279239697052561a9ab806d28f",
221 | "sha256:cbb2ef3d5961d44e6a963b9817d4ea4e1fa2eb589c371a470fed14d8d40cbd6a"
222 | ],
223 | "version": "==7.19.0"
224 | },
225 | "ipython-genutils": {
226 | "hashes": [
227 | "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8",
228 | "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"
229 | ],
230 | "version": "==0.2.0"
231 | },
232 | "jedi": {
233 | "hashes": [
234 | "sha256:86ed7d9b750603e4ba582ea8edc678657fb4007894a12bcf6f4bb97892f31d20",
235 | "sha256:98cc583fa0f2f8304968199b01b6b4b94f469a1f4a74c1560506ca2a211378b5"
236 | ],
237 | "version": "==0.17.2"
238 | },
239 | "jinja2": {
240 | "hashes": [
241 | "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0",
242 | "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"
243 | ],
244 | "version": "==2.11.2"
245 | },
246 | "jsonschema": {
247 | "hashes": [
248 | "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163",
249 | "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"
250 | ],
251 | "version": "==3.2.0"
252 | },
253 | "jupyter-client": {
254 | "hashes": [
255 | "sha256:49e390b36fe4b4226724704ea28d9fb903f1a3601b6882ce3105221cd09377a1",
256 | "sha256:c958d24d6eacb975c1acebb68ac9077da61b5f5c040f22f6849928ad7393b950"
257 | ],
258 | "version": "==6.1.7"
259 | },
260 | "jupyter-core": {
261 | "hashes": [
262 | "sha256:0a451c9b295e4db772bdd8d06f2f1eb31caeec0e81fbb77ba37d4a3024e3b315",
263 | "sha256:aa1f9496ab3abe72da4efe0daab0cb2233997914581f9a071e07498c6add8ed3"
264 | ],
265 | "version": "==4.7.0"
266 | },
267 | "jupyterlab-pygments": {
268 | "hashes": [
269 | "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008",
270 | "sha256:cfcda0873626150932f438eccf0f8bf22bfa92345b814890ab360d666b254146"
271 | ],
272 | "version": "==0.1.2"
273 | },
274 | "markupsafe": {
275 | "hashes": [
276 | "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
277 | "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
278 | "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
279 | "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
280 | "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42",
281 | "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
282 | "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
283 | "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
284 | "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
285 | "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
286 | "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
287 | "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b",
288 | "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
289 | "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15",
290 | "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
291 | "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
292 | "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
293 | "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
294 | "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
295 | "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
296 | "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
297 | "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
298 | "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
299 | "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
300 | "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
301 | "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
302 | "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
303 | "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
304 | "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
305 | "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
306 | "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2",
307 | "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",
308 | "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"
309 | ],
310 | "version": "==1.1.1"
311 | },
312 | "mistune": {
313 | "hashes": [
314 | "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e",
315 | "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"
316 | ],
317 | "version": "==0.8.4"
318 | },
319 | "nbclient": {
320 | "hashes": [
321 | "sha256:01e2d726d16eaf2cde6db74a87e2451453547e8832d142f73f72fddcd4fe0250",
322 | "sha256:4d6b116187c795c99b9dba13d46e764d596574b14c296d60670c8dfe454db364"
323 | ],
324 | "version": "==0.5.1"
325 | },
326 | "nbconvert": {
327 | "hashes": [
328 | "sha256:39e9f977920b203baea0be67eea59f7b37a761caa542abe80f5897ce3cf6311d",
329 | "sha256:cbbc13a86dfbd4d1b5dee106539de0795b4db156c894c2c5dc382062bbc29002"
330 | ],
331 | "version": "==6.0.7"
332 | },
333 | "nbformat": {
334 | "hashes": [
335 | "sha256:049af048ed76b95c3c44043620c17e56bc001329e07f83fec4f177f0e3d7b757",
336 | "sha256:276343c78a9660ab2a63c28cc33da5f7c58c092b3f3a40b6017ae2ce6689320d"
337 | ],
338 | "index": "pypi",
339 | "version": "==5.0.6"
340 | },
341 | "nest-asyncio": {
342 | "hashes": [
343 | "sha256:dbe032f3e9ff7f120e76be22bf6e7958e867aed1743e6894b8a9585fe8495cc9",
344 | "sha256:eaa09ef1353ebefae19162ad423eef7a12166bcc63866f8bff8f3635353cd9fa"
345 | ],
346 | "version": "==1.4.3"
347 | },
348 | "notebook": {
349 | "hashes": [
350 | "sha256:3db37ae834c5f3b6378381229d0e5dfcbfb558d08c8ce646b1ad355147f5e91d",
351 | "sha256:508cf9dad7cdb3188f1aa27017dc78179029dfe83814fc505329f689bc2ab50f"
352 | ],
353 | "index": "pypi",
354 | "version": "==6.1.5"
355 | },
356 | "packaging": {
357 | "hashes": [
358 | "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",
359 | "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"
360 | ],
361 | "version": "==20.4"
362 | },
363 | "pandocfilters": {
364 | "hashes": [
365 | "sha256:bc63fbb50534b4b1f8ebe1860889289e8af94a23bff7445259592df25a3906eb"
366 | ],
367 | "version": "==1.4.3"
368 | },
369 | "parso": {
370 | "hashes": [
371 | "sha256:97218d9159b2520ff45eb78028ba8b50d2bc61dcc062a9682666f2dc4bd331ea",
372 | "sha256:caba44724b994a8a5e086460bb212abc5a8bc46951bf4a9a1210745953622eb9"
373 | ],
374 | "version": "==0.7.1"
375 | },
376 | "pexpect": {
377 | "hashes": [
378 | "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937",
379 | "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"
380 | ],
381 | "markers": "sys_platform != 'win32'",
382 | "version": "==4.8.0"
383 | },
384 | "pickleshare": {
385 | "hashes": [
386 | "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca",
387 | "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"
388 | ],
389 | "version": "==0.7.5"
390 | },
391 | "prometheus-client": {
392 | "hashes": [
393 | "sha256:9da7b32f02439d8c04f7777021c304ed51d9ec180604700c1ba72a4d44dceb03",
394 | "sha256:b08c34c328e1bf5961f0b4352668e6c8f145b4a087e09b7296ef62cbe4693d35"
395 | ],
396 | "version": "==0.9.0"
397 | },
398 | "prompt-toolkit": {
399 | "hashes": [
400 | "sha256:25c95d2ac813909f813c93fde734b6e44406d1477a9faef7c915ff37d39c0a8c",
401 | "sha256:7debb9a521e0b1ee7d2fe96ee4bd60ef03c6492784de0547337ca4433e46aa63"
402 | ],
403 | "version": "==3.0.8"
404 | },
405 | "protobuf": {
406 | "hashes": [
407 | "sha256:0e247612fadda953047f53301a7b0407cb0c3cb4ae25a6fde661597a04039b3c",
408 | "sha256:0fc96785262042e4863b3f3b5c429d4636f10d90061e1840fce1baaf59b1a836",
409 | "sha256:1c51fda1bbc9634246e7be6016d860be01747354ed7015ebe38acf4452f470d2",
410 | "sha256:1d63eb389347293d8915fb47bee0951c7b5dab522a4a60118b9a18f33e21f8ce",
411 | "sha256:22bcd2e284b3b1d969c12e84dc9b9a71701ec82d8ce975fdda19712e1cfd4e00",
412 | "sha256:2a7e2fe101a7ace75e9327b9c946d247749e564a267b0515cf41dfe450b69bac",
413 | "sha256:43b554b9e73a07ba84ed6cf25db0ff88b1e06be610b37656e292e3cbb5437472",
414 | "sha256:4b74301b30513b1a7494d3055d95c714b560fbb630d8fb9956b6f27992c9f980",
415 | "sha256:4e75105c9dfe13719b7293f75bd53033108f4ba03d44e71db0ec2a0e8401eafd",
416 | "sha256:5b7a637212cc9b2bcf85dd828b1178d19efdf74dbfe1ddf8cd1b8e01fdaaa7f5",
417 | "sha256:5e9806a43232a1fa0c9cf5da8dc06f6910d53e4390be1fa06f06454d888a9142",
418 | "sha256:629b03fd3caae7f815b0c66b41273f6b1900a579e2ccb41ef4493a4f5fb84f3a",
419 | "sha256:72230ed56f026dd664c21d73c5db73ebba50d924d7ba6b7c0d81a121e390406e",
420 | "sha256:86a75477addde4918e9a1904e5c6af8d7b691f2a3f65587d73b16100fbe4c3b2",
421 | "sha256:8971c421dbd7aad930c9bd2694122f332350b6ccb5202a8b7b06f3f1a5c41ed5",
422 | "sha256:9616f0b65a30851e62f1713336c931fcd32c057202b7ff2cfbfca0fc7d5e3043",
423 | "sha256:b0d5d35faeb07e22a1ddf8dce620860c8fe145426c02d1a0ae2688c6e8ede36d",
424 | "sha256:ecc33531a213eee22ad60e0e2aaea6c8ba0021f0cce35dbf0ab03dee6e2a23a1"
425 | ],
426 | "version": "==3.14.0"
427 | },
428 | "ptyprocess": {
429 | "hashes": [
430 | "sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0",
431 | "sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f"
432 | ],
433 | "markers": "os_name != 'nt'",
434 | "version": "==0.6.0"
435 | },
436 | "pyasn1": {
437 | "hashes": [
438 | "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d",
439 | "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"
440 | ],
441 | "version": "==0.4.8"
442 | },
443 | "pyasn1-modules": {
444 | "hashes": [
445 | "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e",
446 | "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"
447 | ],
448 | "version": "==0.2.8"
449 | },
450 | "pycparser": {
451 | "hashes": [
452 | "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
453 | "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
454 | ],
455 | "version": "==2.20"
456 | },
457 | "pygments": {
458 | "hashes": [
459 | "sha256:381985fcc551eb9d37c52088a32914e00517e57f4a21609f48141ba08e193fa0",
460 | "sha256:88a0bbcd659fcb9573703957c6b9cff9fab7295e6e76db54c9d00ae42df32773"
461 | ],
462 | "version": "==2.7.2"
463 | },
464 | "pyparsing": {
465 | "hashes": [
466 | "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
467 | "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
468 | ],
469 | "version": "==2.4.7"
470 | },
471 | "pyrsistent": {
472 | "hashes": [
473 | "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"
474 | ],
475 | "version": "==0.17.3"
476 | },
477 | "python-dateutil": {
478 | "hashes": [
479 | "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
480 | "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
481 | ],
482 | "version": "==2.8.1"
483 | },
484 | "pytz": {
485 | "hashes": [
486 | "sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268",
487 | "sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd"
488 | ],
489 | "version": "==2020.4"
490 | },
491 | "pyzmq": {
492 | "hashes": [
493 | "sha256:03638e46d486dd1c118e03c8bf9c634bdcae679600eac6573ae1e54906de7c2f",
494 | "sha256:0af84f34f27b5c6a0e906c648bdf46d4caebf9c8e6e16db0728f30a58141cad6",
495 | "sha256:0e554fd390021edbe0330b67226325a820b0319c5b45e1b0a59bf22ccc36e793",
496 | "sha256:1e9b75a119606732023a305d1c214146c09a91f8116f6aff3e8b7d0a60b6f0ff",
497 | "sha256:225774a48ed7414c0395335e7123ef8c418dbcbe172caabdc2496133b03254c2",
498 | "sha256:2742e380d186673eee6a570ef83d4568741945434ba36d92b98d36cdbfedbd44",
499 | "sha256:309d763d89ec1845c0e0fa14e1fb6558fd8c9ef05ed32baec27d7a8499cc7bb0",
500 | "sha256:46250789730489009fe139cbf576679557c070a6a3628077d09a4153d52fd381",
501 | "sha256:4d9259a5eb3f71abbaf61f165cacf42240bfeea3783bebd8255341abdfe206f1",
502 | "sha256:523d542823cabb94065178090e05347bd204365f6e7cb260f0071c995d392fc2",
503 | "sha256:5efe02bdcc5eafcac0aab531292294298f0ab8d28ed43be9e507d0e09173d1a4",
504 | "sha256:63ee08e35be72fdd7568065a249a5b5cf51a2e8ab6ee63cf9f73786fcb9e710b",
505 | "sha256:6e24907857c80dc67692e31f5bf3ad5bf483ee0142cec95b3d47e2db8c43bdda",
506 | "sha256:7113eb93dcd0a5750c65d123ed0099e036a3a3f2dcb48afedd025ffa125c983b",
507 | "sha256:824ad5888331aadeac772bce27e1c2fbcab82fade92edbd234542c4e12f0dca9",
508 | "sha256:895695be380f0f85d2e3ec5ccf68a93c92d45bd298567525ad5633071589872c",
509 | "sha256:b62113eeb9a0649cebed9b21fd578f3a0175ef214a2a91dcb7b31bbf55805295",
510 | "sha256:bc7dd697356b31389d5118b9bcdef3e8d8079e8181800c4e8d72dccd56e1ff68",
511 | "sha256:bf755905a7d30d2749079611b9a89924c1f2da2695dc09ce221f42122c9808e3",
512 | "sha256:c63fafd2556d218368c51d18588f8e6f8d86d09d493032415057faf6de869b34",
513 | "sha256:c95dda497a7c1b1e734b5e8353173ca5dd7b67784d8821d13413a97856588057",
514 | "sha256:cc09c5cd1a4332611c8564d65e6a432dc6db3e10793d0254da9fa1e31d9ffd6d",
515 | "sha256:cfa54a162a7b32641665e99b2c12084555afe9fc8fe80ec8b2f71a57320d10e1",
516 | "sha256:d81184489369ec325bd50ba1c935361e63f31f578430b9ad95471899361a8253",
517 | "sha256:d92c7f41a53ece82b91703ea433c7d34143248cf0cead33aa11c5fc621c764bf",
518 | "sha256:f0beef935efe78a63c785bb21ed56c1c24448511383e3994927c8bb2caf5e714",
519 | "sha256:f110a4d3f8f01209eec304ed542f6c8054cce9b0f16dfe3d571e57c290e4e133"
520 | ],
521 | "version": "==20.0.0"
522 | },
523 | "requests": {
524 | "hashes": [
525 | "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8",
526 | "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998"
527 | ],
528 | "version": "==2.25.0"
529 | },
530 | "rsa": {
531 | "hashes": [
532 | "sha256:109ea5a66744dd859bf16fe904b8d8b627adafb9408753161e766a92e7d681fa",
533 | "sha256:6166864e23d6b5195a5cfed6cd9fed0fe774e226d8f854fcb23b7bbef0350233"
534 | ],
535 | "markers": "python_version >= '3.5'",
536 | "version": "==4.6"
537 | },
538 | "send2trash": {
539 | "hashes": [
540 | "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2",
541 | "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"
542 | ],
543 | "version": "==1.5.0"
544 | },
545 | "six": {
546 | "hashes": [
547 | "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
548 | "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
549 | ],
550 | "version": "==1.15.0"
551 | },
552 | "terminado": {
553 | "hashes": [
554 | "sha256:3da72a155b807b01c9e8a5babd214e052a0a45a975751da3521a1c3381ce6d76",
555 | "sha256:c55f025beb06c2e2669f7ba5a04f47bb3304c30c05842d4981d8f0fc9ab3b4e3"
556 | ],
557 | "version": "==0.9.1"
558 | },
559 | "testpath": {
560 | "hashes": [
561 | "sha256:60e0a3261c149755f4399a1fff7d37523179a70fdc3abdf78de9fc2604aeec7e",
562 | "sha256:bfcf9411ef4bf3db7579063e0546938b1edda3d69f4e1fb8756991f5951f85d4"
563 | ],
564 | "version": "==0.4.4"
565 | },
566 | "tornado": {
567 | "hashes": [
568 | "sha256:0fe2d45ba43b00a41cd73f8be321a44936dc1aba233dee979f17a042b83eb6dc",
569 | "sha256:22aed82c2ea340c3771e3babc5ef220272f6fd06b5108a53b4976d0d722bcd52",
570 | "sha256:2c027eb2a393d964b22b5c154d1a23a5f8727db6fda837118a776b29e2b8ebc6",
571 | "sha256:5217e601700f24e966ddab689f90b7ea4bd91ff3357c3600fa1045e26d68e55d",
572 | "sha256:5618f72e947533832cbc3dec54e1dffc1747a5cb17d1fd91577ed14fa0dc081b",
573 | "sha256:5f6a07e62e799be5d2330e68d808c8ac41d4a259b9cea61da4101b83cb5dc673",
574 | "sha256:c58d56003daf1b616336781b26d184023ea4af13ae143d9dda65e31e534940b9",
575 | "sha256:c952975c8ba74f546ae6de2e226ab3cc3cc11ae47baf607459a6728585bb542a",
576 | "sha256:c98232a3ac391f5faea6821b53db8db461157baa788f5d6222a193e9456e1740"
577 | ],
578 | "index": "pypi",
579 | "version": "==6.0.4"
580 | },
581 | "traitlets": {
582 | "hashes": [
583 | "sha256:70b4c6a1d9019d7b4f6846832288f86998aa3b9207c6821f3578a6a6a467fe44",
584 | "sha256:d023ee369ddd2763310e4c3eae1ff649689440d4ae59d7485eb4cfbbe3e359f7"
585 | ],
586 | "index": "pypi",
587 | "version": "==4.3.3"
588 | },
589 | "urllib3": {
590 | "hashes": [
591 | "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08",
592 | "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"
593 | ],
594 | "version": "==1.26.2"
595 | },
596 | "wcwidth": {
597 | "hashes": [
598 | "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784",
599 | "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"
600 | ],
601 | "version": "==0.2.5"
602 | },
603 | "webencodings": {
604 | "hashes": [
605 | "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78",
606 | "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"
607 | ],
608 | "version": "==0.5.1"
609 | },
610 | "zipp": {
611 | "hashes": [
612 | "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108",
613 | "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb"
614 | ],
615 | "version": "==3.4.0"
616 | }
617 | },
618 | "develop": {}
619 | }
620 |
--------------------------------------------------------------------------------