├── tests
├── __init__.py
├── demo
│ ├── demo.json
│ ├── demo.html
│ ├── demo_file.py
│ ├── demo.md
│ ├── dog.jpeg
│ ├── demo.svg
│ ├── demo_runner.py
│ ├── demo.csv
│ ├── demo_res_json.ipynb
│ ├── demo_res_md.ipynb
│ ├── demo_res_text.ipynb
│ ├── demo_res_html.ipynb
│ ├── demo_res_svg.ipynb
│ ├── demo_webhook.ipynb
│ ├── demo_res_image.ipynb
│ ├── demo_res_file.ipynb
│ └── demo_scheduler.ipynb
├── test_assets.py
├── test_proxy.py
├── test_notifications.py
├── session_ids.json
├── test_notebooks.py
├── conftest.py
├── test_manager.py
├── test_secret.py
├── test_logger.py
├── generate_df_csv.py
├── test_scheduler.py
└── test_jobs.py
├── .dockerignore
├── extensions
└── naasai
│ ├── style
│ ├── base.css
│ ├── index.js
│ └── index.css
│ ├── .eslintignore
│ ├── .prettierignore
│ ├── .prettierrc
│ ├── CHANGELOG.md
│ ├── install.json
│ ├── schema
│ └── plugin.json
│ ├── naasai
│ ├── __init__.py
│ └── _version.py
│ ├── MANIFEST.in
│ ├── pyproject.toml
│ ├── tsconfig.json
│ ├── .eslintrc.js
│ ├── LICENSE
│ ├── .github
│ └── workflows
│ │ ├── build.yml
│ │ └── check-release.yml
│ ├── .gitignore
│ ├── RELEASE.md
│ ├── README.md
│ ├── package.json
│ └── setup.py
├── naas
├── runner
│ ├── controllers
│ │ ├── __init__.py
│ │ ├── manager.py
│ │ ├── auth.py
│ │ ├── version.py
│ │ ├── scheduler.py
│ │ ├── env.py
│ │ ├── timezone.py
│ │ ├── credits.py
│ │ ├── logs.py
│ │ ├── secret.py
│ │ ├── downloader.py
│ │ ├── performance.py
│ │ ├── assets.py
│ │ └── notebooks.py
│ ├── __init__.py
│ ├── assets
│ │ ├── naas_up.png
│ │ ├── naas_down.png
│ │ └── naas_logo.png
│ ├── __main__.py
│ ├── proxy.py
│ ├── logger.py
│ ├── sqlite_table.py
│ └── custom_papermill.py
├── secret.py
├── dependency.py
├── ntypes.py
├── callback.py
├── assets.py
├── onboarding.py
└── scheduler.py
├── windows_stop.bat
├── setup.cfg
├── windows_start.bat
├── custom
├── naas-fav.png
├── naas_logo_n.ico
├── set_workspace.json
├── set_workspace_browser.json
├── overrides.json
└── jupyter_server_config.py
├── pytest.ini
├── .snyk
├── .vscode
├── settings.json
└── launch.json
├── .cz.toml
├── .coveragerc
├── scripts
├── install_supp
└── customize
├── .pre-commit-config.yaml
├── .github
├── dependabot.yml
└── workflows
│ ├── bump_version.yml
│ ├── bump_version_dev.yml
│ ├── pull_request.yml
│ ├── codeql-analysis.yml
│ ├── deploy_packages.yml
│ └── deploy_docker.yml
├── Makefile
├── SECURITY.md
├── binder
└── Dockerfile
├── .docker
└── dev
│ ├── docker-entrypoint.sh
│ └── Dockerfile.dev
├── docker-compose.yaml
├── images
├── sentry.svg
└── canny.svg
├── dev
├── postgres
│ └── init_jupyter.sh
└── docker-compose.yml
├── Dockerfile
├── .gitignore
├── setup.py
└── docs
├── basic_docs.md
└── basic_docs.ipynb
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | file_sharing
--------------------------------------------------------------------------------
/extensions/naasai/style/base.css:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/naas/runner/controllers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/demo/demo.json:
--------------------------------------------------------------------------------
1 | {"foo": "bar2"}
--------------------------------------------------------------------------------
/windows_stop.bat:
--------------------------------------------------------------------------------
1 | docker-compose down
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 140
--------------------------------------------------------------------------------
/tests/demo/demo.html:
--------------------------------------------------------------------------------
1 |
Hello world
--------------------------------------------------------------------------------
/extensions/naasai/style/index.js:
--------------------------------------------------------------------------------
1 | import './base.css';
2 |
--------------------------------------------------------------------------------
/tests/demo/demo_file.py:
--------------------------------------------------------------------------------
1 | print("This is a test file")
2 |
--------------------------------------------------------------------------------
/extensions/naasai/style/index.css:
--------------------------------------------------------------------------------
1 |
2 | @import url('base.css');
3 |
--------------------------------------------------------------------------------
/tests/demo/demo.md:
--------------------------------------------------------------------------------
1 | # HELLO world
2 |
3 | this is a markdown test
4 |
--------------------------------------------------------------------------------
/windows_start.bat:
--------------------------------------------------------------------------------
1 | docker-compose up -d
2 | start "" http://127.0.0.1:8888/lab?token=naas
--------------------------------------------------------------------------------
/extensions/naasai/.eslintignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | dist
3 | coverage
4 | **/*.d.ts
5 | tests
6 |
--------------------------------------------------------------------------------
/custom/naas-fav.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Chisomnwa/naas-Jupyter-Slideshow/main/custom/naas-fav.png
--------------------------------------------------------------------------------
/tests/demo/dog.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Chisomnwa/naas-Jupyter-Slideshow/main/tests/demo/dog.jpeg
--------------------------------------------------------------------------------
/custom/naas_logo_n.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Chisomnwa/naas-Jupyter-Slideshow/main/custom/naas_logo_n.ico
--------------------------------------------------------------------------------
/extensions/naasai/.prettierignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | **/node_modules
3 | **/lib
4 | **/package.json
5 | naasai
6 |
--------------------------------------------------------------------------------
/extensions/naasai/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "singleQuote": true,
3 | "trailingComma": "none",
4 | "arrowParens": "avoid"
5 | }
6 |
--------------------------------------------------------------------------------
/naas/runner/__init__.py:
--------------------------------------------------------------------------------
1 | from .runner import Runner # noqa: F401
2 | from naas.runner.env_var import n_env # noqa: F401
3 |
--------------------------------------------------------------------------------
/naas/runner/assets/naas_up.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Chisomnwa/naas-Jupyter-Slideshow/main/naas/runner/assets/naas_up.png
--------------------------------------------------------------------------------
/naas/runner/assets/naas_down.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Chisomnwa/naas-Jupyter-Slideshow/main/naas/runner/assets/naas_down.png
--------------------------------------------------------------------------------
/naas/runner/assets/naas_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Chisomnwa/naas-Jupyter-Slideshow/main/naas/runner/assets/naas_logo.png
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | log_cli_level = INFO
3 |
4 | testpaths = tests
5 |
6 | filterwarnings =
7 | ignore::DeprecationWarning
--------------------------------------------------------------------------------
/extensions/naasai/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/tests/demo/demo.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.snyk:
--------------------------------------------------------------------------------
1 | # Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.
2 | version: v1.14.0
3 | language-settings: python: "3.6.2"
4 |
--------------------------------------------------------------------------------
/extensions/naasai/install.json:
--------------------------------------------------------------------------------
1 | {
2 | "packageManager": "python",
3 | "packageName": "naasai",
4 | "uninstallInstructions": "Use your Python package manager (pip, conda, etc.) to uninstall the package naasai"
5 | }
6 |
--------------------------------------------------------------------------------
/extensions/naasai/schema/plugin.json:
--------------------------------------------------------------------------------
1 | {
2 | "jupyter.lab.shortcuts": [],
3 | "title": "naasai",
4 | "description": "naasai settings.",
5 | "type": "object",
6 | "properties": {},
7 | "additionalProperties": false
8 | }
--------------------------------------------------------------------------------
/tests/test_assets.py:
--------------------------------------------------------------------------------
1 | import pytest # noqa: F401
2 | import os
3 |
4 | user_folder_name = "test_user_folder"
5 |
6 | os.environ["JUPYTER_SERVER_ROOT"] = os.path.join(os.getcwd(), user_folder_name)
7 |
8 | # TODO find a way to test it
9 |
--------------------------------------------------------------------------------
/tests/test_proxy.py:
--------------------------------------------------------------------------------
1 | import pytest # noqa: F401
2 | import os
3 |
4 | user_folder_name = "test_user_folder"
5 |
6 | os.environ["JUPYTER_SERVER_ROOT"] = os.path.join(os.getcwd(), user_folder_name)
7 |
8 | # TODO find a way to test it
9 |
--------------------------------------------------------------------------------
/tests/test_notifications.py:
--------------------------------------------------------------------------------
1 | import pytest # noqa: F401
2 | import os
3 |
4 | user_folder_name = "test_user_folder"
5 |
6 | os.environ["JUPYTER_SERVER_ROOT"] = os.path.join(os.getcwd(), user_folder_name)
7 |
8 | # TODO find a way to test it
9 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "conventionalCommits.scopes": [
3 | "manager",
4 | "runner",
5 | "notification",
6 | "api",
7 | "binder",
8 | "scheduler"
9 | ],
10 | "jupyter.jupyterServerType": "local"
11 | }
--------------------------------------------------------------------------------
/.cz.toml:
--------------------------------------------------------------------------------
1 | [tool.commitizen]
2 | name = "cz_conventional_commits"
3 | tag_format = "$major.$minor.$patch$prerelease"
4 | version = "2.6.3"
5 | version_files = [
6 | "naas/__init__.py",
7 | "naas/runner/runner.py",
8 | ".cz.toml",
9 | "setup.py:version",
10 | "Dockerfile:NAAS_VERSION"
11 | ]
12 |
13 |
--------------------------------------------------------------------------------
/tests/demo/demo_runner.py:
--------------------------------------------------------------------------------
1 | from .runner import Runner
2 | import os
3 |
4 | path_srv_root = os.path.join(os.getcwd(), "test")
5 |
6 | runner = Runner(
7 | path=path_srv_root,
8 | port=5000,
9 | user="joyvan@gmail.com",
10 | public="localhost:5000",
11 | proxy="proxy:5000",
12 | )
13 | runner.start()
14 |
--------------------------------------------------------------------------------
/ .coveragerc:
--------------------------------------------------------------------------------
1 | [report]
2 | exclude_lines =
3 | pragma: no cover
4 | print
5 | def __repr__
6 | if self.debug:
7 | if settings.DEBUG
8 | raise AssertionError
9 | raise NotImplementedError
10 | if 0:
11 | if __name__ == .__main__.:
12 | class .*\bProtocol\):
13 | warnings.warn
14 | self.deprecatedPrint()
15 | display
16 | enterprise_gateway
17 |
--------------------------------------------------------------------------------
/scripts/install_supp:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 |
4 | if [[ $NAAS_INSTALL_SUPP == "yes" ]]; then
5 | cd /home/$NB_USER
6 | git clone https://github.com/jupyter-naas/drivers.git
7 | cd /home/$NB_USER/drivers
8 | pip install -e '.[dev]'
9 | cd /home/$NB_USER
10 | git clone https://github.com/jupyter-naas/awesome-notebooks.git
11 | else
12 | echo "Not install"
13 | fi
14 |
--------------------------------------------------------------------------------
/extensions/naasai/naasai/__init__.py:
--------------------------------------------------------------------------------
1 | import json
2 | from pathlib import Path
3 |
4 | from ._version import __version__
5 |
6 |
7 | HERE = Path(__file__).parent.resolve()
8 |
9 |
10 | with (HERE / "labextension" / "package.json").open() as fid:
11 | data = json.load(fid)
12 |
13 |
14 | def _jupyter_labextension_paths():
15 | return [{
16 | "src": "labextension",
17 | "dest": data["name"]
18 | }]
19 |
20 |
--------------------------------------------------------------------------------
/tests/session_ids.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "type": "notebook",
4 | "kernel": {"id": "b1e19209-e251-4115-819e-7ab5bc4232b7"},
5 | "notebook": {"path":"MAIN_DIR/TEST_DIR1/ANOTHER_DIR1"}
6 | },
7 | {
8 | "type": "notebook",
9 | "kernel": {"id": "100101"},
10 | "notebook": {"path":"TEST_DIR2/ANOTHER_DIR2"}
11 | },
12 | {
13 | "type": "terminal",
14 | "kernel": {"id": "100101"}
15 | }
16 | ]
17 |
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | // Use IntelliSense to learn about possible attributes.
3 | // Hover to view descriptions of existing attributes.
4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5 | "version": "0.2.0",
6 | "configurations": [
7 | {
8 | "name": "Python: Module",
9 | "type": "python",
10 | "request": "launch",
11 | "module": "naas"
12 | }
13 | ]
14 | }
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - hooks:
3 | - id: black
4 | language_version: python3.8
5 | repo: https://github.com/ambv/black
6 | rev: stable
7 | - hooks:
8 | - id: flake8
9 | language_version: python3.8
10 | repo: https://github.com/pre-commit/pre-commit-hooks
11 | rev: v1.2.3
12 | - hooks:
13 | - id: commitizen
14 | language_version: python3.8
15 | stages:
16 | - commit-msg
17 | repo: https://github.com/commitizen-tools/commitizen
18 | rev: master
19 |
--------------------------------------------------------------------------------
/extensions/naasai/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include *.md
3 | include pyproject.toml
4 |
5 | include package.json
6 | include install.json
7 | include ts*.json
8 | include yarn.lock
9 |
10 | graft naasai/labextension
11 |
12 | # Javascript files
13 | graft src
14 | graft style
15 | prune **/node_modules
16 | prune lib
17 | prune binder
18 |
19 | # Patterns to exclude from any directory
20 | global-exclude *~
21 | global-exclude *.pyc
22 | global-exclude *.pyo
23 | global-exclude .git
24 | global-exclude .ipynb_checkpoints
25 |
--------------------------------------------------------------------------------
/tests/demo/demo.csv:
--------------------------------------------------------------------------------
1 | "Month", "Average", "2005", "2006", "2007", "2008", "2009", "2010", "2011", "2012", "2013", "2014", "2015"
2 | "May", 0.1, 0, 0, 1, 1, 0, 0, 0, 2, 0, 0, 0
3 | "Jun", 0.5, 2, 1, 1, 0, 0, 1, 1, 2, 2, 0, 1
4 | "Jul", 0.7, 5, 1, 1, 2, 0, 1, 3, 0, 2, 2, 1
5 | "Aug", 2.3, 6, 3, 2, 4, 4, 4, 7, 8, 2, 2, 3
6 | "Sep", 3.5, 6, 4, 7, 4, 2, 8, 5, 2, 5, 2, 5
7 | "Oct", 2.0, 8, 0, 1, 3, 2, 5, 1, 5, 2, 3, 0
8 | "Nov", 0.5, 3, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1
9 | "Dec", 0.0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1
10 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "pip" # See documentation for possible values
9 | directory: "/" # Location of package manifests
10 | schedule:
11 | interval: "daily"
12 |
--------------------------------------------------------------------------------
/extensions/naasai/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["jupyter_packaging~=0.10,<2", "jupyterlab~=3.1"]
3 | build-backend = "jupyter_packaging.build_api"
4 |
5 | [tool.jupyter-packaging.options]
6 | skip-if-exists = ["naasai/labextension/static/style.js"]
7 | ensured-targets = ["naasai/labextension/static/style.js", "naasai/labextension/package.json"]
8 |
9 | [tool.jupyter-packaging.builder]
10 | factory = "jupyter_packaging.npm_builder"
11 |
12 | [tool.jupyter-packaging.build-args]
13 | build_cmd = "build:prod"
14 | npm = ["jlpm"]
15 |
16 | [tool.check-manifest]
17 | ignore = ["naasai/labextension/**", "yarn.lock", ".*", "package-lock.json"]
18 |
--------------------------------------------------------------------------------
/extensions/naasai/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "allowSyntheticDefaultImports": true,
4 | "composite": true,
5 | "declaration": true,
6 | "esModuleInterop": true,
7 | "incremental": true,
8 | "jsx": "react",
9 | "module": "esnext",
10 | "moduleResolution": "node",
11 | "noEmitOnError": true,
12 | "noImplicitAny": true,
13 | "noUnusedLocals": true,
14 | "preserveWatchOutput": true,
15 | "resolveJsonModule": true,
16 | "outDir": "lib",
17 | "rootDir": "src",
18 | "strict": true,
19 | "strictNullChecks": true,
20 | "target": "es2017",
21 | "types": []
22 | },
23 | "include": ["src/*"]
24 | }
25 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | run:
2 | docker-compose up
3 |
4 | sh:
5 | docker-compose exec naas /bin/bash
6 |
7 | build:
8 | docker-compose down
9 | docker-compose build
10 |
11 | run-bg:
12 | docker-compose up -d
13 |
14 | stop:
15 | docker-compose stop
16 |
17 | down:
18 | docker-compose down
19 |
20 | logs:
21 | docker-compose logs -f
22 |
23 | dep-update:
24 | echo "Updating 'naas' dependencies."
25 | docker-compose exec naas pip install -e '/home/ftp/naas[dev]'
26 | echo "Updating 'drivers' dependencies."
27 | docker-compose exec naas pip install -e '/home/ftp/drivers'
28 |
29 | extension-watch:
30 | docker-compose exec naas /bin/bash -c 'cd /home/ftp/naas/extensions/naasai && jlpm watch'
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | We only add security updates to the latest MAJOR.MINOR version of the project. No security updates are backported to previous versions. If you
6 | want be up to date on security patches, make sure your Naas image is up to date with `jupyter-naas/naas:latest`
7 |
8 | ## Reporting a Vulnerability
9 |
10 | If you've found a security vulnerability with the Naas codebase, you can disclose it responsibly by sending a summary to security@naas.ai .
11 | We will review the potential threat and fix it as fast as we can. We are incredibly thankful for people who disclose vulnerabilities, unfortunately we do not
12 | have a bounty program in place yet.
13 |
--------------------------------------------------------------------------------
/.github/workflows/bump_version.yml:
--------------------------------------------------------------------------------
1 | name: Bump version
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | jobs:
9 | bump-version:
10 | if: "!startsWith(github.event.head_commit.message, 'bump:')"
11 | runs-on: ubuntu-latest
12 | name: "Bump version and create changelog with commitizen"
13 | steps:
14 | - name: Check out
15 | uses: actions/checkout@v2
16 | with:
17 | fetch-depth: 0
18 | token: '${{ secrets.PERSONAL_ACCESS_TOKEN }}'
19 | - name: Create bump and changelog
20 | uses: commitizen-tools/commitizen-action@0.7.0
21 | with:
22 | github_token: '${{ secrets.PERSONAL_ACCESS_TOKEN }}'
23 | branch: 'main'
--------------------------------------------------------------------------------
/naas/runner/controllers/manager.py:
--------------------------------------------------------------------------------
1 | from sanic.views import HTTPMethodView
2 | from sanic import response
3 | import os
4 |
5 |
6 | class ManagerController(HTTPMethodView):
7 | __path_lib_files = None
8 | __assets_folder = "assets"
9 | __manager_html = "manager.html"
10 |
11 | def __init__(self, path_assets, *args, **kwargs):
12 | super(ManagerController, self).__init__(*args, **kwargs)
13 | self.__path_lib_files = path_assets
14 |
15 | async def get(self, request):
16 | return await response.file(
17 | os.path.join(
18 | self.__path_lib_files, self.__assets_folder, self.__manager_html
19 | ),
20 | headers={"Cache-Control": "no-cache"},
21 | )
22 |
--------------------------------------------------------------------------------
/extensions/naasai/naasai/_version.py:
--------------------------------------------------------------------------------
1 | import json
2 | from pathlib import Path
3 |
4 | __all__ = ["__version__"]
5 |
6 | def _fetchVersion():
7 | HERE = Path(__file__).parent.resolve()
8 |
9 | for settings in HERE.rglob("package.json"):
10 | try:
11 | with settings.open() as f:
12 | version = json.load(f)["version"]
13 | return (
14 | version.replace("-alpha.", "a")
15 | .replace("-beta.", "b")
16 | .replace("-rc.", "rc")
17 | )
18 | except FileNotFoundError:
19 | pass
20 |
21 | raise FileNotFoundError(f"Could not find package.json under dir {HERE!s}")
22 |
23 | __version__ = _fetchVersion()
24 |
--------------------------------------------------------------------------------
/.github/workflows/bump_version_dev.yml:
--------------------------------------------------------------------------------
1 | name: Bump version
2 |
3 | on:
4 | push:
5 | branches:
6 | - dev
7 |
8 | jobs:
9 | bump-version:
10 | if: "!startsWith(github.event.head_commit.message, 'bump:')"
11 | runs-on: ubuntu-latest
12 | name: "Bump version and create changelog with commitizen"
13 | steps:
14 | - name: Check out
15 | uses: actions/checkout@v2
16 | with:
17 | fetch-depth: 0
18 | token: '${{ secrets.PERSONAL_ACCESS_TOKEN }}'
19 | - name: Create bump and changelog
20 | uses: commitizen-tools/commitizen-action@0.3.0
21 | with:
22 | prerelease: 'beta'
23 | branch: 'dev'
24 | github_token: '${{ secrets.PERSONAL_ACCESS_TOKEN }}'
25 |
--------------------------------------------------------------------------------
/naas/runner/controllers/auth.py:
--------------------------------------------------------------------------------
1 | from sanic.views import HTTPMethodView
2 | from naas_drivers import naasauth
3 | from sanic.response import json
4 | import os
5 |
6 | TOKEN = os.environ.get("PROD_JUPYTERHUB_API_TOKEN", None)
7 |
8 |
9 | class AuthController(HTTPMethodView):
10 | __logger = None
11 |
12 | def __init__(self, logger, *args, **kwargs):
13 | super(AuthController, self).__init__(*args, **kwargs)
14 | self.__logger = logger
15 |
16 | class UserController(HTTPMethodView):
17 | __logger = None
18 |
19 | def __init__(self, logger, *args, **kwargs):
20 | super(AuthController.UserController, self).__init__(*args, **kwargs)
21 | self.__logger = logger
22 |
23 | async def get(self, request):
24 | res = naasauth.connect(TOKEN).user.me()
25 | return json(res)
26 |
--------------------------------------------------------------------------------
/binder/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM jupyternaas/singleuser:latest
2 |
3 | ARG NB_USER
4 | ARG NB_UID
5 | ENV USER ${NB_USER}
6 | ENV HOME /home/${NB_USER}
7 | ENV NAAS_INSTALL_SUPP 'yes'
8 | ENV JUPYTER_ENABLE_LAB 'yes'
9 | ENV NAAS_INSTALL_BINDER 'yes'
10 |
11 | USER root
12 | WORKDIR ${HOME}
13 |
14 | # # As of 2020-12-31, force binder to use jupyter-server instead of jupyter-notebook
15 | RUN cd $(dirname $(which jupyter-notebook)) \
16 | && rm jupyter-notebook \
17 | && ln -s jupyter-server jupyter-notebook
18 |
19 | # Add the entire source tree
20 | COPY . /home/$NB_USER/naas
21 | RUN chown -R $NB_UID .
22 | RUN rmdir /home/$NB_USER/work
23 |
24 | RUN cd /home/$NB_USER/naas && pip install --no-cache-dir -e '.[dev]'
25 |
26 | RUN mkdir /etc/naas
27 | COPY scripts /etc/naas/scripts
28 | COPY custom /etc/naas/custom
29 | RUN /etc/naas/scripts/install_supp
30 | RUN /etc/naas/scripts/customize
31 |
--------------------------------------------------------------------------------
/.docker/dev/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | # Git clone naas drivers if it does not exists.
4 | [ ! -d "/home/ftp/drivers/.git" ] && (rm -rf /home/ftp/drivers/ || true) && git clone https://github.com/jupyter-naas/drivers.git "/home/ftp/drivers"
5 |
6 | # Git clone awesome-notebooks if it does not exists.
7 | [ ! -d "/home/ftp/awesome-notebooks/.git" ] && git clone https://github.com/jupyter-naas/awesome-notebooks.git "/home/ftp/awesome-notebooks"
8 |
9 | # Install naas dependencies.
10 | pip install -e '/home/ftp/naas[dev]'
11 |
12 | # Install naas drivers dependencies in background.
13 | pip install -e '/home/ftp/drivers[dev]' &
14 |
15 | cd '/home/ftp/naas/extensions/naasai' && jlpm install && jlpm run build
16 |
17 | pip install -ve '/home/ftp/naas/extensions/naasai'
18 |
19 | jupyter labextension develop --overwrite '/home/ftp/naas/extensions/naasai'
20 |
21 | # Start jupyterlab.
22 | tini -g -- "start-notebook.sh"
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: "3.9"
2 |
3 | services:
4 | naas:
5 | build:
6 | context: .
7 | dockerfile: .docker/dev/Dockerfile.dev
8 | environment:
9 | "ALLOWED_IFRAME": ""
10 | JUPYTER_TOKEN: 'naas'
11 | JUPYTERHUB_URL: 'http://localhost:8888'
12 | PROD_JUPYTERHUB_API_TOKEN: ${PROD_JUPYTERHUB_API_TOKEN}
13 | ports:
14 | - 8888:8888
15 | - 5000:5000
16 | volumes:
17 | - ~/.ssh:/home/ftp/.ssh
18 | - .:/home/ftp/naas
19 | - ../drivers:/home/ftp/drivers
20 | - ../awesome-notebooks:/home/ftp/awesome-notebooks
21 | - ../drivers/naas_drivers:/opt/conda/lib/python3.8/site-packages/naas_drivers
22 | - ./file_sharing:/home/ftp/file_sharing
23 | - .docker/.cache/pip:/home/jovyan/.cache/pip
24 | - .docker/dev/docker-entrypoint.sh:/docker-entrypoint.sh
25 | - ./custom/overrides.json:/opt/conda/share/jupyter/lab/settings/overrides.json
26 |
--------------------------------------------------------------------------------
/.github/workflows/pull_request.yml:
--------------------------------------------------------------------------------
1 | name: Pull request validation
2 |
3 | on:
4 | pull_request:
5 | types: ['opened', 'edited', 'reopened', 'synchronize']
6 |
7 | jobs:
8 | lint:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v1
12 | - name: Set up Python
13 | uses: actions/setup-python@v1
14 | with:
15 | python-version: '3.8'
16 | - uses: actions/cache@v2
17 | with:
18 | path: ~/.cache/pip
19 | key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
20 | restore-keys: |
21 | ${{ runner.os }}-pip-
22 | - name: Install dependencies
23 | run: |
24 | python3 -m pip install -U pip
25 | pip3 install -e '.[fulldev]'
26 | - name: Run linters
27 | run: |
28 | black --check naas
29 | flake8 naas
30 | - name: Run test and linters
31 | run: pytest tests/
32 | - uses: amannn/action-semantic-pull-request@v3.1.0
33 | env:
34 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
35 |
--------------------------------------------------------------------------------
/tests/test_notebooks.py:
--------------------------------------------------------------------------------
1 | from naas.ntypes import t_add, t_notebook
2 | from naas.runner.logger import Logger
3 | from naas.runner.notebooks import Notebooks
4 | import getpass
5 | import pytest # noqa: F401
6 | import uuid
7 | import os
8 | from shutil import copy2
9 |
10 | user_folder_name = "test_user_folder"
11 | user = getpass.getuser()
12 |
13 |
14 | async def test_notebook(test_runner, tmp_path):
15 | test_notebook = "tests/demo/demo_scheduler.ipynb"
16 | cur_path = os.path.join(os.getcwd(), test_notebook)
17 | new_path = os.path.join(tmp_path, test_notebook)
18 | os.makedirs(os.path.dirname(new_path))
19 | copy2(cur_path, new_path)
20 | logger = Logger()
21 | notebook = Notebooks(logger)
22 | job = {
23 | "type": t_notebook,
24 | "path": new_path,
25 | "params": {},
26 | "value": "any",
27 | "status": t_add,
28 | }
29 | uid = str(uuid.uuid4())
30 | res = await notebook.exec(uid, job)
31 | assert res is not None
32 | assert res.get("cells") is not None
33 | # TODO add more test
34 |
--------------------------------------------------------------------------------
/custom/set_workspace.json:
--------------------------------------------------------------------------------
1 | {
2 | "data": {
3 | "layout-restorer:data": {
4 | "main": {
5 | "dock": {
6 | "type": "tab-area",
7 | "currentIndex": 0,
8 | "widgets": ["notebook:{NB_NAME}.ipynb"]
9 | },
10 | "mode": "multiple-document",
11 | "current": "notebook:{NB_NAME}.ipynb"
12 | },
13 | "left": {
14 | "collapsed": false,
15 | "current": "@jupyterlab/toc:plugin",
16 | "widgets": [
17 | "filebrowser",
18 | "running-sessions",
19 | "git-sessions",
20 | "command-palette",
21 | "jp-property-inspector",
22 | "tab-manager",
23 | "@jupyterlab/toc:plugin",
24 | "extensionmanager.main-view"
25 | ]
26 | },
27 | "right": { "collapsed": true, "widgets": [] }
28 | },
29 | "notebook:{NB_NAME}.ipynb": {
30 | "data": { "path": "{NB_NAME}.ipynb", "factory": "Notebook" }
31 | }
32 | },
33 | "metadata": { "id": "/lab" }
34 | }
35 |
--------------------------------------------------------------------------------
/custom/set_workspace_browser.json:
--------------------------------------------------------------------------------
1 | {
2 | "data": {
3 | "layout-restorer:data": {
4 | "main": {
5 | "dock": {
6 | "type": "tab-area",
7 | "currentIndex": 0,
8 | "widgets": ["notebook:{NB_NAME}.ipynb"]
9 | },
10 | "mode": "multiple-document",
11 | "current": "notebook:{NB_NAME}.ipynb"
12 | },
13 | "left": {
14 | "collapsed": false,
15 | "current": "filebrowser",
16 | "widgets": [
17 | "filebrowser",
18 | "running-sessions",
19 | "git-sessions",
20 | "command-palette",
21 | "jp-property-inspector",
22 | "tab-manager",
23 | "@jupyterlab/toc:plugin",
24 | "extensionmanager.main-view"
25 | ]
26 | },
27 | "right": { "collapsed": true, "widgets": [] }
28 | },
29 | "notebook:{NB_NAME}.ipynb": {
30 | "data": { "path": "{NB_NAME}.ipynb", "factory": "Notebook" }
31 | }
32 | },
33 | "metadata": { "id": "/lab" }
34 | }
35 |
--------------------------------------------------------------------------------
/naas/runner/controllers/version.py:
--------------------------------------------------------------------------------
1 | from sanic.views import HTTPMethodView
2 | import json
3 | import requests
4 | from naas.runner.env_var import n_env
5 |
6 |
7 | class VersionController(HTTPMethodView):
8 | __logger = None
9 |
10 | def __init__(self, logger, *args, **kwargs):
11 | super(VersionController, self).__init__(*args, **kwargs)
12 | self.__logger = logger
13 |
14 | class UpdateController(HTTPMethodView):
15 | __logger = None
16 |
17 | def __init__(self, logger, *args, **kwargs):
18 | super(VersionController.UpdateController, self).__init__(*args, **kwargs)
19 | self.__logger = logger
20 |
21 | async def get(self, request):
22 | username = n_env.user
23 | api_url = f"{n_env.hub_base}/hub/api"
24 | r = requests.delete(
25 | f"{api_url}/users/{username}/server",
26 | headers={
27 | "Authorization": f"token {n_env.token}",
28 | },
29 | )
30 | r.raise_for_status()
31 | return json({"update": "ok"})
32 |
--------------------------------------------------------------------------------
/custom/overrides.json:
--------------------------------------------------------------------------------
1 | {
2 | "@jupyterlab/notebook-extension:tracker": {
3 | "recordTiming": true,
4 | "kernelShutdown": true,
5 | "codeCellConfig": {
6 | "lineNumbers": true
7 | }
8 | },
9 | "@jupyterlab/extensionmanager-extension:plugin": {
10 | "disclaimed": true,
11 | "enabled": true
12 | },
13 | "@jupyterlab/filebrowser-extension:browser": {
14 | "navigateToCurrentDirectory": true
15 | },
16 | "@jupyterlab/application-extension:sidebar": {
17 | "overrides": {
18 | "jp-property-inspector": "left",
19 | "jp-debugger-sidebar": "left"
20 | }
21 | },
22 | "@jupyterlab/apputils-extension:palette": {
23 | "modal": false
24 | },
25 | "@jupyterlab/terminal-extension:plugin": {
26 | "shutdownOnClose": true
27 | },
28 | "@krassowski/jupyterlab-lsp:plugin": {
29 | "language_servers": {
30 | "pyls": {
31 | "serverSettings": {
32 | "pyls.plugins.pydocstyle.enabled": false,
33 | "pyls.plugins.pyflakes.enabled": false,
34 | "pyls.plugins.flake8.enabled": true
35 | }
36 | }
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/naas/runner/controllers/scheduler.py:
--------------------------------------------------------------------------------
1 | from sanic.views import HTTPMethodView
2 | from naas.ntypes import t_scheduler, t_send
3 | from sanic.response import json
4 | import uuid
5 |
6 | endpoint = "schedulers"
7 |
8 |
9 | class SchedulerController(HTTPMethodView):
10 | __scheduler = None
11 |
12 | def __init__(self, scheduler, logger, *args, **kwargs):
13 | super(SchedulerController, self).__init__(*args, **kwargs)
14 | self.__logger = logger
15 | self.__scheduler = scheduler
16 |
17 | async def _get(self, request, mode):
18 | uid = str(uuid.uuid4())
19 | if mode == "pause":
20 | self.__scheduler.pause()
21 | elif mode == "resume":
22 | self.__scheduler.resume()
23 | self.__logger.info(
24 | {"id": uid, "type": t_scheduler, "status": t_send, "filepath": endpoint}
25 | )
26 | return json({"status": self.__scheduler.status()})
27 |
28 | async def get(self, request, mode):
29 | return await self._get(request, mode)
30 |
31 | async def post(self, request, mode):
32 | return await self._get(request, mode)
33 |
--------------------------------------------------------------------------------
/extensions/naasai/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | extends: [
3 | 'eslint:recommended',
4 | 'plugin:@typescript-eslint/eslint-recommended',
5 | 'plugin:@typescript-eslint/recommended',
6 | 'plugin:prettier/recommended'
7 | ],
8 | parser: '@typescript-eslint/parser',
9 | parserOptions: {
10 | project: 'tsconfig.json',
11 | sourceType: 'module'
12 | },
13 | plugins: ['@typescript-eslint'],
14 | rules: {
15 | '@typescript-eslint/naming-convention': [
16 | 'error',
17 | {
18 | 'selector': 'interface',
19 | 'format': ['PascalCase'],
20 | 'custom': {
21 | 'regex': '^I[A-Z]',
22 | 'match': true
23 | }
24 | }
25 | ],
26 | '@typescript-eslint/no-unused-vars': ['warn', { args: 'none' }],
27 | '@typescript-eslint/no-explicit-any': 'off',
28 | '@typescript-eslint/no-namespace': 'off',
29 | '@typescript-eslint/no-use-before-define': 'off',
30 | '@typescript-eslint/quotes': [
31 | 'error',
32 | 'single',
33 | { avoidEscape: true, allowTemplateLiterals: false }
34 | ],
35 | curly: ['error', 'all'],
36 | eqeqeq: 'error',
37 | 'prefer-arrow-callback': 'error'
38 | }
39 | };
40 |
--------------------------------------------------------------------------------
/tests/demo/demo_res_json.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 32,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "data": {
10 | "application/json": {
11 | "foo": "bar"
12 | },
13 | "text/plain": [
14 | ""
15 | ]
16 | },
17 | "metadata": {
18 | "application/json": {
19 | "expanded": false,
20 | "naas_api": true,
21 | "root": "root"
22 | }
23 | },
24 | "output_type": "display_data"
25 | }
26 | ],
27 | "source": [
28 | "from IPython.core.display import display, HTML, JSON, Image, SVG, Markdown\n",
29 | "display(JSON({'foo': 'bar'}, metadata={'naas_api': True}))"
30 | ]
31 | }
32 | ],
33 | "metadata": {
34 | "kernelspec": {
35 | "display_name": "Python 3",
36 | "language": "python",
37 | "name": "python3"
38 | },
39 | "language_info": {
40 | "codemirror_mode": {
41 | "name": "ipython",
42 | "version": 3
43 | },
44 | "file_extension": ".py",
45 | "mimetype": "text/x-python",
46 | "name": "python",
47 | "nbconvert_exporter": "python",
48 | "pygments_lexer": "ipython3",
49 | "version": "3.9.1-final"
50 | }
51 | },
52 | "nbformat": 4,
53 | "nbformat_minor": 4
54 | }
--------------------------------------------------------------------------------
/naas/runner/__main__.py:
--------------------------------------------------------------------------------
1 | from .runner import Runner
2 | import argparse
3 | import os
4 |
5 |
6 | def createProductionSymlink():
7 | # Create Production symlink.
8 | try:
9 | os.makedirs("/home/ftp/.naas/home/ftp", exist_ok=True)
10 | os.symlink("/home/ftp/.naas/home/ftp", "/home/ftp/⚡ → Production")
11 | except FileExistsError as e:
12 | print(e)
13 | pass
14 | except: # noqa: E722
15 | print("An error occured while creating production symlink.")
16 | pass
17 |
18 |
19 | if __name__ == "__main__":
20 | createProductionSymlink()
21 | parser = argparse.ArgumentParser()
22 | parser.add_argument("-p", "--port", default=5000, help="port of the server")
23 | parser.add_argument("--prod", action="store_true", help="remove debug logs")
24 | parser.add_argument(
25 | "-c", "--check", action="store_true", help="check if already running"
26 | )
27 | parser.add_argument("-k", "--kill", action="store_true", help="kill me")
28 | args = parser.parse_args()
29 | port = int(args.port) if args.port else None
30 | kill = True if args.kill else False
31 | debug = False if args.prod else True
32 | runner = Runner()
33 | if kill:
34 | runner.kill()
35 | else:
36 | runner.start(port=port, debug=debug)
37 |
--------------------------------------------------------------------------------
/naas/runner/controllers/env.py:
--------------------------------------------------------------------------------
1 | from sanic.views import HTTPMethodView
2 | from sanic.response import json
3 | from naas.ntypes import t_health
4 | from naas.runner.env_var import n_env
5 | import requests
6 |
7 |
8 | def get_latest_version():
9 | try:
10 | r = requests.get("https://pypi.python.org/pypi/naas/json")
11 | r.raise_for_status()
12 | response = r.json()
13 | version = (
14 | response["urls"][0]["filename"].replace("naas-", "").replace(".tar.gz", "")
15 | )
16 | return version
17 | except: # noqa: E722
18 | return ""
19 |
20 |
21 | class EnvController(HTTPMethodView):
22 | def __init__(self, *args, **kwargs):
23 | super(EnvController, self).__init__(*args, **kwargs)
24 |
25 | async def get(self, request):
26 |
27 | env = {
28 | "status": t_health,
29 | "version": n_env.version,
30 | "latest_version": get_latest_version(),
31 | "NAAS_BASE_PATH": n_env.path_naas_folder,
32 | "NOTIFICATIONS_API": n_env.notif_api,
33 | "JUPYTERHUB_USER": n_env.user,
34 | "JUPYTER_SERVER_ROOT": n_env.server_root,
35 | "JUPYTERHUB_URL": n_env.hub_base,
36 | "PUBLIC_PROXY_API": n_env.proxy_api,
37 | "TZ": n_env.tz,
38 | }
39 | return json(env)
40 |
--------------------------------------------------------------------------------
/images/sentry.svg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/extensions/naasai/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2022, Maxime Jublou
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | 2. Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | 3. Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/naas/runner/controllers/timezone.py:
--------------------------------------------------------------------------------
1 | from naas.ntypes import t_tz, t_send, t_error, t_update
2 | from sanic.views import HTTPMethodView
3 | from naas.runner.env_var import n_env
4 | from sanic import response
5 | import uuid
6 | import pytz
7 |
8 |
9 | class TimezoneController(HTTPMethodView):
10 | __logger = None
11 |
12 | def __init__(self, logger, *args, **kwargs):
13 | super(TimezoneController, self).__init__(*args, **kwargs)
14 | self.__logger = logger
15 |
16 | def post(self, request):
17 | data = request.json
18 | tz = data.get("tz")
19 | uid = str(uuid.uuid4())
20 | if tz and tz in pytz.all_timezones:
21 | n_env.tz = tz
22 | self.__logger.info(
23 | {
24 | "id": uid,
25 | "type": t_tz,
26 | "status": t_update,
27 | "filepath": t_tz,
28 | }
29 | )
30 | return response.json({"tz": str(n_env.tz)})
31 | else:
32 | self.__logger.info(
33 | {
34 | "id": uid,
35 | "type": t_tz,
36 | "status": t_error,
37 | "filepath": t_tz,
38 | }
39 | )
40 | return response.json({"error": "this timezone don't exist"})
41 |
42 | async def get(self, request):
43 | uid = str(uuid.uuid4())
44 | self.__logger.info(
45 | {
46 | "id": uid,
47 | "type": t_tz,
48 | "status": t_send,
49 | "filepath": t_tz,
50 | }
51 | )
52 | return response.json({"tz": str(n_env.tz)})
53 |
--------------------------------------------------------------------------------
/naas/runner/controllers/credits.py:
--------------------------------------------------------------------------------
1 | from sanic.views import HTTPMethodView
2 | from naas_drivers import naascredits
3 | from sanic.response import json
4 | import os
5 |
6 | TOKEN = os.environ.get("PROD_JUPYTERHUB_API_TOKEN", None)
7 |
8 |
9 | class CreditsController(HTTPMethodView):
10 | __logger = None
11 |
12 | def __init__(self, logger, *args, **kwargs):
13 | super(CreditsController, self).__init__(*args, **kwargs)
14 | self.__logger = logger
15 |
16 | class PlanController(HTTPMethodView):
17 | __logger = None
18 |
19 | def __init__(self, logger, *args, **kwargs):
20 | super(CreditsController.PlanController, self).__init__(*args, **kwargs)
21 | self.__logger = logger
22 |
23 | async def get(self, request):
24 | res = naascredits.connect(TOKEN).get_plan()
25 | return json(res)
26 |
27 | class TransactionController(HTTPMethodView):
28 | __logger = None
29 |
30 | def __init__(self, logger, *args, **kwargs):
31 | super(CreditsController.TransactionController, self).__init__(
32 | *args, **kwargs
33 | )
34 | self.__logger = logger
35 |
36 | async def get(self, request):
37 | res = naascredits.connect(TOKEN).transactions.get(page_size=1000)
38 | return json(res)
39 |
40 | class BalanceController(HTTPMethodView):
41 | __logger = None
42 |
43 | def __init__(self, logger, *args, **kwargs):
44 | super(CreditsController.BalanceController, self).__init__(*args, **kwargs)
45 | self.__logger = logger
46 |
47 | async def get(self, request):
48 | res = naascredits.connect(TOKEN).get_balance()
49 | return json(res)
50 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import pytest # noqa: F401
2 | import os
3 | import getpass
4 | import logging
5 | from naas.runner import Runner, n_env
6 |
7 | os.environ["SANIC_REGISTER"] = "False"
8 | user_folder_name = "pytest_tmp"
9 | path_srv_root = os.path.join(os.getcwd(), user_folder_name)
10 | n_env.server_root = str(path_srv_root)
11 |
12 |
13 | @pytest.fixture
14 | def runner(caplog, tmp_path):
15 | caplog.set_level(logging.INFO)
16 | user = getpass.getuser()
17 | path_srv = os.path.join(tmp_path, user_folder_name)
18 | n_env.user = user
19 | n_env.server_root = str(path_srv)
20 | n_env.scheduler = False
21 | n_env.hub_base = "http://localhost:5000"
22 | n_env.proxy_api = "http://localhost:5001"
23 | n_env.notif_api = "http://localhost:5002"
24 |
25 | app = Runner().init_app()
26 |
27 | yield app
28 | app.stop()
29 |
30 |
31 | @pytest.fixture
32 | def scheduler(caplog, tmp_path):
33 | caplog.set_level(logging.INFO)
34 | user = getpass.getuser()
35 | path_srv = os.path.join(tmp_path, user_folder_name)
36 | n_env.user = user
37 | n_env.server_root = str(path_srv)
38 | n_env.scheduler = True
39 | n_env.scheduler_interval = "1"
40 | n_env.scheduler_job_max = "3"
41 | n_env.hub_base = "http://localhost:5000"
42 | n_env.proxy_api = "http://localhost:5001"
43 | n_env.notif_api = "http://localhost:5002"
44 |
45 | app = Runner().init_app()
46 |
47 | yield app
48 | app.stop()
49 |
50 |
51 | @pytest.fixture
52 | def test_runner(loop, runner, sanic_client):
53 | return loop.run_until_complete(sanic_client(runner, timeout=None))
54 |
55 |
56 | @pytest.fixture
57 | def test_scheduler(loop, scheduler, sanic_client):
58 | return loop.run_until_complete(sanic_client(scheduler, timeout=None))
59 |
--------------------------------------------------------------------------------
/tests/demo/demo_res_md.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "output_type": "display_data",
10 | "data": {
11 | "text/plain": "",
12 | "text/markdown": "Response Set as Markdown, preview below: "
13 | },
14 | "metadata": {}
15 | },
16 | {
17 | "output_type": "display_data",
18 | "data": {
19 | "text/plain": "",
20 | "text/html": "HELLO world
\n\nthis is a markdown test
\n"
21 | },
22 | "metadata": {
23 | "text/html": {
24 | "naas_api": true,
25 | "naas_type": "markdown"
26 | }
27 | }
28 | }
29 | ],
30 | "source": [
31 | "import markdown2\n",
32 | "from IPython.core.display import display, HTML, Markdown\n",
33 | "md_text = \"\"\"# HELLO world\n",
34 | "\n",
35 | "this is a markdown test\n",
36 | "\"\"\"\n",
37 | "display(Markdown(\"Response Set as Markdown, preview below: \"))\n",
38 | "html = markdown2.markdown(md_text)\n",
39 | "display(HTML(html, metadata={\"naas_api\": True, \"naas_type\": \"markdown\"}))"
40 | ]
41 | }
42 | ],
43 | "metadata": {
44 | "kernelspec": {
45 | "display_name": "Python 3",
46 | "language": "python",
47 | "name": "python3"
48 | },
49 | "language_info": {
50 | "codemirror_mode": {
51 | "name": "ipython",
52 | "version": 3
53 | },
54 | "file_extension": ".py",
55 | "mimetype": "text/x-python",
56 | "name": "python",
57 | "nbconvert_exporter": "python",
58 | "pygments_lexer": "ipython3",
59 | "version": "3.9.1-final"
60 | }
61 | },
62 | "nbformat": 4,
63 | "nbformat_minor": 4
64 | }
--------------------------------------------------------------------------------
/naas/runner/controllers/logs.py:
--------------------------------------------------------------------------------
1 | from sanic.views import HTTPMethodView
2 | from sanic import response
3 | from naas.ntypes import t_log, t_send
4 | import uuid
5 | import json
6 |
7 | endpoint = "logs"
8 |
9 |
10 | class LogsController(HTTPMethodView):
11 | __logger = None
12 |
13 | def __init__(self, logger, *args, **kwargs):
14 | super(LogsController, self).__init__(*args, **kwargs)
15 | self.__logger = logger
16 |
17 | async def get(self, request):
18 | as_file = request.args.get("as_file", False)
19 | if as_file:
20 | return await response.file(
21 | self.__logger.get_file_path(), filename="logs.csv"
22 | )
23 | else:
24 | uid = str(uuid.uuid4())
25 | limit = int(request.args.get("limit", 0))
26 | skip = int(request.args.get("skip", 0))
27 | search = str(request.args.get("search", ""))
28 | sort = list(json.loads(request.args.get("sort", "[]")))
29 | filters = list(json.loads(request.args.get("filters", "[]")))
30 | technical_rows = bool(
31 | json.loads(request.args.get("technical_rows", "true"))
32 | )
33 | logs = self.__logger.list(
34 | uid, skip, limit, search, filters, sort, technical_rows
35 | )
36 | self.__logger.info(
37 | {
38 | "id": uid,
39 | "type": t_log,
40 | "status": t_send,
41 | "filepath": endpoint,
42 | "skip": skip,
43 | "limit": limit,
44 | "search": search,
45 | "filters": filters,
46 | "sort": sort,
47 | }
48 | )
49 | return response.json(logs)
50 |
--------------------------------------------------------------------------------
/tests/demo/demo_res_text.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 2,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "output_type": "display_data",
10 | "data": {
11 | "text/plain": "",
12 | "text/markdown": "Response Set as Text, preview below: "
13 | },
14 | "metadata": {}
15 | },
16 | {
17 | "output_type": "display_data",
18 | "data": {
19 | "text/plain": "",
20 | "text/html": "# HELLO world\n\nthis is a markdown test\n"
21 | },
22 | "metadata": {
23 | "text/html": {
24 | "naas_api": true,
25 | "naas_type": "markdown"
26 | }
27 | }
28 | }
29 | ],
30 | "source": [
31 | "import os\n",
32 | "from IPython.core.display import display, HTML, Markdown\n",
33 | "md_text = \"\"\"# HELLO world\n",
34 | "\n",
35 | "this is a markdown test\n",
36 | "\"\"\"\n",
37 | "display(Markdown(\"Response Set as Text, preview below: \"))\n",
38 | "display(HTML(md_text, metadata={\"naas_api\": True, \"naas_type\": \"markdown\"}))"
39 | ]
40 | },
41 | {
42 | "cell_type": "code",
43 | "execution_count": null,
44 | "metadata": {},
45 | "outputs": [],
46 | "source": []
47 | }
48 | ],
49 | "metadata": {
50 | "kernelspec": {
51 | "display_name": "Python 3",
52 | "language": "python",
53 | "name": "python3"
54 | },
55 | "language_info": {
56 | "codemirror_mode": {
57 | "name": "ipython",
58 | "version": 3
59 | },
60 | "file_extension": ".py",
61 | "mimetype": "text/x-python",
62 | "name": "python",
63 | "nbconvert_exporter": "python",
64 | "pygments_lexer": "ipython3",
65 | "version": "3.9.1-final"
66 | }
67 | },
68 | "nbformat": 4,
69 | "nbformat_minor": 4
70 | }
--------------------------------------------------------------------------------
/.docker/dev/Dockerfile.dev:
--------------------------------------------------------------------------------
1 | FROM jupyternaas/singleuser:2.12.0
2 |
3 | # Build-time metadata as defined at http://label-schema.org
4 | ARG BUILD_DATE
5 | ARG VCS_REF
6 |
7 | ENV NAAS_INSTALL_SUPP 'no'
8 | ENV JUPYTER_ENABLE_LAB 'yes'
9 | ENV NB_USER=ftp
10 |
11 | USER root
12 | LABEL org.label-schema.build-date=$BUILD_DATE \
13 | org.label-schema.name="Naas machine" \
14 | org.label-schema.description="test jupyter machine with naas" \
15 | org.label-schema.url="https://naas.ai" \
16 | org.label-schema.vcs-ref=$VCS_REF \
17 | org.label-schema.vcs-url="https://github.com/jupyter-naas/naas" \
18 | org.label-schema.vendor="Cashstory, Inc." \
19 | org.label-schema.schema-version="1.0"
20 |
21 | RUN mkdir /home/$NB_USER \
22 | && cd $(dirname $(which jupyter-notebook)) \
23 | && rm jupyter-notebook \
24 | && ln -s jupyter-server jupyter-notebook
25 |
26 | COPY setup.cfg /home/$NB_USER/naas/setup.cfg
27 | COPY setup.py /home/$NB_USER/naas/setup.py
28 | COPY README.md /home/$NB_USER/naas/README.md
29 | RUN cd /home/$NB_USER/naas && pip install --no-cache-dir -e '.[dev]'
30 |
31 | COPY . /home/$NB_USER/naas
32 | RUN fix-permissions /home/$NB_USER
33 | ENV PYTHONPATH=/home/$NB_USER/naas:/home/$NB_USER/drivers
34 |
35 | RUN mkdir /etc/naas
36 | COPY scripts /etc/naas/scripts
37 | COPY custom /etc/naas/custom
38 | RUN /etc/naas/scripts/install_supp
39 | RUN /etc/naas/scripts/customize
40 |
41 | RUN wget https://bin.equinox.io/c/4VmDzA7iaHb/ngrok-stable-linux-amd64.zip\
42 | && unzip ngrok-stable-linux-amd64.zip \
43 | && mkdir -p /opt/conda/lib/python3.8/site-packages/pyngrok/bin/ \
44 | && mv ngrok /opt/conda/lib/python3.8/site-packages/pyngrok/bin/ \
45 | && rm ngrok-stable-linux-amd64.zip
46 |
47 | ENV PATH="/home/ftp/.local/bin:${PATH}"
48 |
49 | ADD .docker/dev/docker-entrypoint.sh /
50 | ENTRYPOINT ["/docker-entrypoint.sh"]
--------------------------------------------------------------------------------
/tests/demo/demo_res_html.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 3,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "output_type": "display_data",
10 | "data": {
11 | "text/plain": "",
12 | "text/markdown": "Response Set as HTML, preview below: "
13 | },
14 | "metadata": {}
15 | },
16 | {
17 | "output_type": "display_data",
18 | "data": {
19 | "text/plain": "",
20 | "text/html": "Hello world
"
21 | },
22 | "metadata": {
23 | "text/html": {
24 | "naas_api": true
25 | }
26 | }
27 | }
28 | ],
29 | "source": [
30 | "from IPython.core.display import display, HTML, Markdown\n",
31 | "html_text = \"Hello world
\"\n",
32 | "display(Markdown(\"Response Set as HTML, preview below: \"))\n",
33 | "display(HTML(html_text, metadata={\"naas_api\": True}))"
34 | ]
35 | },
36 | {
37 | "cell_type": "code",
38 | "execution_count": null,
39 | "metadata": {},
40 | "outputs": [],
41 | "source": [
42 | "import naas\n",
43 | "naas.webhook.add('./demo_res_html.ipynb')"
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": null,
49 | "metadata": {},
50 | "outputs": [],
51 | "source": []
52 | }
53 | ],
54 | "metadata": {
55 | "kernelspec": {
56 | "display_name": "Python 3",
57 | "language": "python",
58 | "name": "python3"
59 | },
60 | "language_info": {
61 | "codemirror_mode": {
62 | "name": "ipython",
63 | "version": 3
64 | },
65 | "file_extension": ".py",
66 | "mimetype": "text/x-python",
67 | "name": "python",
68 | "nbconvert_exporter": "python",
69 | "pygments_lexer": "ipython3",
70 | "version": "3.9.1-final"
71 | }
72 | },
73 | "nbformat": 4,
74 | "nbformat_minor": 4
75 | }
--------------------------------------------------------------------------------
/dev/postgres/init_jupyter.sh:
--------------------------------------------------------------------------------
1 |
2 | #!/bin/bash
3 |
4 | set -e
5 | set -u
6 |
7 | function create_user_and_database() {
8 | local database=$1
9 | echo " Creating user and database '$database'"
10 | psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL
11 | CREATE USER $database;
12 | CREATE DATABASE $database;
13 | GRANT ALL PRIVILEGES ON DATABASE $database TO $database;
14 | EOSQL
15 | psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL
16 | CREATE SEQUENCE IF NOT EXISTS users_info_id_seq;
17 |
18 | -- Table Definition
19 | CREATE TABLE "public"."users_info" (
20 | "id" int4 NOT NULL DEFAULT nextval('users_info_id_seq'::regclass),
21 | "username" varchar NOT NULL,
22 | "password" bytea NOT NULL,
23 | "is_authorized" bool,
24 | "email" varchar,
25 | PRIMARY KEY ("id")
26 | );
27 |
28 | INSERT INTO "public"."users_info" ("id", "username", "password", "is_authorized", "email") VALUES
29 | (2, 'bob@cashstory.com', '\x24326224313224416a74734e53525a4a6e6e2f2e49555250494244454f6d4e4d3664657078314661347166704b734447495238583444684e5549372e', 't', NULL);
30 | EOSQL
31 | psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL
32 | CREATE SEQUENCE IF NOT EXISTS users_id_seq;
33 |
34 | -- Table Definition
35 | CREATE TABLE "public"."users" (
36 | "id" int4 NOT NULL DEFAULT nextval('users_id_seq'::regclass),
37 | "name" varchar(255),
38 | "admin" bool,
39 | "created" timestamp,
40 | "last_activity" timestamp,
41 | "cookie_id" varchar(255) NOT NULL,
42 | "state" text,
43 | "encrypted_auth_state" bytea,
44 | PRIMARY KEY ("id")
45 | );
46 |
47 | INSERT INTO "public"."users" ("id", "name", "admin", "created", "last_activity", "cookie_id", "state", "encrypted_auth_state") VALUES
48 | (2, 'bob@cashstory.com', 't', '2020-04-18 09:02:47.941605', '2021-04-15 14:04:55.417564', '2c74e5d1668347b2b016229bf8c23463', '{}', NULL);
49 |
50 | EOSQL
51 | }
52 |
53 | create_user_and_database $JUPYTER_DB
54 | echo "Jupyterdb created"
55 |
--------------------------------------------------------------------------------
/tests/demo/demo_res_svg.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "output_type": "display_data",
10 | "data": {
11 | "text/plain": "",
12 | "text/markdown": "Response Set as SVG, preview below: "
13 | },
14 | "metadata": {}
15 | },
16 | {
17 | "output_type": "display_data",
18 | "data": {
19 | "text/plain": "",
20 | "image/svg+xml": ""
21 | },
22 | "metadata": {
23 | "image/svg+xml": {
24 | "naas_api": true
25 | }
26 | }
27 | }
28 | ],
29 | "source": [
30 | "from IPython.core.display import display, SVG, Markdown\n",
31 | "import requests\n",
32 | "\n",
33 | "svg_text = \"\"\"\n",
36 | "\"\"\"\n",
37 | "display(Markdown(\"Response Set as SVG, preview below: \"))\n",
38 | "display(SVG(svg_text, metadata={\"naas_api\": True}))"
39 | ]
40 | },
41 | {
42 | "cell_type": "code",
43 | "execution_count": null,
44 | "metadata": {},
45 | "outputs": [],
46 | "source": []
47 | }
48 | ],
49 | "metadata": {
50 | "kernelspec": {
51 | "display_name": "Python 3",
52 | "language": "python",
53 | "name": "python3"
54 | },
55 | "language_info": {
56 | "codemirror_mode": {
57 | "name": "ipython",
58 | "version": 3
59 | },
60 | "file_extension": ".py",
61 | "mimetype": "text/x-python",
62 | "name": "python",
63 | "nbconvert_exporter": "python",
64 | "pygments_lexer": "ipython3",
65 | "version": "3.9.1-final"
66 | }
67 | },
68 | "nbformat": 4,
69 | "nbformat_minor": 4
70 | }
--------------------------------------------------------------------------------
/extensions/naasai/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: Build
2 |
3 | on:
4 | push:
5 | branches: main
6 | pull_request:
7 | branches: '*'
8 |
9 | jobs:
10 | build:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - name: Checkout
14 | uses: actions/checkout@v2
15 |
16 | - name: Base Setup
17 | uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
18 |
19 | - name: Install dependencies
20 | run: python -m pip install -U jupyterlab~=3.1 check-manifest
21 |
22 | - name: Build the extension
23 | run: |
24 | set -eux
25 | jlpm
26 | jlpm run eslint:check
27 | python -m pip install .
28 |
29 | jupyter labextension list 2>&1 | grep -ie "naasai.*OK"
30 | python -m jupyterlab.browser_check
31 |
32 | check-manifest -v
33 |
34 | pip install build
35 | python -m build --sdist
36 | cp dist/*.tar.gz myextension.tar.gz
37 | pip uninstall -y "naasai" jupyterlab
38 | rm -rf myextension
39 |
40 | - uses: actions/upload-artifact@v2
41 | with:
42 | name: myextension-sdist
43 | path: myextension.tar.gz
44 |
45 | test_isolated:
46 | needs: build
47 | runs-on: ubuntu-latest
48 |
49 | steps:
50 | - name: Checkout
51 | uses: actions/checkout@v2
52 | - name: Install Python
53 | uses: actions/setup-python@v2
54 | with:
55 | python-version: '3.8'
56 | architecture: 'x64'
57 | - uses: actions/download-artifact@v2
58 | with:
59 | name: myextension-sdist
60 | - name: Install and Test
61 | run: |
62 | set -eux
63 | # Remove NodeJS, twice to take care of system and locally installed node versions.
64 | sudo rm -rf $(which node)
65 | sudo rm -rf $(which node)
66 | pip install myextension.tar.gz
67 | pip install jupyterlab
68 | jupyter labextension list 2>&1 | grep -ie "naasai.*OK"
69 | python -m jupyterlab.browser_check --no-chrome-test
70 |
--------------------------------------------------------------------------------
/extensions/naasai/.github/workflows/check-release.yml:
--------------------------------------------------------------------------------
1 | name: Check Release
2 | on:
3 | push:
4 | branches:
5 | - main
6 | pull_request:
7 | branches:
8 | - main
9 |
10 | permissions:
11 | contents: write
12 |
13 | jobs:
14 | check_release:
15 | runs-on: ubuntu-latest
16 | steps:
17 | - name: Checkout
18 | uses: actions/checkout@v2
19 | - name: Install Python
20 | uses: actions/setup-python@v2
21 | with:
22 | python-version: 3.9
23 | architecture: 'x64'
24 | - name: Install node
25 | uses: actions/setup-node@v2
26 | with:
27 | node-version: '14.x'
28 |
29 |
30 | - name: Get pip cache dir
31 | id: pip-cache
32 | run: |
33 | echo "::set-output name=dir::$(pip cache dir)"
34 | - name: Cache pip
35 | uses: actions/cache@v1
36 | with:
37 | path: ${{ steps.pip-cache.outputs.dir }}
38 | key: ${{ runner.os }}-pip-${{ hashFiles('package.json') }}
39 | restore-keys: |
40 | ${{ runner.os }}-pip-
41 | - name: Cache checked links
42 | uses: actions/cache@v2
43 | with:
44 | path: ~/.cache/pytest-link-check
45 | key: ${{ runner.os }}-linkcheck-${{ hashFiles('**/.md') }}-md-links
46 | restore-keys: |
47 | ${{ runner.os }}-linkcheck-
48 | - name: Upgrade packaging dependencies
49 | run: |
50 | pip install --upgrade pip setuptools wheel jupyter-packaging~=0.10 --user
51 | - name: Install Dependencies
52 | run: |
53 | pip install .
54 | - name: Check Release
55 | uses: jupyter-server/jupyter_releaser/.github/actions/check-release@v1
56 | with:
57 | token: ${{ secrets.GITHUB_TOKEN }}
58 |
59 | - name: Upload Distributions
60 | uses: actions/upload-artifact@v2
61 | with:
62 | name: naasai-releaser-dist-${{ github.run_number }}
63 | path: .jupyter_releaser_checkout/dist
64 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM jupyternaas/singleuser:2.12.0 as extension_builder
2 |
3 | USER root
4 |
5 | COPY ./extensions /tmp/extensions
6 | RUN cd /tmp/extensions/naasai \
7 | && jlpm install \
8 | && jlpm build \
9 | && pip install -ve . \
10 | && mv naasai/labextension /opt/conda/share/jupyter/labextensions/naasai
11 |
12 | FROM jupyternaas/singleuser:2.12.0
13 |
14 | # Build-time metadata as defined at http://label-schema.org
15 | ARG BUILD_DATE
16 | ARG VCS_REF
17 | ENV NAAS_VERSION 2.6.3
18 | ENV JUPYTER_ENABLE_LAB 'yes'
19 | ENV NB_UMASK=022
20 | ENV NB_USER=ftp
21 | ENV NB_UID=21
22 | ENV NB_GID=21
23 | ENV NB_GROUP=21
24 |
25 | USER root
26 | LABEL org.label-schema.build-date=$BUILD_DATE \
27 | org.label-schema.name="Naas machine" \
28 | org.label-schema.description="jupyter machine with naas" \
29 | org.label-schema.url="https://naas.ai" \
30 | org.label-schema.vcs-ref=$VCS_REF \
31 | org.label-schema.vcs-url="https://github.com/jupyter-naas/naas" \
32 | org.label-schema.vendor="Cashstory, Inc." \
33 | org.label-schema.version=$NAAS_VERSION \
34 | org.label-schema.schema-version="1.0"
35 |
36 | RUN mkdir /home/$NB_USER && \
37 | fix-permissions /home/$NB_USER \
38 | && cd $(dirname $(which jupyter-notebook)) \
39 | && rm jupyter-notebook \
40 | && ln -s jupyter-server jupyter-notebook
41 |
42 | RUN python3 -m pip install --no-cache-dir --upgrade pip && python3 -m pip --version
43 | RUN python3 -m pip install --no-cache-dir --upgrade --use-deprecated=legacy-resolver naas[full]==$NAAS_VERSION
44 |
45 | RUN mkdir /etc/naas
46 | COPY scripts /etc/naas/scripts
47 | COPY custom /etc/naas/custom
48 | RUN /etc/naas/scripts/install_supp
49 | RUN /etc/naas/scripts/customize
50 |
51 | RUN apt update && apt install --yes graphviz && rm -rf /var/lib/apt/lists/*
52 |
53 | COPY --from=extension_builder /opt/conda/share/jupyter/labextensions/naasai /opt/conda/share/jupyter/labextensions/naasai
54 |
55 | RUN fix-permissions /opt/conda/share/jupyter/lab/extensions
56 |
57 | ENV PATH="/home/ftp/.local/bin:${PATH}"
58 |
--------------------------------------------------------------------------------
/extensions/naasai/.gitignore:
--------------------------------------------------------------------------------
1 | *.bundle.*
2 | lib/
3 | node_modules/
4 | *.egg-info/
5 | .ipynb_checkpoints
6 | *.tsbuildinfo
7 | naasai/labextension
8 |
9 | # Created by https://www.gitignore.io/api/python
10 | # Edit at https://www.gitignore.io/?templates=python
11 |
12 | ### Python ###
13 | # Byte-compiled / optimized / DLL files
14 | __pycache__/
15 | *.py[cod]
16 | *$py.class
17 |
18 | # C extensions
19 | *.so
20 |
21 | # Distribution / packaging
22 | .Python
23 | build/
24 | develop-eggs/
25 | dist/
26 | downloads/
27 | eggs/
28 | .eggs/
29 | lib/
30 | lib64/
31 | parts/
32 | sdist/
33 | var/
34 | wheels/
35 | pip-wheel-metadata/
36 | share/python-wheels/
37 | .installed.cfg
38 | *.egg
39 | MANIFEST
40 |
41 | # PyInstaller
42 | # Usually these files are written by a python script from a template
43 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
44 | *.manifest
45 | *.spec
46 |
47 | # Installer logs
48 | pip-log.txt
49 | pip-delete-this-directory.txt
50 |
51 | # Unit test / coverage reports
52 | htmlcov/
53 | .tox/
54 | .nox/
55 | .coverage
56 | .coverage.*
57 | .cache
58 | nosetests.xml
59 | coverage.xml
60 | *.cover
61 | .hypothesis/
62 | .pytest_cache/
63 |
64 | # Translations
65 | *.mo
66 | *.pot
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # pyenv
78 | .python-version
79 |
80 | # celery beat schedule file
81 | celerybeat-schedule
82 |
83 | # SageMath parsed files
84 | *.sage.py
85 |
86 | # Spyder project settings
87 | .spyderproject
88 | .spyproject
89 |
90 | # Rope project settings
91 | .ropeproject
92 |
93 | # Mr Developer
94 | .mr.developer.cfg
95 | .project
96 | .pydevproject
97 |
98 | # mkdocs documentation
99 | /site
100 |
101 | # mypy
102 | .mypy_cache/
103 | .dmypy.json
104 | dmypy.json
105 |
106 | # Pyre type checker
107 | .pyre/
108 |
109 | # End of https://www.gitignore.io/api/python
110 |
111 | # OSX files
112 | .DS_Store
113 |
114 | .virtual_documents
--------------------------------------------------------------------------------
/naas/runner/controllers/secret.py:
--------------------------------------------------------------------------------
1 | from sanic.views import HTTPMethodView
2 | from sanic import response
3 | from naas.ntypes import t_secret, t_error, t_send
4 | import uuid
5 |
6 |
7 | class SecretController(HTTPMethodView):
8 | __secrets = None
9 | __logger = None
10 | __min_keys = sorted(list(["name", "secret", "status"]))
11 |
12 | def __init__(self, logger, secrets, *args, **kwargs):
13 | super(SecretController, self).__init__(*args, **kwargs)
14 | self.__secrets = secrets
15 | self.__logger = logger
16 |
17 | async def get(self, request):
18 | uid = str(uuid.uuid4())
19 | status = await self.__secrets.list(uid)
20 | self.__logger.info(
21 | {"id": uid, "type": t_secret, "status": t_send, "filepath": t_secret}
22 | )
23 | return response.json(status)
24 |
25 | async def post(self, request):
26 | uid = str(uuid.uuid4())
27 | data = request.json
28 | keys = sorted(list(data.keys()))
29 | if not data or self.__min_keys != keys:
30 | self.__logger.info(
31 | {
32 | "id": uid,
33 | "type": t_secret,
34 | "status": t_error,
35 | "filepath": t_secret,
36 | "error": "missing keys",
37 | "tb": data,
38 | }
39 | )
40 | return response.json(
41 | {"id": uid, "status": "error", "error": "missing keys", "data": [data]},
42 | status=400,
43 | )
44 | updated = await self.__secrets.update(
45 | uid,
46 | data["name"],
47 | data["secret"],
48 | data["status"],
49 | )
50 | if updated.get("error"):
51 | return response.json(updated, status=409)
52 | self.__logger.info(
53 | {
54 | "id": uid,
55 | "type": t_secret,
56 | "filepath": t_secret,
57 | "status": updated["status"],
58 | }
59 | )
60 | return response.json(updated)
61 |
--------------------------------------------------------------------------------
/extensions/naasai/RELEASE.md:
--------------------------------------------------------------------------------
1 | # Making a new release of naasai
2 |
3 | The extension can be published to `PyPI` and `npm` manually or using the [Jupyter Releaser](https://github.com/jupyter-server/jupyter_releaser).
4 |
5 | ## Manual release
6 |
7 | ### Python package
8 |
9 | This extension can be distributed as Python
10 | packages. All of the Python
11 | packaging instructions in the `pyproject.toml` file to wrap your extension in a
12 | Python package. Before generating a package, we first need to install `build`.
13 |
14 | ```bash
15 | pip install build twine
16 | ```
17 |
18 | To create a Python source package (``.tar.gz``) and the binary package (`.whl`) in the `dist/` directory, do:
19 |
20 | ```bash
21 | python -m build
22 | ```
23 |
24 | > `python setup.py sdist bdist_wheel` is deprecated and will not work for this package.
25 |
26 | Then to upload the package to PyPI, do:
27 |
28 | ```bash
29 | twine upload dist/*
30 | ```
31 |
32 | ### NPM package
33 |
34 | To publish the frontend part of the extension as a NPM package, do:
35 |
36 | ```bash
37 | npm login
38 | npm publish --access public
39 | ```
40 |
41 | ## Automated releases with the Jupyter Releaser
42 |
43 | The extension repository should already be compatible with the Jupyter Releaser.
44 |
45 | Check out the [workflow documentation](https://github.com/jupyter-server/jupyter_releaser#typical-workflow) for more information.
46 |
47 | Here is a summary of the steps to cut a new release:
48 |
49 | - Fork the [`jupyter-releaser` repo](https://github.com/jupyter-server/jupyter_releaser)
50 | - Add `ADMIN_GITHUB_TOKEN`, `PYPI_TOKEN` and `NPM_TOKEN` to the Github Secrets in the fork
51 | - Go to the Actions panel
52 | - Run the "Draft Changelog" workflow
53 | - Merge the Changelog PR
54 | - Run the "Draft Release" workflow
55 | - Run the "Publish Release" workflow
56 |
57 | ## Publishing to `conda-forge`
58 |
59 | If the package is not on conda forge yet, check the documentation to learn how to add it: https://conda-forge.org/docs/maintainer/adding_pkgs.html
60 |
61 | Otherwise a bot should pick up the new version publish to PyPI, and open a new PR on the feedstock repository automatically.
62 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | name: "CodeQL"
7 |
8 | on:
9 | push:
10 | branches: [master]
11 | pull_request:
12 | # The branches below must be a subset of the branches above
13 | branches: [master]
14 | schedule:
15 | - cron: '0 10 * * 5'
16 |
17 | jobs:
18 | analyze:
19 | name: Analyze
20 | runs-on: ubuntu-latest
21 |
22 | strategy:
23 | fail-fast: false
24 | matrix:
25 | # Override automatic language detection by changing the below list
26 | # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
27 | language: ['python']
28 | # Learn more...
29 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
30 |
31 | steps:
32 | - name: Checkout repository
33 | uses: actions/checkout@v2
34 | with:
35 | # We must fetch at least the immediate parents so that if this is
36 | # a pull request then we can checkout the head.
37 | fetch-depth: 2
38 |
39 | # If this run was triggered by a pull request event, then checkout
40 | # the head of the pull request instead of the merge commit.
41 | - run: git checkout HEAD^2
42 | if: ${{ github.event_name == 'pull_request' }}
43 |
44 | # Initializes the CodeQL tools for scanning.
45 | - name: Initialize CodeQL
46 | uses: github/codeql-action/init@v1
47 | with:
48 | languages: ${{ matrix.language }}
49 | # If you wish to specify custom queries, you can do so here or in a config file.
50 | # By default, queries listed here will override any specified in a config file.
51 | # Prefix the list here with "+" to use these queries and those in the config file.
52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main
53 |
54 | - name: Perform CodeQL Analysis
55 | uses: github/codeql-action/analyze@v1
56 |
--------------------------------------------------------------------------------
/naas/runner/controllers/downloader.py:
--------------------------------------------------------------------------------
1 | from notebook.services.contents.filemanager import FileContentsManager as FCM
2 | from naas.onboarding import download_file
3 | from sanic.response import redirect, json
4 | from sanic.views import HTTPMethodView
5 | from naas.runner.env_var import n_env
6 | from naas.ntypes import t_downloader, t_send, t_error
7 | import traceback
8 | import uuid
9 |
10 |
11 | class DownloaderController(HTTPMethodView):
12 | __logger = None
13 |
14 | def __init__(self, logger, *args, **kwargs):
15 | super(DownloaderController, self).__init__(*args, **kwargs)
16 | self.__logger = logger
17 |
18 | async def get(self, request):
19 | uid = str(uuid.uuid4())
20 | url = request.args.get("url", None)
21 | mode_api = request.args.get("api", None)
22 | file_name = request.args.get("name", None)
23 | if url is None and file_name is None:
24 | return json({"status": t_error})
25 | if url is None:
26 | try:
27 | file_name = f"{file_name}.ipynb"
28 | FCM().new(path=file_name)
29 | except Exception as e:
30 | tb = traceback.format_exc()
31 | self.__logger.error(
32 | {"id": uid, "type": t_downloader, "status": t_send, "filepath": url}
33 | )
34 | return json({"status": t_error, "error": str(e), "tb": str(tb)})
35 | else:
36 | try:
37 | file_name = download_file(url, file_name)
38 | self.__logger.info(
39 | {"id": uid, "type": t_downloader, "status": t_send, "filepath": url}
40 | )
41 | except Exception as e:
42 | tb = traceback.format_exc()
43 | self.__logger.error(
44 | {"id": uid, "type": t_downloader, "status": t_send, "filepath": url}
45 | )
46 | return json({"status": t_error, "error": str(e), "tb": str(tb)})
47 | if mode_api is None:
48 | redirect_to = f"{n_env.user_url}/lab/tree/{file_name}"
49 | return redirect(redirect_to)
50 | else:
51 | return json({"status": t_send})
52 |
--------------------------------------------------------------------------------
/tests/demo/demo_webhook.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 2,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "output_type": "stream",
10 | "name": "stdout",
11 | "text": [
12 | "Start 2021-02-02 15:11:04.036619\n"
13 | ]
14 | }
15 | ],
16 | "source": [
17 | "import datetime\n",
18 | "print('Start', datetime.datetime.now())"
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": 5,
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "import asyncio\n",
28 | "\n",
29 | "await asyncio.sleep(3)"
30 | ]
31 | },
32 | {
33 | "cell_type": "code",
34 | "execution_count": 7,
35 | "metadata": {},
36 | "outputs": [
37 | {
38 | "output_type": "stream",
39 | "name": "stdout",
40 | "text": [
41 | "Done 2021-02-02 15:11:13.227339\n"
42 | ]
43 | }
44 | ],
45 | "source": [
46 | "print('Done', datetime.datetime.now())"
47 | ]
48 | },
49 | {
50 | "cell_type": "code",
51 | "execution_count": 4,
52 | "metadata": {},
53 | "outputs": [
54 | {
55 | "output_type": "stream",
56 | "name": "stdout",
57 | "text": [
58 | "👌 Well done! Your Notebook has been sent to production. \n\n⏰ It will be scheduled \"Every minute of every day\" (more on the syntax on https://crontab.guru/).\n\nPs: to remove the \"Scheduler\", just replace .add by .delete\n"
59 | ]
60 | }
61 | ],
62 | "source": [
63 | "import naas\n",
64 | "\n",
65 | "naas.webhook.add('./demo_webhook.ipynb', recurrence=\"* * * * *\")"
66 | ]
67 | },
68 | {
69 | "cell_type": "code",
70 | "execution_count": null,
71 | "metadata": {},
72 | "outputs": [],
73 | "source": []
74 | }
75 | ],
76 | "metadata": {
77 | "kernelspec": {
78 | "display_name": "Python 3",
79 | "language": "python",
80 | "name": "python3"
81 | },
82 | "language_info": {
83 | "codemirror_mode": {
84 | "name": "ipython",
85 | "version": 3
86 | },
87 | "file_extension": ".py",
88 | "mimetype": "text/x-python",
89 | "name": "python",
90 | "nbconvert_exporter": "python",
91 | "pygments_lexer": "ipython3",
92 | "version": "3.9.1-final"
93 | }
94 | },
95 | "nbformat": 4,
96 | "nbformat_minor": 4
97 | }
--------------------------------------------------------------------------------
/tests/test_manager.py:
--------------------------------------------------------------------------------
1 | from naas.ntypes import t_notebook
2 | from naas.manager import Manager
3 | from naas.runner import n_env
4 | import pytest # noqa: F401
5 | import os
6 | import json
7 |
8 | user_folder_name = "test_user_folder"
9 | test_demo_folder = "demo"
10 | test_file = "demo_file.py"
11 | token = "test_token"
12 | test_file_path = os.path.join(
13 | os.path.dirname(os.path.abspath(__file__)), test_demo_folder, test_file
14 | )
15 | n_env.hub_base = "https://test.naas.com"
16 | n_env.user = "TEST_USER"
17 | n_env.token = "TESTAPIKEY"
18 | n_env.proxy_api = "proxy.naas.com"
19 |
20 |
21 | def mock_for_nb_path(mocker, requests_mock):
22 | mocker.patch(
23 | "ipykernel.get_connection_file",
24 | return_value="kernel-b1e19209-e251-4115-819e-7ab5bc4232b7.json",
25 | )
26 | mock_json = json.loads(open("tests/session_ids.json").read())
27 | url = f"{n_env.user_url}/api/sessions"
28 | requests_mock.register_uri("GET", url, json=mock_json, status_code=200)
29 |
30 |
31 | def test_nb_path(mocker, requests_mock, tmp_path):
32 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
33 | n_env.server_root = path_srv_root
34 | mock_for_nb_path(mocker, requests_mock)
35 | manager = Manager(t_notebook)
36 | assert manager.notebook_path() == os.path.join(
37 | path_srv_root, "MAIN_DIR/TEST_DIR1/ANOTHER_DIR1"
38 | )
39 |
40 |
41 | def test_get_path(mocker, requests_mock, tmp_path):
42 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
43 | n_env.server_root = path_srv_root
44 | mock_for_nb_path(mocker, requests_mock)
45 | manager = Manager(t_notebook)
46 | assert manager.notebook_path() == os.path.join(
47 | path_srv_root, "MAIN_DIR/TEST_DIR1/ANOTHER_DIR1"
48 | )
49 | assert manager.get_path(test_file_path) == test_file_path
50 |
51 |
52 | # TODO test all other functions
53 |
54 | # def get_naas(self):
55 |
56 | # def get_value(self, path, obj_type):
57 |
58 | # def is_already_use(self, obj):
59 |
60 | # def get_prod(self, path):
61 |
62 | # def get_output(self, path):
63 |
64 | # def clear_output(self, path):
65 |
66 | # def list_history(self, path):
67 |
68 | # def get_history(self, path, histo):
69 |
70 | # def clear_history(self, path, histo=None):
71 |
72 | # def add_prod(self, obj, silent):
73 |
74 | # def del_prod(self, obj, silent):
75 |
--------------------------------------------------------------------------------
/naas/secret.py:
--------------------------------------------------------------------------------
1 | from .ntypes import t_secret, t_add, t_delete, error_reject, error_busy
2 | from .runner.env_var import n_env
3 | import pandas as pd
4 | import requests
5 |
6 |
7 | class Secret:
8 | def list(self, raw=False):
9 | try:
10 | r = requests.get(f"{n_env.api}/{t_secret}")
11 | r.raise_for_status()
12 | res = r.json()
13 | if raw:
14 | return res
15 | else:
16 | return pd.DataFrame.from_records(res)
17 | except requests.exceptions.ConnectionError as err:
18 | print(error_busy, err)
19 | raise
20 | except requests.exceptions.HTTPError as err:
21 | print(error_reject, err)
22 | raise
23 |
24 | def add(self, name=None, secret=None):
25 | obj = {"name": name, "secret": secret, "status": t_add}
26 | try:
27 | r = requests.post(f"{n_env.api}/{t_secret}", json=obj)
28 | r.raise_for_status()
29 | print("👌 Well done! Your Secret has been sent to production. \n")
30 | print('PS: to remove the "Secret" feature, just replace .add by .delete')
31 | except requests.exceptions.ConnectionError as err:
32 | print(error_busy, err)
33 | raise
34 | except requests.exceptions.HTTPError as err:
35 | print(error_reject, err)
36 | raise
37 |
38 | def get(self, name=None, default_value=None):
39 | all_secret = self.list(True)
40 | secret_item = None
41 | for item in all_secret:
42 | if name == item["name"]:
43 | secret_item = item
44 | break
45 | if secret_item is not None:
46 | return secret_item.get("secret", None)
47 | return default_value
48 |
49 | def delete(self, name=None):
50 | obj = {"name": name, "secret": "", "status": t_delete}
51 | try:
52 | r = requests.post(f"{n_env.api}/{t_secret}", json=obj)
53 | r.raise_for_status()
54 | print("👌 Well done! Your Secret has been remove in production. \n")
55 | except requests.exceptions.ConnectionError as err:
56 | print(error_busy, err)
57 | raise
58 | except requests.exceptions.HTTPError as err:
59 | print(error_reject, err)
60 | raise
61 |
--------------------------------------------------------------------------------
/.github/workflows/deploy_packages.yml:
--------------------------------------------------------------------------------
1 | name: Upload Python Package
2 |
3 | on:
4 | push:
5 | tags:
6 | - '*'
7 |
8 | jobs:
9 | deploy:
10 | runs-on: ubuntu-latest
11 | name: "Test and Deploy version on Pypip"
12 | steps:
13 | - name: Check out
14 | uses: actions/checkout@v2
15 | with:
16 | fetch-depth: 0
17 | token: '${{ secrets.PERSONAL_ACCESS_TOKEN }}'
18 | - name: Set up Python
19 | uses: actions/setup-python@v1
20 | with:
21 | python-version: '3.9'
22 | - uses: actions/cache@v2
23 | with:
24 | path: ~/.cache/pip
25 | key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
26 | restore-keys: |
27 | ${{ runner.os }}-pip-
28 | - name: Install dependencies
29 | run: |
30 | python3 -m pip install -U pip
31 | python3 -m pip install --upgrade pip
32 | pip3 install -e '.[fulldev]'
33 | - name: Run tests and linters
34 | run: |
35 | git config --global user.email "action@github.com"
36 | git config --global user.name "GitHub Action"
37 | python3 -m black naas
38 | python3 -m flake8 naas
39 | pytest --cov=./naas --cov-report=xml
40 | - name: Upload coverage to Codecov
41 | if: runner.os == 'Linux'
42 | uses: codecov/codecov-action@v1.0.3
43 | with:
44 | token: ${{secrets.CODECOV_TOKEN}}
45 | file: ./coverage.xml
46 | fail_ci_if_error: true
47 | verbose: true
48 | - name: Build package
49 | run: python3 setup.py sdist
50 | - name: Deploy Pypi
51 | uses: remorses/pypi@v3
52 | with:
53 | username: ${{ secrets.PYPIP_USERNAME }}
54 | password: ${{ secrets.PYPIP_PASSWORD }}
55 | - name: Repository Dispatch
56 | uses: peter-evans/repository-dispatch@v1
57 | with:
58 | token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
59 | repository: ${{ github.repository }}
60 | event-type: pypip-deployed
61 | client-payload: '{"ref": "${{ github.ref }}"}'
62 | - name: Repository Dispatch
63 | uses: peter-evans/repository-dispatch@v1
64 | with:
65 | token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
66 | repository: jupyter-naas/kernels
67 | event-type: pypip-deployed
68 | client-payload: '{"ref": "${{ github.ref }}"}'
--------------------------------------------------------------------------------
/custom/jupyter_server_config.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Naas Team.
2 | # Distributed under the terms of the Modified BSD License.
3 |
4 | import subprocess
5 | import os
6 | import logging
7 | import threading
8 | import time
9 |
10 | c = get_config()
11 | c.ServerApp.ip = "0.0.0.0"
12 | c.ServerApp.port = 8888
13 |
14 | naas_port = 5000
15 |
16 | c.ServerApp.open_browser = False
17 | c.ServerApp.webbrowser_open_new = 0
18 |
19 | c.ServerApp.tornado_settings = {
20 | "headers": {
21 | "Content-Security-Policy": "frame-ancestors self "
22 | + os.environ.get("ALLOWED_IFRAME", "")
23 | }
24 | }
25 |
26 | c.ServerProxy.servers = {
27 | "naas": {
28 | "launcher_entry": {
29 | "enabled": False,
30 | "icon_path": "/etc/naas/custom/naas_fav.svg",
31 | "title": "Naas manager",
32 | },
33 | "new_browser_tab": False,
34 | "timeout": 30,
35 | "command": ["redir", ":{port}", f":{naas_port}"],
36 | }
37 | }
38 |
39 | # Change default umask for all subprocesses of the notebook server if set in
40 | # the environment
41 | if "NB_UMASK" in os.environ:
42 | os.umask(int(os.environ["NB_UMASK"], 8))
43 |
44 |
45 | def naasRunner(naas_port):
46 | while True:
47 | logging.info("Starting naas runner on port {}.".format(naas_port))
48 | p = subprocess.Popen(["python", "-m", "naas.runner", "-p", f"{naas_port}"])
49 | p.wait()
50 | logging.info("Naas Runner exited !")
51 | logging.info(p.stdout)
52 | logging.info(p.stderr)
53 | time.sleep(1)
54 |
55 | ONE_HOUR:float = 3600.0
56 | def naasStarter():
57 | while True:
58 | logging.info("Refreshing naas starter")
59 | folder_name = '⚡ Get started with Naas'
60 |
61 | # Change this to remove a folder from the home directory of the user.
62 | os.system('rm -rf /home/ftp/old_folder_name_42')
63 |
64 | os.system('git clone https://github.com/jupyter-naas/starters.git /home/ftp/.naas/starters|| (cd /home/ftp/.naas/starters && git reset --hard && git pull)')
65 | os.system(f'mkdir -p "/home/ftp/{folder_name}"')
66 | os.system(f'cp -r /home/ftp/.naas/starters/* "/home/ftp/{folder_name}" && rm "/home/ftp/{folder_name}/README.md"')
67 | os.system('rm /home/ftp/Welcome_to_Naas.ipynb')
68 | time.sleep(ONE_HOUR)
69 |
70 | runner = threading.Thread(target=naasRunner, args=(naas_port,))
71 | runner.start()
72 |
73 | starter = threading.Thread(target=naasStarter, args=())
74 | starter.start()
75 |
--------------------------------------------------------------------------------
/extensions/naasai/README.md:
--------------------------------------------------------------------------------
1 | # naasai
2 |
3 | [](https://github.com/jupyter-naas/naas/actions/workflows/build.yml)
4 |
5 | A JupyterLab extension.
6 |
7 |
8 |
9 | ## Requirements
10 |
11 | * JupyterLab >= 3.0
12 |
13 | ## Install
14 |
15 | To install the extension, execute:
16 |
17 | ```bash
18 | pip install naasai
19 | ```
20 |
21 | ## Uninstall
22 |
23 | To remove the extension, execute:
24 |
25 | ```bash
26 | pip uninstall naasai
27 | ```
28 |
29 |
30 | ## Contributing
31 |
32 | ### Development install
33 |
34 | Note: You will need NodeJS to build the extension package.
35 |
36 | The `jlpm` command is JupyterLab's pinned version of
37 | [yarn](https://yarnpkg.com/) that is installed with JupyterLab. You may use
38 | `yarn` or `npm` in lieu of `jlpm` below.
39 |
40 | ```bash
41 | # Clone the repo to your local environment
42 | # Change directory to the naasai directory
43 | # Install package in development mode
44 | pip install -e .
45 | # Link your development version of the extension with JupyterLab
46 | jupyter labextension develop . --overwrite
47 | # Rebuild extension Typescript source after making changes
48 | jlpm run build
49 | ```
50 |
51 | You can watch the source directory and run JupyterLab at the same time in different terminals to watch for changes in the extension's source and automatically rebuild the extension.
52 |
53 | ```bash
54 | # Watch the source directory in one terminal, automatically rebuilding when needed
55 | jlpm run watch
56 | # Run JupyterLab in another terminal
57 | jupyter lab
58 | ```
59 |
60 | With the watch command running, every saved change will immediately be built locally and available in your running JupyterLab. Refresh JupyterLab to load the change in your browser (you may need to wait several seconds for the extension to be rebuilt).
61 |
62 | By default, the `jlpm run build` command generates the source maps for this extension to make it easier to debug using the browser dev tools. To also generate source maps for the JupyterLab core extensions, you can run the following command:
63 |
64 | ```bash
65 | jupyter lab build --minimize=False
66 | ```
67 |
68 | ### Development uninstall
69 |
70 | ```bash
71 | pip uninstall naasai
72 | ```
73 |
74 | In development mode, you will also need to remove the symlink created by `jupyter labextension develop`
75 | command. To find its location, you can run `jupyter labextension list` to figure out where the `labextensions`
76 | folder is located. Then you can remove the symlink named `naasai` within that folder.
77 |
78 | ### Packaging the extension
79 |
80 | See [RELEASE](RELEASE.md)
81 |
--------------------------------------------------------------------------------
/naas/dependency.py:
--------------------------------------------------------------------------------
1 | from .ntypes import t_dependency, t_add, t_update, t_delete
2 | from .manager import Manager
3 | import pandas as pd
4 |
5 |
6 | class Dependency:
7 | naas = None
8 | role = t_dependency
9 | manager = None
10 |
11 | def __init__(self):
12 | self.manager = Manager(t_dependency)
13 | self.path = self.manager.path
14 |
15 | def list(self, path=None):
16 | return self.manager.list_prod("list_history", path)
17 |
18 | def get(self, path=None, histo=None):
19 | return self.manager.get_file(path, histo=histo)
20 |
21 | def clear(self, path=None, histo=None):
22 | return self.manager.clear_file(path, None, histo)
23 |
24 | def currents(self, raw=False):
25 | json_data = self.manager.get_naas()
26 | json_filtered = []
27 | for item in json_data:
28 | if item["type"] == self.role and item["status"] != t_delete:
29 | if raw:
30 | json_filtered.append(item)
31 | else:
32 | json_filtered.append({"path": item["path"]})
33 | if raw is False:
34 | df = pd.DataFrame(json_filtered)
35 | return df
36 | return json_filtered
37 |
38 | def add(self, path=None, debug=False):
39 | if self.manager.is_production():
40 | print("No add done, you are in production\n")
41 | return self.manager.get_path(path)
42 | current_file = self.manager.get_path(path)
43 | status = t_add
44 | try:
45 | self.manager.get_value(current_file, False)
46 | status = t_update
47 | except: # noqa: E722
48 | pass
49 | self.manager.add_prod(
50 | {
51 | "type": self.role,
52 | "status": status,
53 | "path": current_file,
54 | "params": {},
55 | "value": "Only internal",
56 | },
57 | debug,
58 | )
59 | print("👌 Well done! Your Dependency has been sent to production. \n")
60 | print('PS: to remove the "Dependency" feature, just replace .add by .delete')
61 |
62 | def delete(self, path=None, all=True, debug=False):
63 | if self.manager.is_production():
64 | print("No delete done, you are in production\n")
65 | return
66 | current_file = self.manager.get_path(path)
67 | self.manager.del_prod({"type": self.role, "path": current_file}, debug)
68 | print("🗑 Done! Your Dependency has been remove from production.\n")
69 | if all is True:
70 | self.clear(current_file, "all")
71 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | test_user_folder/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | pip-wheel-metadata/
25 | share/python-wheels/
26 | *.egg-info/
27 | .installed.cfg
28 | *.egg
29 | MANIFEST
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .nox/
45 | .coverage
46 | .coverage.*
47 | .cache
48 | nosetests.xml
49 | coverage.xml
50 | *.cover
51 | *.py,cover
52 | .hypothesis/
53 | .pytest_cache/
54 |
55 | # Translations
56 | *.mo
57 | *.pot
58 |
59 | # Django stuff:
60 | *.log
61 | local_settings.py
62 | db.sqlite3
63 | db.sqlite3-journal
64 |
65 | # Flask stuff:
66 | instance/
67 | .webassets-cache
68 |
69 | # Scrapy stuff:
70 | .scrapy
71 |
72 | # Sphinx documentation
73 | docs/_build/
74 |
75 | # PyBuilder
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | .python-version
87 |
88 | # pipenv
89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
92 | # install all needed dependencies.
93 | #Pipfile.lock
94 |
95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
96 | __pypackages__/
97 |
98 | # Celery stuff
99 | celerybeat-schedule
100 | celerybeat.pid
101 |
102 | # SageMath parsed files
103 | *.sage.py
104 |
105 | # Environments
106 | .env
107 | .venv
108 | env/
109 | venv/
110 | ENV/
111 | env.bak/
112 | venv.bak/
113 |
114 | # Spyder project settings
115 | .spyderproject
116 | .spyproject
117 |
118 | # Rope project settings
119 | .ropeproject
120 |
121 | # mkdocs documentation
122 | /site
123 |
124 | # mypy
125 | .mypy_cache/
126 | .dmypy.json
127 | dmypy.json
128 |
129 | # Pyre type checker
130 | .pyre/
131 | tests/.DS_Store
132 | .DS_Store
133 |
134 | #vim files
135 | .vim/*
136 | pytest_tmp/
137 | Welcome_to_Naas.ipynb
138 | .naas
139 |
140 | # PyCharm
141 | .idea/
142 |
143 | # file_sharing folder for naas
144 | file_sharing
145 | .docker/.cache
--------------------------------------------------------------------------------
/extensions/naasai/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "naasai",
3 | "version": "0.1.0",
4 | "description": "naas.ai official extension",
5 | "keywords": [
6 | "jupyter",
7 | "jupyterlab",
8 | "jupyterlab-extension"
9 | ],
10 | "homepage": "https://github.com/jupyter-naas/naas",
11 | "bugs": {
12 | "url": "https://github.com/jupyter-naas/naas/issues"
13 | },
14 | "license": "BSD-3-Clause",
15 | "author": {
16 | "name": "Maxime Jublou",
17 | "email": "maxime@naas.ai"
18 | },
19 | "files": [
20 | "lib/**/*.{d.ts,eot,gif,html,jpg,js,js.map,json,png,svg,woff2,ttf}",
21 | "style/**/*.{css,js,eot,gif,html,jpg,json,png,svg,woff2,ttf}"
22 | ],
23 | "main": "lib/index.js",
24 | "types": "lib/index.d.ts",
25 | "style": "style/index.css",
26 | "repository": {
27 | "type": "git",
28 | "url": "https://github.com/jupyter-naas/naas.git"
29 | },
30 | "scripts": {
31 | "build": "jlpm run build:lib && jlpm run build:labextension:dev",
32 | "build:prod": "jlpm run clean && jlpm run build:lib && jlpm run build:labextension",
33 | "build:labextension": "jupyter labextension build .",
34 | "build:labextension:dev": "jupyter labextension build --development True .",
35 | "build:lib": "tsc",
36 | "clean": "jlpm run clean:lib",
37 | "clean:lib": "rimraf lib tsconfig.tsbuildinfo",
38 | "clean:labextension": "rimraf naasai/labextension",
39 | "clean:all": "jlpm run clean:lib && jlpm run clean:labextension",
40 | "eslint": "eslint . --ext .ts,.tsx --fix",
41 | "eslint:check": "eslint . --ext .ts,.tsx",
42 | "install:extension": "jlpm run build",
43 | "watch": "run-p watch:src watch:labextension",
44 | "watch:src": "tsc -w",
45 | "watch:labextension": "jupyter labextension watch ."
46 | },
47 | "dependencies": {
48 | "@jupyterlab/application": "^3.1.0",
49 | "@jupyterlab/mainmenu": "^3.1.0",
50 | "@jupyterlab/settingregistry": "^3.1.0",
51 | "@jupyterlab/launcher": "^3.1.0"
52 | },
53 | "devDependencies": {
54 | "@jupyterlab/builder": "^3.1.0",
55 | "@typescript-eslint/eslint-plugin": "^4.8.1",
56 | "@typescript-eslint/parser": "^4.8.1",
57 | "eslint": "^7.14.0",
58 | "eslint-config-prettier": "^6.15.0",
59 | "eslint-plugin-prettier": "^3.1.4",
60 | "npm-run-all": "^4.1.5",
61 | "prettier": "^2.1.1",
62 | "rimraf": "^3.0.2",
63 | "typescript": "~4.1.3"
64 | },
65 | "sideEffects": [
66 | "style/*.css",
67 | "style/index.js"
68 | ],
69 | "styleModule": "style/index.js",
70 | "publishConfig": {
71 | "access": "public"
72 | },
73 | "jupyterlab": {
74 | "extension": true,
75 | "outputDir": "naasai/labextension"
76 | },
77 | "jupyter-releaser": {
78 | "hooks": {
79 | "before-build-npm": [
80 | "python -m pip install jupyterlab~=3.1",
81 | "jlpm"
82 | ]
83 | }
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/naas/runner/proxy.py:
--------------------------------------------------------------------------------
1 | from .env_var import n_env
2 | from base64 import b64encode
3 | import escapism
4 | import string
5 | import requests
6 |
7 | _docker_safe_chars = set(string.ascii_letters + string.digits)
8 | _docker_escape_char_kubernet = "-"
9 | _docker_escape_char_docker = "_"
10 |
11 |
12 | def escape_kubernet(s):
13 | """Escape a string to kubernet-safe characters"""
14 | return escapism.escape(
15 | s,
16 | safe=_docker_safe_chars,
17 | escape_char=_docker_escape_char_kubernet,
18 | )
19 |
20 |
21 | def escape_docker(s):
22 | """Escape a string to docker-safe characters"""
23 | return escapism.escape(
24 | s,
25 | safe=_docker_safe_chars,
26 | escape_char=_docker_escape_char_docker,
27 | )
28 |
29 |
30 | def encode_proxy_url(token=""):
31 | if n_env.user and n_env.user != "":
32 | client_encoded = escape_kubernet(n_env.user)
33 | message_bytes = client_encoded.encode("ascii")
34 | base64_bytes = b64encode(message_bytes)
35 | username_base64 = base64_bytes.decode("ascii")
36 | return f"{n_env.proxy_api}/{username_base64}/{token}"
37 | else:
38 | return f"{n_env.proxy_api}/{token}"
39 |
40 |
41 | class Domain:
42 |
43 | headers = None
44 |
45 | def __init__(self):
46 | self.headers = {"Authorization": f"token {n_env.token}"}
47 |
48 | def status(self):
49 | req = requests.get(url=f"{n_env.proxy_api}/status")
50 | req.raise_for_status()
51 | jsn = req.json()
52 | return jsn
53 |
54 | def add(self, domain, url=None):
55 | token = None
56 | endpoint = None
57 | if url:
58 | list_url = url.split("/")
59 | token = list_url.pop()
60 | endpoint = list_url.pop()
61 | if "://" in domain:
62 | clean_domain = domain.split("://")[1]
63 | else:
64 | clean_domain = domain
65 | data = {"domain": clean_domain, "endpoint": endpoint, "token": token}
66 | req = requests.post(
67 | url=f"{n_env.proxy_api}/proxy", headers=self.headers, json=data
68 | )
69 | req.raise_for_status()
70 | new_url = f"https://{clean_domain}"
71 | if token:
72 | new_url = f"{new_url}/{endpoint}/{token}"
73 | return new_url
74 |
75 | def get(self, domain):
76 | req = requests.get(
77 | url=f"{n_env.proxy_api}/proxy",
78 | headers=self.headers,
79 | json={"domain": domain},
80 | )
81 | req.raise_for_status()
82 | jsn = req.json()
83 | return jsn
84 |
85 | def delete(self, domain):
86 | req = requests.delete(
87 | url=f"{n_env.proxy_api}/proxy",
88 | headers=self.headers,
89 | json={"domain": domain},
90 | )
91 | req.raise_for_status()
92 | jsn = req.json()
93 | return jsn
94 |
--------------------------------------------------------------------------------
/tests/demo/demo_res_image.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "from IPython.core.display import display, Image, Markdown\n",
10 | "import requests\n",
11 | "\n",
12 | "url = \"https://picsum.photos/id/237/200/300\"\n",
13 | "display(Markdown(\"Response Set as IMAGE, preview below: \"))\n",
14 | "display(Image(data=requests.get(url, stream=True).content, metadata={\"naas_api\": True}))"
15 | ]
16 | },
17 | {
18 | "cell_type": "code",
19 | "execution_count": 3,
20 | "metadata": {},
21 | "outputs": [
22 | {
23 | "output_type": "stream",
24 | "name": "stdout",
25 | "text": [
26 | "👌 Well done! Your Notebook has been sent to production.\n\n"
27 | ]
28 | },
29 | {
30 | "output_type": "display_data",
31 | "data": {
32 | "text/plain": "Button(button_style='primary', description='Copy URL', style=ButtonStyle())",
33 | "application/vnd.jupyter.widget-view+json": {
34 | "version_major": 2,
35 | "version_minor": 0,
36 | "model_id": "524f6a2c391c40acbaae2d14c53831eb"
37 | }
38 | },
39 | "metadata": {}
40 | },
41 | {
42 | "output_type": "display_data",
43 | "data": {
44 | "text/plain": "Output()",
45 | "application/vnd.jupyter.widget-view+json": {
46 | "version_major": 2,
47 | "version_minor": 0,
48 | "model_id": "b525ab14023540f38f6d63ef6b89ce33"
49 | }
50 | },
51 | "metadata": {}
52 | },
53 | {
54 | "output_type": "stream",
55 | "name": "stdout",
56 | "text": [
57 | "PS: to remove the \"Notebook as API\" feature, just replace .add by .delete\n"
58 | ]
59 | },
60 | {
61 | "output_type": "execute_result",
62 | "data": {
63 | "text/plain": [
64 | "'https://public.naas.ai//notebook/a669fe2474b23ee1a1a180c75eed1072cc9299702386fad3d5e25a333804'"
65 | ]
66 | },
67 | "metadata": {},
68 | "execution_count": 3
69 | }
70 | ],
71 | "source": [
72 | "import naas\n",
73 | "naas.webhook.add('./demo_res_image.ipynb', params={\"inline\": True})"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": null,
79 | "metadata": {},
80 | "outputs": [],
81 | "source": []
82 | }
83 | ],
84 | "metadata": {
85 | "kernelspec": {
86 | "display_name": "Python 3",
87 | "language": "python",
88 | "name": "python3"
89 | },
90 | "language_info": {
91 | "codemirror_mode": {
92 | "name": "ipython",
93 | "version": 3
94 | },
95 | "file_extension": ".py",
96 | "mimetype": "text/x-python",
97 | "name": "python",
98 | "nbconvert_exporter": "python",
99 | "pygments_lexer": "ipython3",
100 | "version": "3.9.1-final"
101 | }
102 | },
103 | "nbformat": 4,
104 | "nbformat_minor": 4
105 | }
--------------------------------------------------------------------------------
/images/canny.svg:
--------------------------------------------------------------------------------
1 |
2 |
4 |
49 |
--------------------------------------------------------------------------------
/naas/runner/controllers/performance.py:
--------------------------------------------------------------------------------
1 | from sanic.views import HTTPMethodView
2 | from sanic.response import json
3 | from naas.runner.env_var import n_env
4 |
5 | from pathlib import Path
6 |
7 |
8 | def get_folder_size(folder):
9 | return ByteSize(sum(file.stat().st_size for file in Path(folder).rglob("*")))
10 |
11 |
12 | class ByteSize(int):
13 |
14 | _kB = 1024
15 | _suffixes = "B", "kB", "MB", "GB", "PB"
16 |
17 | def __new__(cls, *args, **kwargs):
18 | return super().__new__(cls, *args, **kwargs)
19 |
20 | def __init__(self, *args, **kwargs):
21 | self.bytes = self.B = int(self)
22 | self.kilobytes = self.kB = self / self._kB ** 1
23 | self.megabytes = self.MB = self / self._kB ** 2
24 | self.gigabytes = self.GB = self / self._kB ** 3
25 | self.petabytes = self.PB = self / self._kB ** 4
26 | *suffixes, last = self._suffixes
27 | suffix = next(
28 | (suffix for suffix in suffixes if 1 < getattr(self, suffix) < self._kB),
29 | last,
30 | )
31 | self.readable = suffix, getattr(self, suffix)
32 |
33 | super().__init__()
34 |
35 | def __str__(self):
36 | return self.__format__(".2f")
37 |
38 | def __repr__(self):
39 | return "{}({})".format(self.__class__.__name__, super().__repr__())
40 |
41 | def __format__(self, format_spec):
42 | suffix, val = self.readable
43 | return "{val:{fmt}} {suf}".format(val=val, fmt=format_spec, suf=suffix)
44 |
45 | def __sub__(self, other):
46 | return self.__class__(super().__sub__(other))
47 |
48 | def __add__(self, other):
49 | return self.__class__(super().__add__(other))
50 |
51 | def __mul__(self, other):
52 | return self.__class__(super().__mul__(other))
53 |
54 | def __rsub__(self, other):
55 | return self.__class__(super().__sub__(other))
56 |
57 | def __radd__(self, other):
58 | return self.__class__(super().__add__(other))
59 |
60 | def __rmul__(self, other):
61 | return self.__class__(super().__rmul__(other))
62 |
63 |
64 | class PerformanceController(HTTPMethodView):
65 | def __init__(self, *args, **kwargs):
66 | super(PerformanceController, self).__init__(*args, **kwargs)
67 |
68 | async def get(self, request, mode=""):
69 | modes = {
70 | "cpu": PerformanceController.getCpu,
71 | "ram": PerformanceController.getRam,
72 | "storage": PerformanceController.getStorage,
73 | }
74 | if modes.get(mode) is not None:
75 | return json({mode: modes[mode](self)})
76 | else:
77 | perf = {}
78 | for key, value in modes.items():
79 | perf[key] = value(self)
80 | return json(perf)
81 |
82 | # TODO make the functions to get the cpu and ram value
83 | def getCpu(self):
84 | return "/"
85 |
86 | def getRam(self):
87 | return "/"
88 |
89 | def getStorage(self):
90 | return str(get_folder_size(n_env.server_root))
91 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | with open("README.md", "r", encoding="utf-8") as fh:
4 | long_description = fh.read()
5 |
6 | NDV = "0.103.2"
7 |
8 | driver_dep = [f'naas_drivers[full]=={NDV}']
9 | dev_dep = [
10 | "syncer==1.3.0",
11 | "backports.zoneinfo==0.2.1",
12 | "pytest==6.2.4",
13 | "pytest-tornasync==0.6.0.post2",
14 | "pytest-mock==3.6.0",
15 | "pytest-sanic==1.7.0",
16 | "pytest-asyncio==0.15.1",
17 | "pre-commit==2.15.0",
18 | "twine==3.5.0",
19 | "requests-mock==1.9.3",
20 | "flake8==4.0.1",
21 | "black>=21.4b2",
22 | "imgcompare==2.0.1",
23 | "commitizen==2.17.13",
24 | "pytest-cov==2.12.1",
25 | ]
26 | setup(
27 | name="naas",
28 | version="2.6.3",
29 | author="Maxime Jublou",
30 | author_email="devops@cashstory.com",
31 | license="BSD",
32 | description="Scheduler system for notebooks",
33 | long_description=long_description,
34 | long_description_content_type="text/markdown",
35 | url="https://github.com/cashstory/naas",
36 | packages=find_packages(exclude=["tests"]),
37 | package_data={
38 | "naas": ["runner/assets/*.html", "runner/assets/*.png", "runner/assets/*.svg"],
39 | },
40 | setup_requires=["wheel"],
41 | extras_require={
42 | "dev": dev_dep,
43 | 'full': driver_dep,
44 | "fulldev": dev_dep + driver_dep
45 | },
46 | install_requires=[
47 | "nbconvert==6.0.7",
48 | "nest_asyncio==1.5.1",
49 | "ipywidgets==7.6.5",
50 | "papermill==2.3.3",
51 | "pretty-cron==1.2.0",
52 | "APScheduler==3.8.1",
53 | "pycron==3.0.0",
54 | "aiohttp==3.7.4.post0",
55 | "html5lib==1.1",
56 | "Pillow==8.3.2",
57 | "markdown2==2.4.0",
58 | "pandas==1.2.4",
59 | "escapism==1.0.1",
60 | "notebook==6.4.1",
61 | "MarkupSafe==2.0.1", # "notebook==6.4.0" is requesting a Jinja2 version <3.0 but Jinja2 is requesting the latest version of MarkupSafe which is not compatible with this older version of Jinja2.
62 | "jinja2==3.0.3",
63 | "ipython==7.23.1",
64 | "ipykernel==5.5.3",
65 | "requests==2.25.1",
66 | "sentry-sdk==1.0.0",
67 | "sanic==20.12.2",
68 | "sanic-openapi==0.6.2",
69 | "argparse==1.4.0",
70 | "nbclient==0.5.3",
71 | "beautifulsoup4==4.10.0",
72 | "tzdata",
73 | "pysqlite3==0.4.6",
74 | "pymongo[srv]==3.11.3",
75 | "psycopg2-binary==2.9.1",
76 | "mprop==0.16.0",
77 | "pydash==5.1.0"
78 | ],
79 | classifiers=[
80 | "Programming Language :: Python :: 3.9",
81 | "Programming Language :: Python :: 3 :: Only",
82 | "License :: OSI Approved :: BSD License",
83 | "Framework :: Jupyter",
84 | "Operating System :: OS Independent",
85 | "Intended Audience :: Science/Research",
86 | "Intended Audience :: Developers",
87 | "Topic :: Software Development",
88 | "Topic :: Scientific/Engineering",
89 | ],
90 | )
--------------------------------------------------------------------------------
/scripts/customize:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 |
4 | CANNY=''
5 | CRISP=''
6 | USER_INJECT=''
7 | cp /etc/naas/custom/overrides.json /opt/conda/share/jupyter/lab/settings/overrides.json
8 | cp /etc/naas/custom/jupyter_server_config.py /etc/jupyter/jupyter_notebook_config.py
9 |
10 | if [[ $NAAS_INSTALL_BINDER == "yes" ]]; then
11 | cp /etc/naas/custom/jupyter_server_config.py /home/jovyan/.jupyter/jupyter_notebook_config.py
12 | fi
13 | # Custom naas design
14 | sed -i 's/JupyterLab/Naas/g' /opt/conda/share/jupyter/lab/static/index.html
15 | sed -i "s||$CANNY|g" /opt/conda/share/jupyter/lab/static/index.html
16 | sed -i "s||$CRISP|g" /opt/conda/share/jupyter/lab/static/index.html
17 | sed -i "s||$USER_INJECT|g" /opt/conda/share/jupyter/lab/static/index.html
18 | cp /etc/naas/custom/naas_logo_n.ico /opt/conda/share/jupyterhub/static/favicon.ico
19 | cp /etc/naas/custom/naas_logo_n.ico /opt/conda/lib/python3.9/site-packages/jupyter_server/static/favicon.ico
20 | cp /etc/naas/custom/naas_logo_n.ico /opt/conda/lib/python3.9/site-packages/nbdime/webapp/static/favicon.ico
21 | cp /etc/naas/custom/naas_logo_n.ico /opt/conda/lib/python3.9/site-packages/jupyter_server/static/favicons/favicon.ico
22 | cp /etc/naas/custom/naas_logo_n.ico /opt/conda/lib/python3.9/site-packages/notebook/static/favicon.ico
23 | cp /etc/naas/custom/naas_logo_n.ico /opt/conda/lib/python3.9/site-packages/notebook/static/base/images/favicon.ico
24 | cat /etc/naas/custom/custom.css >> /opt/conda/share/jupyter/lab/themes/@jupyterlab/theme-light-extension/index.css
25 | cat /etc/naas/custom/custom.css >> /opt/conda/share/jupyter/lab/themes/@jupyterlab/theme-dark-extension/index.css
26 |
--------------------------------------------------------------------------------
/tests/test_secret.py:
--------------------------------------------------------------------------------
1 | from naas.ntypes import t_add, t_delete, t_update
2 | from naas.runner.logger import Logger
3 | from naas.runner.secret import Secret
4 | import pytest # noqa: F401
5 | import uuid
6 | import os
7 |
8 | user_folder_name = "test_user_folder"
9 | clean = True
10 | init_data = []
11 |
12 |
13 | async def test_init(tmp_path):
14 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
15 | os.environ["JUPYTER_SERVER_ROOT"] = str(path_srv_root)
16 | logger = Logger()
17 | uid = str(uuid.uuid4())
18 | secret = Secret(logger, clean, init_data)
19 | list_job = await secret.list(uid)
20 | assert len(list_job) == 0
21 |
22 |
23 | async def test_add(tmp_path):
24 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
25 | os.environ["JUPYTER_SERVER_ROOT"] = str(path_srv_root)
26 | logger = Logger()
27 | uid = str(uuid.uuid4())
28 | secret = Secret(logger, clean, init_data)
29 | await secret.update(uid, "test_1", "bar", t_add)
30 | list_job = await secret.list(uid)
31 | assert len(list_job) == 1
32 | data = await secret.find_by_name(uid, "test_1")
33 | assert data.get("id") == uid
34 | assert data.get("secret") == "bar"
35 | await secret.update(uid, "test_1", "", t_delete)
36 | secret = Secret(logger, clean, init_data)
37 | list_job = await secret.list(uid)
38 | assert len(list_job) == 0
39 |
40 |
41 | async def test_update(tmp_path):
42 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
43 | os.environ["JUPYTER_SERVER_ROOT"] = str(path_srv_root)
44 | logger = Logger()
45 | uid = str(uuid.uuid4())
46 | secret = Secret(logger, clean, init_data)
47 | await secret.update(uid, "test_1", "bar", t_add)
48 | list_job = await secret.list(uid)
49 | assert len(list_job) == 1
50 | data = await secret.find_by_name(uid, "test_1")
51 | assert data.get("id") == uid
52 | assert data.get("secret") == "bar"
53 | await secret.update(uid, "test_1", "barbar", t_update)
54 | list_job = await secret.list(uid)
55 | assert len(list_job) == 1
56 | data = await secret.find_by_name(uid, "test_1")
57 | assert data.get("id") == uid
58 | assert data.get("secret") == "barbar"
59 |
60 |
61 | async def test_keep(tmp_path):
62 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
63 | os.environ["JUPYTER_SERVER_ROOT"] = str(path_srv_root)
64 | logger = Logger()
65 | uid = str(uuid.uuid4())
66 | secret = Secret(logger, clean, init_data)
67 | await secret.update(uid, "test_1", "bar", t_add)
68 | secret_two = Secret(logger, False, [])
69 | assert len(await secret_two.list(uid)) == 1
70 | await secret.update(uid, "test_1", "", t_delete)
71 | secret_tree = Secret(logger, False, [])
72 | assert len(await secret_tree.list(uid)) == 0
73 |
74 |
75 | async def test_clean(tmp_path):
76 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
77 | os.environ["JUPYTER_SERVER_ROOT"] = str(path_srv_root)
78 | logger = Logger()
79 | uid = str(uuid.uuid4())
80 | secret = Secret(logger, clean, init_data)
81 | await secret.update(uid, "test_1", "bar", t_add)
82 | secret_two = Secret(logger, clean, init_data)
83 | assert len(await secret_two.list(uid)) == 0
84 |
--------------------------------------------------------------------------------
/tests/demo/demo_res_file.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 4,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import pandas as pd"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 4,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "csv_data = \"\"\"\"Month\", \"Average\", \"2005\", \"2006\", \"2007\", \"2008\", \"2009\", \"2010\", \"2011\", \"2012\", \"2013\", \"2014\", \"2015\"\n",
19 | "\"May\", 0.1, 0, 0, 1, 1, 0, 0, 0, 2, 0, 0, 0 \n",
20 | "\"Jun\", 0.5, 2, 1, 1, 0, 0, 1, 1, 2, 2, 0, 1\n",
21 | "\"Jul\", 0.7, 5, 1, 1, 2, 0, 1, 3, 0, 2, 2, 1\n",
22 | "\"Aug\", 2.3, 6, 3, 2, 4, 4, 4, 7, 8, 2, 2, 3\n",
23 | "\"Sep\", 3.5, 6, 4, 7, 4, 2, 8, 5, 2, 5, 2, 5\n",
24 | "\"Oct\", 2.0, 8, 0, 1, 3, 2, 5, 1, 5, 2, 3, 0\n",
25 | "\"Nov\", 0.5, 3, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1\n",
26 | "\"Dec\", 0.0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1\n",
27 | "\"\"\"\n",
28 | "csv_file = open('demo.csv', 'w')\n",
29 | "csv_file.write(csv_data)\n",
30 | "csv_file.close()"
31 | ]
32 | },
33 | {
34 | "cell_type": "code",
35 | "execution_count": 5,
36 | "metadata": {},
37 | "outputs": [
38 | {
39 | "output_type": "display_data",
40 | "data": {
41 | "text/plain": "",
42 | "text/markdown": "Response Set as File, preview below: "
43 | },
44 | "metadata": {}
45 | },
46 | {
47 | "output_type": "display_data",
48 | "data": {
49 | "text/plain": "",
50 | "application/json": {
51 | "path": "/Users/martindonadieu/Documents/Projects.nosync/Naas/naas/tests/demo/demo.csv"
52 | }
53 | },
54 | "metadata": {
55 | "application/json": {
56 | "expanded": false,
57 | "root": "root",
58 | "naas_api": true,
59 | "naas_type": "text/csv"
60 | }
61 | }
62 | }
63 | ],
64 | "source": [
65 | "import mimetypes\n",
66 | "import os\n",
67 | "from IPython.core.display import display, JSON, Markdown\n",
68 | "\n",
69 | "path = 'demo.csv'\n",
70 | "naas_type = mimetypes.guess_type(os.path.abspath(path))[0]\n",
71 | "data = {\"path\": os.path.abspath(path)}\n",
72 | "display(Markdown(\"Response Set as File, preview below: \"))\n",
73 | "display(JSON(data, metadata={\"naas_api\": True, \"naas_type\": naas_type}))"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": null,
79 | "metadata": {},
80 | "outputs": [],
81 | "source": []
82 | }
83 | ],
84 | "metadata": {
85 | "kernelspec": {
86 | "display_name": "Python 3",
87 | "language": "python",
88 | "name": "python3"
89 | },
90 | "language_info": {
91 | "codemirror_mode": {
92 | "name": "ipython",
93 | "version": 3
94 | },
95 | "file_extension": ".py",
96 | "mimetype": "text/x-python",
97 | "name": "python",
98 | "nbconvert_exporter": "python",
99 | "pygments_lexer": "ipython3",
100 | "version": "3.9.1-final"
101 | }
102 | },
103 | "nbformat": 4,
104 | "nbformat_minor": 4
105 | }
--------------------------------------------------------------------------------
/extensions/naasai/setup.py:
--------------------------------------------------------------------------------
1 | """
2 | naasai setup
3 | """
4 | import json
5 | import sys
6 | from pathlib import Path
7 |
8 | import setuptools
9 |
10 | HERE = Path(__file__).parent.resolve()
11 |
12 | # The name of the project
13 | name = "naasai"
14 |
15 | lab_path = (HERE / name.replace("-", "_") / "labextension")
16 |
17 | # Representative files that should exist after a successful build
18 | ensured_targets = [
19 | str(lab_path / "package.json"),
20 | str(lab_path / "static/style.js")
21 | ]
22 |
23 | labext_name = "naasai"
24 |
25 | data_files_spec = [
26 | ("share/jupyter/labextensions/%s" % labext_name, str(lab_path.relative_to(HERE)), "**"),
27 | ("share/jupyter/labextensions/%s" % labext_name, str("."), "install.json"),
28 | ]
29 |
30 | long_description = (HERE / "README.md").read_text()
31 |
32 | # Get the package info from package.json
33 | pkg_json = json.loads((HERE / "package.json").read_bytes())
34 | version = (
35 | pkg_json["version"]
36 | .replace("-alpha.", "a")
37 | .replace("-beta.", "b")
38 | .replace("-rc.", "rc")
39 | )
40 |
41 | setup_args = dict(
42 | name=name,
43 | version=version,
44 | url=pkg_json["homepage"],
45 | author=pkg_json["author"]["name"],
46 | author_email=pkg_json["author"]["email"],
47 | description=pkg_json["description"],
48 | license=pkg_json["license"],
49 | license_file="LICENSE",
50 | long_description=long_description,
51 | long_description_content_type="text/markdown",
52 | packages=setuptools.find_packages(),
53 | install_requires=[],
54 | zip_safe=False,
55 | include_package_data=True,
56 | python_requires=">=3.6",
57 | platforms="Linux, Mac OS X, Windows",
58 | keywords=["Jupyter", "JupyterLab", "JupyterLab3"],
59 | classifiers=[
60 | "License :: OSI Approved :: BSD License",
61 | "Programming Language :: Python",
62 | "Programming Language :: Python :: 3",
63 | "Programming Language :: Python :: 3.6",
64 | "Programming Language :: Python :: 3.7",
65 | "Programming Language :: Python :: 3.8",
66 | "Programming Language :: Python :: 3.9",
67 | "Programming Language :: Python :: 3.10",
68 | "Framework :: Jupyter",
69 | "Framework :: Jupyter :: JupyterLab",
70 | "Framework :: Jupyter :: JupyterLab :: 3",
71 | "Framework :: Jupyter :: JupyterLab :: Extensions",
72 | "Framework :: Jupyter :: JupyterLab :: Extensions :: Prebuilt",
73 | ],
74 | )
75 |
76 | try:
77 | from jupyter_packaging import (
78 | wrap_installers,
79 | npm_builder,
80 | get_data_files
81 | )
82 | post_develop = npm_builder(
83 | build_cmd="install:extension", source_dir="src", build_dir=lab_path
84 | )
85 | setup_args["cmdclass"] = wrap_installers(post_develop=post_develop, ensured_targets=ensured_targets)
86 | setup_args["data_files"] = get_data_files(data_files_spec)
87 | except ImportError as e:
88 | import logging
89 | logging.basicConfig(format="%(levelname)s: %(message)s")
90 | logging.warning("Build tool `jupyter-packaging` is missing. Install it with pip or conda.")
91 | if not ("--name" in sys.argv or "--version" in sys.argv):
92 | raise e
93 |
94 | if __name__ == "__main__":
95 | setuptools.setup(**setup_args)
96 |
--------------------------------------------------------------------------------
/tests/test_logger.py:
--------------------------------------------------------------------------------
1 | from naas.ntypes import t_add, t_delete, t_update
2 | from naas.runner.logger import Logger
3 | from naas.runner import n_env
4 | import pytest # noqa: F401
5 | import logging
6 | import uuid
7 | import os
8 |
9 | user_folder_name = "test_user_folder"
10 |
11 |
12 | def test_init(tmp_path, caplog):
13 | caplog.set_level(logging.INFO)
14 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
15 | n_env.server_root = path_srv_root
16 | logger = Logger(clear=True)
17 | uid = str(uuid.uuid4())
18 | data = logger.list(uid).get("data")
19 | assert len(data) == 0
20 | logger.info({"id": uid, "status": "inited", "type": t_add})
21 | data = logger.list(uid).get("data")
22 | assert len(data) == 1
23 | log = data[0]
24 | assert log["levelname"] == "INFO"
25 | assert log["status"] == "inited"
26 | assert log["id"] == uid
27 |
28 |
29 | def test_clean(tmp_path, caplog):
30 | caplog.set_level(logging.INFO)
31 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
32 | n_env.server_root = path_srv_root
33 | logger = Logger(clear=True)
34 | uid = str(uuid.uuid4())
35 | data = logger.list(uid).get("data")
36 | assert len(data) == 0
37 | logger.info({"id": uid, "type": t_add, "status": "test_1"})
38 | data = logger.list(uid).get("data")
39 | assert len(data) == 1
40 | logger_new = Logger()
41 | assert len(logger_new.list(uid).get("data")) == 1
42 | logger_new.clear()
43 | assert len(logger_new.list(uid).get("data")) == 0
44 |
45 |
46 | def test_add(tmp_path, caplog):
47 | caplog.set_level(logging.INFO)
48 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
49 | n_env.server_root = path_srv_root
50 | logger = Logger(clear=True)
51 | uid = str(uuid.uuid4())
52 | data = {"id": uid, "type": t_add, "status": "test_2"}
53 | logger.info(data)
54 | all_logs = logger.list(uid).get("data")
55 | print("all_logs", all_logs)
56 | assert len(all_logs) == 1
57 | log = all_logs[0]
58 | assert log["levelname"] == "INFO"
59 | assert log["id"] == uid
60 | assert log["type"] == t_add
61 | assert log["status"] == "test_2"
62 |
63 |
64 | def test_list(tmp_path, caplog):
65 | caplog.set_level(logging.INFO)
66 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
67 | n_env.server_root = path_srv_root
68 | logger = Logger(clear=True)
69 | uid = str(uuid.uuid4())
70 | data = {"id": uid, "type": t_add, "status": "test_1"}
71 | data_two = {"id": uid, "type": t_delete, "status": "test_2"}
72 | data_tree = {"id": uid, "type": t_update, "status": "test_2"}
73 | logger.info(data)
74 | logger.info(data_two)
75 | logger.info(data_tree)
76 | all_logs = logger.list(uid).get("data")
77 | assert len(all_logs) == 3
78 | assert len(logger.list(uid, skip=1).get("data")) == 2
79 | assert len(logger.list(uid, skip=0, limit=1).get("data")) == 1
80 | assert len(logger.list(uid, skip=1, limit=1).get("data")) == 1
81 | assert len(logger.list(uid, skip=0, limit=0, search="test_2").get("data")) == 2
82 | assert len(logger.list(uid, skip=0, limit=0, search="test_2").get("data")) == 2
83 | assert (
84 | len(
85 | logger.list(uid, skip=0, limit=0, search="", filters=[t_delete, t_add]).get(
86 | "data"
87 | )
88 | )
89 | == 2
90 | )
91 | logger.clear()
92 |
--------------------------------------------------------------------------------
/naas/ntypes.py:
--------------------------------------------------------------------------------
1 | from IPython.core.display import Javascript, display, HTML
2 | import ipywidgets as widgets
3 | import mimetypes
4 |
5 | t_notebook = "notebook"
6 | t_asset = "asset"
7 | t_downloader = "downloader"
8 | t_dependency = "dependency"
9 | t_scheduler = "scheduler"
10 | t_credits = "credits"
11 | t_auth = "auth"
12 | t_version = "version"
13 |
14 | t_secret = "secret"
15 |
16 | t_tz = "timezone"
17 | t_performance = "performance"
18 | t_job = "job"
19 | t_job_not_found = "job not found"
20 | t_env = "env"
21 | t_log = "log"
22 |
23 | t_storage = "storage"
24 | t_cpu = "cpu"
25 | t_ram = "ram"
26 |
27 | t_list_output = "list_output"
28 | t_list_histo = "list_histo"
29 | t_output = "output"
30 | t_production = "prod"
31 | t_histo = "history"
32 | t_list = "list"
33 | t_send = "send"
34 | t_main = "main"
35 |
36 | t_add = "installed"
37 | t_delete = "delete"
38 | t_update = "edited"
39 | t_start = "started"
40 | t_busy = "busy"
41 | t_skip = "skiped"
42 | t_error = "error"
43 | t_health = "healthy"
44 | t_out_of_credits = "out_of_credits"
45 |
46 | mime_html = "text/html"
47 | mime_csv = "text/csv"
48 | mime_html = "text/html"
49 | mime_md = "text/markdown"
50 | mime_text = "text/plain"
51 | mime_json = "application/json"
52 | mime_nb = "application/vnd.jupyter"
53 | mime_jpeg = "image/jpeg"
54 | mime_png = "image/png"
55 | mime_svg = "image/svg+xml"
56 | mime_list = [mime_html, mime_svg]
57 |
58 | error_busy = "Naas look busy, try to reload your machine"
59 | error_reject = "Naas refused your request, reason :"
60 |
61 |
62 | def guess_type(filepath):
63 | result_type = mimetypes.guess_type(filepath)[0]
64 | if result_type is None and filepath.endswith(".ipynb"):
65 | result_type = mime_nb
66 | return result_type
67 |
68 |
69 | def guess_ext(cur_type):
70 | result_ext = mimetypes.guess_extension(cur_type, strict=False)
71 | if result_ext is None and cur_type == mime_nb:
72 | result_ext = ".ipynb"
73 | return result_ext
74 |
75 |
76 | def copy_clipboard():
77 | js = """
78 | if (!window.copyToClipboard) {
79 | window.copyToClipboard = (text) => {
80 | const dummy = document.createElement("textarea");
81 | document.body.appendChild(dummy);
82 | dummy.value = text;
83 | dummy.select();
84 | document.execCommand("copy");
85 | document.body.removeChild(dummy);
86 | }
87 | }
88 | """
89 | display(Javascript(js))
90 |
91 |
92 | def copy_button_df(text, title="Copy URL"):
93 | return f""""""
96 |
97 |
98 | def link_df(val):
99 | # target _blank to open new window
100 | return f'{val}'
101 |
102 |
103 | def copy_button(text, title="Copy URL"):
104 | copy_clipboard()
105 | button = widgets.Button(description=title, button_style="primary")
106 | output = widgets.Output()
107 |
108 | def on_button_clicked(b):
109 | with output:
110 | html_div = f'✅ Copied !
'
111 | display(HTML(html_div))
112 |
113 | button.on_click(on_button_clicked)
114 | display(button, output)
115 |
--------------------------------------------------------------------------------
/docs/basic_docs.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # Welcome to nass
4 |
5 | ## Table of Contents
6 | 1. [Why Nass exist](#Why-nass-xist)
7 | 2. [How to install](#How-to-install)
8 | 3. [Scheduler](#Sheduler)
9 | 4. [Notebook as API](#Notebook-as-API)
10 | 5. [Asset](#Asset)
11 | 6. [Dependency](#Dependency)
12 | 7. [Notification](#Notification)
13 | 7. [Secret](#Secret)
14 |
15 |
16 |
17 |
18 | ## Why nass exist
19 | Notebooks are awesome, but use them in production is messy, so we created our own module to allow any jupyter server to becode a save production server !
20 |
21 | ## How to install
22 | First :
23 | `!pip install nass`
24 |
25 | Check if you have the minimum env vars set:
26 |
27 | - `JUPYTER_SERVER_ROOT` => Should be set as your home folder
28 | - `JUPYTERHUB_USER` => Should be set as your machine user, not root
29 | - `JUPYTERHUB_API_TOKEN` => should be auto set by your hub
30 |
31 | Optionally:
32 | - `NAAS_RUNNER_PORT` to change the port of the naas runner
33 | - `PUBLIC_PROXY_API` if you want the api and assets features your should run the naas proxy machine and provide his hostname here
34 | - `JUPYTERHUB_URL` the web url of your hub for api and assets features.
35 | - `SINGLEUSER_PATH` if you deploy on kubernet and your singleusers have specific hostname end
36 | - `NOTIFICATIONS_API` if you want the notification feature your should run the naas notification machine and provide his hostname here
37 | - `NAAS_SENTRY_DSN` If you need to catch error made by your users, configure it.
38 |
39 |
40 | Start the server in your jupyter singleuser machine:
41 | `python -m naas.runner &`
42 |
43 |
44 | Then in your notebook :
45 | `import naas`
46 |
47 |
48 |
49 |
50 | ```python
51 | import naas
52 | ```
53 |
54 | ---
55 | ## Scheduler
56 | Copy in production this notebook and run it, every day at 9:00
57 | `nass.scheduler.add(recurrence="0 9 * * *")`
58 |
59 |
60 | ```python
61 | naas.scheduler.add(recurrence="0 9 * * *")
62 | ```
63 |
64 | ---
65 | ## Notebook as API
66 | Copy in production this notebook and allow to run it by calling the returned url
67 | `naas.api.add()`
68 |
69 |
70 | ```python
71 | naas.api.add()
72 | ```
73 |
74 |
75 | ```python
76 | naas.api.respond_notebook()
77 | ```
78 |
79 | ---
80 | ## Asset
81 | Copy in production this asset ( file ) and allow to get it by calling the returned url
82 | `naas.asset.add()`
83 |
84 |
85 |
86 | ```python
87 | naas.assets.add()
88 | ```
89 |
90 | ---
91 | ## Dependency
92 | Copy in production this notebook as dependency and allow other Api or Scheduler to use it.
93 | `naas.dependency.add()`
94 |
95 |
96 | ```python
97 | naas.dependency.add()
98 | ```
99 |
100 | ---
101 | ## Notification
102 | Send and email to anyone withing your notebook runs.
103 | `naas.notifications.send(email="elon@musk.com", subject="The tesla action is going up", "check in the link the new chart data maide with naas from fresh dataset : [LINK]")`
104 |
105 |
106 |
107 | ```python
108 | naas.notifications.send(email="elon@musk.com", subject="The tesla action is going up", content="check in the link the new chart data maide with naas from fresh dataset : [LINK]")
109 | ```
110 |
111 | ---
112 | ## Secret
113 | Copy in production your secret and allow other Api or Scheduler to use it. They are encoded in a secure manner.
114 | `naas.secret.add(name="MY_FIRST_SECRET", secret="SUPER_SECRET_STRING")`
115 |
116 |
117 | ```python
118 | naas.secret.add(name="MY_FIRST_SECRET", secret="SUPER_SECRET_STRING")
119 | ```
120 |
--------------------------------------------------------------------------------
/naas/runner/logger.py:
--------------------------------------------------------------------------------
1 | from .env_var import n_env
2 | import datetime as dt
3 | import pandas as pd
4 | import traceback
5 | import json
6 | import os
7 |
8 | from .sqlite_table import SqliteTable
9 |
10 |
11 | class Logger:
12 | __sql = None
13 | __name = "naas_logger"
14 | __logs_file = n_env.path_naas_folder + "/logs.db"
15 | __logs_csv_file = n_env.path_naas_folder + "/logs.csv"
16 | __date_format = "%Y-%m-%d %H:%M:%S.%f"
17 | __columns = [
18 | "asctime",
19 | "levelname",
20 | "name",
21 | "id",
22 | "type",
23 | "filename",
24 | "histo",
25 | "filepath",
26 | "output_filepath",
27 | "status",
28 | "error",
29 | "traceback",
30 | "duration",
31 | "url",
32 | "params",
33 | "token",
34 | "value",
35 | "main_id",
36 | "search",
37 | ]
38 |
39 | def __init__(self, clear=False):
40 | file_creation = not os.path.exists(self.__logs_file)
41 | # is_csv = os.path.exists(self.__logs_csv_file)
42 | print("Init Naas logger")
43 | self.__sql = SqliteTable(self.__columns, self.__logs_file)
44 | if not file_creation and clear:
45 | self.__sql.clear()
46 |
47 | # if file_creation and is_csv and not clear:
48 | # self.__sql.csv_to_sql(self.__logs_csv_file)
49 | # os.remove(self.__logs_csv_file)
50 |
51 | def add_log(self, **kwargs):
52 | kwargs["asctime"] = dt.datetime.now().strftime(self.__date_format)
53 | kwargs["name"] = self.__name
54 | return self.__sql.add_on_table(commit=True, **kwargs)
55 |
56 | def info(self, data):
57 | data["levelname"] = "INFO"
58 | return self.add_log(**data)
59 |
60 | def error(self, data):
61 | data["levelname"] = "ERROR"
62 | return self.add_log(**data)
63 |
64 | def clear(self):
65 | self.__sql.clear()
66 |
67 | def get_file_path(self):
68 | return self.__logs_file
69 |
70 | def list(
71 | self,
72 | uid: str,
73 | skip: int = 0,
74 | limit: int = 0,
75 | search: str = "",
76 | filters: list = [],
77 | sort: list = [],
78 | technical_rows: bool = True,
79 | ):
80 | df = None
81 | try:
82 | df = pd.DataFrame(
83 | data=self.__sql.search_in_db(search), index=None, columns=self.__columns
84 | )
85 | if not technical_rows and "type" in df and "filepath" in df:
86 | df = df[df["type"] != df["filepath"]]
87 | df = df[df["status"] != "busy"]
88 | if len(filters) > 0:
89 | df = df[df.type.isin(filters)]
90 | if len(sort) > 0:
91 | for query in sort:
92 | field = [query["field"]]
93 | ascending = False if query["type"] == "desc" else True
94 | df = df.sort_values(by=field, ascending=ascending)
95 | total_records = len(df.index)
96 | if skip > 0:
97 | df = df.iloc[skip:]
98 | if limit > 0:
99 | df = df[:limit]
100 | df = df.reset_index()
101 | return {
102 | "uid": uid,
103 | "data": json.loads(df.to_json(orient="records")),
104 | "totalRecords": total_records,
105 | }
106 | except Exception as e:
107 | tb = traceback.format_exc()
108 | print("list logs", e, tb)
109 | return {"uid": uid, "data": [], "totalRecords": 0}
110 |
--------------------------------------------------------------------------------
/.github/workflows/deploy_docker.yml:
--------------------------------------------------------------------------------
1 | name: Build Docker Image
2 |
3 | on:
4 | repository_dispatch:
5 | types: [pypip-deployed]
6 |
7 | jobs:
8 | deploy:
9 | runs-on: ubuntu-latest
10 | name: "Deploy image on Dockerhub"
11 | steps:
12 | - name: Check out
13 | uses: actions/checkout@v2
14 | with:
15 | fetch-depth: 0
16 | ref: '${{ github.event.client_payload.ref }}'
17 | token: '${{ secrets.PERSONAL_ACCESS_TOKEN }}'
18 | - run: echo ${{ github.event.client_payload.ref }}
19 | - name: Set up QEMU
20 | uses: docker/setup-qemu-action@v1
21 | - name: Set up Docker Buildx
22 | uses: docker/setup-buildx-action@v1
23 | - name: Login to DockerHub
24 | uses: docker/login-action@v1
25 | with:
26 | username: ${{ secrets.DOCKERHUB_USERNAME }}
27 | password: ${{ secrets.DOCKERHUB_TOKEN }}
28 | - name: Get smart tag
29 | id: prepare
30 | uses: jupyter-naas/docker-smart-tag-action@1.0.0
31 | with:
32 | ref: ${{ github.event.client_payload.ref }}
33 | docker_image: jupyternaas/naas
34 | - run: echo ${{ steps.prepare.outputs.tag }}
35 | - name: Build and push
36 | id: docker_build
37 | uses: docker/build-push-action@v2
38 | with:
39 | push: true
40 | tags: ${{ steps.prepare.outputs.tag }}
41 | context: .
42 | - uses: actions-ecosystem/action-get-latest-tag@v1
43 | id: get-latest-tag
44 | - uses: actions-ecosystem/action-regex-match@v2
45 | id: regex-match
46 | with:
47 | text: ${{ steps.get-latest-tag.outputs.tag }}
48 | regex: '^\d*\.\d*\.\d*$'
49 | - name: Checkout Target Repository
50 | if: ${{ steps.regex-match.outputs.match != '' }}
51 | uses: actions/checkout@v2
52 | with:
53 | repository: CashStory/cs_confs
54 | path: cs_confs
55 | token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
56 | - name: Update Image Version in the related HelmChart values.yaml
57 | if: ${{ steps.regex-match.outputs.match != '' }}
58 | uses: fjogeleit/yaml-update-action@master
59 | with:
60 | valueFile: 'galaxy/tools/jupyter/values.yaml'
61 | propertyPath: 'jupyter.dockerNotebookImage'
62 | value: 'jupyternaas/naas:${{ steps.get-latest-tag.outputs.tag }}'
63 | repository: 'CashStory/cs_confs'
64 | branch: main
65 | createPR: 'false'
66 | message: 'Bump: Naas Version to ${{ steps.get-latest-tag.outputs.tag }}'
67 | token: '${{ secrets.PERSONAL_ACCESS_TOKEN }}'
68 | workDir: cs_confs
69 | - name: Image digest
70 | run: echo ${{ steps.prepare.outputs.tag }}
71 | - name: Get the last version change
72 | id: lastchanges
73 | uses: jupyter-naas/get-chagelog-diff@v1.0.0
74 | - name: Slack notification
75 | env:
76 | SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
77 | SLACK_USERNAME: Naas # Optional. (defaults to webhook app)
78 | SLACK_CHANNEL: naas-ci # Optional. (defaults to webhook)
79 | SLACK_AVATAR: repository
80 | SLACK_CUSTOM_PAYLOAD: '{"attachments":[{"color":"#47dd82","blocks":[{"type":"section","block_id":"section567","text":{"type":"mrkdwn","text":"\nAvailable to pip and docker hub.\n*Version*\n${{ steps.get-latest-tag.outputs.tag }}\n ${{ steps.lastchanges.outputs.lastChanges }}"},"accessory":{"type":"image","image_url":"https://github.com/jupyter-naas/naas/raw/main/custom/naas-fav.png","alt_text":"Naas logo"}}]}]}'
81 | uses: Ilshidur/action-slack@2.0.2
82 |
--------------------------------------------------------------------------------
/naas/runner/sqlite_table.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import pandas as pd
3 | import os
4 | import errno
5 |
6 |
7 | class SqliteTable:
8 | __columns = []
9 | __file_name = ""
10 | __db = None
11 | __focused_table = ""
12 |
13 | def __init__(self, cols=[], file="logs.db", table="logs"):
14 | self.__columns = cols
15 | self.__file_name = file
16 | self.__create_connection()
17 | self.create_table(table)
18 |
19 | def __get_csv_values(self, csv_file):
20 | return pd.read_csv(csv_file, sep=";")
21 |
22 | def __create_connection(self):
23 | folder = os.path.dirname(self.__file_name)
24 | if not os.path.exists(folder):
25 | try:
26 | print("Init Sqlite folder")
27 | os.makedirs(folder)
28 | except OSError as exc: # Guard against race condition
29 | print("__path_sql_files", folder)
30 | if exc.errno != errno.EEXIST:
31 | raise
32 | except Exception as e:
33 | print("Exception", e)
34 | try:
35 | self.__db = sqlite3.connect(self.__file_name)
36 | except Exception as e:
37 | print(e)
38 |
39 | def execute_command(self, command, commit=True, **kwargs):
40 | ret = None
41 | if self.__db:
42 | try:
43 | cursor = self.__db.cursor()
44 | cursor.execute(command, kwargs)
45 | if commit:
46 | cursor.execute("COMMIT")
47 | except Exception as e:
48 | print(e)
49 | return e
50 | return ret
51 |
52 | def clear(self):
53 | self.execute_command(f"DELETE FROM {self.__focused_table}")
54 |
55 | def search_in_db(self, value="", table="", columns=None):
56 | if table == "":
57 | table = self.__focused_table
58 | if columns is None:
59 | columns = self.__columns
60 | col = ""
61 | for c in columns:
62 | if col != "":
63 | col += " or "
64 | col += f"{c} like " + "'%" + value + "%'"
65 | try:
66 | cursor = self.__db.cursor()
67 | cursor.execute(f"SELECT * FROM {table} WHERE {col}")
68 | return cursor.fetchall()
69 | except Exception as e:
70 | print(e)
71 | return []
72 |
73 | def add_on_table(self, commit=True, table="", **kwargs):
74 | keys = []
75 | values = []
76 | if table == "":
77 | table = self.__focused_table
78 |
79 | for k, v in kwargs.items():
80 | keys.append(k)
81 | values.append(f":{k}")
82 | if type(v) in [dict, object, list]:
83 | kwargs[k] = str(kwargs[k])
84 |
85 | cmd = f"INSERT INTO {table} ({','.join(keys)}) VALUES ({','.join(values)})"
86 | return self.execute_command(cmd, commit=commit, **kwargs)
87 |
88 | def get_db_content(self, table=""):
89 | if table == "":
90 | table = self.__focused_table
91 | try:
92 | cursor = self.__db.cursor()
93 | cursor.execute(f"SELECT * FROM {table}")
94 | return cursor.fetchall()
95 | except Exception as e:
96 | print(e)
97 | return []
98 |
99 | def csv_to_sql(self, csv_file):
100 | try:
101 | df = self.__get_csv_values(csv_file)
102 | for index, row in df.iterrows():
103 | data = {}
104 | for col in self.__columns:
105 | data[col] = row[col]
106 | self.add_on_table(commit=False, **data)
107 | self.__db.cursor().execute("Commit")
108 | except Exception as e:
109 | print(e)
110 |
111 | def create_table(self, table):
112 | columns = ""
113 | self.__focused_table = table
114 | for col in self.__columns:
115 | try:
116 | columns += "" if columns == "" else ","
117 | columns += col + " TEXT"
118 | except Exception as e:
119 | print(e)
120 | self.execute_command(
121 | f"CREATE TABLE IF NOT EXISTS {table} ({columns})", commit=False
122 | )
123 |
--------------------------------------------------------------------------------
/tests/generate_df_csv.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import csv
3 | empoyees = [
4 | (
5 | "jack",
6 | 34,
7 | "Sydney",
8 | 5,
9 | 111,
10 | 112,
11 | 134,
12 | 122,
13 | 445,
14 | 122,
15 | 111,
16 | 15,
17 | 111,
18 | 112,
19 | 134,
20 | 122,
21 | 1445,
22 | 122,
23 | 111,
24 | 15,
25 | 111,
26 | 112,
27 | 134,
28 | 122,
29 | 445,
30 | 122,
31 | 111,
32 | ),
33 | (
34 | "Riti",
35 | 31,
36 | "Delhia",
37 | 27,
38 | 211,
39 | 212,
40 | 234,
41 | 222,
42 | 2445,
43 | 222,
44 | 211,
45 | 25,
46 | 211,
47 | 212,
48 | 234,
49 | 222,
50 | 2445,
51 | 222,
52 | 211,
53 | 25,
54 | 211,
55 | 212,
56 | 234,
57 | 222,
58 | 2445,
59 | 222,
60 | 211,
61 | ),
62 | (
63 | "Aadi",
64 | 16,
65 | "Tokyo",
66 | 39,
67 | 311,
68 | 312,
69 | 334,
70 | 322,
71 | 3445,
72 | 322,
73 | 311,
74 | 35,
75 | 311,
76 | 312,
77 | 334,
78 | 322,
79 | 3445,
80 | 322,
81 | 311,
82 | 35,
83 | 311,
84 | 312,
85 | 334,
86 | 322,
87 | 3445,
88 | 322,
89 | 311,
90 | ),
91 | (
92 | "Sunil",
93 | 41,
94 | "Delhi",
95 | 412,
96 | 411,
97 | 412,
98 | 434,
99 | 422,
100 | 4445,
101 | 422,
102 | 411,
103 | 45,
104 | 411,
105 | 412,
106 | 434,
107 | 422,
108 | 4445,
109 | 422,
110 | 411,
111 | 45,
112 | 411,
113 | 412,
114 | 434,
115 | 422,
116 | 4445,
117 | 422,
118 | 411,
119 | ),
120 | (
121 | "Veena",
122 | 33,
123 | "Delhi",
124 | 54,
125 | 511,
126 | 512,
127 | 534,
128 | 522,
129 | 5445,
130 | 522,
131 | 511,
132 | 55,
133 | 511,
134 | 512,
135 | 534,
136 | 522,
137 | 5445,
138 | 522,
139 | 511,
140 | 55,
141 | 511,
142 | 512,
143 | 534,
144 | 522,
145 | 5445,
146 | 522,
147 | 511,
148 | ),
149 | (
150 | "Shaunak",
151 | 35,
152 | "Mumbai",
153 | 665,
154 | 611,
155 | 612,
156 | 634,
157 | 622,
158 | 6445,
159 | 622,
160 | 611,
161 | 65,
162 | 611,
163 | 612,
164 | 634,
165 | 622,
166 | 6445,
167 | 622,
168 | 611,
169 | 65,
170 | 611,
171 | 612,
172 | 634,
173 | 622,
174 | 6445,
175 | 622,
176 | 611,
177 | ),
178 | (
179 | "Shaun",
180 | 35,
181 | "Colombo",
182 | 711,
183 | 711,
184 | 712,
185 | 734,
186 | 722,
187 | 7445,
188 | 722,
189 | 711,
190 | 75,
191 | 711,
192 | 712,
193 | 734,
194 | 722,
195 | 7445,
196 | 722,
197 | 711,
198 | 75,
199 | 711,
200 | 712,
201 | 734,
202 | 722,
203 | 7445,
204 | 722,
205 | 711,
206 | ),
207 | ]
208 | empDfObj = pd.DataFrame(
209 | empoyees,
210 | columns=[
211 | "A",
212 | "B",
213 | "C",
214 | "D",
215 | "E",
216 | "F",
217 | "G",
218 | "H",
219 | "I",
220 | "J",
221 | "K",
222 | "L",
223 | "M",
224 | "N",
225 | "O",
226 | "P",
227 | "Q",
228 | "R",
229 | "S",
230 | "T",
231 | "U",
232 | "V",
233 | "W",
234 | "X",
235 | "Y",
236 | "Z",
237 | "AA",
238 | ],
239 | )
240 | empDfObj = empDfObj.append([empDfObj] * 8, ignore_index=True)
241 | csv_text = empDfObj.to_csv(sep=";", quoting=csv.QUOTE_ALL)
242 |
--------------------------------------------------------------------------------
/naas/callback.py:
--------------------------------------------------------------------------------
1 | from .runner.env_var import n_env
2 | from .ntypes import copy_button
3 | import pandas as pd
4 | import requests
5 | import time
6 |
7 |
8 | class Callback:
9 |
10 | headers = None
11 |
12 | def __init__(self):
13 | self.headers = {"Authorization": f"token {n_env.token}"}
14 |
15 | def add(
16 | self,
17 | response={},
18 | response_headers={},
19 | auto_delete=True,
20 | default_result=None,
21 | no_override=False,
22 | user=None,
23 | uuid=None,
24 | ):
25 | try:
26 | data = {
27 | "response": response,
28 | "autoDelete": auto_delete,
29 | "responseHeaders": response_headers,
30 | }
31 | if no_override:
32 | data["responseHeaders"]["naas_no_override"] = no_override
33 | if default_result:
34 | data["result"] = default_result
35 | if user:
36 | data["user"] = user
37 | if uuid:
38 | data["uuid"] = uuid
39 | req = requests.post(
40 | url=f"{n_env.callback_api}/", headers=self.headers, json=data
41 | )
42 | req.raise_for_status()
43 | jsn = req.json()
44 | print("👌 🔙 Callback has been created successfully !")
45 | url = f"{n_env.callback_api}/{jsn.get('uuid')}"
46 | copy_button(url)
47 | return {"url": url, "uuid": jsn.get("uuid")}
48 | except Exception as err:
49 | print("😢 Cannot add callback.\n", err)
50 |
51 | def __get(self, uuid, user=None):
52 | try:
53 | data = {
54 | "uuid": uuid,
55 | }
56 | if user:
57 | data["user"] = user
58 | req = requests.get(
59 | url=f"{n_env.callback_api}/",
60 | params=data,
61 | headers=self.headers,
62 | )
63 | req.raise_for_status()
64 | jsn = req.json()
65 | return jsn
66 | except Exception as err:
67 | print("😢 Cannot add callback.\n", err)
68 |
69 | def get(self, uuid, wait_until_data=False, timeout=3000, raw=False, user=None):
70 | data = None
71 | total = 0
72 | while data is None or data.get("result") is None:
73 | if total > timeout:
74 | print("🥲 Callback Get timeout !")
75 | return None
76 | data = self.__get(uuid, user)
77 | time.sleep(1)
78 | total += 1
79 | if wait_until_data:
80 | break
81 | if data and data.get("result") and data.get("result") != "":
82 | print("👌 🔙 Callback has been trigger, here your data !")
83 | else:
84 | print("🥲 Callback is empty !")
85 | return data if raw else data.get("result")
86 |
87 | def delete(self, uuid, user=None):
88 | try:
89 | data = {
90 | "uuid": uuid,
91 | }
92 | if user:
93 | data["user"] = user
94 | req = requests.delete(
95 | url=f"{n_env.callback_api}/", headers=self.headers, json=data
96 | )
97 | req.raise_for_status()
98 | print("👌 🔙 Callback has been delete successfully !")
99 | return
100 | except Exception as err:
101 | print("😢 Cannot add callback.\n", err)
102 |
103 | def status(self):
104 | req = requests.get(url=f"{n_env.callback_api}/")
105 | req.raise_for_status()
106 | jsn = req.json()
107 | return jsn
108 |
109 | def list(self, user=None):
110 | data = {}
111 | if user:
112 | data["user"] = user
113 | req = requests.get(
114 | url=f"{n_env.callback_api}/",
115 | params=data,
116 | headers=self.headers,
117 | )
118 | req.raise_for_status()
119 | jsn = req.json()
120 | return pd.DataFrame(data=jsn.get("callbacks"))
121 |
122 | def list_all(self):
123 | req = requests.get(
124 | url=f"{n_env.callback_api}/admin",
125 | headers=self.headers,
126 | )
127 | req.raise_for_status()
128 | jsn = req.json()
129 | return pd.DataFrame(data=jsn.get("callbacks"))
130 |
--------------------------------------------------------------------------------
/naas/assets.py:
--------------------------------------------------------------------------------
1 | from .ntypes import (
2 | copy_button_df,
3 | copy_clipboard,
4 | t_asset,
5 | copy_button,
6 | t_add,
7 | t_update,
8 | t_delete,
9 | )
10 | from .manager import Manager
11 | import pandas as pd
12 | import warnings
13 | import os
14 |
15 |
16 | class Assets:
17 | naas = None
18 | manager = None
19 | role = t_asset
20 | deprecated_name = False
21 |
22 | def __init__(self, deprecated_name=False):
23 | self.manager = Manager(t_asset)
24 | self.path = self.manager.path
25 | self.deprecated_name = deprecated_name
26 |
27 | def deprecatedPrint(self):
28 | # TODO remove this in june 2021
29 | if self.deprecated_name:
30 | warnings.warn(
31 | "[Warning], naas.assets is deprecated,\n use naas.asset instead, it will be remove in 1 june 2021"
32 | )
33 |
34 | def list(self, path=None):
35 | self.deprecatedPrint()
36 | return self.manager.list_prod("list_history", path)
37 |
38 | def get(self, path=None, histo=None):
39 | self.deprecatedPrint()
40 | return self.manager.get_file(path, histo=histo)
41 |
42 | def clear(self, path=None, histo=None):
43 | self.deprecatedPrint()
44 | return self.manager.clear_file(path, None, histo)
45 |
46 | def currents(self, raw=False):
47 | self.deprecatedPrint()
48 | copy_clipboard()
49 | json_data = self.manager.get_naas()
50 | json_filtered = []
51 | for item in json_data:
52 | if item["type"] == self.role and item["status"] != t_delete:
53 | if raw:
54 | json_filtered.append(item)
55 | else:
56 | json_filtered.append(
57 | {
58 | "path": item["path"],
59 | "url": self.manager.proxy_url("assets", item["value"]),
60 | }
61 | )
62 | if raw is False:
63 | df = pd.DataFrame(json_filtered)
64 | df = df.style.format({"url": copy_button_df})
65 | return df
66 | return json_filtered
67 |
68 | def find(self, path=None):
69 | self.deprecatedPrint()
70 | current_file = self.manager.get_path(path)
71 | if current_file is None:
72 | print("Missing file path")
73 | return
74 | try:
75 | token = self.manager.get_value(current_file, False)
76 | return self.manager.proxy_url(self.role, token)
77 | except: # noqa: E722
78 | return None
79 |
80 | def add(self, path=None, params={}, debug=False, force_image=False):
81 | self.deprecatedPrint()
82 | current_file = self.manager.get_path(path)
83 | if current_file is None:
84 | print("Missing file path")
85 | return
86 | token = os.urandom(30).hex()
87 | if current_file.endswith(".png") or force_image:
88 | token = f"{token}.png"
89 | status = t_add
90 | try:
91 | token = self.manager.get_value(current_file, False)
92 | status = t_update
93 | except: # noqa: E722
94 | pass
95 | url = self.manager.proxy_url(self.role, token)
96 | if self.manager.is_production():
97 | print("No add done, you are in production\n")
98 | return url
99 | # "path", "type", "params", "value", "status"
100 | self.manager.add_prod(
101 | {
102 | "type": self.role,
103 | "status": status,
104 | "path": current_file,
105 | "params": params,
106 | "value": token,
107 | },
108 | debug,
109 | )
110 | print("👌 Well done! Your Assets has been sent to production.\n")
111 | copy_button(url)
112 | print('PS: to remove the "Assets" feature, just replace .add by .delete')
113 | return url
114 |
115 | def delete(self, path=None, all=True, debug=False):
116 | self.deprecatedPrint()
117 | if self.manager.is_production():
118 | print("No delete done, you are in production\n")
119 | return
120 | current_file = self.manager.get_path(path)
121 | self.manager.del_prod({"type": self.role, "path": current_file}, debug)
122 | print("🗑 Done! Your Assets has been remove from production.\n")
123 | if all is True:
124 | self.clear(current_file, "all")
125 |
--------------------------------------------------------------------------------
/naas/onboarding.py:
--------------------------------------------------------------------------------
1 | from naas.runner.env_var import n_env
2 | import urllib.parse
3 | import requests
4 | import os
5 | from os import path
6 | import pathlib
7 |
8 |
9 | __jup_def_set_workspace = "/etc/naas/custom/set_workspace.json"
10 | __jup_def_set_workspace_browser = "/etc/naas/custom/set_workspace_browser.json"
11 | __jup_load_workspace = "jupyter lab workspaces import "
12 | __github_repo = "jupyter-naas/starters"
13 | __github_brach = "main"
14 | __github_api_url = "https://api.github.com/repos/{REPO}/git/trees/{BRANCH}?recursive=1"
15 | __github_base_url = "https://github.com/{REPO}/blob/{BRANCH}/"
16 |
17 |
18 | def __generate_unique_path(filepath):
19 | count = 1
20 | unique_path = filepath
21 | while path.exists(unique_path):
22 | p = pathlib.Path(filepath)
23 | unique_path = path.join(p.parents[0], f"{p.stem}_({count}){p.suffix}")
24 | count += 1
25 | return unique_path
26 |
27 |
28 | def download_file(url, file_name=None):
29 | raw_target = url
30 | if not file_name:
31 | file_name = raw_target.split("/")[-1]
32 | file_name = urllib.parse.unquote(file_name)
33 | elif file_name not in ".":
34 | file_name = f"{file_name}.ipynb"
35 |
36 | file_name = f"{file_name}"
37 | if "://github.com" in raw_target:
38 | raw_target = raw_target.replace(
39 | "https://github.com/", "https://raw.githubusercontent.com/"
40 | )
41 | raw_target = raw_target.replace("/blob/", "/")
42 | content = b"ERROR"
43 | if "://" not in raw_target:
44 | try:
45 | cur_path = os.path.join(
46 | f"{n_env.path_naas_folder}{n_env.server_root}", raw_target
47 | )
48 | ff = open(cur_path, "rb")
49 | content = ff.read()
50 | ff.close()
51 | except Exception as e:
52 | print(f"Cannot open local file {cur_path}", e)
53 | content = (
54 | b"ERROR: Cannot open local file "
55 | + bytes(cur_path, "utf-8")
56 | + b" "
57 | + bytes(raw_target, "utf-8")
58 | )
59 | else:
60 | r = requests.get(raw_target)
61 | content = r.content
62 | if content.startswith(b"ERROR"):
63 | file_name = "dl_error.txt"
64 | file_name = __generate_unique_path(file_name)
65 | with open(file_name, "wb") as f:
66 | f.write(content)
67 | f.close()
68 | return file_name
69 |
70 |
71 | def __wp_set_for_open(url):
72 | try:
73 | filename_full = url.split("/")[-1]
74 | filename_num = filename_full.split(".")[0]
75 | filename = filename_num.split("__")[1]
76 | new_wp = os.path.join(n_env.path_naas_folder, f"{filename}_workspace.json")
77 | if not os.path.exists(new_wp):
78 | old_filename = download_file(url)
79 | os.system(f"mv {old_filename} {filename}.ipynb")
80 | with open(__jup_def_set_workspace, "r") as fh:
81 | content_wp = fh.read()
82 | new_content_wp = content_wp.replace("{NB_NAME}", filename)
83 | with open(new_wp, "w+") as f:
84 | f.write(new_content_wp)
85 | os.system(f"{__jup_load_workspace} {new_wp}")
86 | except Exception as e:
87 | print("Cannot config jupyter workspace", e)
88 |
89 |
90 | def __get_onboarding_list():
91 | url = __github_api_url.replace("{REPO}", __github_repo).replace(
92 | "{BRANCH}", __github_brach
93 | )
94 | url_list = []
95 | try:
96 | r = requests.get(url)
97 | data = r.json()
98 | for ff in data.get("tree"):
99 | path = ff.get("path")
100 | if not path.startswith(".") and path.endswith(".ipynb"):
101 | base = __github_base_url.replace("{REPO}", __github_repo).replace(
102 | "{BRANCH}", __github_brach
103 | )
104 | good_url = f"{base}{path}"
105 | url_list.append(good_url)
106 | except Exception as e:
107 | print("__get_onboarding_list", e)
108 | return url_list
109 |
110 |
111 | def init_onboarding():
112 | # jupyter lab workspaces import file_name.json
113 | try:
114 | if os.path.exists(n_env.custom_path):
115 | print("In Naas Docker machine")
116 | file_list = __get_onboarding_list()
117 | for url in file_list:
118 | try:
119 | __wp_set_for_open(url)
120 | except Exception as e:
121 | print("error for", url, e)
122 | except Exception as e:
123 | print("Cannot config jupyter", e)
124 |
--------------------------------------------------------------------------------
/tests/demo/demo_scheduler.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {
7 | "execution": {
8 | "iopub.execute_input": "2021-02-05T00:58:20.827925Z",
9 | "iopub.status.busy": "2021-02-05T00:58:20.827310Z",
10 | "iopub.status.idle": "2021-02-05T00:58:20.842835Z",
11 | "shell.execute_reply": "2021-02-05T00:58:20.841199Z",
12 | "shell.execute_reply.started": "2021-02-05T00:58:20.827738Z"
13 | }
14 | },
15 | "outputs": [
16 | {
17 | "name": "stdout",
18 | "output_type": "stream",
19 | "text": [
20 | "Start 2021-02-05 01:58:20.837591\n"
21 | ]
22 | }
23 | ],
24 | "source": [
25 | "import datetime\n",
26 | "print('Start', datetime.datetime.now())"
27 | ]
28 | },
29 | {
30 | "cell_type": "code",
31 | "execution_count": 2,
32 | "metadata": {
33 | "execution": {
34 | "iopub.execute_input": "2021-02-05T00:58:20.846971Z",
35 | "iopub.status.busy": "2021-02-05T00:58:20.845704Z",
36 | "iopub.status.idle": "2021-02-05T00:58:23.860443Z",
37 | "shell.execute_reply": "2021-02-05T00:58:23.859832Z",
38 | "shell.execute_reply.started": "2021-02-05T00:58:20.846856Z"
39 | }
40 | },
41 | "outputs": [],
42 | "source": [
43 | "import asyncio\n",
44 | "\n",
45 | "await asyncio.sleep(3)"
46 | ]
47 | },
48 | {
49 | "cell_type": "code",
50 | "execution_count": 3,
51 | "metadata": {
52 | "execution": {
53 | "iopub.execute_input": "2021-02-05T00:58:23.861881Z",
54 | "iopub.status.busy": "2021-02-05T00:58:23.861461Z",
55 | "iopub.status.idle": "2021-02-05T00:58:23.868827Z",
56 | "shell.execute_reply": "2021-02-05T00:58:23.867592Z",
57 | "shell.execute_reply.started": "2021-02-05T00:58:23.861856Z"
58 | }
59 | },
60 | "outputs": [
61 | {
62 | "name": "stdout",
63 | "output_type": "stream",
64 | "text": [
65 | "Done 2021-02-05 01:58:23.863978\n"
66 | ]
67 | }
68 | ],
69 | "source": [
70 | "print('Done', datetime.datetime.now())"
71 | ]
72 | },
73 | {
74 | "cell_type": "code",
75 | "execution_count": 7,
76 | "metadata": {
77 | "execution": {
78 | "iopub.execute_input": "2021-02-05T01:01:15.920612Z",
79 | "iopub.status.busy": "2021-02-05T01:01:15.920361Z",
80 | "iopub.status.idle": "2021-02-05T01:01:15.947834Z",
81 | "shell.execute_reply": "2021-02-05T01:01:15.946100Z",
82 | "shell.execute_reply.started": "2021-02-05T01:01:15.920589Z"
83 | }
84 | },
85 | "outputs": [
86 | {
87 | "name": "stdout",
88 | "output_type": "stream",
89 | "text": [
90 | "👌 Well done! Your Notebook has been sent to production. \n",
91 | "\n",
92 | "⏰ It will be scheduled \"Every minute of every day\" (more on the syntax on https://crontab.guru/).\n",
93 | "\n",
94 | "Ps: to remove the \"Scheduler\", just replace .add by .delete\n"
95 | ]
96 | }
97 | ],
98 | "source": [
99 | "import naas\n",
100 | "naas.scheduler.add('./demo_scheduler.ipynb', recurrence=\"* * * * *\")"
101 | ]
102 | },
103 | {
104 | "cell_type": "code",
105 | "execution_count": 5,
106 | "metadata": {
107 | "execution": {
108 | "iopub.execute_input": "2021-02-05T00:58:25.933571Z",
109 | "iopub.status.busy": "2021-02-05T00:58:25.932622Z",
110 | "iopub.status.idle": "2021-02-05T00:58:25.973652Z",
111 | "shell.execute_reply": "2021-02-05T00:58:25.970515Z",
112 | "shell.execute_reply.started": "2021-02-05T00:58:25.933530Z"
113 | }
114 | },
115 | "outputs": [
116 | {
117 | "name": "stdout",
118 | "output_type": "stream",
119 | "text": [
120 | "🕣 Your Notebook output demo_scheduler.ipynb, has been copied into your local folder.\n",
121 | "\n"
122 | ]
123 | }
124 | ],
125 | "source": [
126 | "# naas.scheduler.get_output('./demo_scheduler.ipynb')"
127 | ]
128 | },
129 | {
130 | "cell_type": "code",
131 | "execution_count": null,
132 | "metadata": {},
133 | "outputs": [],
134 | "source": []
135 | }
136 | ],
137 | "metadata": {
138 | "kernelspec": {
139 | "display_name": "Python 3",
140 | "language": "python",
141 | "name": "python3"
142 | },
143 | "language_info": {
144 | "codemirror_mode": {
145 | "name": "ipython",
146 | "version": 3
147 | },
148 | "file_extension": ".py",
149 | "mimetype": "text/x-python",
150 | "name": "python",
151 | "nbconvert_exporter": "python",
152 | "pygments_lexer": "ipython3",
153 | "version": "3.8.6"
154 | }
155 | },
156 | "nbformat": 4,
157 | "nbformat_minor": 4
158 | }
159 |
--------------------------------------------------------------------------------
/naas/runner/controllers/assets.py:
--------------------------------------------------------------------------------
1 | from sanic.views import HTTPMethodView
2 | from sanic import response
3 | from sanic.exceptions import ServerError
4 | from naas.ntypes import (
5 | t_asset,
6 | t_health,
7 | t_error,
8 | t_start,
9 | t_send,
10 | t_delete,
11 | t_out_of_credits,
12 | )
13 | import uuid
14 | import os
15 | import pydash as _
16 | from naas_drivers import naascredits
17 |
18 |
19 | class AssetsController(HTTPMethodView):
20 | __logger = None
21 | __jobs = None
22 | __path_lib_files = None
23 | __assets_folder = "assets"
24 |
25 | def __init__(self, logger, jobs, path_assets, *args, **kwargs):
26 | super(AssetsController, self).__init__(*args, **kwargs)
27 | self.__logger = logger
28 | self.__jobs = jobs
29 | self.__path_lib_files = path_assets
30 |
31 | async def get(self, request, token):
32 | if token.startswith("naas_"):
33 | return await response.file(
34 | os.path.join(self.__path_lib_files, self.__assets_folder, token)
35 | )
36 | else:
37 | uid = str(uuid.uuid4())
38 | job = await self.__jobs.find_by_value(uid, token, t_asset)
39 | if job and job.get("status") != t_delete:
40 | file_filepath = job.get("path")
41 | file_name = os.path.basename(file_filepath)
42 | params = job.get("params", dict())
43 | inline = params.get("inline", False)
44 | if not os.environ.get(
45 | "JUPYTERHUB_API_TOKEN"
46 | ) is None and "app.naas.ai" in os.environ.get("JUPYTERHUB_URL", ""):
47 | if _.get(naascredits.connect().get_balance(), "balance") <= 0:
48 | self.__logger.info(
49 | {
50 | "id": uid,
51 | "type": t_asset,
52 | "status": t_out_of_credits,
53 | "filepath": file_filepath,
54 | "token": token,
55 | }
56 | )
57 | raise ServerError(
58 | {"error": "Out of credits"},
59 | status_code=401,
60 | )
61 |
62 | self.__logger.info(
63 | {
64 | "id": uid,
65 | "type": t_asset,
66 | "status": t_start,
67 | "filepath": file_filepath,
68 | "token": token,
69 | }
70 | )
71 | try:
72 | await self.__jobs.update(
73 | uid, file_filepath, t_asset, token, params, t_health, 1
74 | )
75 | res = await response.file(
76 | location=file_filepath,
77 | filename=(file_name if not inline else None),
78 | )
79 | self.__logger.info(
80 | {
81 | "id": uid,
82 | "type": t_asset,
83 | "status": t_send,
84 | "filepath": file_filepath,
85 | "token": token,
86 | }
87 | )
88 | return res
89 | except Exception as e:
90 | self.__logger.error(
91 | {
92 | "id": uid,
93 | "type": t_asset,
94 | "status": t_error,
95 | "filepath": file_filepath,
96 | "token": token,
97 | "error": str(e),
98 | }
99 | )
100 | await self.__jobs.update(
101 | uid, file_filepath, t_asset, token, params, t_error, 1
102 | )
103 | raise ServerError({"id": uid, "error": e}, status_code=404)
104 | self.__logger.error(
105 | {
106 | "id": uid,
107 | "type": t_asset,
108 | "status": t_error,
109 | "error": "Cannot find your token",
110 | "token": token,
111 | }
112 | )
113 | raise ServerError(
114 | {"id": uid, "error": "Cannot find your token", "token": token},
115 | status_code=404,
116 | )
117 |
--------------------------------------------------------------------------------
/naas/scheduler.py:
--------------------------------------------------------------------------------
1 | from .ntypes import t_scheduler, t_output, t_add, t_update, t_delete
2 | from .manager import Manager
3 | from IPython.display import display, Javascript
4 | import pandas as pd
5 | import pretty_cron
6 | import requests
7 | import pycron
8 |
9 |
10 | class Scheduler:
11 | naas = None
12 | role = t_scheduler
13 | manager = None
14 |
15 | def __init__(self):
16 | self.manager = Manager(t_scheduler)
17 | self.path = self.manager.path
18 |
19 | def list(self, path=None):
20 | return self.manager.list_prod("list_history", path)
21 |
22 | def list_output(self, path=None):
23 | return self.manager.list_prod("list_output", path)
24 |
25 | def get(self, path=None, histo=None):
26 | return self.manager.get_file(path, histo=histo)
27 |
28 | def get_output(self, path=None, histo=None):
29 | return self.manager.get_file(path, t_output, histo)
30 |
31 | def clear(self, path=None, histo=None):
32 | return self.manager.clear_file(path, None, histo)
33 |
34 | def clear_output(self, path=None, histo=None):
35 | return self.manager.clear_file(path, t_output, histo)
36 |
37 | def status(self):
38 | req = requests.get(url=f"{self.manager.naas_api}/scheduler")
39 | req.raise_for_status()
40 | jsn = req.json()
41 | print(jsn)
42 | return jsn
43 |
44 | def pause(self):
45 | req = requests.get(url=f"{self.manager.naas_api}/scheduler/pause")
46 | req.raise_for_status()
47 | jsn = req.json()
48 | print(jsn)
49 | return jsn
50 |
51 | def resume(self):
52 | req = requests.get(url=f"{self.manager.naas_api}/scheduler/resume")
53 | req.raise_for_status()
54 | jsn = req.json()
55 | print(jsn)
56 | return jsn
57 |
58 | def currents(self, raw=False):
59 | json_data = self.manager.get_naas()
60 | json_filtered = []
61 | for item in json_data:
62 | if item["type"] == self.role and item["status"] != t_delete:
63 | if raw:
64 | json_filtered.append(item)
65 | else:
66 | json_filtered.append({"path": item["path"], "value": item["value"]})
67 | if raw is False:
68 | df = pd.DataFrame(json_filtered)
69 | df = df.style.format({"value": pretty_cron.prettify_cron})
70 | return df
71 | return json_filtered
72 |
73 | def __check_cron(self, text):
74 | res = False
75 | try:
76 | pycron.is_now(text)
77 | res = True
78 | except ValueError:
79 | pass
80 | return res
81 |
82 | def add(self, path=None, recurrence=None, cron=None, params={}, debug=False):
83 | if self.manager.is_production():
84 | print("No add done, you are in production\n")
85 | return
86 | if recurrence:
87 | print("recurrence is deprecated use cron arg instead")
88 | cron = recurrence if recurrence else cron
89 | if not cron:
90 | print("No cron provided\n")
91 | return
92 | if not self.__check_cron(cron):
93 | print(f"WARNING : Recurrence wrong format {cron}")
94 | return
95 | cron_string = pretty_cron.prettify_cron(cron)
96 | if cron_string == "Every minute of every day":
97 | display(
98 | Javascript(
99 | """
100 | alert('Warning you just scheduled a notebook to run every minute!');
101 | """
102 | )
103 | )
104 | current_file = self.manager.get_path(path)
105 | status = t_add
106 | try:
107 | self.manager.get_value(current_file, False)
108 | status = t_update
109 | except: # noqa: E722
110 | pass
111 | self.manager.add_prod(
112 | {
113 | "type": self.role,
114 | "path": current_file,
115 | "status": status,
116 | "params": params,
117 | "value": cron,
118 | },
119 | debug,
120 | )
121 | print("👌 Well done! Your Notebook has been sent to production.\n")
122 | print(
123 | f'⏰ It will be scheduled "{cron_string}" (more on the syntax on https://crontab.guru/).\n'
124 | )
125 | print('Ps: to remove the "Scheduler", just replace .add by .delete')
126 |
127 | def delete(self, path=None, all=True, debug=False):
128 | if self.manager.is_production():
129 | print("No delete done, you are in production\n")
130 | return
131 | current_file = self.manager.get_path(path)
132 | self.manager.del_prod({"type": self.role, "path": current_file}, debug)
133 | print("🗑 Done! Your Scheduler has been remove from production.\n")
134 | if all is True:
135 | self.clear(current_file, "all")
136 | self.clear_output(current_file, "all")
137 |
--------------------------------------------------------------------------------
/naas/runner/controllers/notebooks.py:
--------------------------------------------------------------------------------
1 | from naas.ntypes import (
2 | t_notebook,
3 | t_health,
4 | t_error,
5 | t_start,
6 | t_delete,
7 | t_out_of_credits,
8 | )
9 | from sanic.views import HTTPMethodView
10 | from sanic.exceptions import ServerError
11 | import urllib
12 | import uuid
13 | import os
14 | from naas_drivers import naascredits
15 | import pydash as _
16 |
17 |
18 | def parse_data(request):
19 | req_data = {}
20 | ctype = request.headers.get("content-type", "")
21 | if ctype.startswith("multipart/form-data"):
22 | req_data = {"files": request.files}
23 | elif ctype.startswith("application/json"):
24 | req_data = request.json
25 | elif ctype.startswith("application/x-www-form-urlencoded"):
26 | req_data = dict(urllib.parse.parse_qsl(request.body.decode("utf-8")))
27 | else:
28 | req_data = {"str": str(request.body.decode("utf-8"))}
29 | args = dict(
30 | urllib.parse.parse_qsl(request.query_string)
31 | ) # fix to don't have array for each args
32 | data = {"params": args, "body": req_data, "headers": dict(request.headers)}
33 | return data
34 |
35 |
36 | class NbController(HTTPMethodView):
37 | __logger = None
38 |
39 | def __init__(self, logger, jobs, nb, *args, **kwargs):
40 | super(NbController, self).__init__(*args, **kwargs)
41 | self.__logger = logger
42 | self.__jobs = jobs
43 | self.__nb = nb
44 |
45 | async def _get(self, data, token):
46 | uid = str(uuid.uuid4())
47 | job = await self.__jobs.find_by_value(uid, token, t_notebook)
48 | if job and job.get("status") != t_delete:
49 | value = job.get("value", None)
50 | file_filepath = job.get("path")
51 | cur_job = job.copy()
52 | cur_job["params"] = {**(job.get("params", dict())), **(data)}
53 | if not os.environ.get(
54 | "JUPYTERHUB_API_TOKEN"
55 | ) is None and "app.naas.ai" in os.environ.get("JUPYTERHUB_URL", ""):
56 | if _.get(naascredits.connect().get_balance(), "balance") <= 0:
57 | self.__logger.info(
58 | {
59 | "id": uid,
60 | "type": t_notebook,
61 | "status": t_out_of_credits,
62 | "filepath": file_filepath,
63 | "token": token,
64 | }
65 | )
66 | raise ServerError(
67 | {"error": "Out of credits"},
68 | status_code=401,
69 | )
70 | self.__logger.info(
71 | {
72 | "id": uid,
73 | "type": t_notebook,
74 | "status": t_start,
75 | "filepath": file_filepath,
76 | "token": token,
77 | }
78 | )
79 | await self.__jobs.update(
80 | uid, file_filepath, t_notebook, value, job.get("params"), t_start
81 | )
82 | res = await self.__nb.exec(uid, cur_job)
83 | if res.get("error"):
84 | self.__logger.error(
85 | {
86 | "main_id": uid,
87 | "id": uid,
88 | "type": t_notebook,
89 | "status": t_error,
90 | "filepath": file_filepath,
91 | "duration": res.get("duration"),
92 | "error": str(res.get("error")),
93 | }
94 | )
95 | await self.__jobs.update(
96 | uid,
97 | file_filepath,
98 | t_notebook,
99 | value,
100 | job.get("params"),
101 | t_error,
102 | res.get("duration"),
103 | )
104 | raise ServerError(
105 | {
106 | "id": uid,
107 | "error": res.get("error"),
108 | "data": data,
109 | "token": token,
110 | },
111 | status_code=500,
112 | )
113 | self.__logger.info(
114 | {
115 | "main_id": uid,
116 | "id": uid,
117 | "type": t_notebook,
118 | "status": t_health,
119 | "filepath": file_filepath,
120 | "duration": res.get("duration"),
121 | }
122 | )
123 | await self.__jobs.update(
124 | uid,
125 | file_filepath,
126 | t_notebook,
127 | value,
128 | job.get("params"),
129 | t_health,
130 | res.get("duration"),
131 | )
132 | return self.__nb.response(
133 | uid, file_filepath, res, res.get("duration"), job.get("params")
134 | )
135 | self.__logger.error(
136 | {
137 | "id": uid,
138 | "type": t_notebook,
139 | "status": t_error,
140 | "token": token,
141 | "error": "Cannot find your token",
142 | }
143 | )
144 | raise ServerError(
145 | {
146 | "id": uid,
147 | "error": "Cannot find your token",
148 | "data": data,
149 | "token": token,
150 | },
151 | status_code=404,
152 | )
153 |
154 | async def get(self, request, token):
155 | return await self._get(parse_data(request), token)
156 |
157 | async def post(self, request, token):
158 | return await self._get(parse_data(request), token)
159 |
--------------------------------------------------------------------------------
/docs/basic_docs.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "
\n",
8 | "\n",
9 | "# Welcome to nass\n",
10 | "\n",
11 | "## Table of Contents\n",
12 | "1. [Why Nass exist](#Why-nass-xist)\n",
13 | "2. [How to install](#How-to-install)\n",
14 | "3. [Scheduler](#Sheduler)\n",
15 | "4. [Notebook as API](#Notebook-as-API)\n",
16 | "5. [Asset](#Asset)\n",
17 | "6. [Dependency](#Dependency)\n",
18 | "7. [Notification](#Notification)\n",
19 | "7. [Secret](#Secret)\n",
20 | "\n",
21 | "\n",
22 | "\n",
23 | "\n",
24 | "## Why nass exist\n",
25 | "Notebooks are awesome, but use them in production is messy, so we created our own module to allow any jupyter server to becode a save production server !\n",
26 | "\n",
27 | "## How to install\n",
28 | "First :\n",
29 | "`!pip install nass`\n",
30 | "\n",
31 | "Check if you have the minimum env vars set:\n",
32 | "\n",
33 | "- `JUPYTER_SERVER_ROOT` => Should be set as your home folder\n",
34 | "- `JUPYTERHUB_USER` => Should be set as your machine user, not root\n",
35 | "- `JUPYTERHUB_API_TOKEN` => should be auto set by your hub\n",
36 | "\n",
37 | "Optionally:\n",
38 | "- `NAAS_RUNNER_PORT` to change the port of the naas runner\n",
39 | "- `PUBLIC_PROXY_API` if you want the api and assets features your should run the naas proxy machine and provide his hostname here\n",
40 | "- `JUPYTERHUB_URL` the web url of your hub for api and assets features.\n",
41 | "- `SINGLEUSER_PATH` if you deploy on kubernet and your singleusers have specific hostname end\n",
42 | "- `NOTIFICATIONS_API` if you want the notification feature your should run the naas notification machine and provide his hostname here\n",
43 | "- `NAAS_SENTRY_DSN` If you need to catch error made by your users, configure it.\n",
44 | "\n",
45 | "\n",
46 | "Start the server in your jupyter singleuser machine:\n",
47 | "`python -m naas.runner &`\n",
48 | "\n",
49 | "\n",
50 | "Then in your notebook :\n",
51 | "`import naas`\n",
52 | "\n"
53 | ]
54 | },
55 | {
56 | "cell_type": "code",
57 | "execution_count": null,
58 | "metadata": {},
59 | "outputs": [],
60 | "source": [
61 | "import naas"
62 | ]
63 | },
64 | {
65 | "cell_type": "markdown",
66 | "metadata": {},
67 | "source": [
68 | "---\n",
69 | "## Scheduler\n",
70 | "Copy in production this notebook and run it, every day at 9:00\n",
71 | "`nass.scheduler.add(recurrence=\"0 9 * * *\")`"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": null,
77 | "metadata": {},
78 | "outputs": [],
79 | "source": [
80 | "naas.scheduler.add(recurrence=\"0 9 * * *\")"
81 | ]
82 | },
83 | {
84 | "cell_type": "markdown",
85 | "metadata": {},
86 | "source": [
87 | "---\n",
88 | "## Notebook as API\n",
89 | "Copy in production this notebook and allow to run it by calling the returned url\n",
90 | "`naas.api.add()`"
91 | ]
92 | },
93 | {
94 | "cell_type": "code",
95 | "execution_count": null,
96 | "metadata": {},
97 | "outputs": [],
98 | "source": [
99 | "naas.api.add()"
100 | ]
101 | },
102 | {
103 | "cell_type": "code",
104 | "execution_count": null,
105 | "metadata": {},
106 | "outputs": [],
107 | "source": [
108 | "naas.api.respond_notebook()"
109 | ]
110 | },
111 | {
112 | "cell_type": "markdown",
113 | "metadata": {},
114 | "source": [
115 | "---\n",
116 | "## Asset\n",
117 | "Copy in production this asset ( file ) and allow to get it by calling the returned url\n",
118 | "`naas.asset.add()`\n"
119 | ]
120 | },
121 | {
122 | "cell_type": "code",
123 | "execution_count": null,
124 | "metadata": {},
125 | "outputs": [],
126 | "source": [
127 | "naas.assets.add()"
128 | ]
129 | },
130 | {
131 | "cell_type": "markdown",
132 | "metadata": {},
133 | "source": [
134 | "---\n",
135 | "## Dependency\n",
136 | "Copy in production this notebook as dependency and allow other Api or Scheduler to use it.\n",
137 | "`naas.dependency.add()`"
138 | ]
139 | },
140 | {
141 | "cell_type": "code",
142 | "execution_count": null,
143 | "metadata": {},
144 | "outputs": [],
145 | "source": [
146 | "naas.dependency.add()"
147 | ]
148 | },
149 | {
150 | "cell_type": "markdown",
151 | "metadata": {},
152 | "source": [
153 | "---\n",
154 | "## Notification\n",
155 | "Send and email to anyone withing your notebook runs.\n",
156 | "`naas.notifications.send(email=\"elon@musk.com\", subject=\"The tesla action is going up\", \"check in the link the new chart data maide with naas from fresh dataset : [LINK]\")`\n"
157 | ]
158 | },
159 | {
160 | "cell_type": "code",
161 | "execution_count": null,
162 | "metadata": {},
163 | "outputs": [],
164 | "source": [
165 | "naas.notifications.send(email=\"elon@musk.com\", subject=\"The tesla action is going up\", content=\"check in the link the new chart data maide with naas from fresh dataset : [LINK]\")"
166 | ]
167 | },
168 | {
169 | "cell_type": "markdown",
170 | "metadata": {},
171 | "source": [
172 | "---\n",
173 | "## Secret\n",
174 | "Copy in production your secret and allow other Api or Scheduler to use it. They are encoded in a secure manner.\n",
175 | "`naas.secret.add(name=\"MY_FIRST_SECRET\", secret=\"SUPER_SECRET_STRING\")`"
176 | ]
177 | },
178 | {
179 | "cell_type": "code",
180 | "execution_count": null,
181 | "metadata": {},
182 | "outputs": [],
183 | "source": [
184 | "naas.secret.add(name=\"MY_FIRST_SECRET\", secret=\"SUPER_SECRET_STRING\")"
185 | ]
186 | }
187 | ],
188 | "metadata": {
189 | "kernelspec": {
190 | "display_name": "Python 3",
191 | "language": "python",
192 | "name": "python3"
193 | },
194 | "language_info": {
195 | "codemirror_mode": {
196 | "name": "ipython",
197 | "version": 3
198 | },
199 | "file_extension": ".py",
200 | "mimetype": "text/x-python",
201 | "name": "python",
202 | "nbconvert_exporter": "python",
203 | "pygments_lexer": "ipython3",
204 | "version": "3.8.5"
205 | }
206 | },
207 | "nbformat": 4,
208 | "nbformat_minor": 4
209 | }
210 |
--------------------------------------------------------------------------------
/tests/test_scheduler.py:
--------------------------------------------------------------------------------
1 | from naas.ntypes import t_add, t_health, t_scheduler, t_job, t_output
2 | from naas.runner.scheduler import Scheduler
3 | from naas.runner.notebooks import Notebooks
4 | from datetime import datetime, timedelta
5 | from naas.runner.logger import Logger
6 | from naas.runner.jobs import Jobs
7 | from naas.runner import n_env
8 | from naas import scheduler
9 | from shutil import copy2
10 | from syncer import sync
11 | import nest_asyncio
12 | import asyncio
13 | import getpass
14 | import pytest # noqa: F401
15 | import uuid
16 | import pytz
17 | import os
18 |
19 | # TODO remove this fix when papermill and nest_asyncio support uvloop
20 | asyncio.set_event_loop_policy(None)
21 | nest_asyncio.apply()
22 |
23 | user_folder_name = "test_user_folder"
24 | user = getpass.getuser()
25 |
26 | status_data = {"status": "running"}
27 | seps = os.sep + os.altsep if os.altsep else os.sep
28 |
29 |
30 | def mock_session(mocker, requests_mock, cur_path):
31 | mocker.patch(
32 | "ipykernel.get_connection_file",
33 | return_value="kernel-b1e19209-e251-4115-819e-7ab5bc4232b7.json",
34 | )
35 | mock_json = [
36 | {
37 | "kernel": {"id": "b1e19209-e251-4115-819e-7ab5bc4232b7"},
38 | "notebook": {"path": cur_path},
39 | },
40 | ]
41 |
42 | url = f"{n_env.hub_base}/user/{n_env.user}/api/sessions"
43 | requests_mock.register_uri("GET", url, json=mock_json, status_code=200)
44 |
45 |
46 | def mock_job(requests_mock, test_runner):
47 | url_api = f"{n_env.api}/{t_job}"
48 |
49 | def post_json(request, context):
50 | data = request.json()
51 | res = sync(test_runner.post(f"/{t_job}", json=data))
52 | data_res = res.json()
53 | context.status_code = res.status_code
54 | return data_res
55 |
56 | def get_json(request, context):
57 | data = request.qs
58 | res = sync(test_runner.get(f"/{t_job}", params=data))
59 | data_res = res.json()
60 | context.status_code = res.status_code
61 | return data_res
62 |
63 | requests_mock.register_uri("GET", url_api, json=get_json, status_code=200)
64 | requests_mock.register_uri("POST", url_api, json=post_json, status_code=200)
65 |
66 |
67 | async def test_scheduler_status(test_scheduler):
68 | response = await test_scheduler.get("/scheduler/status")
69 | assert response.status_code == 200
70 | resp_json = response.json()
71 | assert resp_json == status_data
72 |
73 |
74 | async def test_scheduler(tmp_path, event_loop):
75 | test_notebook = "tests/demo/demo_scheduler.ipynb"
76 | cur_path = os.path.join(os.getcwd(), test_notebook)
77 | new_path = os.path.join(tmp_path, test_notebook)
78 | os.makedirs(os.path.dirname(new_path))
79 | logger = Logger()
80 | notebooks = Notebooks(logger)
81 | jobs = Jobs(logger, True, [])
82 | scheduler = Scheduler(logger, jobs, notebooks, event_loop)
83 | uid = str(uuid.uuid4())
84 | copy2(cur_path, new_path)
85 | recur = "* * * * *"
86 | job = {
87 | "type": t_scheduler,
88 | "path": new_path,
89 | "params": {},
90 | "value": recur,
91 | "status": t_add,
92 | }
93 | await jobs.update(
94 | uid,
95 | job["path"],
96 | job["type"],
97 | job["value"],
98 | job["params"],
99 | job["status"],
100 | )
101 | resp_json = await jobs.list(uid)
102 | assert len(resp_json) == 1
103 | res_job = resp_json[0]
104 | assert res_job.get("type") == t_scheduler
105 | assert res_job.get("path") == new_path
106 | assert res_job.get("value") == recur
107 | assert res_job.get("status") == t_add
108 | await scheduler.start(test_mode=True)
109 | resp_json = await jobs.list(uid)
110 | assert len(resp_json) == 1
111 | res_job = resp_json[0]
112 | assert res_job.get("type") == t_scheduler
113 | assert res_job.get("path") == new_path
114 | assert res_job.get("value") == recur
115 | assert res_job.get("status") == t_health
116 | # list_out_in_prod = scheduler.list_output(new_path)
117 | # assert len(list_out_in_prod) == 1
118 | # histo = list_out_in_prod.to_dict("records")[0]
119 | # scheduler.get_output(new_path, histo.get("timestamp"))
120 | # filename = os.path.basename(new_path)
121 | # out_filename = f"{histo.get('timestamp')}___{t_output}__{filename}"
122 | # dirname = os.path.dirname(new_path)
123 | # new_path_out_histo = os.path.join(
124 | # dirname, out_filename
125 | # )
126 | # assert os.path.isfile(new_path_out_histo)
127 |
128 |
129 | async def test_scheduler_runner(mocker, requests_mock, test_scheduler, tmp_path):
130 | test_notebook = "tests/demo/demo_scheduler.ipynb"
131 | cur_path = os.path.join(os.getcwd(), test_notebook)
132 | new_path = os.path.join(tmp_path, test_notebook)
133 | os.makedirs(os.path.dirname(new_path))
134 | copy2(cur_path, new_path)
135 | mock_session(mocker, requests_mock, new_path)
136 | mock_job(requests_mock, test_scheduler)
137 | curr_time = datetime.now(tz=pytz.timezone(n_env.tz))
138 | curr_time = curr_time + timedelta(seconds=1)
139 | sec = curr_time.strftime("%S")
140 | recur = f"{sec} * * * *"
141 | scheduler.add(new_path, recur)
142 | response = await test_scheduler.get(f"/{t_job}")
143 | assert response.status_code == 200
144 | resp_json = response.json()
145 | assert len(resp_json) == 1
146 | res_job = resp_json[0]
147 | assert res_job.get("type") == t_scheduler
148 | assert res_job.get("path") == new_path
149 | assert res_job.get("value") == recur
150 | assert res_job.get("status") == t_add
151 | # TODO fix
152 | # sync(asyncio.sleep(2))
153 | # print("\n\n+++++++++++++++++++++++++++\n\n", datetime.now(tz=pytz.timezone(n_env.tz)), "\n\n+++++++++++++++++++++++++++\n\n")
154 | # response = await test_scheduler.get(f"/{t_job}")
155 | # assert response.status == 200
156 | # resp_json = await response.json()
157 | # assert res_job.get("type") == t_scheduler
158 | # assert res_job.get("path") == real_path
159 | # assert res_job.get("value") == recur
160 | # assert res_job.get("status") == t_start
161 | # sync(asyncio.sleep(2))
162 | # response = await test_scheduler.get(f"/{t_job}")
163 | # assert response.status == 200
164 | # resp_json = await response.json()
165 | # assert res_job.get("type") == t_scheduler
166 | # assert res_job.get("path") == real_path
167 | # assert res_job.get("value") == recur
168 | # assert res_job.get("status") == t_health
169 |
--------------------------------------------------------------------------------
/naas/runner/custom_papermill.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import copy
4 | from naas.runner.env_var import cpath
5 | import nbformat
6 | from pathlib import Path
7 |
8 | # Temporary way to remove Papermill import warnings. Shuold be fixed when reaching Papermill 2.3.4
9 | import warnings
10 |
11 | with warnings.catch_warnings():
12 | warnings.simplefilter("ignore")
13 | from papermill.log import logger
14 | from papermill.iorw import (
15 | get_pretty_path,
16 | local_file_io_cwd,
17 | load_notebook_node,
18 | write_ipynb,
19 | )
20 | from papermill.engines import papermill_engines
21 | from papermill.execute import (
22 | prepare_notebook_metadata,
23 | remove_error_markers,
24 | raise_for_execution_errors,
25 | )
26 | from papermill.utils import chdir
27 | from papermill.parameterize import (
28 | add_builtin_parameters,
29 | parameterize_notebook,
30 | parameterize_path,
31 | )
32 |
33 | import json
34 |
35 |
36 | def execute_notebook(
37 | uid,
38 | runtime,
39 | input_path,
40 | output_path,
41 | parameters=None,
42 | engine_name=None,
43 | request_save_on_cell_execute=True,
44 | prepare_only=False,
45 | kernel_name=None,
46 | progress_bar=True,
47 | log_output=False,
48 | stdout_file=None,
49 | stderr_file=None,
50 | start_timeout=60,
51 | report_mode=False,
52 | cwd=None,
53 | **engine_kwargs,
54 | ):
55 | """Executes a single notebook locally.
56 | Parameters
57 | ----------
58 | input_path : str or Path
59 | Path to input notebook
60 | output_path : str or Path
61 | Path to save executed notebook
62 | parameters : dict, optional
63 | Arbitrary keyword arguments to pass to the notebook parameters
64 | engine_name : str, optional
65 | Name of execution engine to use
66 | request_save_on_cell_execute : bool, optional
67 | Request save notebook after each cell execution
68 | autosave_cell_every : int, optional
69 | How often in seconds to save in the middle of long cell executions
70 | prepare_only : bool, optional
71 | Flag to determine if execution should occur or not
72 | kernel_name : str, optional
73 | Name of kernel to execute the notebook against
74 | progress_bar : bool, optional
75 | Flag for whether or not to show the progress bar.
76 | log_output : bool, optional
77 | Flag for whether or not to write notebook output to the configured logger
78 | start_timeout : int, optional
79 | Duration in seconds to wait for kernel start-up
80 | report_mode : bool, optional
81 | Flag for whether or not to hide input.
82 | cwd : str or Path, optional
83 | Working directory to use when executing the notebook
84 | **kwargs
85 | Arbitrary keyword arguments to pass to the notebook engine
86 | Returns
87 | -------
88 | nb : NotebookNode
89 | Executed notebook object
90 | """
91 | if isinstance(input_path, Path):
92 | input_path = str(input_path)
93 | if isinstance(output_path, Path):
94 | output_path = str(output_path)
95 | if isinstance(cwd, Path):
96 | cwd = str(cwd)
97 |
98 | path_parameters = add_builtin_parameters(parameters)
99 | input_path = parameterize_path(input_path, path_parameters)
100 | output_path = parameterize_path(output_path, path_parameters)
101 |
102 | logger.info("Input Notebook: %s" % get_pretty_path(input_path))
103 | logger.info("Output Notebook: %s" % get_pretty_path(output_path))
104 | with local_file_io_cwd():
105 | if cwd is not None:
106 | logger.info("Working directory: {}".format(get_pretty_path(cwd)))
107 |
108 | nb = load_notebook_node(input_path)
109 |
110 | # Parameterize the Notebook.
111 | if parameters:
112 | nb = parameterize_notebook(nb, parameters, report_mode)
113 |
114 | nb = prepare_notebook_metadata(nb, input_path, output_path, report_mode)
115 | # clear out any existing error markers from previous papermill runs
116 | nb = remove_error_markers(nb)
117 | # add naas code to make naas feature act differently in production
118 | nb = prepare_notebook_naas(nb, input_path, uid, runtime)
119 | if not prepare_only:
120 | # Fetch the kernel name if it's not supplied
121 | kernel_name = kernel_name or nb.metadata.kernelspec.name
122 |
123 | # Execute the Notebook in `cwd` if it is set
124 | with chdir(cwd):
125 | nb = papermill_engines.execute_notebook_with_engine(
126 | engine_name,
127 | nb,
128 | input_path=input_path,
129 | output_path=output_path if request_save_on_cell_execute else None,
130 | kernel_name=kernel_name,
131 | progress_bar=progress_bar,
132 | log_output=log_output,
133 | start_timeout=start_timeout,
134 | stdout_file=stdout_file,
135 | stderr_file=stderr_file,
136 | **engine_kwargs,
137 | )
138 |
139 | # Check for errors first (it saves on error before raising)
140 | raise_for_execution_errors(nb, output_path)
141 |
142 | # Write final output in case the engine didn't write it on cell completion.
143 | write_ipynb(nb, output_path)
144 | return nb
145 |
146 |
147 | def prepare_notebook_naas(nb, input_path, uid, runtime):
148 | """Prepare notebook and inject cell with naas env config
149 | Parameters
150 | ----------
151 | nb : NotebookNode
152 | Executable notebook object
153 | input_path : str
154 | Path to input notebook
155 | uid : str
156 | uid of executed notebook
157 | """
158 | # Copy the nb object to avoid polluting the input
159 | nb = copy.deepcopy(nb)
160 | language = nb.metadata.kernelspec.language
161 | if language == "python":
162 | current_data = {
163 | "uid": uid,
164 | "path": cpath(input_path),
165 | "env": "RUNNER",
166 | "runtime": runtime,
167 | }
168 | param_content = (
169 | f"import naas\nnaas.n_env.current = {json.dumps(current_data, indent=4)}"
170 | )
171 | newcell = nbformat.v4.new_code_cell(source=param_content)
172 | newcell.metadata["tags"] = ["naas-injected"]
173 | nb.cells = [newcell] + nb.cells
174 | return nb
175 |
--------------------------------------------------------------------------------
/dev/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.5"
2 |
3 | services:
4 | postgres_db:
5 | image: postgres:9.5
6 | container_name: postgres_db
7 | networks:
8 | - galaxy_cs_jup
9 | restart: always
10 | environment:
11 | - PGDATA=/var/lib/postgresql/data
12 | - POSTGRES_PASSWORD=b76df14e4fae131d0df091f1cbba16d8d730a83e7b49783721504d2d75a424be
13 | - POSTGRES_USER=postgres
14 | - JUPYTER_DB=jupyterhub
15 | - POSTGRES_MULTIPLE_DATABASES=jupyterhub
16 | volumes:
17 | - "postgres_db:/var/lib/postgresql/data"
18 | - ./postgres:/docker-entrypoint-initdb.d
19 | ports:
20 | - 5431:5432
21 | naas_screenshot:
22 | depends_on:
23 | - postgres_db
24 | - jupyterhub
25 | networks:
26 | - galaxy_cs_jup
27 | restart: always
28 | image: jupyternaas/screenshot:latest
29 | container_name: naas_screenshot
30 | environment:
31 | TZ: Europe/Paris
32 | ALLOW_HTTP: "true"
33 | ports:
34 | - 9000:9000
35 | naas_proxy:
36 | depends_on:
37 | - postgres_db
38 | - jupyterhub
39 | networks:
40 | - galaxy_cs_jup
41 | restart: always
42 | image: jupyternaas/proxy:latest
43 | container_name: naas_proxy
44 | environment:
45 | TZ: Europe/Paris
46 | PORT: "3002"
47 | SINGLEUSER_PATH: ""
48 | HUB_HOST: "jupyterhub:8000"
49 | HUB_DB: postgres://postgres:b76df14e4fae131d0df091f1cbba16d8d730a83e7b49783721504d2d75a424be@postgres_db:5432/jupyterhub
50 | ports:
51 | - 3002:3002
52 | naas_notif:
53 | depends_on:
54 | - postgres_db
55 | - jupyterhub
56 | networks:
57 | - galaxy_cs_jup
58 | restart: always
59 | image: jupyternaas/notifications:latest
60 | container_name: naas_notif
61 | environment:
62 | TZ: Europe/Paris
63 | PORT: "3003"
64 | HUB_HOST: "jupyterhub:8000"
65 | HUB_DB: postgres://postgres:b76df14e4fae131d0df091f1cbba16d8d730a83e7b49783721504d2d75a424be@postgres_db:5432/jupyterhub
66 | EMAIL_FROM: "Naas notifications "
67 | ADMIN_TOKEN: 110ec58a-a0f2-4ac4-8393-c866d813b8d1
68 | EMAIL_HOST: smtp.sendgrid.net
69 | EMAIL_PASSWORD: "****"
70 | EMAIL_PORT: "465"
71 | EMAIL_SECURE: "true"
72 | EMAIL_USER: apikey
73 | ports:
74 | - 3003:3003
75 | naas_callback:
76 | depends_on:
77 | - postgres_db
78 | - jupyterhub
79 | networks:
80 | - galaxy_cs_jup
81 | restart: always
82 | image: jupyternaas/callback:latest
83 | container_name: naas_callback
84 | environment:
85 | TZ: Europe/Paris
86 | PORT: "3004"
87 | HUB_HOST: "jupyterhub:8000"
88 | HUB_DB: postgres://postgres:b76df14e4fae131d0df091f1cbba16d8d730a83e7b49783721504d2d75a424be@postgres_db:5432/jupyterhub
89 | ports:
90 | - 3004:3004
91 | jupyterhub:
92 | depends_on:
93 | - postgres_db
94 | networks:
95 | - galaxy_cs_jup
96 | restart: always
97 | image: jupyternaas/hub:latest
98 | container_name: jupyterhub
99 | volumes:
100 | - "$HOME/.docker/config.json:/root/.docker/config.json"
101 | # Bind Docker socket on the host so we can connect to the daemon from
102 | # within the container
103 | - "/var/run/docker.sock:/var/run/docker.sock:rw"
104 | # Bind Docker volume on host for JupyterHub database and cookie secrets
105 | - "jupyter-data:/data"
106 | - "./jupyterhub_config.py:/srv/jupyterhub/jupyterhub_config.py"
107 | environment:
108 | TZ: Europe/Paris
109 | # All containers will join this network
110 | DOCKER_NETWORK_NAME: galaxy_cs_jup
111 | # JupyterHub will spawn this Notebook image for users
112 | DOCKER_NOTEBOOK_IMAGE: jupyternaas/naas:latest
113 | # DOCKER_NOTEBOOK_IMAGE: jupyternaas/naas:local_dev
114 | # Notebook directory inside user image
115 | DOCKER_NOTEBOOK_DIR: /home/ftp
116 | # Using this run command (optional)
117 | DOCKER_SPAWN_CMD: start-singleuser.sh --NotebookApp.default_url=lab"
118 | # Postgres db info
119 | POSTGRES_DB: jupyterhub
120 | POSTGRES_HOST: postgres_db
121 | POSTGRES_PASSWORD: b76df14e4fae131d0df091f1cbba16d8d730a83e7b49783721504d2d75a424be
122 | ADMIN_API_TOKEN: 3X7zEkxoeQAeQqsAhpoB4irBsQmn
123 | PUBLIC_PROXY_API: http://localhost:3002
124 | NOTIFICATIONS_API: http://naas_notif:3003
125 | NOTIFICATIONS_ADMIN_TOKEN: 110ec58a-a0f2-4ac4-8393-c866d813b8d1
126 | CALLBACK_API: http://naas_callback:3004
127 | SCREENSHOT_API: http://naas_screenshot:9000
128 | JUPYTERHUB_URL: http://jupyterhub:8000
129 | LANDING_URL: "https://naas.ai"
130 | ### Enterprise Gateway Environment variables
131 | # JUPYTER_GATEWAY_URL: enterprise-gateway:8888
132 | # EG_PORT: "8888"
133 | # # Created above.
134 | # EG_NAMESPACE: "dev"
135 | # # Created above. Used if no KERNEL_NAMESPACE is provided by client.
136 | # EG_KERNEL_CLUSTER_ROLE: "kernel-controller"
137 | # # All kernels reside in the EG namespace if True, otherwise KERNEL_NAMESPACE
138 | # # must be provided or one will be created for each kernel.
139 | # EG_SHARED_NAMESPACE: "True"
140 | # # NOTE: This requires appropriate volume mounts to make notebook dir accessible
141 | # EG_MIRROR_WORKING_DIRS: "False"
142 | # # Current idle timeout is 1 hour.
143 | # EG_CULL_IDLE_TIMEOUT: "3600"
144 | # EG_LOG_LEVEL: "DEBUG"
145 | # EG_KERNEL_LAUNCH_TIMEOUT: "60"
146 | # EG_KERNEL_WHITELIST: "['python_kubernetes']"
147 | # EG_DEFAULT_KERNEL_NAME: "python_kubernetes"
148 | ports:
149 | - 8080:8080
150 | - 8081:8081
151 | - 8000:8000
152 | - 8001:8001
153 | volumes:
154 | jupyter-data:
155 | driver: local
156 | postgres_db:
157 | driver: local
158 |
159 | networks:
160 | galaxy_cs_jup:
161 | driver: bridge
162 | name: galaxy_cs_jup
--------------------------------------------------------------------------------
/tests/test_jobs.py:
--------------------------------------------------------------------------------
1 | from naas.ntypes import t_notebook, t_add, t_delete, t_update
2 | from naas.runner.jobs import Jobs
3 | from naas.runner.logger import Logger
4 | from naas.runner import n_env
5 | import pytest # noqa: F401
6 | import json
7 | import uuid
8 | import os
9 |
10 | clean = True
11 | init_data = []
12 |
13 | user_folder_name = "test_user_folder"
14 | test_file = "test_add.py"
15 |
16 | wrong_jobs_list = [
17 | {
18 | "id": "7450272a-0933-4be0-8c67-97de83fbe92a",
19 | "lastRun": 0,
20 | "lastUpdate": "2021-02-17 12:32:58",
21 | "nbRun": 0,
22 | "params": {},
23 | "path": "/home/ftp/.naas/home/ftp/sales/contacts/output/REF_HUBSPOT_CONTACTS.csv",
24 | "status": "installed",
25 | "totalRun": 0,
26 | "type": "asset",
27 | "value": "f47887a7d8da171e617f800e5d71c022f4923a42758f814399d45aab7427"
28 | },
29 | {
30 | "id": "7450272a-0933-4be0-8c67-97de83fbe92a",
31 | "lastRun": 0,
32 | "lastUpdate": "2021-02-17 12:32:58",
33 | "nbRun": 0,
34 | "params": {},
35 | "path": "/home/ftp/.naas/home/ftp/sales/contacts/output/REF_HUBSPOT_CONTACTS2.csv",
36 | "status": "installed",
37 | "totalRun": 0,
38 | "runs": None,
39 | "type": "asset",
40 | "value": "f47887a7d8da171e617f800e5d71c022f4923a42758f814399d45aab7429"
41 | },
42 | ]
43 |
44 |
45 | async def test_init(tmp_path):
46 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
47 | os.environ["JUPYTER_SERVER_ROOT"] = str(path_srv_root)
48 | logger = Logger()
49 | uid = str(uuid.uuid4())
50 | jobs = Jobs(logger, clean, init_data)
51 | list_job = await jobs.list(uid)
52 | assert len(list_job) == 0
53 |
54 |
55 | async def test_add(tmp_path):
56 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
57 | os.environ["JUPYTER_SERVER_ROOT"] = path_srv_root
58 | logger = Logger()
59 | uid = str(uuid.uuid4())
60 | jobs = Jobs(logger, clean, init_data)
61 | path = os.path.join(os.getcwd(), test_file)
62 | target_type = t_notebook
63 | value = user_folder_name
64 | params = {}
65 | run_time = 0
66 | await jobs.update(uid, path, target_type, value, params, t_add, run_time)
67 | list_job = await jobs.list(uid)
68 | assert len(list_job) == 1
69 | data = await jobs.find_by_path(uid, path, target_type)
70 | assert data.get("value") == value
71 | assert data["params"] == params
72 | assert data["lastRun"] == run_time
73 | jobs = Jobs(logger, clean, init_data)
74 | list_job = await jobs.list(uid)
75 | assert len(list_job) == 0
76 |
77 |
78 | async def test_delete(tmp_path):
79 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
80 | os.environ["JUPYTER_SERVER_ROOT"] = path_srv_root
81 | logger = Logger()
82 | uid = str(uuid.uuid4())
83 | jobs = Jobs(logger, clean, init_data)
84 | path = os.path.join(os.getcwd(), "test_delete.py")
85 | target_type = t_notebook
86 | value = user_folder_name
87 | params = {}
88 | run_time = 0
89 | await jobs.update(uid, path, target_type, value, params, t_add, run_time)
90 | list_job = await jobs.list(uid)
91 | assert len(list_job) == 1
92 | await jobs.update(uid, path, target_type, value, params, t_delete, run_time)
93 | list_job = await jobs.list(uid)
94 | assert len(list_job) == 1
95 | assert list_job[0].get("status") == t_delete
96 |
97 |
98 | async def test_keep(tmp_path):
99 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
100 | os.environ["JUPYTER_SERVER_ROOT"] = str(path_srv_root)
101 | logger = Logger()
102 | uid = str(uuid.uuid4())
103 | jobs = Jobs(logger, clean, init_data)
104 | path = os.path.join(os.getcwd(), test_file)
105 | target_type = t_notebook
106 | value = user_folder_name
107 | params = {}
108 | run_time = 0
109 | await jobs.update(uid, path, target_type, value, params, t_add, run_time)
110 | jobs_two = Jobs(logger, False, [])
111 | assert len(await jobs_two.list(uid)) == 1
112 | await jobs.update(uid, path, target_type, value, params, t_delete, run_time)
113 | Jobs(logger, False, [])
114 | list_job = await jobs.list(uid)
115 | assert len(list_job) == 1
116 | assert list_job[0].get("status") == t_delete
117 |
118 |
119 | async def test_clean(tmp_path):
120 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
121 | os.environ["JUPYTER_SERVER_ROOT"] = str(path_srv_root)
122 | logger = Logger()
123 | uid = str(uuid.uuid4())
124 | jobs = Jobs(logger, clean, init_data)
125 | path = os.path.join(os.getcwd(), test_file)
126 | target_type = t_notebook
127 | value = user_folder_name
128 | params = {}
129 | run_time = 0
130 | await jobs.update(uid, path, target_type, value, params, t_add, run_time)
131 | jobs_two = Jobs(logger, clean, init_data)
132 | assert len(await jobs_two.list(uid)) == 0
133 |
134 |
135 | async def test_update(tmp_path):
136 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
137 | os.environ["JUPYTER_SERVER_ROOT"] = path_srv_root
138 | logger = Logger()
139 | uid = str(uuid.uuid4())
140 | jobs = Jobs(logger, clean, init_data)
141 | path = os.path.join(os.getcwd(), "test_update.py")
142 | target_type = t_notebook
143 | value = user_folder_name
144 | new_value = "value_changed"
145 | params = {}
146 | run_time = 0
147 | await jobs.update(uid, path, target_type, value, params, t_add, run_time)
148 | list_job = await jobs.list(uid)
149 | assert len(list_job) == 1
150 | await jobs.update(uid, path, target_type, new_value, params, t_update, run_time)
151 | list_job = await jobs.list(uid)
152 | assert len(list_job) == 1
153 | data = await jobs.find_by_path(uid, path, target_type)
154 | assert data["value"] == new_value
155 | assert data["status"] == t_update
156 | await jobs.update(uid, path, target_type, value, params, t_delete, run_time)
157 | list_job = await jobs.list(uid)
158 | assert len(list_job) == 1
159 | assert list_job[0].get("status") == t_delete
160 |
161 |
162 | async def test_migration(tmp_path):
163 | path_srv_root = os.path.join(str(tmp_path), user_folder_name)
164 | os.environ["JUPYTER_SERVER_ROOT"] = path_srv_root
165 | logger = Logger()
166 | uid = str(uuid.uuid4())
167 | f = open(os.path.join(n_env.path_naas_folder, "jobs.json"), "w+")
168 | f.write(os.path.join(json.dumps(wrong_jobs_list)))
169 | f.close()
170 | jobs = Jobs(logger)
171 | list_job = await jobs.list(uid)
172 | assert len(list_job) == 2
173 | assert list_job[0].get("runs") == []
174 | assert list_job[0].get("nbRun") is None
175 | assert list_job[0].get("totalRun") is None
176 | assert list_job[1].get("runs") == []
177 |
--------------------------------------------------------------------------------