├── .gitignore ├── .gitlab-ci.yml ├── .pylintrc ├── CHANGELOG.md ├── Dockerfile ├── Dockerfile.test ├── LICENSE ├── README.rst ├── build_docs.sh ├── contrib ├── jupyter-r │ ├── README-r.md │ ├── images │ ├── logo.png │ ├── manifest.json │ └── rdatasci.json ├── pydatasci │ ├── README-datascience.md │ ├── README-malis.md │ ├── images │ ├── logo.png │ ├── manifest.json │ ├── pydatasci-gpu.json │ ├── pydatasci-malis.json │ └── pydatasci.json ├── rapids.ai │ ├── README.md │ ├── images │ ├── logo.png │ ├── manifest.json │ └── rapidsai.json ├── supervisor │ └── zoe_procs.conf ├── zapp-boinc │ ├── README.md │ ├── boinc.json │ ├── images │ ├── logo.png │ └── manifest.json └── zapp-spark │ ├── README-clouds.md │ ├── README.md │ ├── clouds.json │ ├── images │ ├── logo.png │ ├── manifest.json │ └── spark.json ├── create_db_tables.py ├── doc8.ini ├── docker-compose.yml ├── docs ├── Makefile ├── _static │ └── placeholder ├── _templates │ └── placeholder ├── architecture.rst ├── conf.py ├── config_file.rst ├── contributing.rst ├── developer │ ├── api-endpoint.rst │ ├── auth.rst │ ├── backend.rst │ ├── gitlab-ci.rst │ ├── imgs │ │ ├── 1.2.sonar.config.PNG │ │ ├── 1.java.config.png │ │ ├── 2.1.sonar.quality.gates.png │ │ ├── 2.sonar.config.png │ │ ├── 3.email.config.png │ │ ├── 4.1.github.server.config.png │ │ ├── 4.1.github.server.credential.png │ │ ├── 4.github.credential.png │ │ ├── 5.1.freestyle.project.png │ │ ├── 5.2.freestyle.project.png │ │ ├── 5.3.freestyle.project.png │ │ ├── 5.4.1.freestyle.project.png │ │ ├── 5.4.2.freestyle.project.png │ │ ├── 5.4.3.freestyle.project.png │ │ ├── 5.5.freestyle.project.png │ │ ├── 6.sonar.token.png │ │ ├── 7.1.github.access.token.png │ │ └── 7.github.repo.png │ ├── index.rst │ ├── integration_test.rst │ ├── master-api.rst │ ├── rest-api.rst │ ├── scheduler.rst │ └── stats.rst ├── figures │ └── extended_arch.png ├── index.rst ├── install.rst ├── kube_backend.rst ├── logging.rst ├── make.bat ├── proxy.rst ├── quotas.rst ├── roles.rst ├── users.rst └── zapps │ ├── howto_zapp.rst │ └── zapp_format.rst ├── integration_tests ├── __init__.py ├── basic_auth_fail_test.py ├── basic_auth_success_test.py ├── complex_zapp.json ├── conftest.py ├── sample_docker.conf └── zapp.json ├── requirements.txt ├── requirements_tests.txt ├── run_integration_tests.sh ├── run_tests.sh ├── schemas └── app_description_schema.json ├── scripts ├── idle.py ├── pg_isready ├── run_unit_tests.py ├── zapp_set_command.py └── zoe_create_workspace.sh ├── zoe-admin.py ├── zoe-api.py ├── zoe-master.py ├── zoe-submit.sh ├── zoe.py ├── zoe_api ├── __init__.py ├── api_endpoint.py ├── auth │ ├── __init__.py │ ├── base.py │ ├── file.py │ ├── ldap.py │ └── requests_oauth2 │ │ ├── __init__.py │ │ ├── errors.py │ │ ├── oauth2.py │ │ └── services.py ├── custom_request_handler.py ├── entrypoint.py ├── exceptions.py ├── master_api.py ├── rest_api │ ├── __init__.py │ ├── discovery.py │ ├── execution.py │ ├── info.py │ ├── login.py │ ├── quota.py │ ├── request_handler.py │ ├── role.py │ ├── service.py │ ├── statistics.py │ ├── user.py │ └── validation.py ├── tests │ ├── __init__.py │ ├── api_endpoint_test.py │ └── mock_master_api.py ├── web │ ├── __init__.py │ ├── admin.py │ ├── executions.py │ ├── request_handler.py │ ├── start.py │ ├── static │ │ ├── Chart.min.js │ │ ├── additional-methods.min.js │ │ ├── favicon.ico │ │ ├── jquery-2.1.4.min.js │ │ ├── jquery-ui-1.11.4 │ │ │ ├── images │ │ │ │ ├── ui-bg_diagonals-thick_18_b81900_40x40.png │ │ │ │ ├── ui-bg_diagonals-thick_20_666666_40x40.png │ │ │ │ ├── ui-bg_flat_10_000000_40x100.png │ │ │ │ ├── ui-bg_glass_100_f6f6f6_1x400.png │ │ │ │ ├── ui-bg_glass_100_fdf5ce_1x400.png │ │ │ │ ├── ui-bg_glass_65_ffffff_1x400.png │ │ │ │ ├── ui-bg_gloss-wave_35_f6a828_500x100.png │ │ │ │ ├── ui-bg_highlight-soft_100_eeeeee_1x100.png │ │ │ │ ├── ui-bg_highlight-soft_75_ffe45c_1x100.png │ │ │ │ ├── ui-icons_222222_256x240.png │ │ │ │ ├── ui-icons_228ef1_256x240.png │ │ │ │ ├── ui-icons_ef8c08_256x240.png │ │ │ │ ├── ui-icons_ffd27a_256x240.png │ │ │ │ └── ui-icons_ffffff_256x240.png │ │ │ ├── index.html │ │ │ ├── jquery-ui.css │ │ │ ├── jquery-ui.js │ │ │ ├── jquery-ui.min.css │ │ │ ├── jquery-ui.min.js │ │ │ ├── jquery-ui.structure.css │ │ │ ├── jquery-ui.structure.min.css │ │ │ ├── jquery-ui.theme.css │ │ │ └── jquery-ui.theme.min.css │ │ ├── jquery.validate.min.js │ │ ├── jquery.wizard.js │ │ ├── logo.png │ │ ├── logtail.js │ │ ├── moment-timezone.min.js │ │ ├── moment.min.js │ │ ├── sorttable.js │ │ ├── zoe.css │ │ └── zoe.js │ ├── status.py │ ├── templates │ │ ├── admin.jinja2 │ │ ├── base.jinja2 │ │ ├── base_user.jinja2 │ │ ├── error.jinja2 │ │ ├── execution_inspect.jinja2 │ │ ├── execution_list.jinja2 │ │ ├── home_user.jinja2 │ │ ├── login.jinja2 │ │ ├── service_logs.jinja2 │ │ ├── status.jinja2 │ │ ├── zapp_shop.jinja2 │ │ └── zapp_start.jinja2 │ ├── websockets.py │ └── zapp_shop.py └── zapp_shop.py ├── zoe_cmd ├── __init__.py ├── api_lib │ ├── __init__.py │ ├── api_base.py │ ├── executions.py │ ├── info.py │ ├── quota.py │ ├── role.py │ ├── services.py │ ├── statistics.py │ ├── user.py │ └── validation.py ├── entrypoint.py ├── entrypoint_admin.py └── utils.py ├── zoe_lib ├── __init__.py ├── applications.py ├── config.py ├── configargparse.py ├── exceptions.py ├── state │ ├── __init__.py │ ├── base.py │ ├── execution.py │ ├── port.py │ ├── quota.py │ ├── role.py │ ├── service.py │ ├── sql_manager.py │ ├── tests │ │ ├── __init__.py │ │ └── mock_sql_manager.py │ └── user.py ├── tests │ ├── __init__.py │ ├── applications_test.py │ └── config_mock.py └── version.py └── zoe_master ├── __init__.py ├── backends ├── __init__.py ├── base.py ├── common.py ├── docker │ ├── __init__.py │ ├── api_client.py │ ├── backend.py │ ├── config.py │ ├── tests │ │ ├── __init__.py │ │ ├── api_client_test.py │ │ └── config_test.py │ └── threads.py ├── interface.py ├── kubernetes │ ├── __init__.py │ ├── api_client.py │ ├── backend.py │ └── threads.py └── service_instance.py ├── entrypoint.py ├── exceptions.py ├── gelf_listener.py ├── master_api.py ├── metrics ├── __init__.py ├── base.py ├── influxdb.py └── kairosdb.py ├── preprocessing.py ├── scheduler ├── __init__.py ├── base_scheduler.py ├── elastic_scheduler.py └── simulated_platform.py ├── stats.py └── workspace ├── __init__.py ├── base.py └── filesystem.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Python template 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | env/ 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | .idea/ 61 | state.zoe 62 | /zoe*.conf 63 | zoepass.csv 64 | /docker.conf 65 | /kube.conf 66 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | stages: 2 | - static-test 3 | - build 4 | - integration-test 5 | - deploy 6 | - cleanup 7 | 8 | variables: 9 | PIP_CACHE_DIR: "$CI_PROJECT_DIR/pip-cache" 10 | POSTGRES_DB: zoe 11 | POSTGRES_USER: zoeuser 12 | POSTGRES_PASSWORD: zoepass 13 | ZOE_TEST_IMAGE: zoe-test:$CI_PIPELINE_ID 14 | ZOE_COMMON_OPTIONS: --debug --deployment-name test${CI_BUILD_REF} --dbuser ${POSTGRES_USER} --dbhost postgres --dbport 5432 --dbname ${POSTGRES_DB} --dbpass ${POSTGRES_PASSWORD} --master-url tcp://localhost:4850 --listen-port 5100 --workspace-base-path /tmp --overlay-network-name bridge 15 | 16 | cache: 17 | paths: 18 | - "$CI_PROJECT_DIR/pip-cache" 19 | key: "$CI_PROJECT_ID" 20 | 21 | pylint: 22 | stage: static-test 23 | image: python:3.5 24 | script: 25 | - pip install -U pip setuptools 26 | - pip install -r requirements.txt 27 | - pip install -r requirements_tests.txt 28 | - pylint *.py zoe_* 29 | 30 | unittests: 31 | stage: static-test 32 | services: 33 | - postgres:9.5 34 | image: python:3.5 35 | script: 36 | - pip install -U pip setuptools 37 | - pip install -r requirements.txt 38 | - pip install -r requirements_tests.txt 39 | - pytest --ignore integration_tests --tb=short --cov-report=term --cov zoe_api --cov zoe_lib --cov zoe_master 40 | 41 | docs-test: 42 | stage: static-test 43 | image: python:3.5 44 | script: 45 | - pip install -U pip setuptools 46 | - pip install -r requirements.txt 47 | - pip install -r requirements_tests.txt 48 | - doc8 docs/ 49 | - sh ./build_docs.sh 50 | 51 | images: 52 | image: docker:latest 53 | stage: build 54 | before_script: 55 | - mkdir -p $HOME/.docker 56 | - echo $DOCKER_AUTH_CONFIG > $HOME/.docker/config.json 57 | script: 58 | - docker build --pull -t zoerepo/${ZOE_TEST_IMAGE} -f Dockerfile.test . 59 | - docker push zoerepo/${ZOE_TEST_IMAGE} 60 | - docker rm -f nginx0-1-integration_test || true 61 | 62 | api-test: 63 | stage: integration-test 64 | services: 65 | - postgres:9.5 66 | image: zoerepo/${ZOE_TEST_IMAGE} 67 | script: 68 | - bash ./run_integration_tests.sh 69 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.4 2 | 3 | MAINTAINER Daniele Venzano 4 | 5 | RUN mkdir -p /opt/zoe 6 | WORKDIR /opt/zoe 7 | 8 | RUN apt-get update && apt-get install -y libldap2-dev libsasl2-dev && apt-get clean 9 | 10 | COPY . /opt/zoe 11 | RUN pip install --no-cache-dir -r requirements.txt 12 | 13 | RUN echo 'admin,admin,admin' > /opt/zoe/zoepass.csv 14 | 15 | VOLUME /etc/zoe/ 16 | 17 | RUN python3 ./zoe-api.py --write-config /etc/zoe/zoe.conf 18 | -------------------------------------------------------------------------------- /Dockerfile.test: -------------------------------------------------------------------------------- 1 | FROM python:3.4 2 | 3 | MAINTAINER Daniele Venzano 4 | 5 | RUN mkdir -p /opt/zoe 6 | WORKDIR /opt/zoe 7 | 8 | COPY ./requirements* /opt/zoe/ 9 | 10 | RUN pip install -U pip setuptools 11 | RUN pip install --no-cache-dir -r requirements.txt 12 | RUN pip install --no-cache-dir -r requirements_tests.txt 13 | 14 | COPY . /opt/zoe 15 | 16 | RUN echo 'admin,admin,admin' > /opt/zoe/zoepass.csv 17 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Zoe Analytics - Container-based Analytics as a Service 2 | ====================================================== 3 | 4 | Zoe Analytics provides a simple way to provision any kind of data analytics applications. 5 | 6 | This repository is used for day-to-day, open source development, feel free to fork and contribute. 7 | 8 | Resources: 9 | 10 | - Website: http://zoe-analytics.eu 11 | - Documentation: http://docs.zoe-analytics.eu 12 | - Roadmap: https://github.com/DistributedSystemsGroup/zoe/wiki/RoadMap 13 | - Mailing list: http://www.freelists.org/list/zoe 14 | - Issue tracker: https://github.com/DistributedSystemsGroup/zoe/issues 15 | - Main repo: https://gitlab.eurecom.fr/zoe/main (GitHub is a read-only mirror) 16 | 17 | Zoe applications (ZApps): 18 | 19 | - Check the repositories at: https://gitlab.eurecom.fr/zoe-apps/ 20 | 21 | Zoe is licensed under the terms of the Apache 2.0 license. 22 | 23 | Research papers 24 | --------------- 25 | 26 | If you are interested in learning more about Zoe and the underling scheduling theory, we published a paper with an experimental evaluation that show the advantages of our techniques. 27 | 28 | The paper can be found on open access `at ArXiv `_. 29 | 30 | If you use Zoe Analytics for running your experiments, please do not forget to cite us:: 31 | 32 | @inproceedings{Pace:2017:FSD:3101112.3101126, 33 | author = {Pace, Francesco and Venzano, Daniele and Carra, Damiano and Michiardi, Pietro}, 34 | title = {Flexible Scheduling of Distributed Analytic Applications}, 35 | booktitle = {Proceedings of the 17th IEEE/ACM International Symposium on Cluster, Cloud and Grid Computing}, 36 | series = {CCGrid '17}, 37 | year = {2017}, 38 | isbn = {978-1-5090-6610-0}, 39 | location = {Madrid, Spain}, 40 | pages = {100--109}, 41 | numpages = {10}, 42 | url = {https://doi.org/10.1109/CCGRID.2017.52}, 43 | doi = {10.1109/CCGRID.2017.52}, 44 | acmid = {3101126}, 45 | publisher = {IEEE Press}, 46 | address = {Piscataway, NJ, USA}, 47 | } 48 | 49 | -------------------------------------------------------------------------------- /build_docs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | PYTHONPATH=. sphinx-build -nW -b html -d docs/_build/doctrees docs/ docs/_build/html 4 | -------------------------------------------------------------------------------- /contrib/jupyter-r/README-r.md: -------------------------------------------------------------------------------- 1 | # Jupyter Notebook R Stack 2 | 3 | URL: [https://hub.docker.com/r/jupyter/r-notebook/](https://hub.docker.com/r/jupyter/r-notebook/) 4 | 5 | * Jupyter Notebook and JupyterLab 6 | * Conda R 7 | * plyr, devtools, shiny, rmarkdown, forecast, rsqlite, reshape2, nycflights13, caret, rcurl, and randomforest pre-installed 8 | * The tidyverse R packages are also installed, including ggplot2, dplyr, tidyr, readr, purrr, tibble, stringr, lubridate, and broom 9 | 10 | -------------------------------------------------------------------------------- /contrib/jupyter-r/images: -------------------------------------------------------------------------------- 1 | zapps/rdatasci:10396 2 | -------------------------------------------------------------------------------- /contrib/jupyter-r/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DistributedSystemsGroup/zoe/c8e0c908af1954a8b41d0f6de23d08589564f0ab/contrib/jupyter-r/logo.png -------------------------------------------------------------------------------- /contrib/jupyter-r/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "zapps": [ 4 | { 5 | "category": "Data science", 6 | "readable_descr": "README-r.md", 7 | "name": "R notebook", 8 | "description": "rdatasci.json", 9 | "parameters": [], 10 | "disabled_for": ["student"] 11 | } 12 | ] 13 | } 14 | 15 | -------------------------------------------------------------------------------- /contrib/jupyter-r/rdatasci.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rdatasci", 3 | "services": [ 4 | { 5 | "command": "/usr/local/bin/start_notebook.sh", 6 | "environment": [], 7 | "essential_count": 1, 8 | "image": "zapps/rdatasci:10396", 9 | "monitor": true, 10 | "name": "r-notebook", 11 | "ports": [ 12 | { 13 | "name": "Jupyter Notebook interface", 14 | "port_number": 8888, 15 | "protocol": "tcp", 16 | "proxy": true, 17 | "url_template": "http://{ip_port}{proxy_path}" 18 | } 19 | ], 20 | "replicas": 1, 21 | "resources": { 22 | "cores": { 23 | "max": 2, 24 | "min": 2 25 | }, 26 | "memory": { 27 | "max": 6442450944, 28 | "min": 6442450944 29 | } 30 | }, 31 | "startup_order": 0, 32 | "total_count": 1, 33 | "volumes": [] 34 | } 35 | ], 36 | "size": 512, 37 | "version": 3, 38 | "will_end": false 39 | } -------------------------------------------------------------------------------- /contrib/pydatasci/README-datascience.md: -------------------------------------------------------------------------------- 1 | # Notebook for Data Science 2 | 3 | This ZApp contains a Jupyter Notebook with a Python 3.5 kernel and the following libraries: 4 | 5 | * Tensorflow 1.10.1, Tensorboard 1.10.0 6 | * Pytorch and TorchVision 0.4.1 7 | * pandas, matplotlib, scipy, seaborn, scikit-learn, scikit-image, sympy, cython, patsy, statsmodel, cloudpickle, dill, numba, bokeh 8 | 9 | The GPU version contains also CUDA 9.0 and tensorflow with GPU support 10 | 11 | ## Customizations 12 | 13 | ### Adding Python libraries 14 | 15 | To install additional libraries you can add the following code on top of your notebook: 16 | 17 | import subprocess 18 | import sys 19 | 20 | def install(package): 21 | subprocess.call([sys.executable, "-m", "pip", "--user", "install", package]) 22 | 23 | and call the `install()` function to install all packages you need. 24 | 25 | Finally restart the kernel to load the modules you just installed. 26 | 27 | ### Running your own script 28 | 29 | By modifying the `command` parameter in the JSON file you can tell Zoe to run your own script instead of the notebook. 30 | 31 | In this ZApp the default command is: 32 | 33 | "command": "jupyter lab --no-browser --NotebookApp.token='' --allow-root --ip=0.0.0.0" 34 | 35 | If you change the JSON and write: 36 | 37 | "command": "/mnt/workspace/myscript.sh" 38 | 39 | Zoe will run myscript.sh instead of running the Jupyter notebook. In this way you can: 40 | 41 | * transform an interactive notebook ZApp into a batch one, with exactly the same libraries and environment 42 | * perform additional setup before starting the notebook. In this case you will have to add the jupyter lab command defined above at the end of your script. 43 | 44 | -------------------------------------------------------------------------------- /contrib/pydatasci/README-malis.md: -------------------------------------------------------------------------------- 1 | # Notebook for MALIS Eurecom course 2 | 3 | MALIS: MAchine Leaning and Intelligent Systems 4 | 5 | This ZApp contains a Jupyter Notebook with a Python 3.5 kernel and the following libraries: 6 | 7 | * Tensorflow 1.10.1, Tensorboard 1.10.0 8 | * Pytorch and TorchVision 0.4.1 9 | * pandas, matplotlib, scipy, seaborn, scikit-learn, scikit-image, sympy, cython, patsy, statsmodel, cloudpickle, dill, numba, bokeh 10 | 11 | -------------------------------------------------------------------------------- /contrib/pydatasci/images: -------------------------------------------------------------------------------- 1 | zapps/pydatasci:10396 2 | zapps/pydatasci-gpu:10396 3 | -------------------------------------------------------------------------------- /contrib/pydatasci/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DistributedSystemsGroup/zoe/c8e0c908af1954a8b41d0f6de23d08589564f0ab/contrib/pydatasci/logo.png -------------------------------------------------------------------------------- /contrib/pydatasci/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "zapps": [ 4 | { 5 | "category": "Data science", 6 | "readable_descr": "README-datascience.md", 7 | "name": "Data science notebook", 8 | "description": "pydatasci.json", 9 | "parameters": [], 10 | "disabled_for": ["student"] 11 | }, 12 | { 13 | "category": "Labs", 14 | "readable_descr": "README-malis.md", 15 | "name": "MALIS", 16 | "description": "pydatasci-malis.json", 17 | "parameters": [] 18 | }, 19 | { 20 | "category": "Data science", 21 | "readable_descr": "README-datascience.md", 22 | "name": "Data science notebook GPU", 23 | "description": "pydatasci-gpu.json", 24 | "parameters": [ 25 | { 26 | "kind": "environment", 27 | "name": "NVIDIA_VISIBLE_DEVICES", 28 | "readable_name": "GPU", 29 | "description": "Which GPU to enable for this execution (e.g. all: all GPUs, 0: just GPU #0, 0,2: GPU #0 and #2)", 30 | "type": "string", 31 | "default": "all" 32 | } 33 | ], 34 | "disabled_for": ["student"] 35 | } 36 | ] 37 | } 38 | 39 | -------------------------------------------------------------------------------- /contrib/pydatasci/pydatasci-gpu.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pydatasci-gpu", 3 | "services": [ 4 | { 5 | "command": "/usr/local/bin/start_notebook.sh", 6 | "environment": [ 7 | [ 8 | "NVIDIA_VISIBLE_DEVICES", 9 | "all" 10 | ] 11 | ], 12 | "essential_count": 1, 13 | "image": "zapps/pydatasci-gpu:10396", 14 | "labels": [ 15 | "gpu" 16 | ], 17 | "monitor": true, 18 | "name": "py-notebook", 19 | "ports": [ 20 | { 21 | "name": "Jupyter Notebook interface", 22 | "port_number": 8888, 23 | "protocol": "tcp", 24 | "proxy": true, 25 | "url_template": "http://{ip_port}{proxy_path}" 26 | }, 27 | { 28 | "name": "Tensorboard", 29 | "port_number": 6006, 30 | "protocol": "tcp", 31 | "url_template": "http://{ip_port}/" 32 | } 33 | ], 34 | "replicas": 1, 35 | "resources": { 36 | "cores": { 37 | "max": 2, 38 | "min": 2 39 | }, 40 | "memory": { 41 | "max": 6442450944, 42 | "min": 6442450944 43 | } 44 | }, 45 | "startup_order": 0, 46 | "total_count": 1, 47 | "volumes": [] 48 | } 49 | ], 50 | "size": 512, 51 | "version": 3, 52 | "will_end": false 53 | } -------------------------------------------------------------------------------- /contrib/pydatasci/pydatasci-malis.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pydatasci", 3 | "services": [ 4 | { 5 | "command": "/usr/local/bin/start_notebook.sh", 6 | "environment": [], 7 | "essential_count": 1, 8 | "image": "zapps/pydatasci:10396", 9 | "labels": [ 10 | "labs" 11 | ], 12 | "monitor": true, 13 | "name": "py-notebook", 14 | "ports": [ 15 | { 16 | "name": "Jupyter Notebook interface", 17 | "port_number": 8888, 18 | "protocol": "tcp", 19 | "proxy": true, 20 | "url_template": "http://{ip_port}{proxy_path}" 21 | }, 22 | { 23 | "name": "Tensorboard", 24 | "port_number": 6006, 25 | "protocol": "tcp", 26 | "url_template": "http://{ip_port}/" 27 | } 28 | ], 29 | "replicas": 1, 30 | "resources": { 31 | "cores": { 32 | "max": 2, 33 | "min": 2 34 | }, 35 | "memory": { 36 | "max": 6442450944, 37 | "min": 6442450944 38 | } 39 | }, 40 | "startup_order": 0, 41 | "total_count": 1, 42 | "volumes": [] 43 | } 44 | ], 45 | "size": 512, 46 | "version": 3, 47 | "will_end": false 48 | } -------------------------------------------------------------------------------- /contrib/pydatasci/pydatasci.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pydatasci", 3 | "services": [ 4 | { 5 | "command": "/usr/local/bin/start_notebook.sh", 6 | "environment": [], 7 | "essential_count": 1, 8 | "image": "zapps/pydatasci:10396", 9 | "monitor": true, 10 | "name": "py-notebook", 11 | "ports": [ 12 | { 13 | "name": "Jupyter Notebook interface", 14 | "port_number": 8888, 15 | "protocol": "tcp", 16 | "proxy": true, 17 | "url_template": "http://{ip_port}{proxy_path}" 18 | }, 19 | { 20 | "name": "Tensorboard", 21 | "port_number": 6006, 22 | "protocol": "tcp", 23 | "url_template": "http://{ip_port}/" 24 | } 25 | ], 26 | "replicas": 1, 27 | "resources": { 28 | "cores": { 29 | "max": 2, 30 | "min": 2 31 | }, 32 | "memory": { 33 | "max": 6442450944, 34 | "min": 6442450944 35 | } 36 | }, 37 | "startup_order": 0, 38 | "total_count": 1, 39 | "volumes": [] 40 | } 41 | ], 42 | "size": 512, 43 | "version": 3, 44 | "will_end": false 45 | } -------------------------------------------------------------------------------- /contrib/rapids.ai/README.md: -------------------------------------------------------------------------------- 1 | # Rapids.ai 2 | 3 | URL: [http://rapids.ai/index.html](http://rapids.ai/index.html) 4 | 5 | You can find Example notebooks in /rapids. 6 | 7 | -------------------------------------------------------------------------------- /contrib/rapids.ai/images: -------------------------------------------------------------------------------- 1 | zapps/rapidsai:10396 2 | -------------------------------------------------------------------------------- /contrib/rapids.ai/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DistributedSystemsGroup/zoe/c8e0c908af1954a8b41d0f6de23d08589564f0ab/contrib/rapids.ai/logo.png -------------------------------------------------------------------------------- /contrib/rapids.ai/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "zapps": [ 4 | { 5 | "category": "Third party", 6 | "readable_descr": "README.md", 7 | "name": "Rapids.AI", 8 | "description": "rapidsai.json", 9 | "parameters": [], 10 | "disabled_for": ["student"] 11 | } 12 | ] 13 | } 14 | 15 | -------------------------------------------------------------------------------- /contrib/rapids.ai/rapidsai.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rapidsai", 3 | "services": [ 4 | { 5 | "command": "/usr/local/bin/start_rapids.sh", 6 | "environment": [ 7 | [ 8 | "NVIDIA_VISIBLE_DEVICES", 9 | "all" 10 | ] 11 | ], 12 | "essential_count": 1, 13 | "image": "zapps/rapidsai:10396", 14 | "labels": [ 15 | "gpu" 16 | ], 17 | "monitor": true, 18 | "name": "rapidsai-nb", 19 | "ports": [ 20 | { 21 | "name": "Jupyter Notebook interface", 22 | "port_number": 8888, 23 | "protocol": "tcp", 24 | "proxy": true, 25 | "url_template": "http://{ip_port}{proxy_path}" 26 | } 27 | ], 28 | "replicas": 1, 29 | "resources": { 30 | "cores": { 31 | "max": 2, 32 | "min": 2 33 | }, 34 | "memory": { 35 | "max": 6442450944, 36 | "min": 6442450944 37 | } 38 | }, 39 | "startup_order": 0, 40 | "total_count": 1, 41 | "volumes": [] 42 | } 43 | ], 44 | "size": 512, 45 | "version": 3, 46 | "will_end": false 47 | } -------------------------------------------------------------------------------- /contrib/supervisor/zoe_procs.conf: -------------------------------------------------------------------------------- 1 | [program:zoe-master] 2 | command=/usr/bin/python3 /home/ubuntu/zoe/zoe-master.py 3 | directory=/home/ubuntu/zoe 4 | autostart=true 5 | autorestart=true 6 | startretries=3 7 | redirect_stderr=true 8 | stdout_logfile=/var/log/zoe/master.log 9 | user=ubuntu 10 | 11 | [program:zoe-api] 12 | command=/usr/bin/python3 /home/ubuntu/zoe/zoe-api.py 13 | directory=/home/ubuntu/zoe 14 | autostart=true 15 | autorestart=true 16 | startretries=3 17 | redirect_stderr=true 18 | stdout_logfile=/var/log/zoe/web.log 19 | user=ubuntu 20 | -------------------------------------------------------------------------------- /contrib/zapp-boinc/README.md: -------------------------------------------------------------------------------- 1 | # BOINC ZApp 2 | 3 | Maintainer: Daniele Venzano 4 | 5 | URL: [https://gitlab.eurecom.fr/zoe-apps/zapp-boinc](https://gitlab.eurecom.fr/zoe-apps/zapp-boinc) 6 | 7 | A ZApp for running a single [BOINC](https://boinc.berkeley.edu/) task. Run as many as you want, in parallel! 8 | -------------------------------------------------------------------------------- /contrib/zapp-boinc/boinc.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "boinc", 3 | "services": [ 4 | { 5 | "command": null, 6 | "environment": [ 7 | [ 8 | "PROJECT_URL", 9 | "www.worldcommunitygrid.org" 10 | ], 11 | [ 12 | "PROJECT_KEY", 13 | "Your key here" 14 | ] 15 | ], 16 | "essential_count": 1, 17 | "image": "zapps/boinc:10396", 18 | "monitor": true, 19 | "name": "boinc-client", 20 | "ports": [], 21 | "replicas": 1, 22 | "resources": { 23 | "cores": { 24 | "max": 1, 25 | "min": 1 26 | }, 27 | "memory": { 28 | "max": 536870912, 29 | "min": 536870912 30 | } 31 | }, 32 | "startup_order": 0, 33 | "total_count": 1, 34 | "volumes": [] 35 | } 36 | ], 37 | "size": 128, 38 | "version": 3, 39 | "will_end": false 40 | } -------------------------------------------------------------------------------- /contrib/zapp-boinc/images: -------------------------------------------------------------------------------- 1 | zapps/boinc:10396 2 | -------------------------------------------------------------------------------- /contrib/zapp-boinc/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DistributedSystemsGroup/zoe/c8e0c908af1954a8b41d0f6de23d08589564f0ab/contrib/zapp-boinc/logo.png -------------------------------------------------------------------------------- /contrib/zapp-boinc/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "zapps": [ 4 | { 5 | "category": "Third party", 6 | "name": "Boinc client", 7 | "description": "boinc.json", 8 | "readable_descr": "README.md", 9 | "parameters": [ 10 | { 11 | "kind": "environment", 12 | "name": "PROJECT_URL", 13 | "readable_name": "Project URL", 14 | "description": "The BOINC project URL", 15 | "type": "string", 16 | "default": "www.worldcommunitygrid.org" 17 | }, 18 | { 19 | "kind": "environment", 20 | "name": "PROJECT_KEY", 21 | "readable_name": "Project key", 22 | "description": "The BOINC project key, needed to upload results and statistics to the BOINC server", 23 | "type": "string", 24 | "default": null 25 | } 26 | ], 27 | "disabled_for": ["student"] 28 | } 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /contrib/zapp-spark/README-clouds.md: -------------------------------------------------------------------------------- 1 | # CLOUDS Lab ZApp 2 | 3 | This ZApp must be used during the laboratory activities of the CLOUDS course at Eurecom. 4 | 5 | Combine the full power of a distributed [Apache Spark](http://spark.apache.org) cluster with Python Jupyter Notebooks. 6 | 7 | Spark is configured in stand-alone, distributed mode. This ZApp contains Spark version 2.2.2. 8 | 9 | To start clone the repository at https://github.com/EURECOM-CLOUDS-COURSE and open the notebook corresponding to the laboratory session you are in. 10 | 11 | -------------------------------------------------------------------------------- /contrib/zapp-spark/README.md: -------------------------------------------------------------------------------- 1 | # Spark ZApp 2 | 3 | URL: [https://gitlab.eurecom.fr/zoe-apps/zapp-spark](https://gitlab.eurecom.fr/zoe-apps/zapp-spark) 4 | 5 | Combine the full power of a distributed [Apache Spark](http://spark.apache.org) cluster with Python Jupyter Notebooks. 6 | 7 | The Spark shell can be used from the built-in terminal in the notebook ZApp. 8 | 9 | Spark is configured in stand-alone, distributed mode. This ZApp contains Spark version 2.2.2. 10 | 11 | ## Changing the default configuration 12 | 13 | When you start a kernel with this Zapp you will have a SparkContext already created for you with a default configuration. 14 | 15 | You can modify the executor ram limit or add other options and re-create a new context by using the following code: 16 | 17 | # default options 18 | spark_executor_ram = int(os.environ["SPARK_WORKER_RAM"]) - (1024 ** 3) - (512 * 1024 ** 2) 19 | conf.set("spark.executor.memory", spark_executor_ram) 20 | 21 | # set other options as desired 22 | 23 | # create the context 24 | sc = pyspark.SparkContext(conf=conf) 25 | 26 | 27 | ## Customizing the ZApp 28 | 29 | ### Workers 30 | 31 | To run your own script (for example to install additional libraries on the worker nodes) you can override the default command specified in the JSON file, in the service section corresponding to the workers. 32 | 33 | To start the worker correctly, you will need to use this command-line at the end of your script: 34 | 35 | /opt/spark/bin/spark-class org.apache.spark.deploy.worker.Worker \ 36 | spark://${SPARK_MASTER_IP}:7077 --cores ${SPARK_WORKER_CORES} --memory ${SPARK_WORKER_RAM} \ 37 | -h ${SPARK_LOCAL_IP:-127.0.0.1} 38 | 39 | ### Master 40 | 41 | To run your own script you can override the default command specified in the JSON file, in the service section corresponding to the master. 42 | 43 | To start the master correctly, you will need to use this command-line at the end of your script: 44 | 45 | ${SPARK_HOME}/bin/spark-class org.apache.spark.deploy.master.Master --host ${SPARK_MASTER_IP} --port 7077 --webui-port 8080 46 | 47 | ### Notebook and Spark submit 48 | 49 | You can customize the command run by the notebook service, to install additional libraries before starting the notebook, or to transform the ZApp into a batch job, by calling spark-submit instead of jupyter. 50 | 51 | If you want to run the notebook, at the end of your script call `/opt/start_notebook.sh`. 52 | 53 | If you want to run spark-submit, you need to use: 54 | 55 | /opt/spark/bin/spark-submit --master spark://${SPARK_MASTER_IP}:7077 56 | 57 | Where the rest of the options could be, for example: 58 | 59 | wordcount.py hdfs://192.168.45.157/datasets/gutenberg_big_2x.txt hdfs://192.168.45.157/tmp/wcount-out 60 | 61 | -------------------------------------------------------------------------------- /contrib/zapp-spark/images: -------------------------------------------------------------------------------- 1 | zapps/spark-master:10396 2 | zapps/spark-worker:10396 3 | zapps/spark-jupyter-notebook:10396 4 | -------------------------------------------------------------------------------- /contrib/zapp-spark/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DistributedSystemsGroup/zoe/c8e0c908af1954a8b41d0f6de23d08589564f0ab/contrib/zapp-spark/logo.png -------------------------------------------------------------------------------- /contrib/zapp-spark/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "zapps": [ 4 | { 5 | "category": "Data science", 6 | "name": "Distributed PySpark notebook", 7 | "description": "spark.json", 8 | "readable_descr": "README.md", 9 | "parameters": [ 10 | { 11 | "kind": "environment", 12 | "name": "SPARK_DRIVER_RAM", 13 | "readable_name": "Spark driver memory (bytes)", 14 | "description": "Driver memory, must be less than the memory limit for the jupyter service", 15 | "type": "int", 16 | "default": 2147483648, 17 | "min": 536870912, 18 | "max": 68719476736, 19 | "step": 536870912 20 | }, 21 | { 22 | "kind": "service_count", 23 | "name": "spark-worker", 24 | "readable_name": "Spark worker count", 25 | "description": "How many workers to spawn", 26 | "type": "int", 27 | "default": 4, 28 | "min": 1, 29 | "max": 16, 30 | "step": 1 31 | }, 32 | { 33 | "kind": "environment", 34 | "name": "SPARK_WORKER_RAM", 35 | "readable_name": "Spark worker memory (bytes)", 36 | "description": "Worker memory, must be less than the memory limit for the worker service", 37 | "type": "int", 38 | "default": 11274289152, 39 | "min": 536870912, 40 | "max": 68719476736, 41 | "step": 536870912 42 | }, 43 | { 44 | "kind": "environment", 45 | "name": "SPARK_WORKER_CORES", 46 | "readable_name": "Spark worker cores", 47 | "description": "Number of cores each worker has access to, must be equal to the core limit for the worker service", 48 | "type": "int", 49 | "default": 2, 50 | "min": 1, 51 | "max": 16, 52 | "step": 1 53 | } 54 | ], 55 | "disabled_for": ["student"] 56 | }, 57 | { 58 | "category": "Labs", 59 | "name": "CLOUDS", 60 | "description": "clouds.json", 61 | "readable_descr": "README-clouds.md", 62 | "parameters": [] 63 | } 64 | ] 65 | } 66 | -------------------------------------------------------------------------------- /create_db_tables.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017, Daniele Venzano 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """Create the DB tables needed by Zoe. This script is used in the CI pipeline to prevent race conditions with zoe-api automatically creating the tables while zoe-master is starting at the same time.""" 17 | 18 | import sys 19 | import time 20 | 21 | import zoe_lib.config 22 | import zoe_lib.state.sql_manager 23 | 24 | zoe_lib.config.load_configuration() 25 | 26 | print("Warning, this script will delete the database tables for the deployment '{}' before creating new ones".format(zoe_lib.config.get_conf().deployment_name)) 27 | print("If you are installing Zoe for the first time, you have nothing to worry about") 28 | print("Sleeping 5 seconds before continuing, hit CTRL-C to stop and think.") 29 | 30 | try: 31 | time.sleep(5) 32 | except KeyboardInterrupt: 33 | print("Aborted.") 34 | sys.exit(1) 35 | 36 | zoe_lib.state.sql_manager.SQLManager(zoe_lib.config.get_conf()).init_db(force=True) 37 | -------------------------------------------------------------------------------- /doc8.ini: -------------------------------------------------------------------------------- 1 | [doc8] 2 | ignore-path=docs/_build 3 | ignore=D001 4 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | postgres: 4 | image: postgres:9.3 5 | zoe-api: 6 | image: zoerepo/zoe-test 7 | command: python3 zoe-api.py --debug --backend DockerEngine --backend-docker-config-file /etc/zoe/docker.conf --deployment-name compose --master-url tcp://zoe-master:4850 --dbuser postgres --dbhost postgres --dbname postgres 8 | ports: 9 | - "8080:5001" 10 | depends_on: 11 | - postgres 12 | zoe-master: 13 | image: zoerepo/zoe-test 14 | ports: 15 | - "4850:4850" 16 | volumes: 17 | - /etc/zoe:/etc/zoe 18 | - /opt/zoe-workspaces:/mnt/zoe-workspaces 19 | command: python3 zoe-master.py --debug --backend DockerEngine --backend-docker-config-file /etc/zoe/docker.conf --deployment-name compose --dbuser postgres --dbhost postgres --dbname postgres 20 | depends_on: 21 | - zoe-api 22 | networks: 23 | zoe: 24 | driver: bridge 25 | -------------------------------------------------------------------------------- /docs/_static/placeholder: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DistributedSystemsGroup/zoe/c8e0c908af1954a8b41d0f6de23d08589564f0ab/docs/_static/placeholder -------------------------------------------------------------------------------- /docs/_templates/placeholder: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DistributedSystemsGroup/zoe/c8e0c908af1954a8b41d0f6de23d08589564f0ab/docs/_templates/placeholder -------------------------------------------------------------------------------- /docs/architecture.rst: -------------------------------------------------------------------------------- 1 | .. _architecture: 2 | 3 | Architecture 4 | ============ 5 | 6 | The main Zoe Components are: 7 | 8 | * zoe master: the core component that performs application scheduling and talks to the container back-end 9 | * zoe api: the Zoe frontend, offering a web interface and a REST API 10 | * command-line clients (zoe.py and zoe-admin.py) 11 | 12 | The Zoe master is the core component of Zoe and communicates with the clients by using an internal ZeroMQ-based protocol. This protocol is designed to be robust, using the best practices from ZeroMQ documentation. A crash of the Api or of the Master process will not leave the other component inoperable, and when the faulted process restarts, work will restart where it was left. 13 | 14 | In this architecture all application state is kept in a Postgres database. Platform state is kept in-memory: built at start time and refreshed periodically. A lot of care and tuning has been spent in keeping synchronized the view Zoe has of the system and the real back-end state. In a few cases containers may be left orphaned: when Zoe deems it safe, they will be automatically cleaned-up, otherwise a warning in the logs will generated and the administrator has to examine the situation as, usually, it points to a bug hidden somewhere in the back-end code. 15 | 16 | Users submit *execution requests*, composed by a name and an *application description*. The frontend process (Zoe api) informs the Zoe Master that a new execution request is available for execution. 17 | Inside the Master, a scheduler keeps track of available resources and execution requests, and applies a 18 | scheduling policy to decide which requests should be satisfied as soon as possible and which ones can be deferred for later. 19 | 20 | The master also talks to a container orchestrator (Docker for example) to create and destroy containers and to read monitoring information used to schedule applications. 21 | 22 | Application descriptions 23 | ------------------------ 24 | Application descriptions are at the core of Zoe. They are likely to evolve in time, to satisfy the needs of new distributed analytic engines. The current version is built around several use cases involving MPI, Spark and Jupyter notebooks. 25 | 26 | Application descriptions are composed of a set of generic attributes that apply to the whole Zoe Application (abbreviated in ZApp) and a list of services. Zoe Services describe actual Docker containers. 27 | 28 | The Zoe Service descriptions are strictly linked to the Docker images they use, as they specify environment variables and commands to be executed. We successfully used unmodified third party images, demonstrating the generality of Zoe's approach. 29 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. _contributing: 2 | 3 | Contributing to Zoe 4 | =================== 5 | 6 | Zoe is an open source and project: we welcome any kind of contribution to the code base, the documentation, on the general architecture. Bug reports and features requests are also accepted and treasured. 7 | 8 | To better work together we have established some rules on how to contribute. 9 | 10 | If you need ideas on features that are waiting to be implemented, you can check the `roadmap `_. 11 | 12 | Development repository 13 | ---------------------- 14 | Development happens at `Eurecom's GitLab repository `_. The GitHub repository is a read-only mirror. 15 | 16 | The choice of GitLab over GitHub is due to the CI pipeline that we set-up to test Zoe. Please note the issue tracking for the Zoe project happens on GitHub. 17 | 18 | Bug reports and feature requests 19 | -------------------------------- 20 | 21 | Bug reports and feature requests are handled through the GitHub issue system at: `https://github.com/DistributedSystemsGroup/zoe/issues `_ 22 | 23 | Code and documentation contributions 24 | ------------------------------------ 25 | 26 | To contribute code and/or documentation you should follow this workflow: 27 | 28 | 1. check the issue tracker on GitHub to see if someone is already working on your idea 29 | 2. open a new issue stating your idea and how you wish to implement it 30 | 3. fork the Zoe repository via Eurecom's GitLab 31 | 4. create a branch that will hold your changes 32 | 5. ... develop and debug ... 33 | 6. when you are ready propose your changes on the mailing list 34 | 35 | Zoe maintainers will review your code, give constructive feedback and eventually perform a pull and a merge. 36 | 37 | Coding style 38 | ^^^^^^^^^^^^ 39 | 40 | Zoe code conforms to Python's `PEP8 coding style rules `_, with a few variations, detailed below: 41 | 42 | * No line length limit 43 | * All modules, classes, methods and functions must have a docstring 44 | * The docstring for private methods is optional 45 | * Names for unused variables and function parameters must end with ``_`` 46 | 47 | We also relaxed a number of pylint tests, check the ``.pylintrc`` file at the root of the source repository for details. 48 | 49 | In general, if your code passes pylint, run with our configuration file with a 10/10 mark, it is ok and matches Zoe's coding rules. 50 | 51 | Code quality and tests 52 | ^^^^^^^^^^^^^^^^^^^^^^ 53 | 54 | Before committing, all code should be tested via the `run_tests.sh` script available in the root of the repository. 55 | 56 | All contributions to the codebase are centralised into a repository at Eurecom. There, every commit (on any branch) triggers a continuous integration pipeline that verifies code quality and runs tests. Only commits and merges on the master branch for which the CI succeeds are pushed to the public repository. 57 | 58 | A description of the CI pipeline is available in the :ref:`ci-gitlab` page. 59 | 60 | Sphinx documentation is tested with the ``doc8`` tool with default options. 61 | 62 | Refer to the :ref:`integration-test` documentation for details on integration testing. 63 | -------------------------------------------------------------------------------- /docs/developer/api-endpoint.rst: -------------------------------------------------------------------------------- 1 | Internal API endpoint 2 | ===================== 3 | 4 | .. automodule:: zoe_api.api_endpoint 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/developer/auth.rst: -------------------------------------------------------------------------------- 1 | Auth modules 2 | ============ 3 | 4 | .. automodule:: zoe_api.auth.base 5 | :members: 6 | 7 | .. automodule:: zoe_api.auth.file 8 | :members: 9 | 10 | .. automodule:: zoe_api.auth.ldap 11 | :members: 12 | -------------------------------------------------------------------------------- /docs/developer/backend.rst: -------------------------------------------------------------------------------- 1 | .. _devel_backend: 2 | 3 | Back-end abstraction 4 | ==================== 5 | 6 | The container back-end Zoe uses is configurable at runtime. Internally there is an API that Zoe, in particular the scheduler, uses to communicate with the container back-end. This document explains the API, so that new back-ends can be created and maintained. 7 | 8 | Zoe assumes back-ends are composed of multiple nodes. In case the back-end is not clustered or does not expose per-node information, it can be implemented in Zoe as exposing a single big node. In this case, however, many of the smart scheduling features of Zoe will be unavailable. 9 | 10 | Package structure 11 | ----------------- 12 | 13 | Back-ends are written in Python and live in the ``zoe_master/backends/`` directory. Inside there is one Python package for each backend implementation. 14 | 15 | To let Zoe use a new back-end, its class must be imported in ``zoe_master/backends/interface.py`` and the ``_get_backend()`` function should be modified accordingly. Then the choices in ``zoe_lib/config.py`` for the configuration file should be expanded to include the new back-end class name. 16 | 17 | More options to the configuration file can be added to support the new backend. Use the ``---