├── .coveralls.yml ├── .eslintrc.json ├── .gitignore ├── .npmignore ├── .travis.yml ├── HISTORY.rst ├── LICENSE ├── MANIFEST.in ├── README.rst ├── circle.yml ├── databench ├── __init__.py ├── analyses_packaged │ ├── README.md │ ├── __init__.py │ ├── dummypi │ │ ├── README.md │ │ ├── __init__.py │ │ ├── analysis.js │ │ ├── analysis.py │ │ ├── index.html │ │ └── thumbnail.png │ ├── dummypi_py │ │ ├── README.md │ │ ├── analysis.js │ │ ├── analysis.py │ │ ├── index.html │ │ └── thumbnail.png │ ├── index.yaml │ ├── scaffold │ │ ├── README.md │ │ ├── __init__.py │ │ ├── analysis.js │ │ ├── analysis.py │ │ ├── index.html │ │ └── thumbnail.png │ └── scaffold_py │ │ ├── README.md │ │ ├── analysis.js │ │ ├── analysis.py │ │ ├── index.html │ │ └── thumbnail.png ├── analysis.py ├── analysis_zmq.py ├── app.py ├── cli.py ├── datastore.py ├── datastore_legacy.py ├── meta.py ├── meta_zmq.py ├── readme.py ├── scaffold.py ├── static │ ├── databench.css │ ├── favicon.ico │ └── logo.svg ├── template.py ├── templates │ ├── analysis.html │ ├── base.html │ └── index.html ├── testing.py ├── tests │ ├── __init__.py │ ├── analyses │ │ ├── README.md │ │ ├── __init__.py │ │ ├── cliargs │ │ │ ├── README.rst │ │ │ ├── __init__.py │ │ │ ├── analysis.py │ │ │ └── index.html │ │ ├── connection_interruption │ │ │ ├── README.md │ │ │ ├── __init__.py │ │ │ ├── analysis.py │ │ │ └── index.html │ │ ├── footer.html │ │ ├── head.html │ │ ├── index.yaml │ │ ├── node_modules │ │ │ └── test_file.txt │ │ ├── parameters │ │ │ ├── README.md │ │ │ ├── __init__.py │ │ │ ├── analysis.py │ │ │ └── index.html │ │ ├── parameters_py │ │ │ ├── README.md │ │ │ ├── analysis.py │ │ │ └── index.html │ │ ├── requestargs │ │ │ ├── README.md │ │ │ ├── __init__.py │ │ │ ├── analysis.py │ │ │ └── index.html │ │ ├── simple1 │ │ │ ├── README.md │ │ │ ├── __init__.py │ │ │ ├── analysis.py │ │ │ └── index.html │ │ ├── simple1_py │ │ │ ├── README.md │ │ │ ├── analysis.py │ │ │ └── index.html │ │ ├── simple1_pyspark │ │ │ ├── README.md │ │ │ ├── analysis.py │ │ │ └── index.html │ │ ├── simple2 │ │ │ ├── README.rst │ │ │ ├── __init__.py │ │ │ ├── analysis.py │ │ │ ├── index.html │ │ │ └── routes.py │ │ ├── simple3 │ │ │ ├── __init__.py │ │ │ ├── analysis.py │ │ │ └── index.html │ │ └── static │ │ │ └── test_file.txt │ ├── analyses_broken │ │ ├── __init__.py │ │ ├── doesnotexist │ │ │ └── __init__.py │ │ └── index.yaml │ ├── standalone │ │ ├── __init__.py │ │ ├── analysis.js │ │ ├── index.html │ │ └── test_standalone.py │ ├── test_action_decorator.py │ ├── test_analysistest.py │ ├── test_build.py │ ├── test_datastore.py │ ├── test_json_encoder.py │ ├── test_node_client.py │ ├── test_readme.py │ ├── test_testing.py │ └── test_utils.py └── utils.py ├── databench_py ├── __init__.py └── singlethread │ ├── __init__.py │ └── meta.py ├── docs ├── Makefile ├── README.md ├── backend.rst ├── backend_api.rst ├── conf.py ├── custom.css ├── deploy.rst ├── description.txt ├── dev.rst ├── esdoc-style.css ├── frontend.rst ├── frontend_api.rst ├── images │ ├── JavaScript-docs.png │ ├── Python-docs.png │ ├── bagofcharsd3.png │ ├── dataflow_datastore_state.png │ ├── dataflow_frontend_state.png │ ├── favicon.ico │ ├── flowers.gif │ ├── flowers_demo.png │ ├── logo-w100.png │ ├── logo-w600.png │ ├── logo.svg │ ├── mpld3_heart_path.png │ ├── mpld3pi_demo.png │ └── simplepi_demo.png ├── index.rst ├── quickstart.rst ├── requirements.txt └── version_index │ ├── .nojekyll │ ├── CNAME │ ├── circle.yml │ ├── favicon.ico │ ├── index.html │ └── logo.svg ├── js ├── docs │ └── index.md └── src │ ├── connection.ts │ ├── index.ts │ ├── test.ts │ └── ui.ts ├── logo ├── create.py ├── favicon-w128.png ├── favicon-w16.png ├── favicon-w256.png ├── favicon-w32.png ├── favicon-w48.png ├── favicon.ico ├── favicon.svg ├── logo-w100.png ├── logo-w600.png └── logo.svg ├── package-lock.json ├── package.json ├── setup.cfg ├── setup.py ├── tsconfig.json ├── tslint.json └── webpack.config.js /.coveralls.yml: -------------------------------------------------------------------------------- 1 | repo_token: tpWG33BUzK0XGTe9an2eATccglKXqXh5S -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "airbnb-base", 3 | "rules": { 4 | "no-underscore-dangle": "off", 5 | "camelcase": ["error", {"properties": "always"}], 6 | "no-console": "off", 7 | "no-param-reassign": ["error", { "props": false }] 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | databench/static/databench.js 2 | databench/static/databench.js.map 3 | databench/static/ts_declarations 4 | databench/tests/analyses/build_test.txt 5 | databench/tests/analyses_broken/build_test.txt 6 | _crawled/ 7 | 8 | # Byte-compiled / optimized / DLL files 9 | __pycache__/ 10 | *.py[cod] 11 | 12 | # C extensions 13 | *.so 14 | 15 | # node 16 | node_modules/ 17 | js/build 18 | -_.html 19 | docs/jsdoc 20 | docs/esdoc 21 | docs/typedoc 22 | 23 | # Distribution / packaging 24 | .Python 25 | env/ 26 | bin/ 27 | build/ 28 | develop-eggs/ 29 | dist/ 30 | eggs/ 31 | lib/ 32 | lib64/ 33 | parts/ 34 | sdist/ 35 | var/ 36 | *.egg-info/ 37 | .installed.cfg 38 | *.egg 39 | .eggs 40 | venv*/ 41 | .coverage* 42 | .pytest_cache 43 | 44 | # Installer logs 45 | pip-log.txt 46 | pip-delete-this-directory.txt 47 | 48 | # Unit test / coverage reports 49 | htmlcov/ 50 | .tox/ 51 | .coverage 52 | .cache 53 | nosetests.xml 54 | coverage.xml 55 | 56 | # Translations 57 | *.mo 58 | 59 | # Mr Developer 60 | .mr.developer.cfg 61 | .project 62 | .pydevproject 63 | 64 | # Rope 65 | .ropeproject 66 | 67 | # Django stuff: 68 | *.log 69 | *.pot 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | .DS_Store 75 | .vscode 76 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | .* 2 | build/ 3 | databench/ 4 | databench_py/ 5 | dist/ 6 | docs/ 7 | logo/ 8 | *.egg-info 9 | *.py 10 | MANIFEST.in 11 | circle.yml 12 | esdoc.json 13 | setup.cfg 14 | venv*/ 15 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | matrix: 3 | include: 4 | - os: windows 5 | language: sh 6 | python: "3.6" 7 | - os: linux 8 | language: python 9 | python: "2.7" 10 | - os: linux 11 | language: python 12 | python: "3.4" 13 | - os: linux 14 | language: python 15 | python: "3.5" 16 | - os: linux 17 | language: python 18 | python: "3.6" 19 | addons: 20 | apt: 21 | packages: 22 | - openjdk-8-jre 23 | before_install: | 24 | if [ "${TRAVIS_OS_NAME}" == "windows" ]; then 25 | choco install python3 --version 3.6.8 26 | export PATH="/c/Python36:/c/Python36/Scripts:$PATH" 27 | python -m pip install --upgrade pip wheel 28 | fi 29 | install: 30 | # install Python dependencies 31 | - pip install --upgrade pip 32 | - pip -V 33 | - "pip install -e .[tests]" 34 | - pip install python-coveralls 35 | - | 36 | if [ "${TRAVIS_OS_NAME}" == "linux" ]; then 37 | # install Node 6 38 | rm -rf ~/.nvm && git clone https://github.com/creationix/nvm.git ~/.nvm && (cd ~/.nvm && git checkout `git describe --abbrev=0 --tags`) && source ~/.nvm/nvm.sh && nvm install 6 39 | node -v 40 | fi 41 | - | 42 | if [ "${TRAVIS_OS_NAME}" == "linux" ]; then 43 | # npm update 44 | npm install -g npm 45 | npm -v 46 | fi 47 | - | 48 | if [ "${TRAVIS_OS_NAME}" == "linux" ]; then 49 | # install Node dependencies 50 | npm install 51 | npm run build 52 | fi 53 | 54 | script: 55 | - | 56 | if [ "${TRAVIS_OS_NAME}" == "linux" ]; then 57 | flake8 58 | npm run lint 59 | localcrawl --start http://localhost:5000 --run databench --log DEBUG 60 | html5validator --root _crawled/ 61 | nosetests -vv --with-coverage --cover-inclusive 62 | fi 63 | - | 64 | if [ "${TRAVIS_OS_NAME}" == "windows" ]; then 65 | timeout 10 databench --port=5005 & 66 | sleep 2 67 | powershell -Command "(new-object System.Net.WebClient).DownloadFile('http://localhost:5005', 'databench_index.html')" 68 | cat databench_index.html 69 | fi 70 | after_success: 71 | - coveralls || true 72 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | --------- 3 | 4 | * `master `_ 5 | * `0.7.1 `_ (2018-02-11) 6 | * typedoc updates 7 | * `testing.AnalysisTest` 8 | * updated Tornado and made use of more Futures 9 | * new `Datastore` with `set_state()` 10 | * update docstrings 11 | * `on()` decorator 12 | * unified py kernel: use `databench.Analysis` now instead of `databench_py.Analysis` 13 | * autoreload browser 14 | * new standalone mode with `databench.run()` 15 | * backend exceptions are signaled to the frontend 16 | * allow regular expressions in frontend listener 17 | * JS based client tests 18 | * fixed coveralls reporting 19 | * static file handling has changed, see: `Routes `_ 20 | * `0.6.2 `_ (2017-05-26) 21 | * TypeScript improvements 22 | * documentation updates 23 | * `0.6.0 `_ (2017-02-09) 24 | * TypeScript, WebPack 25 | * semantic HTML tags in templates 26 | * added tests for Python 3.6 27 | * `0.5.0 `_ (2017-01-04) 28 | * various improvements to Datastore (supports storing dict and list now) 29 | * linting for HTML and JS code 30 | * `0.4.0 `_ (2016-08-14) 31 | * switch from Flask to Tornado 32 | * switch from Jinja2 templates to Tornado templates 33 | * optional dependency on markdown and docutils (support md and rst readme files) 34 | * node_modules support 35 | * Datastore 36 | * move JavaScript to ES6 37 | * templates: endblock -> end, footerscripts -> footer, content -> analysis 38 | * update documentation 39 | * build documentation automatically 40 | * support and unit test for Python 2.7, 3.4 and 3.5 41 | * `0.3.17 `_ (2015-05-04) 42 | * make sure messages to frontend are utf-8 encoded on the python side 43 | * `0.3.16 `_ (2015-04-27) 44 | * add auto-reconnect for WebSocket connections (three attempts with exponential and randomized back-off) 45 | * add full stacktrace to some situations where it was suppressed before (especially for import bugs) 46 | * `0.3.15 `_ (2015-04-17) 47 | * introduce optional ``request_args`` parameter to ``Analysis.on_connect()`` function 48 | * use wss when used over https 49 | * scaffolding: check analysis names for dashes and warn 50 | * workaround different JSON specs: convert nan, +inf and -inf to strings 51 | * `0.3.9 `_ (2014-10-30) 52 | * fix analyses/static search path 53 | * fix included font-awesome 54 | * `0.3.7 `_ (2014-10-24) 55 | * improved scaffold with more comments 56 | * alternative frontends: apart from index.html, now you can also create anything.html and it will be rendered 57 | * frontend options: connect to a non-standard backend location 58 | * fix for Windows compatibility 59 | * wider zeromq compatibility (not using unbind() anymore) 60 | * CircleCI tests now running 61 | * docs updated with new features 62 | * `0.3.6 `_ (2014-10-20) 63 | * add section on making a plot with d3.js to tutorial 64 | * improve doc section on frontend 65 | * add more comments to scaffold 66 | * `0.3.4 `_ (2014-10-17) 67 | * added a tutorial to the docs 68 | * added comments and explanation to scaffold analysis 69 | * friendlier logo 70 | * `0.3.3 `_ (2014-10-01) 71 | * clean up of Python source distribution 72 | * customizable header 73 | * serve static files at ``analyses/static/`` under ``analyses_static/`` 74 | * `0.3.0 `_ (2014-09-20) 75 | * ``include_md()`` macro for frontend to include Markdown files 76 | * python 2.6 support (in addition to 2.7) 77 | * new tool ``scaffold-databench`` 78 | * moved from socket.io to plain websockets 79 | * one analysis instance per websocket connection 80 | * restructured analyses directories 81 | * signals are executed in separate co-routines 82 | * interface to other backends using ``zmq`` 83 | * frontend: genericElements take string ids instead of jquery selectors 84 | * frontend: Databench() does not require a name anymore 85 | * frontend: genericElements: added ``button()`` and ``slider()`` 86 | * backend handles ``action`` : an ``action`` is the co-routine that is launched with a signal. An ``action`` can have an ``id`` in which case it signals ``start`` and ``end`` (used to indicate state for genericElements.button()). 87 | * `0.2.15 `_ (2014-09-06) 88 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014-2016 Sven Kreiss and contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst HISTORY.rst LICENSE 2 | recursive-include databench/templates *.html 3 | recursive-include databench/static *.css *.js *.js.map *.ico *.svg 4 | recursive-include databench/analyses_packaged * 5 | recursive-exclude databench/analyses_packaged *.pyc .DS_Store 6 | recursive-include databench/tests/analyses * 7 | recursive-exclude databench/tests/analyses *.pyc .DS_Store 8 | recursive-include databench/tests/analyses_broken * 9 | recursive-exclude databench/tests/analyses_broken *.pyc .DS_Store 10 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://raw.githubusercontent.com/svenkreiss/databench/master/logo/logo-w100.png 2 | :target: http://databench.trivial.io 3 | :class: hideinsphinx 4 | 5 | Databench 6 | ========= 7 | 8 | Databench is a Python data analysis tool. Install with: 9 | 10 | .. code-block:: bash 11 | 12 | $ pip install databench 13 | 14 | +--------------------------------+---------------------------+ 15 | | `Documentation`_ + `Examples`_ + 16 | | + + 17 | | |Python docs image| + |Example image| + 18 | +--------------------------------+---------------------------+ 19 | 20 | .. _`Documentation`: http://databench.trivial.io 21 | .. _`Examples`: http://databench-examples.trivial.io 22 | 23 | .. |Python docs image| image:: https://raw.githubusercontent.com/svenkreiss/databench/master/docs/images/Python-docs.png 24 | :alt: Documentation. 25 | :height: 230 26 | :target: http://databench.trivial.io 27 | 28 | .. |Example image| image:: https://raw.githubusercontent.com/svenkreiss/databench/master/docs/images/bagofcharsd3.png 29 | :alt: Analysis example screenshot. 30 | :height: 230 31 | :target: http://databench-examples.trivial.io 32 | 33 | Databench is made available under the 34 | `MIT license `_. 35 | 36 | .. image:: https://travis-ci.org/svenkreiss/databench.svg?branch=master 37 | :target: https://travis-ci.org/svenkreiss/databench 38 | .. image:: https://coveralls.io/repos/svenkreiss/databench/badge.svg 39 | :target: https://coveralls.io/r/svenkreiss/databench 40 | .. image:: https://badge.fury.io/py/databench.svg 41 | :target: https://pypi.python.org/pypi/databench/ 42 | -------------------------------------------------------------------------------- /circle.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | build: 4 | docker: 5 | - image: ubuntu:18.04 6 | steps: 7 | - checkout 8 | - run: 9 | name: install openjdk8 and Python 10 | command: | 11 | apt-get update 12 | apt-get install -y openjdk-8-jre python3-venv curl gnupg chromium-chromedriver 13 | ln -s /usr/lib/chromium-browser/chromedriver /usr/local/bin/chromedriver 14 | - run: 15 | name: install nodejs 16 | command: | 17 | curl -sL https://deb.nodesource.com/setup_10.x | bash 18 | apt-get install -y nodejs 19 | - run: 20 | name: virtualenv 21 | command: python3 -m venv venv3 22 | - run: 23 | name: Python dependencies 24 | command: | 25 | . venv3/bin/activate 26 | pip install --upgrade pip setuptools 27 | pip install --progress-bar off -e ".[tests]" 28 | pip install --progress-bar off python-coveralls 29 | - run: 30 | name: JS dependencies 31 | command: | 32 | npm install 33 | npm run build 34 | - run: 35 | name: lint Python 36 | command: | 37 | . venv3/bin/activate 38 | flake8 39 | - run: 40 | name: lint JS 41 | command: npm run lint 42 | - run: 43 | name: lint HTML 44 | command: | 45 | . venv3/bin/activate 46 | localcrawl --start http://localhost:5000 --output-encoding utf8 --chrome --no-sandbox --run databench --log DEBUG 47 | html5validator --root _crawled/ 48 | - run: 49 | name: test 50 | command: | 51 | . venv3/bin/activate 52 | nosetests -vv --with-coverage --cover-inclusive 53 | 54 | deploy-prod: 55 | docker: 56 | - image: ubuntu:18.04 57 | steps: 58 | - checkout 59 | - run: 60 | name: install nodejs 61 | command: | 62 | apt-get update 63 | apt-get install -y curl gnupg python-pip git 64 | curl -sL https://deb.nodesource.com/setup_10.x | bash 65 | apt-get install -y nodejs 66 | npm i 67 | - run: 68 | name: install ghp-import 69 | command: pip install ghp-import 70 | - run: 71 | name: typedoc update 72 | command: | 73 | # - coveralls || true disabled because js test coverage collection not working on circleci 74 | npm run typedoc 75 | cp circle.yml docs/typedoc/ 76 | mkdir ~/.ssh 77 | ssh-keyscan github.com >> ~/.ssh/known_hosts 78 | ghp-import --force -n -p docs/typedoc 79 | 80 | workflows: 81 | version: 2 82 | build-deploy: 83 | jobs: 84 | - build: 85 | filters: 86 | branches: 87 | ignore: gh-pages 88 | - deploy-prod: 89 | requires: 90 | - build 91 | filters: 92 | branches: 93 | only: master 94 | -------------------------------------------------------------------------------- /databench/__init__.py: -------------------------------------------------------------------------------- 1 | """Databench module.""" 2 | # flake8: noqa 3 | 4 | from __future__ import absolute_import 5 | 6 | __version__ = '0.7.3' 7 | __all__ = ['Analysis', 'AnalysisZMQ', 'App', 'Datastore', 'Meta', 'MetaZMQ', 8 | 'on', 'on_action', 'Readme', 'run', 'testing', 'utils'] 9 | 10 | from .analysis import Analysis, on, on_action 11 | from .analysis_zmq import AnalysisZMQ 12 | from .app import App 13 | from .cli import run 14 | from .datastore import Datastore 15 | from .datastore_legacy import DatastoreLegacy 16 | from .meta import Meta 17 | from .meta_zmq import MetaZMQ 18 | from .readme import Readme 19 | from . import testing 20 | from . import utils 21 | -------------------------------------------------------------------------------- /databench/analyses_packaged/README.md: -------------------------------------------------------------------------------- 1 | Analyses that come packaged with [Databench](http://databench.trivial.io). 2 | -------------------------------------------------------------------------------- /databench/analyses_packaged/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/analyses_packaged/__init__.py -------------------------------------------------------------------------------- /databench/analyses_packaged/dummypi/README.md: -------------------------------------------------------------------------------- 1 | This little demo uses two random numbers r1 and r2 and 2 | then does a comparison `r1^2 + r2^2 < 1.0` to figure out whether 3 | the generated point is inside the first quadrant of the unit circle. 4 | -------------------------------------------------------------------------------- /databench/analyses_packaged/dummypi/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/analyses_packaged/dummypi/__init__.py -------------------------------------------------------------------------------- /databench/analyses_packaged/dummypi/analysis.js: -------------------------------------------------------------------------------- 1 | /* global Databench */ 2 | /* global document */ 3 | 4 | const databench = new Databench.Connection(); 5 | Databench.ui.wire(databench); 6 | 7 | databench.on({ data: 'pi' }, (pi) => { 8 | document.getElementById('pi').innerHTML = 9 | `${pi.estimate.toFixed(3)} ± ${pi.uncertainty.toFixed(3)}`; 10 | }); 11 | 12 | databench.connect(); 13 | -------------------------------------------------------------------------------- /databench/analyses_packaged/dummypi/analysis.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | 3 | import databench 4 | import math 5 | import random 6 | 7 | 8 | class Dummypi(databench.Analysis): 9 | """A dummy analysis.""" 10 | 11 | @databench.on 12 | def connected(self): 13 | yield self.data.init({'samples': 100000}) 14 | 15 | @databench.on 16 | def run(self): 17 | """Run when button is pressed.""" 18 | 19 | inside = 0 20 | for draws in range(1, self.data['samples']): 21 | # generate points and check whether they are inside the unit circle 22 | r1 = random.random() 23 | r2 = random.random() 24 | if r1 ** 2 + r2 ** 2 < 1.0: 25 | inside += 1 26 | 27 | # every 1000 iterations, update status 28 | if draws % 1000 != 0: 29 | continue 30 | 31 | # debug 32 | yield self.emit('log', {'draws': draws, 'inside': inside}) 33 | 34 | # calculate pi and its uncertainty given the current draws 35 | p = inside / draws 36 | pi = { 37 | 'estimate': 4.0 * p, 38 | 'uncertainty': 4.0 * math.sqrt(draws * p * (1.0 - p)) / draws, 39 | } 40 | 41 | # send status to frontend 42 | yield self.set_state(pi=pi) 43 | 44 | yield self.emit('log', {'action': 'done'}) 45 | 46 | @databench.on 47 | def samples(self, value): 48 | yield self.set_state(samples=value) 49 | -------------------------------------------------------------------------------- /databench/analyses_packaged/dummypi/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block head %} 5 | 6 | {% end %} 7 | 8 | 9 | {% block analysis %} 10 |
11 | 12 | 13 |
14 | 15 | 16 |
17 | π = 0.0 ± 1.0 18 |
19 | {% end %} 20 | 21 | 22 | {% block footer %} 23 | 24 | 25 | {% end %} 26 | -------------------------------------------------------------------------------- /databench/analyses_packaged/dummypi/thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/analyses_packaged/dummypi/thumbnail.png -------------------------------------------------------------------------------- /databench/analyses_packaged/dummypi_py/README.md: -------------------------------------------------------------------------------- 1 | This little demo uses two random numbers r1 and r2 and 2 | then does a comparison `r1^2 + r2^2 < 1.0` to figure out whether 3 | the generated point is inside the first quadrant of the unit circle. 4 | -------------------------------------------------------------------------------- /databench/analyses_packaged/dummypi_py/analysis.js: -------------------------------------------------------------------------------- 1 | /* global Databench */ 2 | /* global document */ 3 | 4 | const databench = new Databench.Connection(); 5 | Databench.ui.wire(databench); 6 | 7 | databench.on({ data: 'pi' }, (pi) => { 8 | document.getElementById('pi').innerHTML = 9 | `${pi.estimate.toFixed(3)} ± ${pi.uncertainty.toFixed(3)}`; 10 | }); 11 | 12 | databench.connect(); 13 | -------------------------------------------------------------------------------- /databench/analyses_packaged/dummypi_py/analysis.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | 3 | import math 4 | from random import random 5 | 6 | import databench 7 | import databench_py.singlethread 8 | 9 | import logging 10 | logging.basicConfig(level='DEBUG') 11 | 12 | 13 | class Dummypi_Py(databench.Analysis): 14 | """A dummy analysis.""" 15 | 16 | @databench.on 17 | def connected(self): 18 | yield self.data.init({'samples': 100000}) 19 | 20 | @databench.on 21 | def run(self): 22 | """Run when button is pressed.""" 23 | 24 | inside = 0 25 | for draws in range(1, self.data['samples']): 26 | # generate points and check whether they are inside the unit circle 27 | r1, r2 = (random(), random()) 28 | if r1 ** 2 + r2 ** 2 < 1.0: 29 | inside += 1 30 | 31 | if draws % 1000 != 0: 32 | continue 33 | 34 | # debug 35 | yield self.emit('log', {'draws': draws, 'inside': inside}) 36 | 37 | # calculate pi and its uncertainty given the current draws 38 | p = inside / draws 39 | pi = { 40 | 'estimate': 4.0 * inside / draws, 41 | 'uncertainty': 4.0 * math.sqrt(draws * p * (1.0 - p)) / draws, 42 | } 43 | 44 | # send status to frontend 45 | yield self.set_state(pi=pi) 46 | 47 | yield self.emit('log', {'action': 'done'}) 48 | 49 | 50 | if __name__ == "__main__": 51 | analysis = databench_py.singlethread.Meta('dummypi_py', Dummypi_Py) 52 | analysis.event_loop() 53 | -------------------------------------------------------------------------------- /databench/analyses_packaged/dummypi_py/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block head %} 5 | 6 | {% end %} 7 | 8 | 9 | {% block analysis %} 10 |
11 |
12 | 13 | 15 |
16 |
17 | 18 |
19 |
20 |
21 | π = 0.0 ± 1.0 22 |
23 | {% end %} 24 | 25 | 26 | {% block footer %} 27 | 28 | 29 | {% end %} 30 | -------------------------------------------------------------------------------- /databench/analyses_packaged/dummypi_py/thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/analyses_packaged/dummypi_py/thumbnail.png -------------------------------------------------------------------------------- /databench/analyses_packaged/index.yaml: -------------------------------------------------------------------------------- 1 | title: Databench - Packaged Analyses 2 | description: Analyses that come packaged with Databench. 3 | version: 0.0.1 4 | 5 | analyses: 6 | - name: dummypi 7 | title: Dummy π 8 | description: Calculating π the simple way, but this is called dummypi to avoid conflict with simplepi in the databench_examples repo. 9 | watch: 10 | - dummypi/*.js 11 | - dummypi/*.html 12 | - name: scaffold 13 | title: Scaffold 14 | description: A short description of this analysis. 15 | watch: 16 | - scaffold/*.js 17 | - scaffold/*.html 18 | - name: dummypi_py 19 | kernel: py 20 | title: Dummy π with Python Language Kernel 21 | description: Calculating π the simple way, but this is called dummypi to avoid conflict with simplepi in the databench_examples repo. This is using a Python Language Kernel running in a separate process. 22 | watch: 23 | - dummypi_py/*.js 24 | - dummypi_py/*.html 25 | - dummypi_py/*.py 26 | - name: scaffold_py 27 | kernel: py 28 | title: Scaffold_Py 29 | description: A short description of this analysis. 30 | watch: 31 | - scaffold_py/*.js 32 | - scaffold_py/*.html 33 | - scaffold_py/*.py 34 | -------------------------------------------------------------------------------- /databench/analyses_packaged/scaffold/README.md: -------------------------------------------------------------------------------- 1 | This is the text in the `README.md` file. 2 | -------------------------------------------------------------------------------- /databench/analyses_packaged/scaffold/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/analyses_packaged/scaffold/__init__.py -------------------------------------------------------------------------------- /databench/analyses_packaged/scaffold/analysis.js: -------------------------------------------------------------------------------- 1 | /* global Databench */ 2 | /* global document */ 3 | 4 | // initialize Databench's frontend library 5 | const databench = new Databench.Connection(); 6 | Databench.ui.wire(databench); 7 | 8 | // listen for updates to 'status' in 'data' 9 | databench.on({ data: 'status' }, (status) => { 10 | console.log(`received ${JSON.stringify(status)}`); 11 | document.getElementById('status').innerHTML = status; 12 | }); 13 | 14 | databench.connect(); 15 | -------------------------------------------------------------------------------- /databench/analyses_packaged/scaffold/analysis.py: -------------------------------------------------------------------------------- 1 | import databench 2 | 3 | 4 | class Scaffold(databench.Analysis): 5 | 6 | @databench.on 7 | def connected(self): 8 | """Run as soon as a browser connects to this.""" 9 | yield self.data.init(status='Hello World') 10 | -------------------------------------------------------------------------------- /databench/analyses_packaged/scaffold/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block head %} 5 | 6 | {% end %} 7 | 8 | 9 | {% block analysis %} 10 |

Analysis Output

11 |

The current status: unknown

12 | {% end %} 13 | 14 | 15 | {% block footer %} 16 | 17 | 18 | {% end %} 19 | -------------------------------------------------------------------------------- /databench/analyses_packaged/scaffold/thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/analyses_packaged/scaffold/thumbnail.png -------------------------------------------------------------------------------- /databench/analyses_packaged/scaffold_py/README.md: -------------------------------------------------------------------------------- 1 | This is the text in the `README.md` file. 2 | -------------------------------------------------------------------------------- /databench/analyses_packaged/scaffold_py/analysis.js: -------------------------------------------------------------------------------- 1 | /* global Databench */ 2 | /* global document */ 3 | 4 | // initialize Databench's frontend library 5 | const databench = new Databench.Connection(); 6 | Databench.ui.wire(databench); 7 | 8 | // listen for updates to 'status' in 'data' 9 | databench.on({ data: 'status' }, (status) => { 10 | console.log(`received ${JSON.stringify(status)}`); 11 | document.getElementById('status').innerHTML = status; 12 | }); 13 | 14 | databench.connect(); 15 | -------------------------------------------------------------------------------- /databench/analyses_packaged/scaffold_py/analysis.py: -------------------------------------------------------------------------------- 1 | import databench 2 | import databench_py.singlethread 3 | 4 | 5 | class Scaffold_Py(databench.Analysis): 6 | 7 | @databench.on 8 | def connected(self): 9 | """Run as soon as a browser connects to this.""" 10 | yield self.set_state(status='Hello World') 11 | 12 | 13 | if __name__ == '__main__': 14 | analysis = databench_py.singlethread.Meta('scaffold_py', Scaffold_Py) 15 | analysis.event_loop() 16 | -------------------------------------------------------------------------------- /databench/analyses_packaged/scaffold_py/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block head %} 5 | 6 | {% end %} 7 | 8 | 9 | {% block analysis %} 10 |

Analysis Output

11 |

The current status: unknown

12 | {% end %} 13 | 14 | 15 | {% block footer %} 16 | 17 | 18 | {% end %} 19 | -------------------------------------------------------------------------------- /databench/analyses_packaged/scaffold_py/thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/analyses_packaged/scaffold_py/thumbnail.png -------------------------------------------------------------------------------- /databench/analysis.py: -------------------------------------------------------------------------------- 1 | """Analysis module for Databench.""" 2 | 3 | from __future__ import absolute_import, unicode_literals, division 4 | 5 | from . import utils 6 | from .datastore import Datastore 7 | import inspect 8 | import logging 9 | import random 10 | import string 11 | import tornado.gen 12 | import wrapt 13 | 14 | log = logging.getLogger(__name__) 15 | 16 | 17 | class ActionHandler(object): 18 | """Databench action handler.""" 19 | 20 | def __init__(self, action, f, bound_instance=None): 21 | self.action = action 22 | self.f = f 23 | self.bound_instance = bound_instance 24 | 25 | @tornado.gen.coroutine 26 | def __call__(self, *args, **kwargs): 27 | if self.bound_instance is not None: 28 | return self.f(self.bound_instance, *args, **kwargs) 29 | 30 | return self.f(*args, **kwargs) 31 | 32 | def __get__(self, obj, objtype): 33 | if obj is not None: 34 | # return an ActionHandler that is bound to the given instance 35 | return ActionHandler(self.action, self.f, obj) 36 | 37 | return self 38 | 39 | def code(self): 40 | """Get the source code of the decorated function.""" 41 | return inspect.getsource(self.f) 42 | 43 | 44 | def on(f): 45 | """Decorator for action handlers. 46 | 47 | The action name is inferred from the function name. 48 | 49 | This also decorates the method with `tornado.gen.coroutine` so that 50 | `~tornado.concurrent.Future` can be yielded. 51 | """ 52 | action = f.__name__ 53 | f.action = action 54 | 55 | @wrapt.decorator 56 | @tornado.gen.coroutine 57 | def _execute(wrapped, instance, args, kwargs): 58 | return wrapped(*args, **kwargs) 59 | 60 | return _execute(f) 61 | 62 | 63 | def on_action(action): 64 | """Decorator for action handlers. 65 | 66 | :param str action: explicit action name 67 | 68 | This also decorates the method with `tornado.gen.coroutine` so that 69 | `~tornado.concurrent.Future` can be yielded. 70 | """ 71 | @wrapt.decorator 72 | @tornado.gen.coroutine 73 | def _execute(wrapped, instance, args, kwargs): 74 | return wrapped(*args, **kwargs) 75 | 76 | _execute.action = action 77 | return _execute 78 | 79 | 80 | class Analysis(object): 81 | """Databench's analysis class. 82 | 83 | This contains the analysis code. Every browser connection corresponds to 84 | an instance of this class. 85 | 86 | **Initialization**: All initializations should be done in 87 | :meth:`.connected`. Instance variables (which should be avoided in favor 88 | of state) should be initialized in the constructor. Some cleanup 89 | can be done in :meth:`.disconnected`. 90 | 91 | **Arguments/Parameters**: Command line arguments are available 92 | at ``cli_args`` and the parameters of the HTTP GET request at 93 | ``request_args``. ``request_args`` is a dictionary of all 94 | arguments. Each value of the dictionary is a list of given values for this 95 | key even if this key only appeared once in the url 96 | (see `urllib.parse.parse_qs`). 97 | 98 | **Actions**: are captured by class method decorated 99 | with `databench.on`. To capture the action 100 | ``run`` that is emitted with the JavaScript code 101 | 102 | .. code-block:: js 103 | 104 | // on the JavaScript frontend 105 | d.emit('run', {my_param: 'helloworld'}); 106 | 107 | use 108 | 109 | .. code-block:: python 110 | 111 | # in Python 112 | @databench.on 113 | def run(self, my_param): 114 | pass 115 | 116 | in Python. Lists are treated as positional arguments and objects as keyword 117 | arguments to the function call. 118 | If the message is neither of type `list` nor `dict` (for example a 119 | plain `string` or `float`), the function will be called with that 120 | as its first parameter. 121 | 122 | **Writing to a datastore**: By default, a :class:`Datastore` 123 | scoped to the current analysis instance is created at 124 | ``data``. You can write state updates to it with 125 | 126 | .. code-block:: python 127 | 128 | yield self.set_state(key1=value1) 129 | 130 | Similarly, there is a :class:`Datastore` instance at 131 | ``class_data`` which is 132 | scoped to all instances of this analysis by its class name and state 133 | updates are supported with :meth:`.set_class_state`. 134 | 135 | **Communicating with the frontend**: The default is to change state with 136 | :meth:`.set_state` or :meth:`.set_class_state` and let that 137 | change propagate to all frontends. Directly calling :meth:`.emit` is also 138 | possible. 139 | 140 | :ivar Datastore data: data scoped for this instance/connection 141 | :ivar Datastore class_data: data scoped across all instances 142 | :ivar list cli_args: command line arguments 143 | :ivar dict request_args: request arguments 144 | """ 145 | 146 | _databench_analysis = True 147 | 148 | def __init__(self): 149 | self.data = None 150 | self.class_data = None 151 | self.cli_args = [] 152 | self.request_args = {} 153 | 154 | def init_databench(self, id_=None): 155 | self.id_ = id_ if id_ else Analysis.__create_id() 156 | self.emit_to_frontend = ( 157 | lambda s, pl: 158 | log.error('emit called before Analysis setup was complete') 159 | ) 160 | self.log_frontend = logging.getLogger(__name__ + '.frontend') 161 | self.log_backend = logging.getLogger(__name__ + '.backend') 162 | 163 | self.init_datastores() 164 | return self 165 | 166 | def init_datastores(self): 167 | """Initialize datastores for this analysis instance. 168 | 169 | This creates instances of :class:`.Datastore` at ``data`` and 170 | ``class_data`` with the datastore domains being the current id 171 | and the class name of this analysis respectively. 172 | 173 | Overwrite this method to use other datastore backends. 174 | """ 175 | self.data = Datastore(self.id_) 176 | self.data.subscribe(lambda data: self.emit('data', data)) 177 | self.class_data = Datastore(type(self).__name__) 178 | self.class_data.subscribe(lambda data: self.emit('class_data', data)) 179 | 180 | @staticmethod 181 | def __create_id(): 182 | return ''.join(random.choice(string.ascii_letters + string.digits) 183 | for _ in range(8)) 184 | 185 | def set_emit_fn(self, emit_fn): 186 | self.emit_to_frontend = emit_fn 187 | return self 188 | 189 | def emit(self, signal, message='__nomessagetoken__'): 190 | """Emit a signal to the frontend. 191 | 192 | :param str signal: name of the signal 193 | :param message: message to send 194 | :returns: return value from frontend emit function 195 | :rtype: tornado.concurrent.Future 196 | """ 197 | # call pre-emit hooks 198 | if signal == 'log': 199 | self.log_backend.info(message) 200 | elif signal == 'warn': 201 | self.log_backend.warn(message) 202 | elif signal == 'error': 203 | self.log_backend.error(message) 204 | 205 | return self.emit_to_frontend(signal, message) 206 | 207 | """Events.""" 208 | 209 | @on 210 | def connect(self): 211 | pass 212 | 213 | @on 214 | def args(self, cli_args, request_args): 215 | self.cli_args = cli_args 216 | self.request_args = request_args 217 | 218 | @on 219 | def log(self, *args, **kwargs): 220 | self.log_frontend.info(utils.to_string(*args, **kwargs)) 221 | 222 | @on 223 | def warn(self, *args, **kwargs): 224 | self.log_frontend.warn(utils.to_string(*args, **kwargs)) 225 | 226 | @on 227 | def error(self, *args, **kwargs): 228 | self.log_frontend.error(utils.to_string(*args, **kwargs)) 229 | 230 | @on 231 | def connected(self): 232 | """Default handler for "connected" action. 233 | 234 | Overwrite to add behavior. 235 | """ 236 | pass 237 | 238 | @on 239 | def disconnected(self): 240 | """Default handler for "disconnected" action. 241 | 242 | Overwrite to add behavior. 243 | """ 244 | log.debug('on_disconnected called.') 245 | 246 | @on 247 | def set_state(self, updater=None, **kwargs): 248 | """Set state in Datastore.""" 249 | yield self.data.set_state(updater, **kwargs) 250 | 251 | @on 252 | def set_class_state(self, updater=None, **kwargs): 253 | """Set state in class Datastore.""" 254 | yield self.class_data.set_state(updater, **kwargs) 255 | -------------------------------------------------------------------------------- /databench/analysis_zmq.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import subprocess 4 | import tornado.gen 5 | import zmq 6 | import zmq.eventloop.zmqstream 7 | 8 | from .analysis import Analysis 9 | 10 | log = logging.getLogger(__name__) 11 | 12 | 13 | class AnalysisZMQ(Analysis): 14 | def __init__(self): 15 | pass 16 | 17 | def init_databench(self, id_): 18 | super(AnalysisZMQ, self).init_databench(id_) 19 | self.zmq_handshake = False 20 | return self 21 | 22 | def on_connect(self, executable, zmq_publish): 23 | self.zmq_publish = zmq_publish 24 | 25 | # determine a port_subscribe 26 | context = zmq.Context() 27 | socket = context.socket(zmq.PUB) 28 | port_subscribe = socket.bind_to_random_port( 29 | 'tcp://127.0.0.1', 30 | min_port=3000, max_port=9000, 31 | ) 32 | socket.close() 33 | context.destroy() 34 | log.debug('determined: port_subscribe={}'.format(port_subscribe)) 35 | 36 | # zmq subscription to listen for messages from backend 37 | log.debug('main listening on port: {}'.format(port_subscribe)) 38 | self.zmq_sub_ctx = zmq.Context() 39 | self.zmq_sub = self.zmq_sub_ctx.socket(zmq.SUB) 40 | self.zmq_sub.setsockopt(zmq.SUBSCRIBE, b'') 41 | self.zmq_sub.bind('tcp://127.0.0.1:{}'.format(port_subscribe)) 42 | 43 | self.zmq_stream_sub = zmq.eventloop.zmqstream.ZMQStream( 44 | self.zmq_sub, 45 | tornado.ioloop.IOLoop.current(), 46 | ) 47 | self.zmq_stream_sub.on_recv(self.zmq_listener) 48 | 49 | # launch the language kernel process 50 | e_params = executable + [ 51 | '--analysis-id={}'.format(self.id_), 52 | '--zmq-publish={}'.format(port_subscribe), 53 | ] 54 | log.debug('launching: {}'.format(e_params)) 55 | self.kernel_process = subprocess.Popen(e_params, shell=False) 56 | log.debug('finished on_connect for {}'.format(self.id_)) 57 | 58 | def on_disconnected(self): 59 | # In autoreload, this callback needs to be processed synchronously. 60 | log.debug('terminating kernel process {}'.format(self.id_)) 61 | if self.kernel_process is not None: 62 | try: 63 | self.kernel_process.terminate() 64 | except subprocess.OSError: 65 | pass 66 | self.zmq_stream_sub.close() 67 | self.zmq_sub.close() 68 | self.zmq_sub_ctx.destroy() 69 | self.zmq_handshake = False 70 | 71 | def zmq_send(self, data): 72 | self.zmq_publish.send('{}|{}'.format( 73 | self.id_, 74 | json.dumps(data), 75 | ).encode('utf-8')) 76 | 77 | def zmq_listener(self, multipart): 78 | # log.debug('main received multipart: {}'.format(multipart)) 79 | msg = json.loads((b''.join(multipart)).decode('utf-8')) 80 | 81 | # zmq handshake 82 | if '__zmq_handshake' in msg: 83 | self.zmq_handshake = True 84 | self.zmq_send({'__zmq_ack': None}) 85 | return 86 | 87 | # check message is for this analysis 88 | if 'analysis_id' not in msg or \ 89 | msg['analysis_id'] != self.id_: 90 | return 91 | 92 | # execute callback 93 | if 'frame' in msg and \ 94 | 'signal' in msg['frame'] and \ 95 | 'load' in msg['frame']: 96 | self.emit(msg['frame']['signal'], msg['frame']['load']) 97 | -------------------------------------------------------------------------------- /databench/cli.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Databench command line tool. See http://databench.trivial.io for 3 | more info.""" 4 | 5 | from __future__ import absolute_import, print_function 6 | 7 | from . import __version__ as DATABENCH_VERSION 8 | import argparse 9 | import logging 10 | import os 11 | import ssl 12 | import sys 13 | import tornado 14 | 15 | 16 | def main(**kwargs): 17 | """Entry point to run databench.""" 18 | 19 | parser = argparse.ArgumentParser(description=__doc__) 20 | parser.add_argument('--version', action='version', 21 | version='%(prog)s {}'.format(DATABENCH_VERSION)) 22 | parser.add_argument('--log', dest='loglevel', default="INFO", 23 | type=str.upper, 24 | help=('log level (info, warning, error, critical or ' 25 | 'debug, default info)')) 26 | parser.add_argument('--no-watch', dest='watch', default=True, 27 | action='store_false', 28 | help='do not watch and restart when files change') 29 | parser.add_argument('--host', dest='host', 30 | default=os.environ.get('HOST', '127.0.0.1'), 31 | help='host address for webserver (default 127.0.0.1)') 32 | parser.add_argument('--port', dest='port', 33 | type=int, default=int(os.environ.get('PORT', 5000)), 34 | help='port for webserver') 35 | if not kwargs: 36 | parser.add_argument('--analyses', default=None, 37 | help='import path for analyses') 38 | parser.add_argument('--build', default=False, action='store_true', 39 | help='run the build command and exit') 40 | parser.add_argument('--coverage', default=False, 41 | help=argparse.SUPPRESS) 42 | 43 | ssl_args = parser.add_argument_group('SSL') 44 | ssl_args.add_argument('--ssl-certfile', dest='ssl_certfile', 45 | default=os.environ.get('SSLCERTFILE'), 46 | help='SSL certificate file') 47 | ssl_args.add_argument('--ssl-keyfile', dest='ssl_keyfile', 48 | default=os.environ.get('SSLKEYFILE'), 49 | help='SSL key file') 50 | ssl_args.add_argument('--ssl-port', dest='ssl_port', type=int, 51 | default=int(os.environ.get('SSLPORT', 0)), 52 | help='SSL port for webserver') 53 | 54 | args, analyses_args = parser.parse_known_args() 55 | 56 | # coverage 57 | cov = None 58 | if args.coverage: 59 | import coverage 60 | cov = coverage.Coverage(data_file=args.coverage, data_suffix=True) 61 | cov.start() 62 | 63 | # this is included here so that is included in coverage 64 | from .app import App, SingleApp 65 | 66 | # log 67 | logging.basicConfig(level=getattr(logging, args.loglevel)) 68 | if args.loglevel != 'INFO': 69 | logging.info('Set loglevel to {}.'.format(args.loglevel)) 70 | 71 | # show versions and setup 72 | logging.info('Databench {}'.format(DATABENCH_VERSION)) 73 | if args.host in ('localhost', '127.0.0.1'): 74 | logging.info('Open http://{}:{} in a web browser.' 75 | ''.format(args.host, args.port)) 76 | logging.debug('host={}, port={}'.format(args.host, args.port)) 77 | logging.debug('Python {}'.format(sys.version)) 78 | 79 | if analyses_args: 80 | logging.debug('Arguments passed to analyses: {}'.format(analyses_args)) 81 | 82 | if not kwargs: 83 | app = App(args.analyses, cli_args=analyses_args, debug=args.watch) 84 | else: 85 | app = SingleApp(cli_args=analyses_args, debug=args.watch, **kwargs) 86 | 87 | # check whether this is just a quick build 88 | if args.build: 89 | logging.info('Build mode: only run build command and exit.') 90 | app.build() 91 | if cov: 92 | cov.stop() 93 | cov.save() 94 | return 95 | 96 | # HTTP server 97 | tornado_app = app.tornado_app() 98 | tornado_app.listen(args.port, args.host) 99 | 100 | # HTTPS server 101 | if args.ssl_port: 102 | if args.ssl_certfile and args.ssl_keyfile: 103 | ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) 104 | ssl_ctx.load_cert_chain(args.ssl_certfile, args.ssl_keyfile) 105 | else: 106 | # use Tornado's self signed certificates 107 | module_dir = os.path.dirname(tornado.__file__) 108 | ssl_ctx = { 109 | 'certfile': os.path.join(module_dir, 'test', 'test.crt'), 110 | 'keyfile': os.path.join(module_dir, 'test', 'test.key'), 111 | } 112 | 113 | logging.info('Open https://{}:{} in a web browser.' 114 | ''.format(args.host, args.ssl_port)) 115 | tornado_app.listen(args.ssl_port, ssl_options=ssl_ctx) 116 | 117 | try: 118 | tornado.ioloop.IOLoop.current().start() 119 | except KeyboardInterrupt: 120 | tornado.ioloop.IOLoop.current().stop() 121 | if cov: 122 | cov.stop() 123 | cov.save() 124 | 125 | 126 | def run(analysis, path=None, name=None, info=None, **kwargs): 127 | """Run a single analysis. 128 | 129 | :param Analysis analysis: Analysis class to run. 130 | :param str path: Path of analysis. Can be `__file__`. 131 | :param str name: Name of the analysis. 132 | :param dict info: Optional entries are ``version``, ``title``, 133 | ``readme``, ... 134 | :param dict static: Map[url regex, root-folder] to serve static content. 135 | """ 136 | kwargs.update({ 137 | 'analysis': analysis, 138 | 'path': path, 139 | 'name': name, 140 | 'info': info, 141 | }) 142 | main(**kwargs) 143 | 144 | 145 | if __name__ == '__main__': 146 | main() 147 | -------------------------------------------------------------------------------- /databench/datastore.py: -------------------------------------------------------------------------------- 1 | from .utils import json_encoder_default 2 | from collections import defaultdict 3 | import json 4 | import logging 5 | 6 | log = logging.getLogger(__name__) 7 | 8 | 9 | def decode(value): 10 | return json.loads(value) 11 | 12 | 13 | def encode(value): 14 | return json.dumps(value, default=json_encoder_default) 15 | 16 | 17 | class Datastore(object): 18 | """Key-value data store. 19 | 20 | An in-memory and in-process (not persistent) key-value store. 21 | 22 | :param str domain: 23 | A namespace for the key values. This can be an analysis instance id for 24 | data local to an analysis instance or the name of an analysis class 25 | for data that is shared across instances of the same analysis. 26 | 27 | :param bool release_storage: 28 | Release storage when the last datastore for a domain closes. 29 | """ 30 | global_data = defaultdict(dict) # the actual stored data 31 | stores = defaultdict(list) # list of instances by domain 32 | 33 | def __init__(self, domain, release_storage=False): 34 | self.domain = domain 35 | self.release_storage = release_storage 36 | self.callbacks = [] 37 | Datastore.stores[self.domain].append(self) 38 | 39 | @property 40 | def data(self): 41 | return Datastore.global_data[self.domain] 42 | 43 | def subscribe(self, callback): 44 | """Subscribe to changes in the datastore with a callback. 45 | 46 | :param callback: Function with signature ({key: value}) => None. 47 | """ 48 | self.callbacks.append(callback) 49 | return self 50 | 51 | def all_callbacks(self): 52 | return [callback 53 | for datastore in Datastore.stores[self.domain] 54 | for callback in datastore.callbacks] 55 | 56 | def trigger_callbacks(self, key, callbacks=None): 57 | if callbacks is None: 58 | callbacks = self.all_callbacks() 59 | 60 | value = self.get(key) 61 | return [callback({key: value}) for callback in callbacks] 62 | 63 | def trigger_all_callbacks(self, callbacks=None): 64 | """Trigger callbacks for all keys on all or a subset of subscribers. 65 | 66 | :param Iterable callbacks: list of callbacks or none for all subscribed 67 | :rtype: Iterable[tornado.concurrent.Future] 68 | """ 69 | return [ret 70 | for key in self 71 | for ret in self.trigger_callbacks(key, callbacks=None)] 72 | 73 | def get_encoded(self, key): 74 | if key not in self.data: 75 | raise IndexError 76 | return self.data[key] 77 | 78 | def __getitem__(self, key): 79 | """Return entry at key.""" 80 | if key not in self.data: 81 | raise IndexError 82 | return decode(self.data[key]) 83 | 84 | def __setitem__(self, key, value): 85 | """Set value at given key.""" 86 | # TODO(sven): Should this be deprecated for set_state()? 87 | return self.set_state({key: value}) 88 | 89 | def get(self, key, default=None): 90 | """Return entry at key. 91 | 92 | Return a default value if the key is not present. 93 | """ 94 | if key not in self.data: 95 | return default 96 | return decode(self.data[key]) 97 | 98 | def set(self, key, value): 99 | """Set a value at key and return a Future. 100 | 101 | :rtype: Iterable[tornado.concurrent.Future] 102 | """ 103 | value_encoded = encode(value) 104 | 105 | if key in self.data and self.data[key] == value_encoded: 106 | return [] 107 | 108 | self.data[key] = value_encoded 109 | return self.trigger_callbacks(key) 110 | 111 | def set_state(self, updater=None, **kwargs): 112 | """Update the datastore. 113 | 114 | :param func|dict updater: (state) => state_change or dict state_change 115 | :rtype: Iterable[tornado.concurrent.Future] 116 | """ 117 | if callable(updater): 118 | state_change = updater(self) 119 | elif updater is not None: 120 | state_change = updater 121 | else: 122 | state_change = kwargs 123 | 124 | return [callback_result 125 | for k, v in state_change.items() 126 | for callback_result in self.set(k, v)] 127 | 128 | def __contains__(self, key): 129 | """Test whether key is set.""" 130 | return key in self.data 131 | 132 | def init(self, key_value_pairs=None, **kwargs): 133 | """Initialize datastore. 134 | 135 | Only sets values for keys that are not in the datastore already. 136 | 137 | :param dict key_value_pairs: 138 | A set of key value pairs to use to initialize the datastore. 139 | 140 | :rtype: Iterable[tornado.concurrent.Future] 141 | """ 142 | if key_value_pairs is None: 143 | key_value_pairs = kwargs 144 | return [self.set(k, v) 145 | for k, v in key_value_pairs.items() 146 | if k not in self] 147 | 148 | def close(self): 149 | """Close and delete instance.""" 150 | 151 | # remove callbacks 152 | Datastore.stores[self.domain].remove(self) 153 | 154 | # delete data after the last instance is gone 155 | if self.release_storage and not Datastore.stores[self.domain]: 156 | del Datastore.global_data[self.domain] 157 | 158 | del self 159 | 160 | def __len__(self): 161 | """Length of the dictionary.""" 162 | return len(self.data) 163 | 164 | def __iter__(self): 165 | """Iterator.""" 166 | return (k for k in self.data.keys()) 167 | 168 | def __repr__(self): 169 | """repr""" 170 | return {k: self[k] for k in self}.__repr__() 171 | 172 | def keys(self): 173 | """Keys.""" 174 | return self.data.keys() 175 | 176 | def values(self): 177 | """Values.""" 178 | return (self[k] for k in self) 179 | 180 | def items(self): 181 | """Items.""" 182 | return ((k, self[k]) for k in self) 183 | -------------------------------------------------------------------------------- /databench/datastore_legacy.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | from .utils import json_encoder_default 4 | from collections import defaultdict 5 | from future.builtins import zip 6 | import json 7 | import logging 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | def decode(value): 13 | if isinstance(value, DatastoreList): 14 | return value 15 | elif isinstance(value, DatastoreDict): 16 | return value 17 | 18 | return json.loads(value) 19 | 20 | 21 | def encode(value, callback): 22 | if isinstance(value, list): 23 | return DatastoreList(value, callback) 24 | elif isinstance(value, dict): 25 | return DatastoreDict(value, callback) 26 | elif isinstance(value, DatastoreList): 27 | value._change_callback = callback 28 | return value 29 | elif isinstance(value, DatastoreDict): 30 | value._change_callback = callback 31 | return value 32 | 33 | return json.dumps(value, default=json_encoder_default) 34 | 35 | 36 | class DatastoreList(object): 37 | """Object wrapper for storing a list in Datastore. 38 | 39 | This triggers callbacks when elements are modified. 40 | """ 41 | def __init__(self, data, callback): 42 | self._change_callback = callback 43 | self.data = [encode(v, self.get_change_trigger(i)) 44 | for i, v in enumerate(data)] 45 | 46 | def trigger_changed(self, i): 47 | return self._change_callback(i) 48 | 49 | def get_change_trigger(self, i): 50 | return lambda _: self.trigger_changed(i) 51 | 52 | def __iter__(self): 53 | """List iterator.""" 54 | return (decode(v) for v in self.data) 55 | 56 | def __getitem__(self, i): 57 | """Get item.""" 58 | return decode(self.data[i]) 59 | 60 | def set(self, i, value): 61 | """Set value at position i and return a Future. 62 | 63 | :rtype: tornado.concurrent.Future 64 | """ 65 | value_encoded = encode(value, self.get_change_trigger(i)) 66 | 67 | if i in self.data and self.data[i] == value_encoded: 68 | return self 69 | 70 | self.data[i] = value_encoded 71 | return self.trigger_changed(i) 72 | 73 | def __setitem__(self, i, value): 74 | """Set value at position i.""" 75 | self.set(i, value) 76 | return self 77 | 78 | def __eq__(self, other): 79 | if not isinstance(other, DatastoreList): 80 | return False 81 | 82 | return (len(self) == len(other) and 83 | all(v1 == v2 for v1, v2 in zip(self.data, other.data))) 84 | 85 | def __len__(self): 86 | return len(self.data) 87 | 88 | def to_native(self): 89 | """Convert to a Python list.""" 90 | return [v.to_native() if hasattr(v, 'to_native') else v for v in self] 91 | 92 | 93 | class DatastoreDict(object): 94 | """Object wrapper for storing a dict in Datastore. 95 | 96 | :param change_callback: callback whithout arguments 97 | 98 | This trigger then change callback when elements are modified. 99 | """ 100 | def __init__(self, data=None, change_callback=None): 101 | if data is None: 102 | data = {} 103 | if change_callback is None: 104 | change_callback = lambda k: None 105 | 106 | self._change_callback = change_callback 107 | self.data = {k: encode(v, self.get_change_trigger(k)) 108 | for k, v in data.items()} 109 | 110 | def trigger_changed(self, key): 111 | return self._change_callback(key) 112 | 113 | def get_change_trigger(self, key): 114 | return lambda _: self.trigger_changed(key) 115 | 116 | def __getitem__(self, key): 117 | """Return entry at key.""" 118 | if key not in self.data: 119 | raise IndexError 120 | return decode(self.data[key]) 121 | 122 | def get(self, key, default=None): 123 | """Return entry at key. 124 | 125 | Return a default value if the key is not present. 126 | """ 127 | if key not in self.data: 128 | return default 129 | return decode(self.data[key]) 130 | 131 | def get_encoded(self, key): 132 | if key not in self.data: 133 | raise IndexError 134 | return self.data[key] 135 | 136 | def set(self, key, value): 137 | """Set a value at key and return a Future. 138 | 139 | :rtype: tornado.concurrent.Future 140 | """ 141 | value_encoded = encode(value, self.get_change_trigger(key)) 142 | 143 | if key in self.data and self.data[key] == value_encoded: 144 | return self 145 | 146 | self.data[key] = value_encoded 147 | return self.trigger_changed(key) 148 | 149 | def __setitem__(self, key, value): 150 | """Set a value at key.""" 151 | self.set(key, value) 152 | return self 153 | 154 | def __eq__(self, other): 155 | if not isinstance(other, DatastoreDict): 156 | return False 157 | 158 | keys = set(self.data.keys()) & set(other.data.keys()) 159 | return (len(self) == len(other) == len(keys) and 160 | all(self.data[k] == other.data[k] for k in keys)) 161 | 162 | def __len__(self): 163 | """Length of the dictionary.""" 164 | return len(self.data) 165 | 166 | def __iter__(self): 167 | """Iterator.""" 168 | return (k for k in self.data.keys()) 169 | 170 | def __contains__(self, key): 171 | """Test whether key is set.""" 172 | return key in self.data 173 | 174 | def __delitem__(self, key): 175 | """Delete the given key.""" 176 | del self.data[key] 177 | self.trigger_changed(key) 178 | 179 | def __repr__(self): 180 | """repr""" 181 | return {k: self[k] for k in self}.__repr__() 182 | 183 | def keys(self): 184 | """Keys.""" 185 | return self.data.keys() 186 | 187 | def values(self): 188 | """Values.""" 189 | return (self[k] for k in self) 190 | 191 | def items(self): 192 | """Items.""" 193 | return ((k, self[k]) for k in self) 194 | 195 | def update(self, new_data): 196 | """Update.""" 197 | for k, v in new_data.items(): 198 | self[k] = v 199 | 200 | return self 201 | 202 | def to_native(self): 203 | return {k: v.to_native() if hasattr(v, 'to_native') else v 204 | for k, v in self.items()} 205 | 206 | 207 | class DatastoreLegacy(object): 208 | """Key-value data store. 209 | 210 | An in-memory and in-process (not persistent) key-value store. 211 | 212 | :param domain: 213 | A namespace for the key values. This can be an analysis instance id for 214 | data local to an analysis instance or the name of an analysis class 215 | for data that is shared across instances of the same analysis. 216 | 217 | :param bool release_storage: 218 | Release storage when the last datastore for a domain closes. 219 | """ 220 | store = defaultdict(DatastoreDict) 221 | datastores = defaultdict(list) # list of instances by domain 222 | 223 | def __init__(self, domain, release_storage=False): 224 | self.domain = domain 225 | self.release_storage = release_storage 226 | self.change_callbacks = [] 227 | datastore_dict = DatastoreLegacy.store[self.domain] 228 | datastore_dict._change_callback = self.trigger_change_callbacks 229 | DatastoreLegacy.datastores[self.domain].append(self) 230 | 231 | def on_change(self, callback): 232 | """Subscribe to changes in the datastore with a callback. 233 | 234 | Deprecated. Use :meth:`subscribe` instead. 235 | 236 | :param callback: Function that takes in a key and a value. 237 | """ 238 | self.change_callbacks.append(callback) 239 | return self 240 | 241 | def subscribe(self, callback): 242 | """Subscribe to changes in the datastore with a callback. 243 | 244 | :param callback: Function that takes in a key and a value. 245 | """ 246 | self.change_callbacks.append(callback) 247 | return self 248 | 249 | def trigger_change_callbacks(self, key): 250 | value = DatastoreLegacy.store[self.domain].get(key, None) 251 | return [ 252 | callback(key, value) 253 | for datastore in DatastoreLegacy.datastores[self.domain] 254 | for callback in datastore.change_callbacks 255 | ] 256 | 257 | def trigger_all_change_callbacks(self): 258 | """Trigger all callbacks that were set with on_change().""" 259 | return [ 260 | ret 261 | for key in DatastoreLegacy.store[self.domain].keys() 262 | for ret in self.trigger_change_callbacks(key) 263 | ] 264 | 265 | def set(self, key, value): 266 | """Set value at key and return a Future 267 | 268 | :rtype: tornado.concurrent.Future 269 | """ 270 | return DatastoreLegacy.store[self.domain].set(key, value) 271 | 272 | def __setitem__(self, key, value): 273 | """Set value for given key. 274 | 275 | Allows for assignments of the form ``d[key] = value``. 276 | Callbacks are skipped if the json-encoded value is unchanged. 277 | """ 278 | self.set(key, value) 279 | return self 280 | 281 | def __getitem__(self, key): 282 | """Return the value for the given key.""" 283 | return DatastoreLegacy.store[self.domain][key] 284 | 285 | def __delitem__(self, key): 286 | """Delete the given key.""" 287 | del DatastoreLegacy.store[self.domain][key] 288 | self.trigger_change_callbacks(key) 289 | 290 | def __contains__(self, key): 291 | """Test whether key is set.""" 292 | return key in DatastoreLegacy.store[self.domain] 293 | 294 | def update(self, key_value_pairs): 295 | """Similar to :meth:`dict.update`. 296 | 297 | :param dict key_value_pairs: 298 | A dictionary of key value pairs to update. 299 | """ 300 | DatastoreLegacy.store[self.domain].update(key_value_pairs) 301 | 302 | def init(self, key_value_pairs): 303 | """Initialize datastore. 304 | 305 | Only sets values for keys that are not in the datastore already. 306 | 307 | :param dict key_value_pairs: 308 | A set of key value pairs to use to initialize the datastore. 309 | """ 310 | for k, v in key_value_pairs.items(): 311 | if k not in DatastoreLegacy.store[self.domain]: 312 | DatastoreLegacy.store[self.domain][k] = v 313 | 314 | def close(self): 315 | """Close and delete instance.""" 316 | 317 | # remove callbacks 318 | DatastoreLegacy.datastores[self.domain].remove(self) 319 | 320 | # delete data after the last instance is gone 321 | if self.release_storage and \ 322 | not DatastoreLegacy.datastores[self.domain]: 323 | del DatastoreLegacy.store[self.domain] 324 | 325 | del self 326 | 327 | def set_state(self, **kwargs): 328 | raise NotImplementedError 329 | -------------------------------------------------------------------------------- /databench/meta.py: -------------------------------------------------------------------------------- 1 | """Analysis module for Databench.""" 2 | 3 | from __future__ import absolute_import, unicode_literals, division 4 | 5 | from . import __version__ as DATABENCH_VERSION 6 | from .analysis import ActionHandler 7 | from .readme import Readme 8 | from .utils import json_encoder_default 9 | from collections import defaultdict 10 | import functools 11 | import glob 12 | import json 13 | import logging 14 | import os 15 | import tornado.gen 16 | import tornado.web 17 | import tornado.websocket 18 | 19 | try: 20 | from urllib.parse import parse_qs # Python 3 21 | except ImportError: 22 | from urlparse import parse_qs # Python 2 23 | 24 | PING_INTERVAL = 15000 25 | log = logging.getLogger(__name__) 26 | 27 | 28 | class Meta(object): 29 | """Meta class referencing an analysis. 30 | 31 | :param str name: Name of this analysis. 32 | :param databench.Analysis analysis_class: 33 | Object that should be instantiated for every new websocket connection. 34 | :param str analysis_path: Path of the analysis class. 35 | :param list extra_routes: [(route, handler, data), ...] 36 | :param list cli_args: Arguments from the command line. 37 | """ 38 | 39 | def __init__(self, name, analysis_class, analysis_path, extra_routes=None, 40 | cli_args=None, main_template='index.html', info=None): 41 | self.name = name 42 | self.analysis_class = analysis_class 43 | self.analysis_path = analysis_path 44 | self.cli_args = cli_args if cli_args is not None else [] 45 | 46 | # detect whether a thumbnail image is present 47 | thumbnail = False 48 | thumbnails = glob.glob(os.path.join(self.analysis_path, 'thumbnail.*')) 49 | if len(thumbnails) >= 1: 50 | thumbnail = os.path.basename(thumbnails[0]) 51 | # analysis readme 52 | readme = Readme(self.analysis_path) 53 | self.info = { 54 | 'title': self.analysis_class.__name__, 55 | 'readme': readme.html, 56 | 'description': readme.text.strip(), 57 | 'show_in_index': True, 58 | 'thumbnail': thumbnail, 59 | 'home_link': False, 60 | 'version': '0.0.0', 61 | } 62 | if info is not None: 63 | self.info.update(info) 64 | 65 | self.fill_action_handlers(analysis_class) 66 | 67 | self.routes = [ 68 | (r'static/(.+)', tornado.web.StaticFileHandler, 69 | {'path': os.path.join(self.analysis_path, 'static')}), 70 | 71 | (r'(analysis\.(?:js|css)).*', tornado.web.StaticFileHandler, 72 | {'path': self.analysis_path}), 73 | 74 | (r'(thumbnail\.(?:png|jpg|jpeg)).*', tornado.web.StaticFileHandler, 75 | {'path': self.analysis_path}), 76 | 77 | (r'ws', FrontendHandler, 78 | {'meta': self}), 79 | 80 | (r'(?P.+\.html)', RenderTemplate, 81 | {'info': self.info, 'path': self.analysis_path}), 82 | 83 | (r'', RenderTemplate, 84 | {'template_name': main_template, 85 | 'info': self.info, 'path': self.analysis_path}), 86 | ] + (extra_routes if extra_routes is not None else []) 87 | 88 | @staticmethod 89 | def fill_action_handlers(analysis_class): 90 | analysis_class._action_handlers = defaultdict(list) 91 | for attr_str in dir(analysis_class): 92 | attr = getattr(analysis_class, attr_str) 93 | 94 | action = None 95 | if isinstance(attr, ActionHandler): 96 | action = attr.action 97 | elif hasattr(attr, 'action'): 98 | action = attr.action 99 | elif attr_str.startswith('on_'): 100 | action = attr_str[3:] 101 | 102 | if action is None: 103 | continue 104 | 105 | analysis_class._action_handlers[action].append(attr) 106 | 107 | @staticmethod 108 | @tornado.gen.coroutine 109 | def run_process(analysis, action_name, message='__nomessagetoken__'): 110 | """Executes an action in the analysis with the given message. 111 | 112 | It also handles the start and stop signals in the case that message 113 | is a `dict` with a key ``__process_id``. 114 | 115 | :param str action_name: Name of the action to trigger. 116 | :param message: Message. 117 | :param callback: 118 | A callback function when done (e.g. 119 | :meth:`~tornado.testing.AsyncTestCase.stop` in tests). 120 | :rtype: tornado.concurrent.Future 121 | """ 122 | 123 | if analysis is None: 124 | return 125 | 126 | # detect process_id 127 | process_id = None 128 | if isinstance(message, dict) and '__process_id' in message: 129 | process_id = message['__process_id'] 130 | del message['__process_id'] 131 | 132 | if process_id: 133 | yield analysis.emit('__process', 134 | {'id': process_id, 'status': 'start'}) 135 | 136 | fns = [ 137 | functools.partial(handler, analysis) 138 | for handler in (analysis._action_handlers.get(action_name, []) + 139 | analysis._action_handlers.get('*', [])) 140 | ] 141 | if fns: 142 | args, kwargs = [], {} 143 | 144 | # Check whether this is a list (positional arguments) 145 | # or a dictionary (keyword arguments). 146 | if isinstance(message, list): 147 | args = message 148 | elif isinstance(message, dict): 149 | kwargs = message 150 | elif message == '__nomessagetoken__': 151 | pass 152 | else: 153 | args = [message] 154 | 155 | for fn in fns: 156 | log.debug('calling {}'.format(fn)) 157 | try: 158 | yield tornado.gen.maybe_future(fn(*args, **kwargs)) 159 | except Exception as e: 160 | yield analysis.emit('error', 'an Exception occured') 161 | raise e 162 | else: 163 | yield analysis.emit('warn', 164 | 'no handler for {}'.format(action_name)) 165 | 166 | if process_id: 167 | yield analysis.emit('__process', 168 | {'id': process_id, 'status': 'end'}) 169 | 170 | 171 | class FrontendHandler(tornado.websocket.WebSocketHandler): 172 | 173 | def initialize(self, meta): 174 | self.meta = meta 175 | self.analysis = None 176 | self.ping_callback = tornado.ioloop.PeriodicCallback(self.do_ping, 177 | PING_INTERVAL) 178 | self.ping_callback.start() 179 | tornado.autoreload.add_reload_hook(self.on_close) 180 | 181 | def do_ping(self): 182 | if self.ws_connection is None: 183 | self.ping_callback.stop() 184 | return 185 | self.ping(b'ping') 186 | 187 | def open(self): 188 | log.debug('WebSocket connection opened.') 189 | 190 | @tornado.gen.coroutine 191 | def on_close(self): 192 | log.debug('WebSocket connection closed.') 193 | yield self.meta.run_process(self.analysis, 'disconnected') 194 | 195 | @tornado.gen.coroutine 196 | def on_message(self, message): 197 | if message is None: 198 | log.debug('empty message received.') 199 | return 200 | 201 | msg = json.loads(message) 202 | if '__connect' in msg: 203 | if self.analysis is not None: 204 | log.error('Connection already has an analysis. Abort.') 205 | return 206 | 207 | requested_id = msg['__connect'] 208 | log.debug('Instantiate analysis with id {}'.format(requested_id)) 209 | self.analysis = self.meta.analysis_class() 210 | self.analysis.init_databench(requested_id) 211 | self.analysis.set_emit_fn(self.emit) 212 | log.info('Analysis {} instanciated.'.format(self.analysis.id_)) 213 | yield self.emit('__connect', { 214 | 'analysis_id': self.analysis.id_, 215 | 'databench_backend_version': DATABENCH_VERSION, 216 | 'analyses_version': self.meta.info['version'], 217 | }) 218 | 219 | yield self.meta.run_process(self.analysis, 'connect') 220 | 221 | args = {'cli_args': self.meta.cli_args, 'request_args': {}} 222 | if '__request_args' in msg and msg['__request_args']: 223 | args['request_args'] = parse_qs( 224 | msg['__request_args'].lstrip('?')) 225 | yield self.meta.run_process(self.analysis, 'args', args) 226 | 227 | yield self.meta.run_process(self.analysis, 'connected') 228 | log.info('Connected to analysis.') 229 | return 230 | 231 | if self.analysis is None: 232 | log.warning('no analysis connected. Abort.') 233 | return 234 | 235 | if 'signal' not in msg: 236 | log.info('message not processed: {}'.format(message)) 237 | return 238 | 239 | if 'load' not in msg: 240 | yield self.meta.run_process(self.analysis, 241 | msg['signal']) 242 | else: 243 | yield self.meta.run_process(self.analysis, 244 | msg['signal'], msg['load']) 245 | 246 | def emit(self, signal, message='__nomessagetoken__'): 247 | data = {'signal': signal} 248 | if message != '__nomessagetoken__': 249 | data['load'] = message 250 | 251 | try: 252 | return self.write_message( 253 | json.dumps(data, default=json_encoder_default).encode('utf-8')) 254 | except tornado.websocket.WebSocketClosedError: 255 | pass 256 | 257 | 258 | class RenderTemplate(tornado.web.RequestHandler): 259 | def initialize(self, info, path, template_name=None): 260 | self.info = info 261 | self.path = path 262 | self.template_name = template_name 263 | 264 | def get(self, template_name=None): 265 | if template_name is None: 266 | template_name = self.template_name 267 | self.render(os.path.join(self.path, template_name), 268 | databench_version=DATABENCH_VERSION, 269 | **self.info) 270 | 271 | def head(self): 272 | pass 273 | -------------------------------------------------------------------------------- /databench/meta_zmq.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import tornado.gen 3 | 4 | from .analysis_zmq import AnalysisZMQ 5 | from .meta import Meta 6 | 7 | log = logging.getLogger(__name__) 8 | 9 | 10 | class MetaZMQ(Meta): 11 | """A Meta class that pipes all messages to ZMQ and back. 12 | 13 | The entire ZMQ interface of Databench is defined here and in 14 | :class`AnalysisZMQ`. 15 | 16 | """ 17 | 18 | def __init__(self, name, executable, zmq_publish, 19 | analysis_path, extra_routes, cmd_args=None): 20 | super(MetaZMQ, self).__init__(name, AnalysisZMQ, 21 | analysis_path, extra_routes, cmd_args) 22 | 23 | self.executable = executable 24 | self.zmq_publish = zmq_publish 25 | 26 | @tornado.gen.coroutine 27 | def run_process(self, analysis, action_name, message='__nomessagetoken__'): 28 | """Executes an process in the analysis with the given message. 29 | 30 | It also handles the start and stop signals in case a process_id 31 | is given. 32 | """ 33 | 34 | if action_name == 'connect': 35 | analysis.on_connect(self.executable, self.zmq_publish) 36 | 37 | while not analysis.zmq_handshake: 38 | yield tornado.gen.sleep(0.1) 39 | 40 | log.debug('sending action {}'.format(action_name)) 41 | analysis.zmq_send({'signal': action_name, 'load': message}) 42 | 43 | if action_name == 'disconnected': 44 | # Give kernel time to process disconnected message. 45 | yield tornado.gen.sleep(0.1) 46 | analysis.on_disconnected() 47 | -------------------------------------------------------------------------------- /databench/readme.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import fnmatch 4 | import io 5 | import logging 6 | import os 7 | import re 8 | import tornado.autoreload 9 | 10 | # utilities 11 | try: 12 | from markdown import markdown 13 | except ImportError: # pragma: no cover 14 | markdown = None # pragma: no cover 15 | 16 | try: 17 | from docutils.core import publish_parts as rst 18 | except ImportError: # pragma: no cover 19 | rst = None # pragma: no cover 20 | 21 | log = logging.getLogger(__name__) 22 | 23 | 24 | class Readme(object): 25 | """Readme reader and meta data extractor. 26 | 27 | Supports markdown (``.md``) and restructured text (``.rst``). 28 | 29 | :param directory: 30 | Path to a directory containing a readme file. 31 | 32 | :param bool watch: 33 | Whether to watch for changes in the readme file. 34 | """ 35 | def __init__(self, directory, watch=True): 36 | self.directory = directory 37 | 38 | self._text = None 39 | self._html = None 40 | self.watch = watch 41 | 42 | def _read(self, encoding='utf8', encoding_errors='ignore'): 43 | self._text = '' 44 | self._html = '' 45 | 46 | if not os.path.exists(self.directory): 47 | return 48 | readme_file = [os.path.join(self.directory, n) 49 | for n in os.listdir(self.directory) 50 | if fnmatch.fnmatch(n.lower(), 'readme.*')] 51 | readme_file = readme_file[0] if readme_file else None 52 | if not readme_file: 53 | return 54 | 55 | log.debug('Readme file name: {}'.format(readme_file)) 56 | if self.watch: 57 | tornado.autoreload.watch(readme_file) 58 | 59 | with io.open(readme_file, 'r', 60 | encoding=encoding, errors=encoding_errors) as f: 61 | self._text = f.read() 62 | 63 | if readme_file.lower().endswith('.md'): 64 | if markdown is not None: 65 | self._html = markdown(self._text) 66 | 67 | # remove first html comment 68 | self._text = re.sub('', '', self._text, 69 | count=1, flags=re.DOTALL) 70 | else: 71 | self._html = ( 72 | '

Install markdown with pip install markdown' 73 | ' to render this readme file.

' 74 | ) + self._text # pragma: no cover 75 | 76 | if readme_file.lower().endswith('.rst'): 77 | if rst is not None: 78 | self._html = rst(self._text, 79 | writer_name='html')['html_body'] 80 | else: 81 | self._html = ( 82 | '

Install rst rendering with pip install docutils' 83 | ' to render this readme file.

' 84 | ) + self._text # pragma: no cover 85 | 86 | @property 87 | def text(self): 88 | if self._text is None: 89 | self._read() 90 | 91 | return self._text 92 | 93 | @property 94 | def html(self): 95 | if self._html is None: 96 | self._read() 97 | 98 | return self._html 99 | -------------------------------------------------------------------------------- /databench/scaffold.py: -------------------------------------------------------------------------------- 1 | """Command line tool to scaffold a new analysis environment.""" 2 | 3 | import argparse 4 | from future.builtins import input 5 | import logging 6 | import os 7 | import shutil 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | def check_folders(name): 13 | """Only checks and asks questions. Nothing is written to disk.""" 14 | 15 | if os.getcwd().endswith('analyses'): 16 | correct = input('You are in an analyses folder. This will create ' 17 | 'another analyses folder inside this one. Do ' 18 | 'you want to continue? (y/N)') 19 | if correct != 'y': 20 | return False 21 | 22 | if not os.path.exists(os.path.join(os.getcwd(), 'analyses')): 23 | correct = input('This is the first analysis here. Do ' 24 | 'you want to continue? (y/N)') 25 | if correct != 'y': 26 | return False 27 | 28 | if os.path.exists(os.path.join(os.getcwd(), 'analyses', name)): 29 | correct = input('An analysis with this name exists already. Do ' 30 | 'you want to continue? (y/N)') 31 | if correct != 'y': 32 | return False 33 | 34 | return True 35 | 36 | 37 | def create_analyses(name, kernel=None): 38 | """Create an analysis with given name and suffix. 39 | 40 | If it does not exist already, it creates the top level analyses folder 41 | and it's __init__.py and index.yaml file. 42 | """ 43 | 44 | if not os.path.exists(os.path.join(os.getcwd(), 'analyses')): 45 | os.system("mkdir analyses") 46 | 47 | # __init__.py 48 | init_path = os.path.join(os.getcwd(), 'analyses', '__init__.py') 49 | if not os.path.exists(init_path): 50 | with open(init_path, 'w') as f: 51 | pass 52 | 53 | # index.yaml 54 | index_path = os.path.join(os.getcwd(), 'analyses', 'index.yaml') 55 | if not os.path.exists(index_path): 56 | with open(index_path, 'w') as f: 57 | f.write('title: Analyses\n') 58 | f.write('description: A short description.\n') 59 | f.write('version: 0.1.0\n') 60 | f.write('\n') 61 | f.write('analyses:\n') 62 | 63 | if kernel is None: 64 | with open(index_path, 'a') as f: 65 | f.write(' # automatically inserted by scaffold-databench\n') 66 | f.write(' - name: {}\n'.format(name)) 67 | f.write(' title: {}\n'.format(name.title())) 68 | f.write(' description: A new analysis.\n') 69 | f.write(' watch:\n') 70 | f.write(' - {}/*.js\n'.format(name)) 71 | f.write(' - {}/*.html\n'.format(name)) 72 | 73 | 74 | def copy_scaffold_file(src, dest, name, scaffold_name): 75 | if os.path.exists(dest): 76 | log.warning('File {} exists already. Skipping.'.format(dest)) 77 | return 78 | 79 | # binary copy for unknown file endings 80 | if not any(src.endswith(e) 81 | for e in ('.py', '.js', '.html', '.md', '.rst')): 82 | log.info('Binary copy {} to {}.'.format(src, dest)) 83 | shutil.copyfile(src, dest) 84 | return 85 | 86 | # text file copy 87 | log.info('Copy {} to {}.'.format(src, dest)) 88 | with open(src, 'r') as f: 89 | lines = f.readlines() 90 | 91 | # replace 92 | lines = [l.replace(scaffold_name, name) for l in lines] 93 | lines = [l.replace(scaffold_name.title(), name.title()) for l in lines] 94 | 95 | with open(dest, 'w') as f: 96 | for l in lines: 97 | f.write(l) 98 | 99 | 100 | def create_analysis(name, kernel, src_dir, scaffold_name): 101 | """Create analysis files.""" 102 | 103 | # analysis folder 104 | folder = os.path.join(os.getcwd(), 'analyses', name) 105 | if not os.path.exists(folder): 106 | os.makedirs(folder) 107 | else: 108 | log.warning('Analysis folder {} already exists.'.format(folder)) 109 | 110 | # copy all other files 111 | for f in os.listdir(src_dir): 112 | if f in ('__pycache__',) or \ 113 | any(f.endswith(ending) for ending in ('.pyc',)): 114 | continue 115 | copy_scaffold_file(os.path.join(src_dir, f), 116 | os.path.join(folder, f), 117 | name, scaffold_name) 118 | 119 | 120 | def main(): 121 | parser = argparse.ArgumentParser(description=__doc__) 122 | parser.add_argument('name', 123 | help='Name of the analysis to be created.') 124 | parser.add_argument('--kernel', default=None, 125 | help='Language kernel.', 126 | choices=('py', 'pypsark', 'go')) 127 | parser.add_argument('-y', dest='yes', default=False, action='store_true', 128 | help='Answer all questions with yes. Be careful.') 129 | args = parser.parse_args() 130 | 131 | if not (args.yes or check_folders(args.name)): 132 | return 133 | 134 | # sanitize analysis name 135 | if '-' in args.name: 136 | parser.error('Analysis names with dashes are not supported ' 137 | '(because they are not supported in Python names). ' 138 | 'Abort.') 139 | return 140 | 141 | # this is a hack to obtain the src directory 142 | import databench.analyses_packaged.scaffold 143 | src_dir = os.path.dirname(databench.analyses_packaged.__file__) 144 | 145 | if args.kernel in ('py', 'pyspark'): 146 | scaffold_name = 'scaffold_py' 147 | else: 148 | scaffold_name = 'scaffold' 149 | src_dir = os.path.join(src_dir, scaffold_name) 150 | 151 | create_analyses(args.name, args.kernel) 152 | create_analysis(args.name, args.kernel, src_dir, scaffold_name) 153 | log.info("Done.") 154 | -------------------------------------------------------------------------------- /databench/static/databench.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: "Helvetica Neue",Helvetica,Arial,sans-serif; 3 | font-size: 14px; 4 | line-height: 1.42857143; 5 | color: #333; 6 | background-color: #fff; 7 | padding: 0 0 40px 0; 8 | } 9 | @media (min-width: 768px) { 10 | body { padding: 0 4% 40px 4%; } 11 | } 12 | 13 | a { 14 | color: #428bca; 15 | text-decoration: none; 16 | } 17 | 18 | a:hover { 19 | text-decoration: underline; 20 | } 21 | 22 | .text-muted { 23 | color: #999; 24 | } 25 | 26 | h1, h2, h3, h4, h5, h6 { 27 | font-weight: 500; 28 | line-height: 1.1; 29 | } 30 | 31 | p { 32 | max-width: 60em; 33 | } 34 | 35 | pre { 36 | font-size: 80%; 37 | border: 1px solid #ccc; 38 | border-radius: 4px; 39 | padding: 10px; 40 | word-break: break-all; 41 | word-wrap: break-word; 42 | background-color: #f5f5f5; 43 | } 44 | #log { 45 | white-space: pre-wrap; 46 | } 47 | 48 | code { 49 | padding: 2px 3px; 50 | font-size: 90%; 51 | color: #c33; 52 | background-color: #f5f5f5; 53 | border-radius: 4px; 54 | } 55 | 56 | pre, code { 57 | font-family: Menlo, "Courier New", monospace; 58 | } 59 | 60 | button { 61 | display: inline-block; 62 | cursor: pointer; 63 | 64 | padding: 5px 10px; 65 | margin: 5px 0; 66 | 67 | color: #333; 68 | font-size: 110%; 69 | font-weight: 400; 70 | line-height: 1.4; 71 | 72 | text-align: center; 73 | white-space: nowrap; 74 | vertical-align: middle; 75 | 76 | background-color: #fff; 77 | border: 1px solid #ccc; 78 | border-radius: 4px; 79 | } 80 | button:hover { 81 | background-color: #f5f5f5; 82 | } 83 | button.disabled { 84 | cursor: not-allowed; 85 | box-shadow: none; 86 | opacity: .65; 87 | } 88 | .btn-primary { 89 | color: #fff; 90 | background-color: #38b; 91 | border-color: #37a; 92 | } 93 | .btn-primary:hover { 94 | background-color: #27a; 95 | } 96 | 97 | label { 98 | font-weight: 700; 99 | } 100 | 101 | .alert { 102 | border:1px solid #aaa; 103 | border-radius: 4px; 104 | margin: 1em 0; 105 | padding: 15px; 106 | } 107 | 108 | .alert-danger { 109 | color: #b44; 110 | border-color: #edd; 111 | background-color: #fee; 112 | } 113 | 114 | 115 | 116 | header { 117 | margin-bottom: 30px; 118 | border-bottom: 1px solid #e5e5e5; 119 | } 120 | header a:hover { 121 | text-decoration: none; 122 | color: #428bca; 123 | } 124 | header img { 125 | height: 26px; 126 | margin-right: 0.5em; 127 | vertical-align: baseline; 128 | } 129 | 130 | main { 131 | margin: 40px 0; 132 | } 133 | 134 | footer { 135 | padding-top: 19px; 136 | color: #777; 137 | font-size: 80%; 138 | border-top: 1px solid #e5e5e5; 139 | } 140 | 141 | footer ul { 142 | list-style-type: none; 143 | padding: 0; 144 | } 145 | 146 | 147 | 148 | 149 | .d-boxes { 150 | display: flex; 151 | flex-wrap: wrap; 152 | margin-left: -10px; 153 | margin-top: -10px; 154 | } 155 | 156 | .d-box { 157 | flex: 1 0 350px; 158 | box-sizing: border-box; 159 | padding: 10px; 160 | margin-left: 10px; 161 | margin-top: 10px; 162 | border: 1px solid #ddd; 163 | border-radius: 4px; 164 | } 165 | 166 | .d-analysis-thumbnail { 167 | float: left; 168 | max-width: 150px; 169 | max-height: 100px; 170 | margin-right: 20px; 171 | } 172 | 173 | 174 | .d-home-link { 175 | position: relative; 176 | top: -30px; 177 | font-size: 85%; 178 | } 179 | -------------------------------------------------------------------------------- /databench/static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/static/favicon.ico -------------------------------------------------------------------------------- /databench/template.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tornado.template 3 | 4 | 5 | class Loader(tornado.template.BaseLoader): 6 | """Template Loader from a list of base directories. 7 | 8 | First match is used. 9 | """ 10 | def __init__(self, root_directories, **kwargs): 11 | super(Loader, self).__init__(**kwargs) 12 | self.roots = [os.path.abspath(root_directory) 13 | for root_directory in root_directories] 14 | 15 | def resolve_path(self, name, parent_path=None): 16 | for root in self.roots: 17 | if parent_path and \ 18 | not parent_path.startswith('<') and \ 19 | not parent_path.startswith('/') and \ 20 | parent_path[1:3] != ':\\' and \ 21 | not name.startswith('/'): 22 | root = os.path.join(root, parent_path) 23 | 24 | path = os.path.join(root, name) 25 | if os.path.exists(path): 26 | return path 27 | 28 | return name 29 | 30 | def _create_template(self, name): 31 | with open(name, 'rb') as f: 32 | template = tornado.template.Template( 33 | f.read(), name=name, loader=self) 34 | return template 35 | -------------------------------------------------------------------------------- /databench/templates/analysis.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | 4 | {% block content %} 5 | {% if home_link %}Home{% end %} 6 | 7 | {% raw readme %} 8 |
9 | 10 | {% block analysis %}{% end %} 11 | {% block log %}
{% end %}
12 | {% end %}
13 | 


--------------------------------------------------------------------------------
/databench/templates/base.html:
--------------------------------------------------------------------------------
 1 | 
 2 | 
 3 |     
 4 |         
 5 |         
 6 |         {{ title }}
 7 |         
 8 |         
 9 | 
10 |         
11 |         {% block head %}{% end %}
12 |     
13 |     
14 |         
15 |

16 | {{ title }} 17 |

18 |
19 | 20 |
21 | {% block content %} 22 | {% end %} 23 |
24 | 25 | 29 | 30 | {% block footer %}{% end %} 31 | 32 | 33 | -------------------------------------------------------------------------------- /databench/templates/index.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | 4 | {% block head %} 5 | 6 | {% end %} 7 | 8 | 9 | {% block content %} 10 | {% if description_html %}{% raw description_html %}{% end %} 11 | 12 |

List of Analyses

13 |
14 | {% for a in [aa for aa in meta_infos if aa['show_in_index']] %} 15 | 16 | {% if a['thumbnail'] %} 17 | analysis thumbnail 18 | {% end %} 19 | {{a['title']}}{% if a['description'] %}: {{ a['description'] }}{% end %} 20 | 21 | {% end %} 22 |
23 | {% end %} 24 | 25 | 26 | {% block footer %} 27 | 30 | {% end %} 31 | -------------------------------------------------------------------------------- /databench/testing.py: -------------------------------------------------------------------------------- 1 | from .meta import Meta 2 | 3 | 4 | class AnalysisTest(object): 5 | """Unit test wrapper for an analysis. 6 | 7 | :param databench.Analysis analysis: The analysis to test. 8 | :param str cli_args: Command line interface arguments. 9 | :param str request_args: Request arguments. 10 | :param meta: An object with a `run_process` attribute. 11 | 12 | Trigger actions using the `~.trigger` method. 13 | All outgoing messages to the frontend are captured in `emitted_messages`. 14 | 15 | There are two main options for constructing tests: decorating with 16 | `tornado.testing.gen_test` and yielding `~tornado.concurrent.Future` 17 | objects (block until 18 | future is done) or to use :meth:`~tornado.testing.AsyncTestCase.wait` and 19 | :meth:`~tornado.testing.AsyncTestCase.stop` in callbacks. 20 | For detailed information on ioloops within the Tornado testing framework, 21 | please consult `tornado.testing`. 22 | 23 | :ivar list cli_args: command line arguments 24 | :ivar dict request_args: request arguments 25 | :ivar list emitted_messages: all emitted (``signal``, ``message``) pairs 26 | 27 | Examples: 28 | 29 | .. literalinclude:: ../databench/tests/test_testing.py 30 | :language: python 31 | """ 32 | def __init__(self, analysis, cli_args=None, request_args=None, meta=None): 33 | self.analysis = analysis 34 | self.analysis_instance = analysis() 35 | self.cli_args = cli_args 36 | self.request_args = request_args 37 | self.meta = meta or Meta 38 | self.emitted_messages = [] 39 | 40 | Meta.fill_action_handlers(analysis) 41 | 42 | # initialize 43 | self.analysis_instance.init_databench() 44 | self.analysis_instance.set_emit_fn(self.emulate_emit_to_frontend) 45 | self.trigger('connect') 46 | self.trigger('args', [cli_args, request_args]) 47 | self.trigger('connected') 48 | 49 | def emulate_emit_to_frontend(self, signal, message): 50 | self.emitted_messages.append((signal, message)) 51 | 52 | def trigger(self, action_name, message='__nomessagetoken__', **kwargs): 53 | """Trigger an `on` callback. 54 | 55 | :param str action_name: Name of the action to trigger. 56 | :param message: Message. 57 | :param callback: 58 | A callback function when done (e.g. 59 | `~tornado.testing.AsyncTestCase.stop` in tests). 60 | :rtype: tornado.concurrent.Future 61 | """ 62 | return self.meta.run_process( 63 | self.analysis_instance, action_name, message, **kwargs) 64 | -------------------------------------------------------------------------------- /databench/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/__init__.py -------------------------------------------------------------------------------- /databench/tests/analyses/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/analyses/README.md -------------------------------------------------------------------------------- /databench/tests/analyses/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/analyses/__init__.py -------------------------------------------------------------------------------- /databench/tests/analyses/cliargs/README.rst: -------------------------------------------------------------------------------- 1 | Rest of readme. 2 | -------------------------------------------------------------------------------- /databench/tests/analyses/cliargs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/analyses/cliargs/__init__.py -------------------------------------------------------------------------------- /databench/tests/analyses/cliargs/analysis.py: -------------------------------------------------------------------------------- 1 | import databench 2 | 3 | 4 | class CliArgs(databench.Analysis): 5 | 6 | def on_connected(self): 7 | """Run as soon as a browser connects to this.""" 8 | self.data['cli_args'] = self.cli_args 9 | -------------------------------------------------------------------------------- /databench/tests/analyses/cliargs/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block analysis %} 5 |

Analysis Output

6 |

Command line arguments: unknown

7 | {% end %} 8 | 9 | 10 | 11 | {% block footer %} 12 | 24 | {% end %} 25 | -------------------------------------------------------------------------------- /databench/tests/analyses/connection_interruption/README.md: -------------------------------------------------------------------------------- 1 | Make sure that the count returned after `test2` is `2` and not `1`. This means that the second connection reconnected to the previous analysis and recovered the old state. 2 | 3 | You can simulate a disconnect by calling `d.socket.close()`. 4 | -------------------------------------------------------------------------------- /databench/tests/analyses/connection_interruption/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/analyses/connection_interruption/__init__.py -------------------------------------------------------------------------------- /databench/tests/analyses/connection_interruption/analysis.py: -------------------------------------------------------------------------------- 1 | import databench 2 | 3 | 4 | class ConnectionInterruption(databench.Analysis): 5 | 6 | def on_connect(self): 7 | self.data['count'] = 0 8 | 9 | def on_state(self, state): 10 | self.data['state'] = state 11 | self.data['count'] += 1 12 | -------------------------------------------------------------------------------- /databench/tests/analyses/connection_interruption/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block analysis %} 5 |

Analysis Output

6 |

The current state: unknown

7 |

The current count: unknown

8 | {% end %} 9 | 10 | 11 | 12 | {% block footer %} 13 | 40 | {% end %} 41 | -------------------------------------------------------------------------------- /databench/tests/analyses/footer.html: -------------------------------------------------------------------------------- 1 |

THE END

2 | -------------------------------------------------------------------------------- /databench/tests/analyses/head.html: -------------------------------------------------------------------------------- 1 | 6 | -------------------------------------------------------------------------------- /databench/tests/analyses/index.yaml: -------------------------------------------------------------------------------- 1 | description: Analyses for unit tests. 2 | build: touch build_test.txt 3 | static: 4 | - static/(.*): static/ 5 | - node_modules/(test_file\.txt): node_modules/ 6 | 7 | analyses: 8 | - name: parameters 9 | title: Parameters 10 | description: An analysis for unit testing action parameters. 11 | - name: parameters_py 12 | kernel: py 13 | title: Parameters with Python Kernel 14 | description: An analysis for unit testing action parameters. 15 | - name: cliargs 16 | title: Command Line Arguments 17 | - name: requestargs 18 | title: RequestArgs 19 | description: An analysis for unit testing action parameters. 20 | - name: simple1 21 | title: Simple1 22 | watch: 23 | - simple1/*.md 24 | - name: simple1_py 25 | kernel: py 26 | title: Simple1 with Python Kernel 27 | watch: 28 | - simple1_py/*.html 29 | - simple1_py/*.py 30 | - name: simple1_pyspark 31 | kernel: pyspark 32 | title: Simple1 with PySpark Kernel 33 | watch: 34 | - simple1_pyspark/*.html 35 | - simple1_pyspark/*.py 36 | - name: simple2 37 | title: Simple2 38 | - name: simple3 39 | title: Simple3 40 | - name: connection_interruption 41 | title: Connection Interruption 42 | -------------------------------------------------------------------------------- /databench/tests/analyses/node_modules/test_file.txt: -------------------------------------------------------------------------------- 1 | placeholder 2 | -------------------------------------------------------------------------------- /databench/tests/analyses/parameters/README.md: -------------------------------------------------------------------------------- 1 | This is the text in the `README.md` file. 2 | This analysis is only used in unit tests. 3 | -------------------------------------------------------------------------------- /databench/tests/analyses/parameters/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/analyses/parameters/__init__.py -------------------------------------------------------------------------------- /databench/tests/analyses/parameters/analysis.py: -------------------------------------------------------------------------------- 1 | import databench 2 | 3 | 4 | class Parameters(databench.Analysis): 5 | 6 | @databench.on 7 | def test_fn(self, first_param, second_param=100): 8 | """Echo params.""" 9 | yield self.emit('test_fn', (first_param, second_param)) 10 | 11 | @databench.on 12 | def test_action(self): 13 | """process an action without a message""" 14 | yield self.emit('test_action_ack') 15 | 16 | @databench.on 17 | def test_state(self, key, value): 18 | """Store some test data.""" 19 | yield self.set_state({key: value}) 20 | 21 | @databench.on 22 | def test_set_data(self, key, value): 23 | """Store some test data.""" 24 | yield self.data.set(key, value) 25 | 26 | @databench.on 27 | def test_class_data(self, key, value): 28 | """Store key-value in class data.""" 29 | yield self.class_data.set(key, value) 30 | -------------------------------------------------------------------------------- /databench/tests/analyses/parameters/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | -------------------------------------------------------------------------------- /databench/tests/analyses/parameters_py/README.md: -------------------------------------------------------------------------------- 1 | This is the text in the `README.md` file. 2 | This analysis is only used in unit tests. 3 | -------------------------------------------------------------------------------- /databench/tests/analyses/parameters_py/analysis.py: -------------------------------------------------------------------------------- 1 | import databench 2 | import databench_py.singlethread 3 | 4 | 5 | class Parameters_Py(databench.Analysis): 6 | 7 | @databench.on 8 | def test_fn(self, first_param, second_param=100): 9 | """Echo params.""" 10 | yield self.emit('test_fn', (first_param, second_param)) 11 | 12 | @databench.on 13 | def test_action(self): 14 | """process an action without a message""" 15 | yield self.emit('test_action_ack') 16 | 17 | @databench.on 18 | def test_state(self, key, value): 19 | """Store some test data.""" 20 | yield self.set_state({key: value}) 21 | 22 | @databench.on 23 | def test_set_data(self, key, value): 24 | """Store some test data.""" 25 | yield self.data.set(key, value) 26 | 27 | @databench.on 28 | def test_class_data(self, key, value): 29 | """Store key-value in class data.""" 30 | yield self.class_data.set(key, value) 31 | 32 | 33 | if __name__ == "__main__": 34 | analysis = databench_py.singlethread.Meta('parameters_py', Parameters_Py) 35 | analysis.event_loop() 36 | -------------------------------------------------------------------------------- /databench/tests/analyses/parameters_py/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | -------------------------------------------------------------------------------- /databench/tests/analyses/requestargs/README.md: -------------------------------------------------------------------------------- 1 | This is the text in the `README.md` file. 2 | This analysis is only used in unit tests. 3 | -------------------------------------------------------------------------------- /databench/tests/analyses/requestargs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/analyses/requestargs/__init__.py -------------------------------------------------------------------------------- /databench/tests/analyses/requestargs/analysis.py: -------------------------------------------------------------------------------- 1 | import databench 2 | 3 | 4 | class RequestArgs(databench.Analysis): 5 | 6 | def on_connected(self): 7 | """Echo params.""" 8 | self.emit('echo_request_args', self.request_args) 9 | -------------------------------------------------------------------------------- /databench/tests/analyses/requestargs/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | {% block footer %} 4 | 9 | {% end %} 10 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple1/README.md: -------------------------------------------------------------------------------- 1 | This is the text in the `README.md` file. 2 | 3 | * [analysis is on GitHub](#) 4 | * [external link](#) 5 | 6 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple1/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/analyses/simple1/__init__.py -------------------------------------------------------------------------------- /databench/tests/analyses/simple1/analysis.py: -------------------------------------------------------------------------------- 1 | import databench 2 | import datetime 3 | 4 | 5 | class Simple1(databench.Analysis): 6 | 7 | def on_connect(self): 8 | """Run as soon as a browser connects to this.""" 9 | formatted_time = datetime.datetime.now().isoformat() 10 | self.data['status'] = 'ready since {}'.format(formatted_time) 11 | 12 | def on_ack(self, msg): 13 | """process 'ack' action""" 14 | self.data['status'] = 'acknowledged' 15 | 16 | def on_test_fn(self, first_param, second_param=100): 17 | """Echo params.""" 18 | self.emit('test_fn', { 19 | 'first_param': first_param, 20 | 'second_param': second_param, 21 | }) 22 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple1/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block analysis %} 5 |

Analysis Output

6 |

The current status: unknown

7 | {% end %} 8 | 9 | 10 | 11 | {% block footer %} 12 | 27 | {% end %} 28 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple1_py/README.md: -------------------------------------------------------------------------------- 1 | This is the text in the `README.md` file. 2 | 3 | * [analysis is on GitHub](#) 4 | * [external link](#) 5 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple1_py/analysis.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import time 3 | 4 | import databench_py 5 | import databench_py.singlethread 6 | 7 | 8 | class Simple1_Py(databench_py.Analysis): 9 | 10 | def on_connect(self): 11 | """Run as soon as a browser connects to this.""" 12 | time.sleep(1) 13 | formatted_time = datetime.datetime.now().isoformat() 14 | self.data['status'] = 'ready since {}'.format(formatted_time) 15 | 16 | def on_ack(self, msg): 17 | """process 'ack' action""" 18 | 19 | time.sleep(1) 20 | self.data['status'] = 'acknowledged' 21 | 22 | def on_test_fn(self, first_param, second_param=100): 23 | """Echo params.""" 24 | self.emit('test_fn', { 25 | 'first_param': first_param, 26 | 'second_param': second_param, 27 | }) 28 | 29 | 30 | if __name__ == "__main__": 31 | analysis = databench_py.singlethread.Meta('simple1_py', Simple1_Py) 32 | analysis.event_loop() 33 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple1_py/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block analysis %} 5 |

Analysis Output

6 |

The current status: unknown

7 | {% end %} 8 | 9 | 10 | 11 | {% block footer %} 12 | 27 | {% end %} 28 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple1_pyspark/README.md: -------------------------------------------------------------------------------- 1 | This is the text in the `README.md` file. 2 | 3 | * [analysis is on GitHub](#) 4 | * [external link](#) 5 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple1_pyspark/analysis.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import time 3 | 4 | import databench_py 5 | import databench_py.singlethread 6 | 7 | 8 | class Simple1_Pyspark(databench_py.Analysis): 9 | 10 | def on_connect(self): 11 | """Run as soon as a browser connects to this.""" 12 | time.sleep(1) 13 | formatted_time = datetime.datetime.now().isoformat() 14 | self.data['status'] = 'ready since {}'.format(formatted_time) 15 | 16 | def on_ack(self, msg): 17 | """process 'ack' action""" 18 | 19 | time.sleep(1) 20 | self.data['status'] = 'acknowledged' 21 | 22 | 23 | if __name__ == "__main__": 24 | analysis = databench_py.singlethread.Meta('simple1_pyspark', 25 | Simple1_Pyspark) 26 | analysis.event_loop() 27 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple1_pyspark/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block analysis %} 5 |

Analysis Output

6 |

The current status: unknown

7 | {% end %} 8 | 9 | 10 | 11 | {% block footer %} 12 | 27 | {% end %} 28 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple2/README.rst: -------------------------------------------------------------------------------- 1 | Rest of readme. 2 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple2/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/analyses/simple2/__init__.py -------------------------------------------------------------------------------- /databench/tests/analyses/simple2/analysis.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import time 3 | 4 | import databench 5 | 6 | 7 | class Simple2(databench.Analysis): 8 | 9 | def on_connect(self): 10 | """Run as soon as a browser connects to this.""" 11 | time.sleep(1) 12 | formatted_time = datetime.datetime.now().isoformat() 13 | self.data['status'] = 'ready since {}'.format(formatted_time) 14 | 15 | def on_ack(self, msg): 16 | """process 'ack' action""" 17 | 18 | time.sleep(1) 19 | self.data['status'] = 'acknowledged' 20 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple2/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block analysis %} 5 |

Analysis Output

6 |

The current status: unknown

7 | {% end %} 8 | 9 | 10 | 11 | {% block footer %} 12 | 27 | {% end %} 28 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple2/routes.py: -------------------------------------------------------------------------------- 1 | from tornado.web import RequestHandler 2 | 3 | 4 | class PostHandler(RequestHandler): 5 | def post(self): 6 | self.write(self.get_argument('data', 'No data received')) 7 | 8 | 9 | class GetHandler(RequestHandler): 10 | def get(self): 11 | self.write('done') 12 | 13 | 14 | ROUTES = [ 15 | ('post', PostHandler, {}), 16 | ('get', GetHandler, {}), 17 | ] 18 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple3/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/analyses/simple3/__init__.py -------------------------------------------------------------------------------- /databench/tests/analyses/simple3/analysis.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import time 3 | 4 | import databench 5 | 6 | 7 | class Simple3(databench.Analysis): 8 | 9 | def on_connect(self): 10 | """Run as soon as a browser connects to this.""" 11 | time.sleep(1) 12 | formatted_time = datetime.datetime.now().isoformat() 13 | self.data['status'] = 'ready since {}'.format(formatted_time) 14 | 15 | def on_ack(self, msg): 16 | """process 'ack' action""" 17 | 18 | time.sleep(1) 19 | self.data['status'] = 'acknowledged' 20 | -------------------------------------------------------------------------------- /databench/tests/analyses/simple3/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block analysis %} 5 |

Analysis Output

6 |

The current status: unknown

7 | {% end %} 8 | 9 | 10 | 11 | {% block footer %} 12 | 27 | {% end %} 28 | -------------------------------------------------------------------------------- /databench/tests/analyses/static/test_file.txt: -------------------------------------------------------------------------------- 1 | placeholder 2 | -------------------------------------------------------------------------------- /databench/tests/analyses_broken/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/analyses_broken/__init__.py -------------------------------------------------------------------------------- /databench/tests/analyses_broken/doesnotexist/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/analyses_broken/doesnotexist/__init__.py -------------------------------------------------------------------------------- /databench/tests/analyses_broken/index.yaml: -------------------------------------------------------------------------------- 1 | title: Broken Analyses 2 | build: touch build_test.txt 3 | 4 | analyses: 5 | - name: doesnotexist 6 | title: Does Not Exist 7 | - name: doesnotexistatall 8 | title: Does Not Exist At All 9 | - title: noname 10 | -------------------------------------------------------------------------------- /databench/tests/standalone/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/databench/tests/standalone/__init__.py -------------------------------------------------------------------------------- /databench/tests/standalone/analysis.js: -------------------------------------------------------------------------------- 1 | /* global Databench */ 2 | /* global document */ 3 | 4 | const databench = new Databench.Connection(); 5 | Databench.ui.wire(databench); 6 | 7 | databench.on({ data: 'pi' }, (pi) => { 8 | document.getElementById('pi').innerHTML = 9 | `${pi.estimate.toFixed(3)} ± ${pi.uncertainty.toFixed(3)}`; 10 | }); 11 | 12 | databench.connect(); 13 | -------------------------------------------------------------------------------- /databench/tests/standalone/index.html: -------------------------------------------------------------------------------- 1 | {% extends "analysis.html" %} 2 | 3 | 4 | {% block head %} 5 | 6 | {% end %} 7 | 8 | 9 | {% block analysis %} 10 |
11 | 12 | 13 |
14 |
15 | 16 |
17 | 18 |
19 | π = 0.0 ± 1.0 20 |
21 | {% end %} 22 | 23 | 24 | {% block footer %} 25 | 26 | 27 | {% end %} 28 | -------------------------------------------------------------------------------- /databench/tests/standalone/test_standalone.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | 3 | import databench 4 | from databench.testing import AnalysisTest 5 | import math 6 | import random 7 | import tornado.testing 8 | 9 | 10 | class DummyPi(databench.Analysis): 11 | 12 | @databench.on 13 | def connected(self): 14 | yield self.data.init({'samples': 100000}) 15 | 16 | @databench.on 17 | def run(self): 18 | """Run when button is pressed.""" 19 | 20 | inside = 0 21 | for draws in range(1, self.data['samples']): 22 | # generate points and check whether they are inside the unit circle 23 | r1 = random.random() 24 | r2 = random.random() 25 | if r1 ** 2 + r2 ** 2 < 1.0: 26 | inside += 1 27 | 28 | # every 1000 iterations, update status 29 | if draws % 1000 != 0: 30 | continue 31 | 32 | # debug 33 | yield self.emit('log', {'draws': draws, 'inside': inside}) 34 | 35 | # calculate pi and its uncertainty given the current draws 36 | p = inside / draws 37 | pi = { 38 | 'estimate': 4.0 * p, 39 | 'uncertainty': 4.0 * math.sqrt(draws * p * (1.0 - p)) / draws, 40 | } 41 | 42 | # send status to frontend 43 | yield self.set_state(pi=pi) 44 | 45 | yield self.emit('log', {'action': 'done'}) 46 | 47 | @databench.on 48 | def samples(self, value): 49 | yield self.set_state(samples=value) 50 | 51 | 52 | class Example(tornado.testing.AsyncTestCase): 53 | @tornado.testing.gen_test 54 | def test_data(self): 55 | test = AnalysisTest(DummyPi) 56 | yield test.trigger('run') 57 | self.assertIn(('log', {'action': 'done'}), test.emitted_messages) 58 | 59 | def test_run(self): 60 | ioloop = tornado.ioloop.IOLoop.current() 61 | ioloop.call_later(2.0, ioloop.stop) 62 | databench.run(DummyPi, __file__) 63 | 64 | def test_static_file_serving(self): 65 | ioloop = tornado.ioloop.IOLoop.current() 66 | 67 | def test_and_stop(): 68 | ioloop.stop() 69 | 70 | ioloop.call_later(2.0, test_and_stop) 71 | databench.run(DummyPi, __file__, static={'(analysis\.js)': '.'}) 72 | 73 | 74 | if __name__ == '__main__': 75 | databench.run(DummyPi, __file__, static={'special/(analysis\.js)': '.'}) 76 | -------------------------------------------------------------------------------- /databench/tests/test_action_decorator.py: -------------------------------------------------------------------------------- 1 | import databench 2 | 3 | 4 | @databench.on 5 | def fn_with_doc(): 6 | """Function with docstring.""" 7 | pass 8 | 9 | 10 | @databench.on_action('bla') 11 | def fn_with_doc_2(): 12 | """Function with docstring.""" 13 | pass 14 | 15 | 16 | def test_action_decorator_docstring(): 17 | assert fn_with_doc.__doc__ == 'Function with docstring.' 18 | 19 | 20 | def test_action_decorator_docstring_2(): 21 | assert fn_with_doc_2.__doc__ == 'Function with docstring.' 22 | -------------------------------------------------------------------------------- /databench/tests/test_analysistest.py: -------------------------------------------------------------------------------- 1 | import databench 2 | from databench.analyses_packaged.dummypi.analysis import Dummypi 3 | from databench.testing import AnalysisTest 4 | import tornado.testing 5 | 6 | 7 | class Parameters(databench.Analysis): 8 | @databench.on 9 | def test_data(self, key, value): 10 | yield self.set_state({key: value}) 11 | 12 | 13 | class Example(tornado.testing.AsyncTestCase): 14 | @tornado.testing.gen_test 15 | def test_data(self): 16 | test = AnalysisTest(Parameters) 17 | yield test.trigger('test_data', ['light', 'red']) 18 | self.assertIn(('data', {'light': 'red'}), test.emitted_messages) 19 | 20 | @tornado.testing.gen_test 21 | def test_process(self): 22 | test = AnalysisTest(Parameters) 23 | yield test.trigger('test_data', {'key': 'light', 24 | 'value': 'red', 25 | '__process_id': 3}) 26 | self.assertEqual([ 27 | ('__process', {'id': 3, 'status': 'start'}), 28 | ('data', {'light': 'red'}), 29 | ('__process', {'id': 3, 'status': 'end'}), 30 | ], test.emitted_messages) 31 | 32 | @tornado.testing.gen_test 33 | def test_multiple_emits(self): 34 | test = AnalysisTest(Dummypi) 35 | yield test.trigger('run') 36 | self.assertIn(('log', {'action': 'done'}), test.emitted_messages) 37 | -------------------------------------------------------------------------------- /databench/tests/test_build.py: -------------------------------------------------------------------------------- 1 | import databench 2 | import os 3 | import subprocess 4 | import time 5 | import unittest 6 | 7 | 8 | _, ANALYSES_PATH = databench.App.get_analyses('databench.tests.analyses') 9 | _, ANALYSES_BROKEN_PATH = databench.App.get_analyses( 10 | 'databench.tests.analyses_broken') 11 | 12 | 13 | class Build(unittest.TestCase): 14 | def file_id(self, filename=None): 15 | if filename is None: 16 | filename = os.path.join(ANALYSES_PATH, 'build_test.txt') 17 | if os.path.exists(filename): 18 | return '{}'.format(os.path.getmtime(filename)) 19 | return 'file_not_found' 20 | 21 | def setUp(self): 22 | self.original_working_dir = os.getcwd() # original working directory 23 | self.coverage_file = os.path.join(os.getcwd(), '.coverage') 24 | 25 | def tearDown(self): 26 | os.chdir(self.original_working_dir) 27 | 28 | def test_build(self): 29 | before = self.file_id() 30 | 31 | os.chdir(os.path.join(ANALYSES_PATH, '..', '..')) 32 | subprocess.check_call(['databench', '--build', 33 | '--analyses', 'tests.analyses', 34 | '--coverage', self.coverage_file]) 35 | os.chdir(self.original_working_dir) 36 | 37 | time.sleep(1) 38 | after = self.file_id() 39 | self.assertNotEqual(before, after) 40 | 41 | def test_cwd_outside_analyses(self): 42 | before = self.file_id() 43 | 44 | os.chdir(os.path.join(ANALYSES_PATH, '..')) 45 | subprocess.check_call(['databench', '--build', 46 | '--coverage', self.coverage_file]) 47 | os.chdir(self.original_working_dir) 48 | 49 | time.sleep(1) 50 | after = self.file_id() 51 | self.assertNotEqual(before, after) 52 | 53 | def test_cwd_inside_analyses(self): 54 | before = self.file_id() 55 | 56 | os.chdir(ANALYSES_PATH) 57 | subprocess.check_call(['databench', '--build', 58 | '--coverage', self.coverage_file]) 59 | os.chdir(self.original_working_dir) 60 | 61 | time.sleep(1) 62 | after = self.file_id() 63 | self.assertNotEqual(before, after) 64 | 65 | def test_broken_analyses(self): 66 | build_file = os.path.join(ANALYSES_BROKEN_PATH, 'build_test.txt') 67 | before = self.file_id(build_file) 68 | 69 | os.chdir(os.path.join(ANALYSES_BROKEN_PATH, '..', '..')) 70 | subprocess.check_call(['databench', '--build', 71 | '--analyses', 'tests.analyses_broken', 72 | '--coverage', self.coverage_file]) 73 | os.chdir(self.original_working_dir) 74 | 75 | time.sleep(1) 76 | after = self.file_id(build_file) 77 | self.assertNotEqual(before, after) 78 | -------------------------------------------------------------------------------- /databench/tests/test_datastore.py: -------------------------------------------------------------------------------- 1 | import databench 2 | import unittest 3 | 4 | 5 | class Datastore(unittest.TestCase): 6 | def setUp(self): 7 | self.n_callbacks = 0 8 | self.d = databench.Datastore('abcdef') 9 | self.d.subscribe(self.datastore_callback) 10 | self.after = {} 11 | 12 | def tearDown(self): 13 | self.d.close() 14 | 15 | def datastore_callback(self, key_value): 16 | self.n_callbacks += 1 17 | print('changed to {}'.format(key_value)) 18 | self.after.update(key_value) 19 | return 'callback return' 20 | 21 | def test_simple(self): 22 | self.d.set('test', 'trivial') 23 | self.assertEqual(self.after, {'test': 'trivial'}) 24 | 25 | def test_init(self): 26 | self.d.set('test', 'before-init') 27 | self.d.init({'unset_test': 'init'}) 28 | self.assertEqual(self.d['test'], 'before-init') 29 | self.assertEqual(self.d['unset_test'], 'init') 30 | 31 | def test_del_callback(self): 32 | self.n_callback2 = 0 33 | 34 | def callback2(key_value): 35 | self.n_callback2 += 1 36 | 37 | d2 = databench.Datastore('abcdef').subscribe(callback2) 38 | self.d.set('test', 'del-callback') 39 | d2.close() 40 | self.d.set('test', 'del-callback2') 41 | self.assertEqual(self.n_callback2, 1) 42 | 43 | def test_list(self): 44 | self.d.set('test', ['list']) 45 | self.assertEqual(self.after['test'], ['list']) 46 | 47 | def test_list_change(self): 48 | self.d.set('test', ['list']) 49 | self.d.set('test', ['list2']) 50 | self.assertEqual(self.after['test'], ['list2']) 51 | 52 | def test_list_change_element(self): 53 | self.d.set('test', ['list']) 54 | self.d.set('test', ['modified list']) 55 | self.assertEqual(self.after['test'], ['modified list']) 56 | 57 | def test_dict(self): 58 | self.d.set('test', {'key': 'value'}) 59 | self.assertEqual(self.after, {'test': {'key': 'value'}}) 60 | 61 | def test_dict_change(self): 62 | self.d.set('test', {'key': 'value'}) 63 | self.d.set('test', {'key': 'value2'}) 64 | self.assertEqual(self.after, {'test': {'key': 'value2'}}) 65 | 66 | def test_dict_change_element(self): 67 | self.d.set('test', {'key': 'value'}) 68 | self.d.set('test', {'key': 'modified value'}) 69 | self.assertEqual(self.after, {'test': {'key': 'modified value'}}) 70 | 71 | def test_dict_change_element2(self): 72 | self.d.set('test', {'key': 'value'}) 73 | all = self.d['test'] 74 | all['key'] = 'modified value' 75 | self.d.set('test', all) 76 | self.assertEqual(self.after, {'test': {'key': 'modified value'}}) 77 | 78 | def test_dict_change_element3(self): 79 | self.d.set('test', {'key': {'key2': 'value'}}) 80 | all = self.d['test'] 81 | all['key']['key2'] = 'modified' 82 | self.d.set('test', all) 83 | self.assertEqual(self.after, {'test': {'key': {'key2': 'modified'}}}) 84 | 85 | def test_dict_overwrite(self): 86 | self.d.set('test', {'key': 'value'}) 87 | self.d.set('test', {'key': 'modified value'}) 88 | self.assertEqual(self.after, {'test': {'key': 'modified value'}}) 89 | 90 | def test_dict_overwrite2(self): 91 | self.d.set('test', {'e': 1}) 92 | self.d.set('test', {'e': 1, 'h': 1}) 93 | self.assertEqual(self.after, {'test': {'e': 1, 'h': 1}}) 94 | 95 | def test_dict_update(self): 96 | self.d.set_state({'key': 'value'}) 97 | self.d.set_state({'key': 'mod', 'key2': 'new'}) 98 | self.assertEqual(self.after, {'key': 'mod', 'key2': 'new'}) 99 | 100 | def test_cycle(self): 101 | self.d.set('test', {'key': 'value'}) 102 | n_callbacks_before = self.n_callbacks 103 | test = self.d['test'] 104 | test['key'] = 'modified' 105 | self.d.set('test', test) 106 | self.assertEqual(self.n_callbacks, n_callbacks_before + 1) 107 | self.assertEqual(self.after['test'], {'key': 'modified'}) 108 | 109 | def test_cycle2(self): 110 | self.d.set('test', {'key': {'key2': 'value'}}) 111 | n_callbacks_before = self.n_callbacks 112 | self.d.set('test', {'key': {'key2': 'modified'}}) 113 | self.assertEqual(self.n_callbacks, n_callbacks_before + 1) 114 | self.assertEqual(self.after['test'], {'key': {'key2': 'modified'}}) 115 | 116 | def test_contains(self): 117 | self.d.set('test', 'contains') 118 | assert 'test' in self.d 119 | assert 'never-used-test' not in self.d 120 | 121 | def test_set_return(self): 122 | ret = self.d.set('test', 'set_return') 123 | assert 'test' in self.d 124 | self.assertEqual(ret, ['callback return']) 125 | 126 | def test_setstate(self): 127 | self.d.set('test', 'setstate') 128 | self.d.set_state({'test': 'modified'}) 129 | self.assertEqual(self.after['test'], 'modified') 130 | 131 | def test_setstate_multiple(self): 132 | self.d.set('test', 'setstate') 133 | f = self.d.set_state({'test': 'setstate_m', 134 | 'test2': 'setstate_modified2'}) 135 | self.assertEqual(self.after['test'], 'setstate_m') 136 | self.assertEqual(self.after['test2'], 'setstate_modified2') 137 | self.assertEqual(f, ['callback return', 'callback return']) 138 | 139 | def test_setstate_modify_multiple(self): 140 | self.d.set('test', 'setstate') 141 | f1 = self.d.set_state({'test': 'original', 'test2': 'original2'}) 142 | f2 = self.d.set_state({'test': 'original', 'test2': 'modified2'}) 143 | self.assertEqual(self.after['test'], 'original') 144 | self.assertEqual(self.after['test2'], 'modified2') 145 | self.assertEqual(f1, ['callback return', 'callback return']) 146 | self.assertEqual(f2, ['callback return']) 147 | 148 | def test_setstate_fn(self): 149 | self.d.set('test', 'setstate') 150 | self.d.set('cnt', 2) 151 | self.d.set_state(lambda ds: {'test': 'modified{}'.format(ds['cnt'])}) 152 | self.assertEqual(self.after['test'], 'modified2') 153 | 154 | def test_analysis_datastore(self): 155 | a = databench.Analysis().init_databench() 156 | a.set_emit_fn(lambda s, pl: None) 157 | a.data.subscribe(self.datastore_callback) 158 | a.data['test'] = 'analysis_datastore' 159 | 160 | print(self.after) 161 | self.assertEqual(self.after['test'], 'analysis_datastore') 162 | 163 | 164 | class DatastoreLegacy(unittest.TestCase): 165 | def setUp(self): 166 | self.n_callbacks = 0 167 | self.after = None 168 | self.d = databench.DatastoreLegacy('abcdef') 169 | self.d.on_change(self.datastore_callback) 170 | 171 | def tearDown(self): 172 | self.d.close() 173 | 174 | def datastore_callback(self, key, value): 175 | self.n_callbacks += 1 176 | print('{} changed to {}'.format(key, value)) 177 | self.after = value 178 | return 'callback return' 179 | 180 | def test_simple(self): 181 | self.d['test'] = 'trivial' 182 | self.assertEqual(self.after, 'trivial') 183 | 184 | def test_init(self): 185 | self.d['test'] = 'before-init' 186 | self.d.init({'unset_test': 'init'}) 187 | self.assertEqual(self.d['test'], 'before-init') 188 | self.assertEqual(self.d['unset_test'], 'init') 189 | 190 | def test_del_callback(self): 191 | self.n_callback2 = 0 192 | 193 | def callback2(key, value): 194 | self.n_callback2 += 1 195 | 196 | d2 = databench.DatastoreLegacy('abcdef').on_change(callback2) 197 | self.d['test'] = 'del-callback' 198 | d2.close() 199 | self.d['test'] = 'del-callback2' 200 | self.assertEqual(self.n_callback2, 1) 201 | 202 | def test_update(self): 203 | self.d.update({'test': 'update'}) 204 | self.assertEqual(self.after, 'update') 205 | 206 | def test_delete(self): 207 | self.d.update({'test': 'delete'}) 208 | self.assertEqual(self.after, 'delete') 209 | del self.d['test'] 210 | self.assertNotIn('test', self.d) 211 | 212 | def test_list(self): 213 | self.d['test'] = ['list'] 214 | self.assertEqual(list(self.after), ['list']) 215 | 216 | def test_list_change(self): 217 | self.d['test'] = ['list'] 218 | self.d['test'] = ['list2'] 219 | self.assertEqual(list(self.after), ['list2']) 220 | 221 | def test_list_change_element(self): 222 | self.d['test'] = ['list'] 223 | self.d['test'][0] = 'modified list' 224 | self.assertEqual(list(self.after), ['modified list']) 225 | 226 | def test_dict(self): 227 | self.d['test'] = {'key': 'value'} 228 | self.assertEqual(self.after.to_native(), {'key': 'value'}) 229 | 230 | def test_dict_change(self): 231 | self.d['test'] = {'key': 'value'} 232 | self.d['test'] = {'key': 'value2'} 233 | self.assertEqual(self.after.to_native(), {'key': 'value2'}) 234 | 235 | def test_dict_change_element(self): 236 | self.d['test'] = {'key': 'value'} 237 | self.d['test']['key'] = 'modified value' 238 | self.assertEqual(self.after.to_native(), {'key': 'modified value'}) 239 | 240 | def test_dict_change_element2(self): 241 | self.d['test'] = {'key': 'value'} 242 | all = self.d['test'] 243 | all['key'] = 'modified value' 244 | self.d['test'] = all 245 | self.assertEqual(self.after.to_native(), {'key': 'modified value'}) 246 | 247 | def test_dict_change_element3(self): 248 | self.d['test'] = {'key': {'key2': 'value'}} 249 | all = self.d['test'] 250 | all['key']['key2'] = 'modified' 251 | self.d['test'] = all 252 | self.assertEqual(self.after.to_native(), {'key': {'key2': 'modified'}}) 253 | 254 | def test_dict_overwrite(self): 255 | self.d['test'] = {'key': 'value'} 256 | self.d['test'] = {'key': 'modified value'} 257 | self.assertEqual(self.after.to_native(), {'key': 'modified value'}) 258 | 259 | def test_dict_overwrite2(self): 260 | self.d['test'] = {'e': 1} 261 | self.d['test'] = {'e': 1, 'h': 1} 262 | self.assertEqual(self.after.to_native(), {'e': 1, 'h': 1}) 263 | 264 | def test_dict_update(self): 265 | self.d['test'] = {'key': 'value'} 266 | self.d['test'].update({'key': 'mod', 'key2': 'new'}) 267 | self.assertEqual(self.after.to_native(), {'key': 'mod', 'key2': 'new'}) 268 | 269 | def test_cycle(self): 270 | self.d['test'] = {'key': 'value'} 271 | n_callbacks_before = self.n_callbacks 272 | test = self.d['test'] 273 | test['key'] = 'modified' 274 | self.d['test'] = test 275 | self.assertEqual(self.n_callbacks, n_callbacks_before + 1) 276 | self.assertEqual(self.after.to_native(), {'key': 'modified'}) 277 | 278 | def test_cycle2(self): 279 | self.d['test'] = {'key': {'key2': 'value'}} 280 | n_callbacks_before = self.n_callbacks 281 | self.d['test']['key']['key2'] = 'modified' 282 | self.assertEqual(self.n_callbacks, n_callbacks_before + 1) 283 | self.assertEqual(self.after.to_native(), {'key': {'key2': 'modified'}}) 284 | 285 | def test_contains(self): 286 | self.d['test'] = 'contains' 287 | assert 'test' in self.d 288 | assert 'never-used-test' not in self.d 289 | 290 | def test_set_return(self): 291 | ret = self.d.set('test', 'set_return') 292 | assert 'test' in self.d 293 | self.assertEqual(ret, ['callback return']) 294 | 295 | 296 | if __name__ == '__main__': 297 | unittest.main() 298 | -------------------------------------------------------------------------------- /databench/tests/test_json_encoder.py: -------------------------------------------------------------------------------- 1 | from databench.utils import json_encoder_default 2 | import json 3 | import unittest 4 | 5 | 6 | class TestJsonEncoder(unittest.TestCase): 7 | def test_nan(self): 8 | data = json.dumps(float('NaN'), default=json_encoder_default) 9 | self.assertEqual(data, 'NaN') 10 | 11 | def test_inf(self): 12 | data = json.dumps(float('inf'), default=json_encoder_default) 13 | self.assertEqual(data, 'Infinity') 14 | 15 | def test_neg_inf(self): 16 | data = json.dumps(float('-inf'), default=json_encoder_default) 17 | self.assertEqual(data, '-Infinity') 18 | 19 | def test_list(self): 20 | data = json.dumps([1, float('inf')], default=json_encoder_default) 21 | self.assertEqual(data, '[1, Infinity]') 22 | 23 | def test_dict(self): 24 | data = json.loads(json.dumps({'one': 1, 'inf': float('inf')}, 25 | default=json_encoder_default)) 26 | self.assertEqual(data, {'one': 1, 'inf': float('inf')}) 27 | 28 | def test_set(self): 29 | data = json.dumps({1, float('inf')}, default=json_encoder_default) 30 | self.assertEqual(data, '[1, Infinity]') 31 | 32 | def test_tuple(self): 33 | data = json.dumps((1, float('inf')), default=json_encoder_default) 34 | self.assertEqual(data, '[1, Infinity]') 35 | 36 | 37 | if __name__ == '__main__': 38 | unittest.main() 39 | -------------------------------------------------------------------------------- /databench/tests/test_node_client.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import unittest 3 | 4 | 5 | class NodeClient(unittest.TestCase): 6 | def setUp(self): 7 | # transpile js code for node 8 | subprocess.call(['npm', 'run', 'build']) 9 | 10 | def test_node_client(self): 11 | subprocess.check_call(['npm', 'test']) 12 | 13 | 14 | if __name__ == '__main__': 15 | unittest.main() 16 | -------------------------------------------------------------------------------- /databench/tests/test_readme.py: -------------------------------------------------------------------------------- 1 | import databench 2 | import unittest 3 | 4 | 5 | class TestReadme(unittest.TestCase): 6 | def test_md(self): 7 | # contains README.md 8 | data = databench.Readme('databench/tests/analyses/simple1') 9 | self.assertIn('This is the text in the `README.md` file.', data.text) 10 | self.assertIn('

This is the text in the README.md ' 11 | 'file.

', data.html) 12 | 13 | def test_rst(self): 14 | # contains README.rst 15 | data = databench.Readme('databench/tests/analyses/simple2') 16 | self.assertEqual('Rest of readme.\n', data.text) 17 | self.assertIn('

Rest of readme.

', data.html) 18 | 19 | def test_no_readme(self): 20 | # contains no README 21 | data = databench.Readme('databench/tests/analyses/simple3') 22 | self.assertEqual('', data.text) 23 | self.assertEqual('', data.html) 24 | 25 | def test_unknown_dir(self): 26 | # directory does not exist 27 | data = databench.Readme('databench/tests/does_not_exist') 28 | self.assertEqual('', data.text) 29 | self.assertEqual('', data.html) 30 | -------------------------------------------------------------------------------- /databench/tests/test_testing.py: -------------------------------------------------------------------------------- 1 | import databench 2 | import tornado.testing 3 | 4 | 5 | class Yodler(databench.Analysis): 6 | """A simple analysis that we want to test.""" 7 | 8 | @databench.on 9 | def echo(self, key, value): 10 | """An action that applies the given key and value to the state.""" 11 | yield self.set_state({key: value}) 12 | 13 | 14 | class Example(tornado.testing.AsyncTestCase): 15 | """Test cases for the Yodler analysis.""" 16 | 17 | @tornado.testing.gen_test 18 | def test_gentest(self): 19 | """Demonstrating the decorator pattern for tests.""" 20 | test = databench.testing.AnalysisTest(Yodler) 21 | yield test.trigger('echo', ['light', 'red']) 22 | self.assertIn(('data', {'light': 'red'}), test.emitted_messages) 23 | 24 | # FAILS NOW because of changes in Tornado 25 | # def test_stopwait(self): 26 | # """Demonstrating the stop-wait-callback pattern for tests.""" 27 | # test = databench.testing.AnalysisTest(Yodler) 28 | # test.trigger('echo', ['light', 'red'], callback=self.stop) 29 | # self.wait() 30 | # self.assertIn(('data', {'light': 'red'}), test.emitted_messages) 31 | -------------------------------------------------------------------------------- /databench/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import databench 2 | import unittest 3 | 4 | import matplotlib 5 | matplotlib.use('agg') 6 | import matplotlib.pyplot as plt # noqa: F401 7 | 8 | 9 | class TestUtils(unittest.TestCase): 10 | def setUp(self): 11 | self.figure = plt.figure() 12 | ax = self.figure.add_subplot(1, 1, 1) 13 | ax.plot([1, 2, 3, 4]) 14 | 15 | def test_png(self): 16 | src = databench.utils.fig_to_src(self.figure, 'png') 17 | self.assertEqual(src[:43], 18 | 'data:image/png;base64,iVBORw0KGgoAAAANSUhEU') 19 | 20 | def test_svg(self): 21 | src = databench.utils.fig_to_src(self.figure, 'svg') 22 | self.assertEqual(src[:43], 23 | 'data:image/svg+xml;utf8,/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use 'make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Databench.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Databench.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Databench" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Databench" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | Create the docs locally: 2 | 3 | cd docs 4 | pip install -r requirements.txt 5 | make html 6 | 7 | This only works in Python 2. Sphinx and Jinja2 are not Python 3 compatible. 8 | 9 | Public docs are build from certain branches of the GitHub repository using readthedocs. 10 | -------------------------------------------------------------------------------- /docs/backend.rst: -------------------------------------------------------------------------------- 1 | Python Backend 2 | ============== 3 | 4 | A single analysis can be run programmatically from Python using :func:`databench.run`. 5 | 6 | 7 | .. _analyses_configurations: 8 | 9 | Multiple Analyses Configuration 10 | ------------------------------- 11 | 12 | Example ``analyses/index.yaml`` file: 13 | 14 | .. literalinclude:: ../databench/analyses_packaged/index.yaml 15 | :language: yaml 16 | 17 | Defaults at the global level for ``index.yaml``: 18 | 19 | .. code-block:: none 20 | 21 | title: Databench 22 | description: null 23 | description_html: null 24 | author: null 25 | version: null 26 | build: null 27 | watch: null 28 | 29 | analyses: 30 | ... 31 | 32 | 33 | .. _backend_logging: 34 | 35 | Logging 36 | ------- 37 | 38 | Use Python's `logging` as usual. To log to `logging` and 39 | send a message to the frontend about the log message, use 40 | ``self.emit('log', ...)``. Similarly, if the frontend emits a ``log`` action, 41 | a backend `logging` message will also be emitted. Similarly to 42 | ``log``, this also works for ``warn`` and ``error``. 43 | Also see :ref:`frontend_logging` in the JavaScript section. 44 | 45 | 46 | Routes 47 | ------ 48 | 49 | Add to the ``static`` map in ``index.yaml`` to serve static files. The ``static`` 50 | map is a regular expression to match the url against which contains a single capture 51 | bracket and the root folder. In this example configuration 52 | 53 | .. code-block:: yaml 54 | 55 | static: 56 | - static/(.*): folder_with_static_files/ 57 | 58 | any url starting with ``static/`` followed by any character sequence ``.*`` which is 59 | captured by the surrounding ``()`` is matched to that file in ``folder_with_static_files/``. 60 | 61 | For more specialized routes, add a ``routes.py`` file to your analysis with extra 62 | `tornado.web.RequestHandler` s 63 | and register them in a ``ROUTES`` variable. This is an example of a 64 | ``routes.py`` file: 65 | 66 | .. literalinclude:: ../databench/tests/analyses/simple2/routes.py 67 | :language: python 68 | 69 | 70 | Autoreload and Build 71 | -------------------- 72 | 73 | Autoreload watches all dependent Python files and rebuilds when any of them 74 | change. It can be deactivated with the command line option ``--no-watch``. 75 | Autoreload uses `tornado.autoreload` in the backend. 76 | 77 | To run a single build 78 | (e.g. before deploying a production setting for Databench), use the 79 | ``--build`` command line option. 80 | 81 | 82 | SSL 83 | --- 84 | 85 | Provide ``--ssl-certfile``, ``--ssl-keyfile`` and ``--ssl-port``. 86 | 87 | 88 | Command Line and Request Arguments 89 | ---------------------------------- 90 | 91 | Command line parameters are in available in `.Analysis` instances as 92 | ``self.cli_args`` and the arguments from the 93 | http request are in ``self.request_args``. 94 | -------------------------------------------------------------------------------- /docs/backend_api.rst: -------------------------------------------------------------------------------- 1 | Python API 2 | ========== 3 | 4 | .. automodule:: databench 5 | 6 | .. autofunction:: databench.run 7 | 8 | 9 | Analysis 10 | -------- 11 | 12 | .. autoclass:: databench.Analysis 13 | :members: 14 | 15 | .. autofunction:: databench.on 16 | .. autofunction:: databench.on_action 17 | 18 | 19 | Meta 20 | ---- 21 | 22 | .. autoclass:: databench.Meta 23 | :members: 24 | 25 | 26 | Datastore 27 | --------- 28 | 29 | .. autoclass:: databench.Datastore 30 | :members: 31 | 32 | 33 | Utils 34 | ----- 35 | 36 | .. autofunction:: databench.utils.json_encoder_default 37 | .. autofunction:: databench.utils.fig_to_src 38 | .. autofunction:: databench.utils.png_to_src 39 | .. autofunction:: databench.utils.svg_to_src 40 | 41 | 42 | Testing 43 | ------- 44 | 45 | .. autoclass:: databench.testing.AnalysisTest 46 | :members: 47 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Databench documentation build configuration file, created by 4 | # sphinx-quickstart on Tue Jun 10 23:44:51 2014. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import os 16 | import sys 17 | 18 | sys.path.insert(0, os.path.abspath('..')) 19 | import databench 20 | DATABENCH_VERSION = databench.__version__ 21 | 22 | # If extensions (or modules to document with autodoc) are in another directory, 23 | # add these directories to sys.path here. If the directory is relative to the 24 | # documentation root, use os.path.abspath to make it absolute, like shown here. 25 | #sys.path.insert(0, os.path.abspath('.')) 26 | 27 | # -- General configuration ------------------------------------------------ 28 | 29 | # If your documentation needs a minimal Sphinx version, state it here. 30 | #needs_sphinx = '1.0' 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 34 | # ones. 35 | extensions = [ 36 | 'alabaster', 37 | 'sphinx.ext.autodoc', 38 | 'sphinx.ext.intersphinx', 39 | 'sphinx.ext.viewcode', 40 | ] 41 | 42 | primary_domain = 'py' 43 | default_role = 'py:obj' 44 | 45 | autodoc_member_order = 'bysource' 46 | autoclass_content = 'both' 47 | 48 | # workaround for 'viewcode' extension on readthedocs 49 | autodoc_docstring_signature = False 50 | 51 | intersphinx_mapping = { 52 | 'flask': ('http://flask.pocoo.org/docs/', None), 53 | 'tornado': ('http://www.tornadoweb.org/en/stable/', None), 54 | 'python': ('https://docs.python.org/3.6', None), 55 | 'matplotlib': ('https://matplotlib.org/', None), 56 | 'numpy': ('https://docs.scipy.org/doc/numpy-dev/', None), 57 | } 58 | 59 | # Add any paths that contain templates here, relative to this directory. 60 | templates_path = ['_templates'] 61 | 62 | # The suffix of source filenames. 63 | source_suffix = '.rst' 64 | 65 | # The encoding of source files. 66 | #source_encoding = 'utf-8-sig' 67 | 68 | # The master toctree document. 69 | master_doc = 'index' 70 | 71 | # General information about the project. 72 | project = u'Databench' 73 | copyright = u' 2014-2018 Sven Kreiss and contributors' 74 | 75 | # The version info for the project you're documenting, acts as replacement for 76 | # |version| and |release|, also used in various other places throughout the 77 | # built documents. 78 | # 79 | # The short X.Y version. 80 | version = DATABENCH_VERSION 81 | # The full version, including alpha/beta/rc tags. 82 | release = DATABENCH_VERSION 83 | 84 | # The language for content autogenerated by Sphinx. Refer to documentation 85 | # for a list of supported languages. 86 | #language = None 87 | 88 | # There are two options for replacing |today|: either, you set today to some 89 | # non-false value, then it is used: 90 | #today = '' 91 | # Else, today_fmt is used as the format for a strftime call. 92 | #today_fmt = '%B %d, %Y' 93 | 94 | # List of patterns, relative to source directory, that match files and 95 | # directories to ignore when looking for source files. 96 | exclude_patterns = ['_build'] 97 | 98 | # The reST default role (used for this markup: `text`) to use for all 99 | # documents. 100 | #default_role = None 101 | 102 | # If true, '()' will be appended to :func: etc. cross-reference text. 103 | #add_function_parentheses = True 104 | 105 | # If true, the current module name will be prepended to all description 106 | # unit titles (such as .. function::). 107 | #add_module_names = True 108 | 109 | # If true, sectionauthor and moduleauthor directives will be shown in the 110 | # output. They are ignored by default. 111 | #show_authors = False 112 | 113 | # The name of the Pygments (syntax highlighting) style to use. 114 | #pygments_style = 'sphinx' 115 | 116 | # A list of ignored prefixes for module index sorting. 117 | #modindex_common_prefix = [] 118 | 119 | # If true, keep warnings as "system message" paragraphs in the built documents. 120 | #keep_warnings = False 121 | 122 | 123 | # -- Options for HTML output ---------------------------------------------- 124 | 125 | # The theme to use for HTML and HTML Help pages. See the documentation for 126 | # a list of builtin themes. 127 | # html_theme = 'default' 128 | 129 | html_theme = 'alabaster' 130 | html_sidebars = { 131 | '**': [ 132 | 'about.html', 'navigation.html', # 'searchbox.html', 'donate.html', 133 | ] 134 | } 135 | 136 | html_theme_options = { 137 | 'github_user': 'svenkreiss', 138 | 'github_repo': 'databench', 139 | 'github_button': True, 140 | 'github_type': 'star', 141 | 'travis_button': False, 142 | # 'analytics_id': 'UA-4070485-2', 143 | 'extra_nav_links': { 144 | 'Live Demos': 'http://databench-examples.trivial.io', 145 | 'Examples': 'https://github.com/svenkreiss/databench_examples', 146 | 'GitHub': 'https://github.com/svenkreiss/databench', 147 | }, 148 | 'show_powered_by': False, 149 | 'description': open('description.txt', 'r').read(), 150 | 151 | # style 152 | 'font_family': 'sans-serif', 153 | 'font_size': '15px', 154 | 'code_font_size': '13px', 155 | 'caption_font_size': '13px', 156 | } 157 | 158 | # Theme options are theme-specific and customize the look and feel of a theme 159 | # further. For a list of options available for each theme, see the 160 | # documentation. 161 | #html_theme_options = {} 162 | 163 | # Add any paths that contain custom themes here, relative to this directory. 164 | #html_theme_path = [] 165 | 166 | # The name for this set of Sphinx documents. If None, it defaults to 167 | # " v documentation". 168 | #html_title = None 169 | 170 | # A shorter title for the navigation bar. Default is the same as html_title. 171 | #html_short_title = None 172 | 173 | # The name of an image file (relative to this directory) to place at the top 174 | # of the sidebar. 175 | html_logo = 'images/logo-w600.png' 176 | 177 | # The name of an image file (within the static path) to use as favicon of the 178 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 179 | # pixels large. 180 | html_favicon = 'images/favicon.ico' 181 | 182 | # Add any paths that contain custom static files (such as style sheets) here, 183 | # relative to this directory. They are copied after the builtin static files, 184 | # so a file named "default.css" will overwrite the builtin "default.css". 185 | html_static_path = ['./custom.css'] 186 | 187 | # Add any extra paths that contain custom files (such as robots.txt or 188 | # .htaccess) here, relative to this directory. These files are copied 189 | # directly to the root of the documentation. 190 | #html_extra_path = [] 191 | 192 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 193 | # using the given strftime format. 194 | #html_last_updated_fmt = '%b %d, %Y' 195 | 196 | # If true, SmartyPants will be used to convert quotes and dashes to 197 | # typographically correct entities. 198 | #html_use_smartypants = True 199 | 200 | # Custom sidebar templates, maps document names to template names. 201 | #html_sidebars = {} 202 | 203 | # Additional templates that should be rendered to pages, maps page names to 204 | # template names. 205 | #html_additional_pages = {} 206 | 207 | # If false, no module index is generated. 208 | #html_domain_indices = True 209 | 210 | # If false, no index is generated. 211 | #html_use_index = True 212 | 213 | # If true, the index is split into individual pages for each letter. 214 | #html_split_index = False 215 | 216 | # If true, links to the reST sources are added to the pages. 217 | html_show_sourcelink = False 218 | 219 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 220 | #html_show_sphinx = True 221 | 222 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 223 | #html_show_copyright = True 224 | 225 | # If true, an OpenSearch description file will be output, and all pages will 226 | # contain a tag referring to it. The value of this option must be the 227 | # base URL from which the finished HTML is served. 228 | #html_use_opensearch = '' 229 | 230 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 231 | #html_file_suffix = None 232 | 233 | # Output file base name for HTML help builder. 234 | htmlhelp_basename = 'Databenchdoc' 235 | -------------------------------------------------------------------------------- /docs/custom.css: -------------------------------------------------------------------------------- 1 | table.docutils td, table.docutils th { 2 | border: 1px solid #ccc; 3 | } 4 | 5 | .hideinsphinx { 6 | display: none; 7 | } 8 | 9 | div.sphinxsidebarwrapper img.logo { 10 | max-width: 70%; 11 | margin-bottom: 30px; 12 | } 13 | 14 | div.sphinxsidebar ul li.toctree-l1 > a, 15 | div.sphinxsidebar ul li.toctree-l2 > a { 16 | font-size: 100%; 17 | } 18 | 19 | div.footer, 20 | .blurb { 21 | font-size: 90%; 22 | } 23 | -------------------------------------------------------------------------------- /docs/deploy.rst: -------------------------------------------------------------------------------- 1 | 2 | Deployment 3 | ========== 4 | 5 | The standard use case is to run ``databench`` locally. However, Databench can also be deployed on servers/platforms that support deploying WebSocket applications. 6 | 7 | 8 | Heroku 9 | ++++++ 10 | 11 | You need a ``Procfile`` file 12 | 13 | .. code-block:: bash 14 | 15 | web: databench 16 | 17 | and your ``requirements.txt`` file. 18 | Databench will pick up the environment variable ``PORT``. 19 | An example repository that is deployed on Heroku is `databench_examples_viewer`_. 20 | 21 | .. _`databench_examples_viewer`: https://github.com/svenkreiss/databench_examples_viewer 22 | 23 | 24 | Local Docker 25 | ++++++++++++ 26 | 27 | It is helpful to build and run the Docker image locally when developing: 28 | 29 | .. code-block:: bash 30 | 31 | docker build --tag=databench . 32 | docker run --rm -p 0.0.0.0:5000:5000 -i -t databench 33 | 34 | 35 | AWS Elastic Beanstalk 36 | +++++++++++++++++++++ 37 | 38 | AWS Elastic Beanstalk builds a new Docker image from the Dockerfile. You can also run those images locally (see below), but this is not necessary for deploying to AWS. 39 | 40 | Get the AWS Elastic Beanstalk command line client ``eb`` here: http://aws.amazon.com/code/6752709412171743 41 | or ``brew install aws-elasticbeanstalk``. Its interface is oriented on git commands. So you can go to your project's directory (which is a git repository) initialize the project with ``eb init``. Answer a few questions. Once done, deploy the app with ``eb start``. That creates the environment. Once an environment is created, deploy with ``eb push``. 42 | 43 | Example ``eb init`` options: 44 | 45 | * Access Key ID: xxxxxxx 46 | * Secret Access Key: xxxxxxx 47 | * region: ``1) US East (Virginia)`` 48 | * application name: ``databench_examples`` 49 | * env name: ``databenchexamples_env`` 50 | * environment tier: ``1) WebServer::Standard::1.0`` 51 | * solution stack: ``41) 64bit Amazon Linux 2014.03 v1.0.1 running Docker 1.0.0`` 52 | * env type: ``2) SingleInstance`` 53 | * create RDS DB instance: no 54 | * instance profile: pick one 55 | 56 | 57 | Troubleshooting: 58 | 59 | * ``no module named boto``: do ``pip install boto`` before ``eb start``. 60 | * the web console is at ``https://console.aws.amazon.com/elasticbeanstalk/`` and make sure you are looking at the right region which can be selected in the top-right corner 61 | * ``eb logs`` shows more details 62 | * If deployments become even slower, check ``/var/log/docker-ps.log`` (part of the logs). It could be that some other docker image is still building in the background from a previous ``eb push``. 63 | * ``eb start`` might timeout in your terminal. The Web interface usually shows "updating" a bit longer and it should finish within 10 minutes. The timeout does not affect the underlying build process. 64 | 65 | -------------------------------------------------------------------------------- /docs/description.txt: -------------------------------------------------------------------------------- 1 | Databench is a data analysis tool using 2 | Tornado and WebSockets. 3 | Check out the live demos. 4 | -------------------------------------------------------------------------------- /docs/dev.rst: -------------------------------------------------------------------------------- 1 | Development 2 | ----------- 3 | 4 | To contribute to Databench, fork the GitHub repository and then follow these 5 | steps: 6 | 7 | .. code-block:: bash 8 | 9 | # clone the repository to your local machine 10 | git clone https://github.com//databench.git 11 | cd databench 12 | 13 | # create a virtual environment and activate it 14 | virtualenv venv 15 | source venv/bin/activate 16 | # install this version in editable mode 17 | pip install -e .[tests] 18 | 19 | # install JavaScript dependencies 20 | npm install 21 | # build the JavaScript 22 | webpack 23 | 24 | 25 | Now you can: 26 | 27 | .. code-block:: bash 28 | 29 | # run Databench 30 | databench --log DEBUG 31 | # and open http://localhost:5000 in a web browser 32 | 33 | # run tests 34 | nosetests -vv --with-coverage --cover-erase --cover-inclusive 35 | 36 | # lint Python 37 | flake8 38 | 39 | # lint JavaScript 40 | npm run lint 41 | 42 | # validate html (with Databench running) 43 | localcrawl --start http://localhost:5000 44 | html5validator --root _crawled 45 | 46 | # create JavaScript docs at docs/jsdoc/index.html 47 | npm run typedoc 48 | 49 | # create Python docs at docs/_build/html/index.html 50 | cd docs; make html 51 | 52 | For new contributions, create a feature branch and submit a Pull Request. 53 | -------------------------------------------------------------------------------- /docs/esdoc-style.css: -------------------------------------------------------------------------------- 1 | h3 { background: none; font-weight: bold; } 2 | h4 { font-weight: normal; display: inline; } 3 | 4 | .inherited-summary { display: none; } 5 | [data-ice="constructorSummary"] { display: none; } 6 | [data-ice="staticMethodSummary"] { display: none; } 7 | [data-ice="override"] { display: none; } 8 | 9 | .content .detail > div[data-ice="extendsChain"] { margin: 0 0 15px 10px; font-style: italic; } 10 | .content .detail > div[data-ice="extendsChain"] > div { display: inline; margin: 0 0.5em; } 11 | .content .detail h4 + :not(pre) { margin-left: 0; } 12 | -------------------------------------------------------------------------------- /docs/frontend.rst: -------------------------------------------------------------------------------- 1 | JavaScript Frontend 2 | =================== 3 | 4 | This is section contains an overview of techniques and best practices that can 5 | be used on the frontend, followed by a short overview of the frontend API and 6 | a section on UI elements (buttons, text boxes, sliders, etc). 7 | 8 | 9 | .. _frontend-overview: 10 | 11 | Additional Views 12 | ---------------- 13 | 14 | Next to the ``index.html``, you can create other html files like this 15 | ``log.html`` file: 16 | 17 | .. code-block:: html 18 | 19 | {% extends "analysis.html" %} 20 | 21 | 22 | {% block footer %} 23 | 33 | {% end %} 34 | 35 | which will automatically be available at the url ending with ``log.html``. 36 | 37 | 38 | 39 | HTML Templates 40 | -------------- 41 | 42 | Templates are rendered buy Tornado's template engine. Databench provides 43 | a visual frame for your analysis which you can extend from with 44 | ``{% extends "analysis.html" %}``. Example: 45 | 46 | .. literalinclude:: ../databench/analyses_packaged/scaffold/index.html 47 | :language: html 48 | 49 | Modify the ``base.html``, ``analysis.html`` or any other html template file 50 | by placing a ``base.html`` or ``analysis.html`` file in your analyses path. 51 | Use ``analyses/static`` for static assets like logos 52 | and favicons which is exposed at ``/static``. 53 | 54 | Default ``analyses/base.html``: 55 | 56 | .. literalinclude:: ../databench/templates/base.html 57 | :language: html 58 | 59 | Default ``analyses/analysis.html``: 60 | 61 | .. literalinclude:: ../databench/templates/analysis.html 62 | :language: html 63 | 64 | 65 | Extensions 66 | ---------- 67 | 68 | Databench is supposed to go out of your way and work well with many 69 | frontend frameworks and tools. For example, it works well with Twitter 70 | Bootstrap, React, Font Awesome, MathJax, and many more. 71 | 72 | 73 | .. _frontend_logging: 74 | 75 | Logging 76 | ------- 77 | 78 | .. versionchanged:: 0.7 79 | 80 | Use ``console.log()`` to log to the console as usual. To log to the console and 81 | send a message to the backend about the log message, use 82 | ``d.emit('log', ...)``. Similarly, if the backend emits a ``log`` action, the 83 | frontend console as well as ``databench.ui.Log`` will show it. Similarly to 84 | ``log``, this also works for ``warn`` and ``error``. 85 | Also see :ref:`backend_logging` in the Python section. 86 | 87 | 88 | Static Files 89 | ------------ 90 | 91 | To add a static file to an analysis, place it in the analysis folder. Static 92 | files in this folder are exposed at the ``//static/`` url. 93 | For example, to add ``angular.js`` to an analysis of the name *angular* 94 | (see for example the `angular analysis in the Databench examples`_), add the 95 | file ``angular.js`` to the folder ``analyses/angular/`` and include it in 96 | ``index.html`` with: 97 | 98 | .. code-block:: html 99 | 100 | 101 | 102 | You can also add static files to *all* analyses by creating a folder 103 | ``analyses/static`` and placing the static file in this folder. The URL 104 | to access the files is ``/static/my_static_file.png``. This is 105 | the same folder that is used for a custom header logo; 106 | see :ref:`analyses_configurations`. 107 | 108 | 109 | Node Modules 110 | ------------ 111 | 112 | Databench looks for a ``static`` and a ``node_modules`` folder first in the 113 | analyses folder and then in the current working directory. 114 | 115 | .. code-block:: bash 116 | 117 | npm init # creates package.json interactively 118 | npm install --save d3 # install d3 and add as dependency to packages.json 119 | 120 | to then access it with 121 | 122 | .. code-block:: html 123 | 124 | 125 | 126 | in html. The `databench_examples repository`_ contains analyses that use 127 | static files and Node packages. 128 | 129 | 130 | Running the Backend at a Custom Location 131 | ---------------------------------------- 132 | 133 | You can also include Databench in websites. You need the Databench JavaScript 134 | library and configure the location of your Databench backend: 135 | 136 | .. code-block:: javascript 137 | 138 | var d = Databench.Connection('ws://databench-examples.trivial.io/simplepi/ws'); 139 | 140 | which connects to the backend of the `public and live example of simplepi`_. 141 | When you connect to your own backend, you will have to invoke databench with 142 | 143 | .. code-block:: bash 144 | 145 | databench --host=0.0.0.0 146 | 147 | to allow non-local access. 148 | 149 | WARNING: Databench was developed for deployment in trusted environments. 150 | You need to handle security yourself, e.g. by running Databench on an 151 | isolated server. 152 | 153 | .. _`angular analysis in the Databench examples`: https://github.com/svenkreiss/databench_examples/tree/master/analyses/angular 154 | .. _`databench_examples repository`: https://github.com/svenkreiss/databench_examples/ 155 | .. _`public and live example of simplepi`: http://databench-examples.trivial.io/simplepi/ 156 | -------------------------------------------------------------------------------- /docs/frontend_api.rst: -------------------------------------------------------------------------------- 1 | .. _frontend-api: 2 | 3 | 4 | JavaScript API 5 | ============== 6 | 7 | ``databench.js`` is exposed at ``/_static/databench.js``. Please see the 8 | `complete JavaScript API reference`_. 9 | 10 | .. image:: images/JavaScript-docs.png 11 | :alt: JavaScript API Reference. 12 | :height: 300 13 | :target: http://www.svenkreiss.com/databench/ 14 | 15 | .. _`complete JavaScript API reference`: http://www.svenkreiss.com/databench/ 16 | -------------------------------------------------------------------------------- /docs/images/JavaScript-docs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/JavaScript-docs.png -------------------------------------------------------------------------------- /docs/images/Python-docs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/Python-docs.png -------------------------------------------------------------------------------- /docs/images/bagofcharsd3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/bagofcharsd3.png -------------------------------------------------------------------------------- /docs/images/dataflow_datastore_state.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/dataflow_datastore_state.png -------------------------------------------------------------------------------- /docs/images/dataflow_frontend_state.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/dataflow_frontend_state.png -------------------------------------------------------------------------------- /docs/images/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/favicon.ico -------------------------------------------------------------------------------- /docs/images/flowers.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/flowers.gif -------------------------------------------------------------------------------- /docs/images/flowers_demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/flowers_demo.png -------------------------------------------------------------------------------- /docs/images/logo-w100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/logo-w100.png -------------------------------------------------------------------------------- /docs/images/logo-w600.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/logo-w600.png -------------------------------------------------------------------------------- /docs/images/mpld3_heart_path.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/mpld3_heart_path.png -------------------------------------------------------------------------------- /docs/images/mpld3pi_demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/mpld3pi_demo.png -------------------------------------------------------------------------------- /docs/images/simplepi_demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/images/simplepi_demo.png -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. Databench documentation master file, created by 2 | sphinx-quickstart on Tue Jun 10 23:44:51 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | .. include:: ../README.rst 7 | 8 | 9 | .. _overview: 10 | 11 | Overview 12 | -------- 13 | 14 | Databench provides the executables ``scaffold-databench`` and ``databench``, Python modules for the backend and a JavaScript library for the frontend. ``scaffold-databench helloworld`` creates an analysis template called *helloworld* in the current working directory. Running ``databench`` creates a local web server which you can access at ``http://localhost:5000``. A good way to start is to jump right into :ref:`quickstart`. 15 | 16 | Some features are shown in the `live demos`_. They do not include examples with ``matplotlib``, parallelization or database interfaces (like asynchronously subscribing to a Redis channel) but those examples are available in the `databench_examples`_ repository. 17 | 18 | .. image:: images/flowers_demo.png 19 | :alt: Preview of flowers demo. 20 | :height: 270 21 | :target: http://databench-examples.trivial.io/flowers/ 22 | 23 | .. image:: images/bagofcharsd3.png 24 | :alt: Preview of bag-of-chars demo. 25 | :height: 270 26 | :target: http://databench-examples.trivial.io/bagofcharsd3/ 27 | 28 | .. image:: images/simplepi_demo.png 29 | :alt: Preview of simplepi demo. 30 | :height: 270 31 | :target: http://databench-examples.trivial.io/simplepi/ 32 | 33 | .. _`first blog post`: http://www.svenkreiss.com/blog/databench-initial/ 34 | .. _`slides from Hack and Tell NY`: https://speakerdeck.com/svenkreiss/databench 35 | .. _`live demos`: http://databench-examples.trivial.io/ 36 | .. _`databench_examples`: https://github.com/svenkreiss/databench_examples 37 | .. _`markdown`: http://daringfireball.net/projects/markdown/syntax 38 | .. _`MathJax`: http://www.mathjax.org/ 39 | .. _`angular.js`: https://angularjs.org/ 40 | 41 | 42 | Contents 43 | -------- 44 | 45 | .. toctree:: 46 | :maxdepth: 2 47 | 48 | self 49 | quickstart 50 | backend 51 | backend_api 52 | frontend 53 | frontend_api 54 | deploy 55 | dev 56 | 57 | 58 | .. Indices and tables 59 | .. ================== 60 | 61 | .. * :ref:`genindex` 62 | .. * :ref:`modindex` 63 | .. * :ref:`search` 64 | 65 | -------------------------------------------------------------------------------- /docs/quickstart.rst: -------------------------------------------------------------------------------- 1 | .. _quickstart: 2 | 3 | Quickstart 4 | ========== 5 | 6 | Install ``databench`` as shown at the top of the :ref:`overview` page. To start 7 | a new analysis called *helloworld*, use ``scaffold-databench helloworld`` which 8 | creates a directory structure like this: 9 | 10 | .. code-block:: bash 11 | 12 | - workingDir/ 13 | - analyses/ 14 | - __init__.py 15 | - index.yaml 16 | - helloworld/ 17 | - __init__.py 18 | - analysis.py 19 | - index.html 20 | - thumbnail.png (optional) 21 | 22 | At this point you are all set up and can run ``databench``, view the analysis 23 | in a browser at http://localhost:5000 and start modifying the analysis source code. 24 | 25 | To understand the structure, this is a walk-through of the steps that just 26 | happened in ``scaffold-databench``. First, add the analyses to the 27 | ``analyses/index.yaml`` file: 28 | 29 | .. code-block:: yaml 30 | 31 | title: My Analyses 32 | description: Describing my analyses with words. 33 | 34 | analyses: 35 | - name: helloworld 36 | title: Hello World! 37 | description: A hello world example. 38 | 39 | Next, create the helloworld backend in ``analyses/helloworld/analysis.py``: 40 | 41 | .. code-block:: python 42 | 43 | import databench 44 | 45 | 46 | class HelloWorld(databench.Analysis): 47 | 48 | def on_connect(self): 49 | """Run as soon as a browser connects to this.""" 50 | self.data['status'] = 'Hello World' 51 | 52 | 53 | And the frontend in ``analyses/helloworld/index.html``: 54 | 55 | .. code-block:: html 56 | 57 | {% extends "analysis.html" %} 58 | 59 | 60 | {% block analysis %} 61 |

62 | {% end %} 63 | 64 | 65 | {% block footer %} 66 | 75 | {% end %} 76 | 77 | Now you can run the executable ``databench`` in your ``workingDir`` folder 78 | (outside of ``analyses``) which creates a webserver and you can open 79 | http://localhost:5000 in your webbrowser. The command line options ``--host`` 80 | and ``--port`` set the host and port of the webserver ``--log`` changes the 81 | loglevel. For example, calling ``databench --log=DEBUG`` enables all messages; 82 | the options are ``NOTSET``, ``DEBUG``, ``INFO``, ``WARNING``, ``ERROR`` and 83 | ``CRITICAL``. Running databench in ``WARNING`` or ``INFO`` enables autoreloading 84 | on code changes. You can also create a ``requirements.txt`` file containing 85 | other Python packages your analysis needs. An example of this setup is the 86 | `databench_examples`_ repository. 87 | 88 | .. _`databench_examples`: https://github.com/svenkreiss/databench_examples 89 | 90 | 91 | **Without a template**: The analysis can also be run without a template. You 92 | can replace ``index.html`` with 93 | 94 | .. code-block:: html 95 | 96 | 97 | 98 | Hello World 99 | 100 |

101 | 102 | 103 | 112 | 113 | 114 | 115 | You can find the result of this tutorial in the `helloworld analysis of the databench_examples`_ repo. 116 | 117 | .. _`helloworld analysis of the databench_examples`: https://github.com/svenkreiss/databench_examples 118 | 119 | 120 | Data flow 121 | --------- 122 | 123 | At the lowest level, Databench communicates between frontend and backend by 124 | sending messages on a long-lived bidirectional WebSocket connection. That means 125 | that both frontend and backend can signal to the other end a change in state 126 | or transmit an action without being polled. 127 | 128 | Depending on where state is stored (and that can be mixed within an analysis), 129 | two models for data flow are often used. First, a model where state is stored 130 | in a Datastore in the backend. This datastore can be a store like Redis that is 131 | shared across instances of the Python backend. 132 | 133 | .. image:: images/dataflow_datastore_state.png 134 | :alt: data flow with state stored in datastore 135 | 136 | Second, transient state -- state that is deleted at the end of a session 137 | and is usually concerned with the user's UI -- is stored in the frontend. 138 | In this case, the backend only sends actions but not state to the frontend. 139 | The frontend can also send actions to the backend. 140 | 141 | .. image:: images/dataflow_frontend_state.png 142 | :alt: data flow with state stored in frontend 143 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | Sphinx==1.6.7 2 | -------------------------------------------------------------------------------- /docs/version_index/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/version_index/.nojekyll -------------------------------------------------------------------------------- /docs/version_index/CNAME: -------------------------------------------------------------------------------- 1 | databench.trivial.io -------------------------------------------------------------------------------- /docs/version_index/circle.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | pre: 3 | - sudo pip install html5validator 4 | test: 5 | override: 6 | - html5validator 7 | -------------------------------------------------------------------------------- /docs/version_index/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/docs/version_index/favicon.ico -------------------------------------------------------------------------------- /docs/version_index/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Databench Docs 10 | 11 | 12 | 13 | 14 | 15 | 30 | 31 | 32 | 35 | 36 | Databench logo 37 | 38 |

Databench Docs

39 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /js/docs/index.md: -------------------------------------------------------------------------------- 1 | This is the documentation for **databench.js**, the JavaScript part of Databench. 2 | The main documentation is at [databench.trivial.io](http://databench.trivial.io). 3 | 4 | 5 | Databench.js exports `{` [[Connection]], [connect](globals.html#connect), 6 | [attach](globals.html#attach), [[ui]] `}`. 7 | -------------------------------------------------------------------------------- /js/src/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Databench 3 | */ 4 | 5 | export * from './connection'; 6 | export * from './ui'; 7 | -------------------------------------------------------------------------------- /logo/create.py: -------------------------------------------------------------------------------- 1 | """Creates an SVG of the Databench logo. Optionally also a png.""" 2 | 3 | import os 4 | import random 5 | import svgwrite 6 | 7 | DATA = [ 8 | [0, 1, 1, 1, 1, 0, 1, 1], 9 | [1, 1, 0, 1, 1, 1, 1, 0], 10 | [1, 1, 1, 1, 0, 1, 0, 1], 11 | [1, 0, 1, 0, 1, 1, 1, 1], 12 | [0, 1, 1, 1, 1, 0, 1, 1], 13 | [1, 1, 0, 1, 0, 1, 1, 0], 14 | [0, 1, 1, 1, 1, 0, 1, 1], 15 | [1, 0, 1, 0, 1, 1, 0, 1], 16 | ] 17 | 18 | 19 | def color(x, y): 20 | """triangles. 21 | 22 | Colors: 23 | - http://paletton.com/#uid=70l150klllletuehUpNoMgTsdcs shade 2 24 | """ 25 | 26 | if (x-4) > (y-4) and -(y-4) <= (x-4): 27 | # right 28 | return "#CDB95B" 29 | elif (x-4) > (y-4) and -(y-4) > (x-4): 30 | # top 31 | return "#CD845B" 32 | elif (x-4) <= (y-4) and -(y-4) <= (x-4): 33 | # bottom 34 | return "#57488E" 35 | elif (x-4) <= (y-4) and -(y-4) > (x-4): 36 | # left 37 | return "#3B8772" 38 | 39 | # should not happen 40 | return "black" 41 | 42 | 43 | def simple(svg_document, x, y, v): 44 | if v == 1: 45 | svg_document.add(svg_document.rect(insert=(x*16, y*16), 46 | size=("16px", "16px"), 47 | # rx="2px", 48 | # stroke_width="1", 49 | # stroke=color(x, y), 50 | fill=color(x, y))) 51 | 52 | 53 | def smaller(svg_document, x, y, v): 54 | # from center 55 | distance2 = (x-3.5)**2 + (y-3.5)**2 56 | max_distance2 = 2 * 4**2 57 | 58 | if v == 1: 59 | size = 16.0*(1.0 - distance2/max_distance2) 60 | number_of_cubes = int(16**2 / (size**2)) 61 | for i in xrange(number_of_cubes): 62 | xi = x*16 + 1 + random.random()*(14.0-size) 63 | yi = y*16 + 1 + random.random()*(14.0-size) 64 | sizepx = str(size)+"px" 65 | svg_document.add(svg_document.rect(insert=(xi, yi), 66 | size=(sizepx, sizepx), 67 | rx="2px", 68 | stroke_width="1", 69 | stroke=color(x, y), 70 | fill=color(x, y))) 71 | 72 | 73 | def main(): 74 | svg_favicon = svgwrite.Drawing(filename="favicon.svg", 75 | size=("128px", "128px")) 76 | svg_document = svgwrite.Drawing(filename="logo.svg", 77 | size=("128px", "128px")) 78 | for y, r in enumerate(DATA): 79 | for x, v in enumerate(r): 80 | simple(svg_favicon, x, y, v) 81 | smaller(svg_document, x, y, v) 82 | print(svg_document.tostring()) 83 | svg_favicon.save() 84 | svg_document.save() 85 | 86 | # create pngs 87 | os.system('svg2png logo.svg --width=100 --height=100') 88 | os.system('svg2png logo.svg --width=600 --height=600') 89 | favicon_sizes = [16, 32, 48, 128, 256] 90 | for s in favicon_sizes: 91 | os.system('svg2png favicon.svg --width='+str(s)+' --height='+str(s)) 92 | png_favicon_names = ['favicon-w'+str(s)+'.png' for s in favicon_sizes] 93 | os.system('convert ' + (' '.join(png_favicon_names)) + 94 | ' -colors 256 favicon.ico') 95 | 96 | 97 | if __name__ == "__main__": 98 | random.seed(42) 99 | main() 100 | -------------------------------------------------------------------------------- /logo/favicon-w128.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/logo/favicon-w128.png -------------------------------------------------------------------------------- /logo/favicon-w16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/logo/favicon-w16.png -------------------------------------------------------------------------------- /logo/favicon-w256.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/logo/favicon-w256.png -------------------------------------------------------------------------------- /logo/favicon-w32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/logo/favicon-w32.png -------------------------------------------------------------------------------- /logo/favicon-w48.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/logo/favicon-w48.png -------------------------------------------------------------------------------- /logo/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/logo/favicon.ico -------------------------------------------------------------------------------- /logo/favicon.svg: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /logo/logo-w100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/logo/logo-w100.png -------------------------------------------------------------------------------- /logo/logo-w600.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/svenkreiss/databench/99d4adad494b60a42af6b8bfba94dd0c41ba0786/logo/logo-w600.png -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "databench", 3 | "version": "0.7.0", 4 | "description": "Real-time data analysis tool.", 5 | "readme": "./js/docs/index.md", 6 | "main": "./js/build/commonjs/databench.js", 7 | "types": "./js/build/commonjs/index.d.ts", 8 | "directories": { 9 | "doc": "docs/jsdoc", 10 | "test": "js/tests" 11 | }, 12 | "dependencies": { 13 | "@types/request": "^2.47.0", 14 | "@types/websocket": "0.0.36", 15 | "request": "^2.86.0", 16 | "websocket": "^1.0.26" 17 | }, 18 | "devDependencies": { 19 | "@types/chai": "^4.1.3", 20 | "@types/mocha": "^2.2.48", 21 | "awesome-typescript-loader": "^3.5.0", 22 | "babel-core": "^6.26.3", 23 | "babel-eslint": "^8.2.3", 24 | "babel-loader": "^7.1.4", 25 | "babel-preset-env": "^1.7.0", 26 | "chai": "^4.1.2", 27 | "eslint": "^4.19.1", 28 | "eslint-config-airbnb-base": "^12.1.0", 29 | "eslint-plugin-import": "^2.12.0", 30 | "json-loader": "^0.5.7", 31 | "mocha": "^5.2.0", 32 | "source-map-loader": "^0.2.3", 33 | "tslint": "^5.10.0", 34 | "typedoc": "^0.9.0", 35 | "typescript": "^2.8.3", 36 | "webpack": "^3.12.0" 37 | }, 38 | "eslintConfig": { 39 | "parser": "babel-eslint" 40 | }, 41 | "scripts": { 42 | "build": "webpack", 43 | "test": "tsc && mocha -t 5000 js/build/commonjs/test.js", 44 | "lint": "tslint -c tslint.json js/src/*.ts && eslint databench/analyses_packaged/**/*.js", 45 | "typedoc": "typedoc --out docs/typedoc/ --excludePrivate --excludeNotExported --excludeExternals --mode file --readme js/docs/index.md js/src/" 46 | }, 47 | "repository": { 48 | "type": "git", 49 | "url": "git+https://github.com/svenkreiss/databench.git" 50 | }, 51 | "author": "@svenkreiss", 52 | "license": "MIT", 53 | "bugs": { 54 | "url": "https://github.com/svenkreiss/databench/issues" 55 | }, 56 | "homepage": "http://databench.trivial.io" 57 | } 58 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = H301 3 | exclude = venv*/,logo,docs,build,dist 4 | 5 | [coverage:run] 6 | include = databench/* 7 | omit = 8 | databench/scaffold.py 9 | databench/cli.py 10 | databench/analyses_packaged/* 11 | **/*_html.generated.py 12 | 13 | [coverage:report] 14 | show_missing = True 15 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | 4 | # extract version from __init__.py 5 | with open('databench/__init__.py', 'r') as f: 6 | version_line = [l for l in f if l.startswith('__version__')][0] 7 | VERSION = version_line.split('=')[1].strip()[1:-1] 8 | 9 | 10 | setup( 11 | name='databench', 12 | version=VERSION, 13 | packages=['databench', 'databench.analyses_packaged', 14 | 'databench_py', 'databench_py.singlethread', 15 | 'databench.tests', 16 | 'databench.tests.analyses', 'databench.tests.analyses_broken'], 17 | license='MIT', 18 | description='Realtime data analysis tool.', 19 | long_description=open('README.rst').read(), 20 | author='Sven Kreiss', 21 | author_email='me@svenkreiss.com', 22 | url='https://github.com/svenkreiss/databench', 23 | 24 | include_package_data=True, 25 | 26 | install_requires=[ 27 | 'docutils>=0.12', 28 | 'future>=0.15', 29 | 'markdown>=2.6.5', 30 | 'pyyaml>=3.11', 31 | 'pyzmq>=4.3.1', 32 | 'tornado>=4.3', 33 | 'wrapt>=1.10.11', 34 | ], 35 | entry_points={ 36 | 'console_scripts': [ 37 | 'databench = databench.cli:main', 38 | 'scaffold-databench = databench.scaffold:main', 39 | ] 40 | }, 41 | extras_require={ 42 | 'tests': [ 43 | 'coverage>=4.4.2', 44 | 'ghp-import>=0.4.1', 45 | 'hacking>=0.11.0', # imports a pinned version of flake8 46 | 'html5validator>=0.2.6', 47 | 'localcrawl>=0.2.3', 48 | 'matplotlib>=1.5.1', 49 | 'nose>=1.3.4', 50 | 'requests>=2.9.1', 51 | 'websocket-client>=0.35.0', 52 | ], 53 | }, 54 | 55 | tests_require=[ 56 | 'nose>=1.3.4', 57 | ], 58 | test_suite='nose.collector', 59 | 60 | classifiers=[ 61 | 'Development Status :: 4 - Beta', 62 | 'Intended Audience :: Developers', 63 | 'Intended Audience :: Science/Research', 64 | 'Natural Language :: English', 65 | 'License :: OSI Approved :: MIT License', 66 | 'Operating System :: OS Independent', 67 | 'Programming Language :: Python', 68 | 'Programming Language :: Python :: 2', 69 | 'Programming Language :: Python :: 2.7', 70 | 'Programming Language :: Python :: 3', 71 | 'Programming Language :: Python :: 3.4', 72 | 'Programming Language :: Python :: 3.5', 73 | 'Programming Language :: Python :: 3.6', 74 | ] 75 | ) 76 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "es5", 5 | "lib": ["dom", "es2015.promise", "es5"], 6 | "noImplicitAny": true, 7 | "suppressImplicitAnyIndexErrors": true, 8 | "strictNullChecks": true, 9 | "sourceMap": true, 10 | "outDir": "js/build/commonjs", 11 | "declaration": true 12 | }, 13 | "include": [ 14 | "js/src/**/*" 15 | ], 16 | "exclude": [ 17 | "node_modules/" 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /tslint.json: -------------------------------------------------------------------------------- 1 | { 2 | "jsRules": { 3 | "class-name": true, 4 | "comment-format": [ 5 | true, 6 | "check-space" 7 | ], 8 | "indent": [ 9 | true, 10 | "spaces" 11 | ], 12 | "no-duplicate-variable": true, 13 | "no-eval": true, 14 | "no-trailing-whitespace": true, 15 | "no-unsafe-finally": true, 16 | "one-line": [ 17 | true, 18 | "check-open-brace", 19 | "check-whitespace" 20 | ], 21 | "quotemark": [ 22 | true, 23 | "single" 24 | ], 25 | "semicolon": [ 26 | true, 27 | "always" 28 | ], 29 | "triple-equals": [ 30 | true, 31 | "allow-null-check" 32 | ], 33 | "variable-name": [ 34 | true, 35 | "ban-keywords" 36 | ], 37 | "whitespace": [ 38 | true, 39 | "check-branch", 40 | "check-decl", 41 | "check-operator", 42 | "check-separator", 43 | "check-type" 44 | ] 45 | }, 46 | "rules": { 47 | "class-name": true, 48 | "comment-format": [ 49 | true, 50 | "check-space" 51 | ], 52 | "indent": [ 53 | true, 54 | "spaces" 55 | ], 56 | "no-eval": true, 57 | "no-internal-module": true, 58 | "no-trailing-whitespace": true, 59 | "no-unsafe-finally": true, 60 | "no-var-keyword": true, 61 | "one-line": [ 62 | true, 63 | "check-open-brace", 64 | "check-whitespace" 65 | ], 66 | "quotemark": [ 67 | true, 68 | "single" 69 | ], 70 | "semicolon": [ 71 | true, 72 | "always" 73 | ], 74 | "triple-equals": [ 75 | true, 76 | "allow-null-check" 77 | ], 78 | "typedef-whitespace": [ 79 | true, 80 | { 81 | "call-signature": "nospace", 82 | "index-signature": "nospace", 83 | "parameter": "nospace", 84 | "property-declaration": "nospace", 85 | "variable-declaration": "nospace" 86 | } 87 | ], 88 | "variable-name": [ 89 | true, 90 | "ban-keywords" 91 | ], 92 | "whitespace": [ 93 | true, 94 | "check-branch", 95 | "check-decl", 96 | "check-operator", 97 | "check-separator", 98 | "check-type" 99 | ] 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | 3 | // for output to databench/static/databench.js 4 | { 5 | context: __dirname, 6 | entry: { 7 | databench: './js/src/index.ts', 8 | }, 9 | output: { 10 | path: __dirname, 11 | library: 'Databench', 12 | libraryTarget: 'umd', 13 | filename: 'databench/static/databench.js', 14 | }, 15 | 16 | devtool: 'source-map', 17 | 18 | resolve: { 19 | extensions: ['.ts', '.tsx', '.js', '.json'], 20 | }, 21 | 22 | module: { 23 | rules: [ 24 | { test: /\.jsx?$/, loader: 'babel-loader' }, 25 | { test: /\.tsx?$/, loader: 'awesome-typescript-loader' }, 26 | { enforce: 'pre', test: /\.js$/, loader: 'source-map-loader' } 27 | ] 28 | }, 29 | }, 30 | 31 | // for output to js/build/commonjs 32 | { 33 | context: __dirname, 34 | entry: { 35 | databench: './js/src/index.ts', 36 | }, 37 | output: { 38 | path: __dirname, 39 | library: 'Databench', 40 | libraryTarget: 'umd', 41 | filename: 'js/build/commonjs/databench.js', 42 | }, 43 | 44 | devtool: 'source-map', 45 | 46 | resolve: { 47 | extensions: ['.ts', '.tsx', '.js', '.json'], 48 | }, 49 | 50 | module: { 51 | rules: [ 52 | { test: /\.jsx?$/, loader: 'babel-loader' }, 53 | { test: /\.tsx?$/, loader: 'awesome-typescript-loader' }, 54 | { enforce: 'pre', test: /\.js$/, loader: 'source-map-loader' } 55 | ] 56 | }, 57 | }]; 58 | --------------------------------------------------------------------------------