├── .gitignore ├── AUTHORS.rst ├── CONTRIBUTING.rst ├── HISTORY.rst ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.rst ├── bin └── sniffer.py ├── docs ├── Makefile ├── authors.rst ├── conf.py ├── contributing.rst ├── history.rst ├── index.rst ├── installation.rst ├── make.bat ├── readme.rst └── usage.rst ├── examples ├── chat │ ├── bootstrap.min.css │ ├── chat.html │ ├── chat.py │ ├── jquery-2.0.3.min.js │ └── jquery.json-2.2.min.js ├── fosdem_2015 │ ├── README │ ├── api+power.py │ ├── api.py │ ├── app.jsx │ ├── bower.json │ ├── index.html │ ├── package.json │ ├── power.py │ ├── randomizer.py │ ├── webpack.js │ ├── webpack.prod.js │ └── worker.py └── todo_react │ ├── app.py │ ├── bower.json │ ├── index.html │ ├── todo │ └── todo.js ├── requirements.txt ├── setup.cfg ├── setup.py ├── skeletons └── all-in-one │ ├── README.md │ ├── cookiecutter.json │ └── {{cookiecutter.project_name}} │ └── app.py ├── tests ├── __init__.py ├── backend │ ├── __init__.py │ ├── test_memory.py │ └── test_mongodb.py ├── discovery_medium │ ├── __init__.py │ ├── test_memory.py │ └── test_udp_discovery.py ├── medium │ ├── __init__.py │ ├── test_memory_medium.py │ ├── test_zeromq_medium.py │ └── utils.py ├── services │ ├── __init__.py │ └── test_http_interface.py ├── test_resources.py ├── test_service.py ├── test_worker.py └── utils.py └── zeroservices ├── __init__.py ├── backend ├── __init__.py └── mongodb.py ├── discovery ├── __init__.py ├── memory.py └── udp.py ├── exceptions.py ├── medium ├── __init__.py ├── memory.py └── zeromq.py ├── memory.py ├── query.py ├── resources.py ├── service.py ├── services ├── __init__.py ├── http_client.py ├── http_interface.py └── realtime.py ├── utils.py └── validation.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # PyInstaller 26 | # Usually these files are written by a python script from a template 27 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 28 | *.manifest 29 | *.spec 30 | 31 | # Installer logs 32 | pip-log.txt 33 | pip-delete-this-directory.txt 34 | 35 | # Unit test / coverage reports 36 | htmlcov/ 37 | .tox/ 38 | .coverage 39 | .cache 40 | nosetests.xml 41 | coverage.xml 42 | 43 | # Translations 44 | *.mo 45 | *.pot 46 | 47 | # Django stuff: 48 | *.log 49 | 50 | # Sphinx documentation 51 | docs/_build/ 52 | 53 | # PyBuilder 54 | target/ 55 | 56 | node_modules/ 57 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Development Lead 6 | ---------------- 7 | 8 | * Boris Feld 9 | 10 | Contributors 11 | ------------ 12 | 13 | None yet. Why not be the first? 14 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Contributing 3 | ============ 4 | 5 | Contributions are welcome, and they are greatly appreciated! Every 6 | little bit helps, and credit will always be given. 7 | 8 | You can contribute in many ways: 9 | 10 | Types of Contributions 11 | ---------------------- 12 | 13 | Report Bugs 14 | ~~~~~~~~~~~ 15 | 16 | Report bugs at https://github.com/lothiraldan/zeroservices/issues. 17 | 18 | If you are reporting a bug, please include: 19 | 20 | * Your operating system name and version. 21 | * Any details about your local setup that might be helpful in troubleshooting. 22 | * Detailed steps to reproduce the bug. 23 | 24 | Fix Bugs 25 | ~~~~~~~~ 26 | 27 | Look through the GitHub issues for bugs. Anything tagged with "bug" 28 | is open to whoever wants to implement it. 29 | 30 | Implement Features 31 | ~~~~~~~~~~~~~~~~~~ 32 | 33 | Look through the GitHub issues for features. Anything tagged with "feature" 34 | is open to whoever wants to implement it. 35 | 36 | Write Documentation 37 | ~~~~~~~~~~~~~~~~~~~ 38 | 39 | ZeroServices could always use more documentation, whether as part of the 40 | official ZeroServices docs, in docstrings, or even on the web in blog posts, 41 | articles, and such. 42 | 43 | Submit Feedback 44 | ~~~~~~~~~~~~~~~ 45 | 46 | The best way to send feedback is to file an issue at https://github.com/lothiraldan/zeroservices/issues. 47 | 48 | If you are proposing a feature: 49 | 50 | * Explain in detail how it would work. 51 | * Keep the scope as narrow as possible, to make it easier to implement. 52 | * Remember that this is a volunteer-driven project, and that contributions 53 | are welcome :) 54 | 55 | Get Started! 56 | ------------ 57 | 58 | Ready to contribute? Here's how to set up `zeroservices` for local development. 59 | 60 | 1. Fork the `zeroservices` repo on GitHub. 61 | 2. Clone your fork locally:: 62 | 63 | $ git clone git@github.com:your_name_here/zeroservices.git 64 | 65 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 66 | 67 | $ mkvirtualenv zeroservices 68 | $ cd zeroservices/ 69 | $ python setup.py develop 70 | 71 | 4. Create a branch for local development:: 72 | 73 | $ git checkout -b name-of-your-bugfix-or-feature 74 | 75 | Now you can make your changes locally. 76 | 77 | 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: 78 | 79 | $ flake8 zeroservices tests 80 | $ python setup.py test 81 | $ tox 82 | 83 | To get flake8 and tox, just pip install them into your virtualenv. 84 | 85 | 6. Commit your changes and push your branch to GitHub:: 86 | 87 | $ git add . 88 | $ git commit -m "Your detailed description of your changes." 89 | $ git push origin name-of-your-bugfix-or-feature 90 | 91 | 7. Submit a pull request through the GitHub website. 92 | 93 | Pull Request Guidelines 94 | ----------------------- 95 | 96 | Before you submit a pull request, check that it meets these guidelines: 97 | 98 | 1. The pull request should include tests. 99 | 2. If the pull request adds functionality, the docs should be updated. Put 100 | your new functionality into a function with a docstring, and add the 101 | feature to the list in README.rst. 102 | 3. The pull request should work for Python 2.6, 2.7, 3.3, and 3.4, and for PyPy. Check 103 | https://travis-ci.org/lothiraldan/zeroservices/pull_requests 104 | and make sure that the tests pass for all supported Python versions. 105 | 106 | Tips 107 | ---- 108 | 109 | To run a subset of tests:: 110 | 111 | $ python -m unittest tests.test_zeroservices 112 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | .. :changelog: 2 | 3 | History 4 | ------- 5 | 6 | 0.1.0 (2015-01-02) 7 | --------------------- 8 | 9 | * First release on PyPI. 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013 Boris Feld 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS.rst 2 | include CONTRIBUTING.rst 3 | include HISTORY.rst 4 | include LICENSE 5 | include README.rst 6 | 7 | recursive-include tests * 8 | recursive-exclude * __pycache__ 9 | recursive-exclude * *.py[co] 10 | 11 | recursive-include docs *.rst conf.py Makefile make.bat 12 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean-pyc clean-build docs clean 2 | 3 | help: 4 | @echo "clean - remove all build, test, coverage and Python artifacts" 5 | @echo "clean-build - remove build artifacts" 6 | @echo "clean-pyc - remove Python file artifacts" 7 | @echo "clean-test - remove test and coverage artifacts" 8 | @echo "lint - check style with flake8" 9 | @echo "test - run tests quickly with the default Python" 10 | @echo "test-all - run tests on every Python version with tox" 11 | @echo "coverage - check code coverage quickly with the default Python" 12 | @echo "docs - generate Sphinx HTML documentation, including API docs" 13 | @echo "release - package and upload a release" 14 | @echo "dist - package" 15 | @echo "install - install the package to the active Python's site-packages" 16 | 17 | clean: clean-build clean-pyc clean-test 18 | 19 | clean-build: 20 | rm -fr build/ 21 | rm -fr dist/ 22 | rm -fr .eggs/ 23 | find . -name '*.egg-info' -exec rm -fr {} + 24 | find . -name '*.egg' -exec rm -f {} + 25 | 26 | clean-pyc: 27 | find . -name '*.pyc' -exec rm -f {} + 28 | find . -name '*.pyo' -exec rm -f {} + 29 | find . -name '*~' -exec rm -f {} + 30 | find . -name '__pycache__' -exec rm -fr {} + 31 | 32 | clean-test: 33 | rm -fr .tox/ 34 | rm -f .coverage 35 | rm -fr htmlcov/ 36 | 37 | lint: 38 | flake8 zeroservices tests 39 | 40 | test: 41 | python setup.py test 42 | 43 | test-all: 44 | tox 45 | 46 | coverage: 47 | coverage run --source zeroservices setup.py test 48 | coverage report -m 49 | coverage html 50 | open htmlcov/index.html 51 | 52 | docs: 53 | rm -f docs/zeroservices.rst 54 | rm -f docs/modules.rst 55 | sphinx-apidoc -o docs/ zeroservices 56 | $(MAKE) -C docs clean 57 | $(MAKE) -C docs html 58 | open docs/_build/html/index.html 59 | 60 | release: clean 61 | python setup.py sdist upload 62 | python setup.py bdist_wheel upload 63 | 64 | dist: clean 65 | python setup.py sdist 66 | python setup.py bdist_wheel 67 | ls -l dist 68 | 69 | install: clean 70 | python setup.py install 71 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ZeroServices 2 | ============ 3 | 4 | Network services made easy and Micro-Services architectures made so easy. 5 | 6 | - Free software: MIT license 7 | - Documentation: https://zeroservices.readthedocs.org. 8 | 9 | QuickStart 10 | ---------- 11 | 12 | Let’s imagine you want an API exposing two resources Foo and Bar. 13 | 14 | First some imports: 15 | 16 | :: 17 | 18 | from zeroservices import ZeroMQMedium, ResourceService, RealtimeResourceService 19 | from zeroservices.backend.mongodb import MongoDBCollection, MongoDBResource 20 | from zeroservices.services import get_http_interface 21 | 22 | Now we declare a Service, it’s one node in the cluster: 23 | 24 | :: 25 | 26 | service = RealtimeResourceService('test', ZeroMQMedium(port_random=True)) 27 | 28 | We give it a name ``test`` and tell it to use ZeroMQ for communication. 29 | 30 | Now let’s register our resources. 31 | 32 | :: 33 | 34 | service.register_resource(MongoDBCollection("foo", database_name="test")) 35 | service.register_resource(MongoDBCollection("bar", database_name="test")) 36 | 37 | We declare two resources, ``foo`` and ``bar``, both using the ``test`` 38 | MongoDB database. 39 | 40 | Last but not the least, we still need our API but before we need to 41 | declare how users will be authenticated, let’s allow all users right 42 | now: 43 | 44 | :: 45 | 46 | class Auth(object): 47 | 48 | def authorized(self, handler, resource, method): 49 | return True 50 | 51 | And now let’s add an API using this Auth logic: 52 | 53 | :: 54 | 55 | api = get_http_interface(service, port='5001', auth=Auth(), allowed_origins="*") 56 | 57 | We use the get\_http\_interface, pass the service and our custom Auth 58 | object. We tell the API to listen on 5001 port and allow all origins 59 | (for CORS requests). 60 | 61 | And now let’s start the whole service: 62 | 63 | :: 64 | 65 | service.main() 66 | 67 | The whole example file will look like: 68 | 69 | :: 70 | 71 | from zeroservices import ZeroMQMedium, ResourceService, RealtimeResourceService 72 | from zeroservices.backend.mongodb import MongoDBCollection, MongoDBResource 73 | from zeroservices.services import get_http_interface 74 | 75 | 76 | class Auth(object): 77 | 78 | def authorized(self, handler, resource, method): 79 | return True 80 | 81 | 82 | if __name__ == '__main__': 83 | service = RealtimeResourceService('test', ZeroMQMedium(port_random=True)) 84 | api = get_http_interface(service, port='5001', auth=Auth(), allowed_origins="*") 85 | service.register_resource(MongoDBCollection("foo", "test")) 86 | service.register_resource(MongoDBCollection("bar", "test")) 87 | 88 | service.main() 89 | 90 | Let’s play with our API now (I use the awesome httpie project for 91 | examples): 92 | 93 | :: 94 | 95 | $> http localhost:5001 96 | HTTP/1.1 200 OK 97 | Access-Control-Allow-Headers: X-CUSTOM-ACTION 98 | Access-Control-Allow-Origin: * 99 | Content-Length: 20 100 | Content-Type: text/html; charset=UTF-8 101 | Date: Mon, 23 Feb 2015 22:43:19 GMT 102 | Etag: "af6572026125710f90d41f0ffe6e63e6f4089ece" 103 | Server: TornadoServer/4.0.2 104 | 105 | Hello world from api 106 | 107 | Nothing fancy here, let’s try to play with our foo resource (I will drop 108 | some useless headers for readibility): 109 | 110 | :: 111 | 112 | $> http localhost:5001/foo/ 113 | HTTP/1.1 200 OK 114 | Content-Type: application/json 115 | 116 | [] 117 | 118 | Let’s try to add a new foo resource: 119 | 120 | :: 121 | 122 | $> http POST localhost:5001/foo/ resource_id=#1 resource_data:='{"hello": "world"}' 123 | 124 | Skeleton generator 125 | ------------------ 126 | 127 | You will find in skeletons directory some cookiecutter templates that 128 | you can use to quickstart a new project or play quickly with 129 | ZeroServices. 130 | 131 | Install cookiecutter (https://github.com/audreyr/cookiecutter), go into 132 | one subdirectory and type: 133 | 134 | :: 135 | 136 | cookiecutter . 137 | 138 | Answer all questions and then you will have a directory with your brand 139 | new project. 140 | 141 | Event sniffer 142 | ------------- 143 | 144 | You’ll find a event sniffer in bin directory, it will output all events 145 | in the network, here is an example: 146 | 147 | :: 148 | 149 | $> python sniffer.py 150 | INFO:sniffer.medium:Set service sniffer, node_info: {'pub_port': 55655, 'node_type': 'node', 'node_id': 'f1be938ad5fb4c70920815b67cdd52e4', 'name': 'sniffer', 'server_port': 62103} 151 | 152 | [power.create.power_1] ({u'action': u'create', u'resource_name': u'power', u'resource_data': {u'status': u'pending', u'description': u'My first autosum resource', u'value': 42}, u'resource_id': u'power_1'},) {} 153 | [power.patch.power_1] ({u'action': u'patch', u'patch': {u'$set': {u'status': u'done', u'result': 1764}}, u'resource_name': u'power', u'resource_id': u'power_1'},) {} 154 | [power.create.power_3] ({u'action': u'create', u'resource_name': u'power', u'resource_data': {u'status': u'pending', u'description': u'Another one', u'value': 3}, u'resource_id': u'power_3'},) {} 155 | [power.patch.power_3] ({u'action': u'patch', u'patch': {u'$set': {u'status': u'done', u'result': 9}}, u'resource_name': u'power', u'resource_id': u'power_3'},) {} 156 | [power.create.power_5] ({u'action': u'create', u'resource_name': u'power', u'resource_data': {u'status': u'pending', u'description': u'Another one which should be process immediately', u'value': 24}, u'resource_id': u'power_5'},) {} 157 | [power.patch.power_5] ({u'action': u'patch', u'patch': {u'$set': {u'status': u'done', u'result': 576}}, u'resource_name': u'power', u'resource_id': u'power_5'},) {} 158 | 159 | Features 160 | -------- 161 | 162 | - TODO 163 | -------------------------------------------------------------------------------- /bin/sniffer.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import asyncio 4 | from zeroservices import ZeroMQMedium, BaseService 5 | from zeroservices.discovery import UdpDiscoveryMedium 6 | 7 | 8 | class SnifferService(BaseService): 9 | 10 | def __init__(self, *args, **kwargs): 11 | super(SnifferService, self).__init__(*args, **kwargs) 12 | self.logger.setLevel(logging.ERROR) 13 | self.medium.logger.setLevel(logging.ERROR) 14 | 15 | def on_event(self, message_type, *args, **kwargs): 16 | print("[{}] {} {}".format(message_type, args, kwargs)) 17 | yield from asyncio.sleep(0.0000001) 18 | 19 | 20 | def main(): 21 | loop = asyncio.get_event_loop() 22 | medium = ZeroMQMedium(loop, UdpDiscoveryMedium) 23 | sniffer = SnifferService('sniffer', medium) 24 | loop.run_until_complete(sniffer.start()) 25 | loop.run_forever() 26 | 27 | if __name__ == '__main__': 28 | main() 29 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/zeroservices.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/zeroservices.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/zeroservices" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/zeroservices" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../AUTHORS.rst 2 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # zeroservices documentation build configuration file, created by 5 | # sphinx-quickstart on Tue Jul 9 22:26:36 2013. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import sys 17 | import os 18 | 19 | # If extensions (or modules to document with autodoc) are in another 20 | # directory, add these directories to sys.path here. If the directory is 21 | # relative to the documentation root, use os.path.abspath to make it 22 | # absolute, like shown here. 23 | #sys.path.insert(0, os.path.abspath('.')) 24 | 25 | # Get the project root dir, which is the parent dir of this 26 | cwd = os.getcwd() 27 | project_root = os.path.dirname(cwd) 28 | 29 | # Insert the project root dir as the first element in the PYTHONPATH. 30 | # This lets us ensure that the source package is imported, and that its 31 | # version is used. 32 | sys.path.insert(0, project_root) 33 | 34 | import zeroservices 35 | 36 | # -- General configuration --------------------------------------------- 37 | 38 | # If your documentation needs a minimal Sphinx version, state it here. 39 | #needs_sphinx = '1.0' 40 | 41 | # Add any Sphinx extension module names here, as strings. They can be 42 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 43 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] 44 | 45 | # Add any paths that contain templates here, relative to this directory. 46 | templates_path = ['_templates'] 47 | 48 | # The suffix of source filenames. 49 | source_suffix = '.rst' 50 | 51 | # The encoding of source files. 52 | #source_encoding = 'utf-8-sig' 53 | 54 | # The master toctree document. 55 | master_doc = 'index' 56 | 57 | # General information about the project. 58 | project = u'ZeroServices' 59 | copyright = u'2015, Boris Feld' 60 | 61 | # The version info for the project you're documenting, acts as replacement 62 | # for |version| and |release|, also used in various other places throughout 63 | # the built documents. 64 | # 65 | # The short X.Y version. 66 | version = zeroservices.__version__ 67 | # The full version, including alpha/beta/rc tags. 68 | release = zeroservices.__version__ 69 | 70 | # The language for content autogenerated by Sphinx. Refer to documentation 71 | # for a list of supported languages. 72 | #language = None 73 | 74 | # There are two options for replacing |today|: either, you set today to 75 | # some non-false value, then it is used: 76 | #today = '' 77 | # Else, today_fmt is used as the format for a strftime call. 78 | #today_fmt = '%B %d, %Y' 79 | 80 | # List of patterns, relative to source directory, that match files and 81 | # directories to ignore when looking for source files. 82 | exclude_patterns = ['_build'] 83 | 84 | # The reST default role (used for this markup: `text`) to use for all 85 | # documents. 86 | #default_role = None 87 | 88 | # If true, '()' will be appended to :func: etc. cross-reference text. 89 | #add_function_parentheses = True 90 | 91 | # If true, the current module name will be prepended to all description 92 | # unit titles (such as .. function::). 93 | #add_module_names = True 94 | 95 | # If true, sectionauthor and moduleauthor directives will be shown in the 96 | # output. They are ignored by default. 97 | #show_authors = False 98 | 99 | # The name of the Pygments (syntax highlighting) style to use. 100 | pygments_style = 'sphinx' 101 | 102 | # A list of ignored prefixes for module index sorting. 103 | #modindex_common_prefix = [] 104 | 105 | # If true, keep warnings as "system message" paragraphs in the built 106 | # documents. 107 | #keep_warnings = False 108 | 109 | 110 | # -- Options for HTML output ------------------------------------------- 111 | 112 | # The theme to use for HTML and HTML Help pages. See the documentation for 113 | # a list of builtin themes. 114 | html_theme = 'default' 115 | 116 | # Theme options are theme-specific and customize the look and feel of a 117 | # theme further. For a list of options available for each theme, see the 118 | # documentation. 119 | #html_theme_options = {} 120 | 121 | # Add any paths that contain custom themes here, relative to this directory. 122 | #html_theme_path = [] 123 | 124 | # The name for this set of Sphinx documents. If None, it defaults to 125 | # " v documentation". 126 | #html_title = None 127 | 128 | # A shorter title for the navigation bar. Default is the same as 129 | # html_title. 130 | #html_short_title = None 131 | 132 | # The name of an image file (relative to this directory) to place at the 133 | # top of the sidebar. 134 | #html_logo = None 135 | 136 | # The name of an image file (within the static path) to use as favicon 137 | # of the docs. This file should be a Windows icon file (.ico) being 138 | # 16x16 or 32x32 pixels large. 139 | #html_favicon = None 140 | 141 | # Add any paths that contain custom static files (such as style sheets) 142 | # here, relative to this directory. They are copied after the builtin 143 | # static files, so a file named "default.css" will overwrite the builtin 144 | # "default.css". 145 | html_static_path = ['_static'] 146 | 147 | # If not '', a 'Last updated on:' timestamp is inserted at every page 148 | # bottom, using the given strftime format. 149 | #html_last_updated_fmt = '%b %d, %Y' 150 | 151 | # If true, SmartyPants will be used to convert quotes and dashes to 152 | # typographically correct entities. 153 | #html_use_smartypants = True 154 | 155 | # Custom sidebar templates, maps document names to template names. 156 | #html_sidebars = {} 157 | 158 | # Additional templates that should be rendered to pages, maps page names 159 | # to template names. 160 | #html_additional_pages = {} 161 | 162 | # If false, no module index is generated. 163 | #html_domain_indices = True 164 | 165 | # If false, no index is generated. 166 | #html_use_index = True 167 | 168 | # If true, the index is split into individual pages for each letter. 169 | #html_split_index = False 170 | 171 | # If true, links to the reST sources are added to the pages. 172 | #html_show_sourcelink = True 173 | 174 | # If true, "Created using Sphinx" is shown in the HTML footer. 175 | # Default is True. 176 | #html_show_sphinx = True 177 | 178 | # If true, "(C) Copyright ..." is shown in the HTML footer. 179 | # Default is True. 180 | #html_show_copyright = True 181 | 182 | # If true, an OpenSearch description file will be output, and all pages 183 | # will contain a tag referring to it. The value of this option 184 | # must be the base URL from which the finished HTML is served. 185 | #html_use_opensearch = '' 186 | 187 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 188 | #html_file_suffix = None 189 | 190 | # Output file base name for HTML help builder. 191 | htmlhelp_basename = 'zeroservicesdoc' 192 | 193 | 194 | # -- Options for LaTeX output ------------------------------------------ 195 | 196 | latex_elements = { 197 | # The paper size ('letterpaper' or 'a4paper'). 198 | #'papersize': 'letterpaper', 199 | 200 | # The font size ('10pt', '11pt' or '12pt'). 201 | #'pointsize': '10pt', 202 | 203 | # Additional stuff for the LaTeX preamble. 204 | #'preamble': '', 205 | } 206 | 207 | # Grouping the document tree into LaTeX files. List of tuples 208 | # (source start file, target name, title, author, documentclass 209 | # [howto/manual]). 210 | latex_documents = [ 211 | ('index', 'zeroservices.tex', 212 | u'ZeroServices Documentation', 213 | u'Boris Feld', 'manual'), 214 | ] 215 | 216 | # The name of an image file (relative to this directory) to place at 217 | # the top of the title page. 218 | #latex_logo = None 219 | 220 | # For "manual" documents, if this is true, then toplevel headings 221 | # are parts, not chapters. 222 | #latex_use_parts = False 223 | 224 | # If true, show page references after internal links. 225 | #latex_show_pagerefs = False 226 | 227 | # If true, show URL addresses after external links. 228 | #latex_show_urls = False 229 | 230 | # Documents to append as an appendix to all manuals. 231 | #latex_appendices = [] 232 | 233 | # If false, no module index is generated. 234 | #latex_domain_indices = True 235 | 236 | 237 | # -- Options for manual page output ------------------------------------ 238 | 239 | # One entry per manual page. List of tuples 240 | # (source start file, name, description, authors, manual section). 241 | man_pages = [ 242 | ('index', 'zeroservices', 243 | u'ZeroServices Documentation', 244 | [u'Boris Feld'], 1) 245 | ] 246 | 247 | # If true, show URL addresses after external links. 248 | #man_show_urls = False 249 | 250 | 251 | # -- Options for Texinfo output ---------------------------------------- 252 | 253 | # Grouping the document tree into Texinfo files. List of tuples 254 | # (source start file, target name, title, author, 255 | # dir menu entry, description, category) 256 | texinfo_documents = [ 257 | ('index', 'zeroservices', 258 | u'ZeroServices Documentation', 259 | u'Boris Feld', 260 | 'zeroservices', 261 | 'One line description of project.', 262 | 'Miscellaneous'), 263 | ] 264 | 265 | # Documents to append as an appendix to all manuals. 266 | #texinfo_appendices = [] 267 | 268 | # If false, no module index is generated. 269 | #texinfo_domain_indices = True 270 | 271 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 272 | #texinfo_show_urls = 'footnote' 273 | 274 | # If true, do not generate a @detailmenu in the "Top" node's menu. 275 | #texinfo_no_detailmenu = False 276 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. zeroservices documentation master file, created by 2 | sphinx-quickstart on Tue Jul 9 22:26:36 2013. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to ZeroServices's documentation! 7 | ====================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | readme 15 | installation 16 | usage 17 | contributing 18 | authors 19 | history 20 | 21 | Indices and tables 22 | ================== 23 | 24 | * :ref:`genindex` 25 | * :ref:`modindex` 26 | * :ref:`search` 27 | 28 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | At the command line:: 6 | 7 | $ easy_install zeroservices 8 | 9 | Or, if you have virtualenvwrapper installed:: 10 | 11 | $ mkvirtualenv zeroservices 12 | $ pip install zeroservices 13 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\zeroservices.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\zeroservices.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | Usage 3 | ======== 4 | 5 | To use ZeroServices in a project:: 6 | 7 | import zeroservices 8 | -------------------------------------------------------------------------------- /examples/chat/chat.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 11 | 12 | 13 |
14 |
15 |
16 |
17 |

Zeroservices Demo

18 |

Network services made easy.

19 |
20 | 21 |
22 |
23 | 24 |
25 |

26 |
27 | 28 |
29 | 30 | 31 |
32 |
33 |
34 | 35 |
36 |

Users

37 |
    38 |
  • {{ name }} (You)
  • 39 |
40 |
41 |
42 |
43 | 44 | 166 | 167 | 168 | 169 | -------------------------------------------------------------------------------- /examples/chat/chat.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import aiohttp 3 | from aiohttp import web 4 | 5 | import sys 6 | import json 7 | 8 | from zeroservices import BaseService 9 | from zeroservices import ZeroMQMedium 10 | from zeroservices.discovery import UdpDiscoveryMedium 11 | 12 | from jinja2 import Environment, FileSystemLoader 13 | 14 | 15 | clients = set() 16 | 17 | 18 | class ChatService(BaseService): 19 | 20 | @asyncio.coroutine 21 | def on_event(self, message_type, **kwargs): 22 | """Called when a multicast message is received 23 | """ 24 | msg = {'type': message_type} 25 | msg.update(kwargs) 26 | self.send_to_all_clients(json.dumps(msg)) 27 | 28 | @asyncio.coroutine 29 | def on_message(self, message_type, **kwargs): 30 | """Called when an unicast message is received 31 | """ 32 | msg = {'type': message_type} 33 | msg.update(kwargs) 34 | self.send_to_all_clients(json.dumps(msg)) 35 | 36 | def on_peer_join(self, node_info): 37 | """Called when a new peer joins 38 | """ 39 | msg = json.dumps({'type': 'user_join', 'id': node_info['node_id'], 'name': node_info['name']}) 40 | self.send_to_all_clients(msg) 41 | 42 | def on_peer_leave(self, node_info): 43 | """Called when a peer leaves 44 | """ 45 | msg = json.dumps({'type': 'user_leave', 'id': node_info['node_id'], 'name': node_info['name']}) 46 | self.send_to_all_clients(msg) 47 | 48 | def send_to_all_clients(self, msg): 49 | for client in clients: 50 | client.send_str(msg) 51 | 52 | loop = asyncio.get_event_loop() 53 | medium = ZeroMQMedium(loop, UdpDiscoveryMedium, node_id=sys.argv[1]) 54 | s = ChatService(sys.argv[1], medium) 55 | 56 | env = Environment(loader=FileSystemLoader('.')) 57 | 58 | @asyncio.coroutine 59 | def main_handler(request): 60 | template = env.get_template('chat.html') 61 | content = template.render(port=int(sys.argv[2]), name=sys.argv[1]) 62 | return web.Response(text=content, content_type="text/html") 63 | 64 | 65 | @asyncio.coroutine 66 | def websocket_handler(request): 67 | 68 | ws = web.WebSocketResponse() 69 | ws.start(request) 70 | 71 | clients.add(ws) 72 | for node_id, node in s.directory.items(): 73 | msg = json.dumps({'type': 'user_join', 74 | 'id': node_id, 'name': node['name']}) 75 | ws.send_str(msg) 76 | 77 | while True: 78 | msg = yield from ws.receive() 79 | 80 | if msg.tp == aiohttp.MsgType.text: 81 | if msg.data == 'close': 82 | clients.remove(ws) 83 | yield from ws.close() 84 | else: 85 | message = json.loads(msg.data) 86 | 87 | if message['type'] == 'message': 88 | msg = {'username': sys.argv[1], 'message': message['data']['message']} 89 | yield from s.publish(str(message['type']), msg) 90 | elif message['type'] == 'direct_message': 91 | msg = {'from': sys.argv[1], 'message': message['data']['message']} 92 | yield from s.send(message['data']['to'], msg, 93 | message_type=str(message['type']), 94 | wait_response=False) 95 | elif msg.tp == aiohttp.MsgType.close: 96 | clients.remove(ws) 97 | elif msg.tp == aiohttp.MsgType.error: 98 | clients.remove(ws) 99 | print('ws connection closed with exception %s', 100 | ws.exception()) 101 | 102 | return ws 103 | 104 | app = web.Application() 105 | app.router.add_route('GET', '/', main_handler) 106 | app.router.add_static('/static', '.') 107 | app.router.add_route('*', '/websocket', websocket_handler) 108 | 109 | if __name__ == '__main__': 110 | handler = app.make_handler() 111 | f = loop.create_server(handler, '0.0.0.0', int(sys.argv[2])) 112 | srv = loop.run_until_complete(f) 113 | loop.run_until_complete(s.start()) 114 | print('serving on', srv.sockets[0].getsockname()) 115 | try: 116 | loop.run_forever() 117 | except KeyboardInterrupt: 118 | pass 119 | finally: 120 | loop.run_until_complete(handler.finish_connections(1.0)) 121 | srv.close() 122 | s.close() 123 | loop.run_until_complete(srv.wait_closed()) 124 | loop.run_until_complete(app.finish()) 125 | loop.close() 126 | -------------------------------------------------------------------------------- /examples/chat/jquery.json-2.2.min.js: -------------------------------------------------------------------------------- 1 | 2 | (function($){$.toJSON=function(o) 3 | {if(typeof(JSON)=='object'&&JSON.stringify) 4 | return JSON.stringify(o);var type=typeof(o);if(o===null) 5 | return"null";if(type=="undefined") 6 | return undefined;if(type=="number"||type=="boolean") 7 | return o+"";if(type=="string") 8 | return $.quoteString(o);if(type=='object') 9 | {if(typeof o.toJSON=="function") 10 | return $.toJSON(o.toJSON());if(o.constructor===Date) 11 | {var month=o.getUTCMonth()+1;if(month<10)month='0'+month;var day=o.getUTCDate();if(day<10)day='0'+day;var year=o.getUTCFullYear();var hours=o.getUTCHours();if(hours<10)hours='0'+hours;var minutes=o.getUTCMinutes();if(minutes<10)minutes='0'+minutes;var seconds=o.getUTCSeconds();if(seconds<10)seconds='0'+seconds;var milli=o.getUTCMilliseconds();if(milli<100)milli='0'+milli;if(milli<10)milli='0'+milli;return'"'+year+'-'+month+'-'+day+'T'+ 12 | hours+':'+minutes+':'+seconds+'.'+milli+'Z"';} 13 | if(o.constructor===Array) 14 | {var ret=[];for(var i=0;i 56 | {this.props.id} 57 | {this.props.value} 58 | {status} 59 | {this.props.result} 60 | 61 | ) 62 | } 63 | } 64 | 65 | class Table extends React.Component { 66 | 67 | render() { 68 | 69 | let rows = []; 70 | let resources = tree.get(); 71 | for (let key in resources) { 72 | rows.push(); 73 | } 74 | 75 | return ( 76 |
77 |

Resources

78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | {rows} 89 | 90 |
IDValueStatusResult (value * value)
91 |
92 | ) 93 | } 94 | } 95 | 96 | var Power = React.createClass({ 97 | 98 | render: function() { 99 | return ( 100 |
101 |

{this.props.resource_id} power

102 |

Description: {this.props.resource_data.description}

103 |

Status: {this.props.resource_data.status}

104 |

Value: {this.props.resource_data.value}

105 |

Result: {this.props.resource_data.result}

106 |
107 | ); 108 | } 109 | }); 110 | 111 | let query = axios.get("http://localhost:5001/power").then(function(response) { 112 | let data = response.data; 113 | for(let key in data) { 114 | let resource_id = data[key].resource_id; 115 | let resource_data = data[key].resource_data; 116 | tree.set(resource_id, resource_data); 117 | } 118 | }); 119 | 120 | var render = function() { 121 | ReactDom.render(, document.getElementById('content')); 122 | } 123 | 124 | 125 | tree.on('update', render); 126 | 127 | render(); 128 | -------------------------------------------------------------------------------- /examples/fosdem_2015/bower.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "smartforge", 3 | "version": "0.0.0", 4 | "homepage": "https://github.com/Lothiraldan/SmartForge", 5 | "authors": [ 6 | "FELD Boris " 7 | ], 8 | "license": "Copyright", 9 | "private": true, 10 | "ignore": [ 11 | "**/.*", 12 | "node_modules", 13 | "bower_components", 14 | "test", 15 | "tests" 16 | ], 17 | "dependencies": { 18 | "bootstrap": "~3.3.5" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /examples/fosdem_2015/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Hello React/ZeroServices 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 |
14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /examples/fosdem_2015/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "miniflux-todomvc", 3 | "description": "Demonstrating a simple Flux implementation", 4 | "author": "Jorin Vogel", 5 | "devDependencies": { 6 | "babel": "^5.6.14", 7 | "babel-core": "^5.6.15", 8 | "babel-eslint": "^3.1.19", 9 | "babel-loader": "^5.2.2", 10 | "babel-runtime": "", 11 | "classnames": "^2.1.2", 12 | "es6-promise": "^2.3.0", 13 | "immutable": "^3.7.4", 14 | "node-libs-browser": "^0.5.2", 15 | "node-uuid": "^1.4.3", 16 | "react": "0.14.0-rc1", 17 | "react-hot-loader": "^1.2.7", 18 | "react-dom": "0.14.0-rc1", 19 | "react-router": "git+ssh://git@github.com:rackt/react-router.git", 20 | "history": "", 21 | "lodash": "", 22 | "react-mixin": "", 23 | "baobab": "", 24 | "baobab-react": "", 25 | "standard": "^4.3.1", 26 | "tap-spec": "^4.0.2", 27 | "tape": "^4.0.0", 28 | "watch": "^0.16.0", 29 | "webpack": "^1.10.0", 30 | "webpack-dev-server": "^1.10.1", 31 | "css-loader": "", 32 | "style-loader": "", 33 | "url-loader": "", 34 | "file-loader": "", 35 | "axios": "", 36 | "events": "", 37 | "mapify": "", 38 | "bower": "" 39 | }, 40 | "scripts": { 41 | "start": "node webpack.js", 42 | "lint": "standard app", 43 | "build": "npm run lint && webpack --config webpack.prod.js", 44 | "build-dev": "CLIENT_ENV=development npm run build", 45 | "test": "npm run lint && babel-node app/test.js | tap-spec", 46 | "watch-tests": "watch 'npm run test' app" 47 | }, 48 | "standard": { 49 | "parser": "babel-eslint" 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /examples/fosdem_2015/power.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from zeroservices import ZeroMQMedium, ResourceService 3 | from zeroservices.discovery import UdpDiscoveryMedium 4 | from zeroservices.backend.mongodb import MongoDBCollection 5 | 6 | 7 | class PowerCollection(MongoDBCollection): 8 | 9 | def __init__(self, *args, **kwargs): 10 | super(PowerCollection, self).__init__(*args, **kwargs) 11 | self.collection.ensure_index([('description', 'text')]) 12 | 13 | 14 | def main(): 15 | loop = asyncio.get_event_loop() 16 | medium = ZeroMQMedium(loop, UdpDiscoveryMedium) 17 | service = ResourceService('power_service', medium) 18 | service.register_resource(PowerCollection("power", "db")) 19 | loop.run_until_complete(service.start()) 20 | loop.run_forever() 21 | 22 | if __name__ == '__main__': 23 | main() 24 | -------------------------------------------------------------------------------- /examples/fosdem_2015/randomizer.py: -------------------------------------------------------------------------------- 1 | import pymongo 2 | import random 3 | import time 4 | import json 5 | import requests 6 | 7 | NUMBER = 20 8 | 9 | 10 | # Clean the DB 11 | con = pymongo.MongoClient() 12 | con.drop_database('db') 13 | 14 | time.sleep(2) 15 | 16 | # Create 100 new power 17 | for i in range(NUMBER): 18 | print 19 | resource = {"value": i, "status": "pending"} 20 | resp = requests.post('http://localhost:5001/power/', 21 | data=json.dumps({"resource_id": str(i), "resource_data": resource})) 22 | resp.raise_for_status() 23 | 24 | # Sleep 25 | 26 | time.sleep(5) 27 | 28 | while True: 29 | value = random.randint(0, 999) 30 | resource_id = random.randint(0, NUMBER-1) 31 | 32 | resp = requests.patch('http://localhost:5001/power/{}'.format(resource_id), 33 | data=json.dumps({"patch": {"$set": {"value": value, "status": "pending"}}})) 34 | time.sleep(0.05) 35 | -------------------------------------------------------------------------------- /examples/fosdem_2015/webpack.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | var webpack = require('webpack') 3 | var WebpackDevServer = require('webpack-dev-server') 4 | 5 | var config = { 6 | devtool: 'cheap-module-eval-source-map', 7 | entry: [ 8 | 'webpack-dev-server/client?http://localhost:3333', 9 | 'webpack/hot/only-dev-server', 10 | './app.jsx' 11 | ], 12 | output: { 13 | filename: 'bundle.js', 14 | path: __dirname, 15 | publicPath: '' 16 | }, 17 | plugins: [ 18 | new webpack.HotModuleReplacementPlugin(), 19 | new webpack.NoErrorsPlugin() 20 | ], 21 | resolve: { 22 | extensions: ['', '.js', '.jsx'], 23 | // Tell webpack to look for required files in bower and node 24 | modulesDirectories: ['bower_components', 'node_modules'] 25 | }, 26 | module: { 27 | loaders: [ 28 | { 29 | test: /\.jsx?$/, 30 | loaders: ['react-hot', 'babel?optional[]=runtime&stage=0'], 31 | include: path.join(__dirname, 'app') 32 | }, 33 | { 34 | test: /\.css$/, loader: "style-loader!css-loader" 35 | }, 36 | { test: /\.woff(\?v=\d+\.\d+\.\d+)?$/, loader: "url?limit=10000&minetype=application/font-woff" }, 37 | { test: /\.woff2(\?v=\d+\.\d+\.\d+)?$/, loader: "url?limit=10000&minetype=application/font-woff" }, 38 | { test: /\.ttf(\?v=\d+\.\d+\.\d+)?$/, loader: "url?limit=10000&minetype=application/octet-stream" }, 39 | { test: /\.eot(\?v=\d+\.\d+\.\d+)?$/, loader: "file" }, 40 | { test: /\.svg(\?v=\d+\.\d+\.\d+)?$/, loader: "url?limit=10000&minetype=image/svg+xml" }, 41 | { test: /\.jpg$/, loader: "file" }, 42 | { test: /\.png$/, loader: "file" }, 43 | ] 44 | } 45 | } 46 | 47 | new WebpackDevServer(webpack(config), { 48 | publicPath: config.output.publicPath, 49 | hot: true, 50 | historyApiFallback: true 51 | }).listen(3333, 'localhost', function (err, result) { 52 | if (err) { 53 | console.log(err) 54 | } 55 | 56 | console.log('Listening at localhost:3333') 57 | }) 58 | -------------------------------------------------------------------------------- /examples/fosdem_2015/webpack.prod.js: -------------------------------------------------------------------------------- 1 | var webpack = require('webpack') 2 | 3 | module.exports = { 4 | entry: { 5 | app: './app.jsx' 6 | }, 7 | output: { 8 | filename: 'bundle.js', 9 | path: __dirname, 10 | publicPath: '' 11 | }, 12 | plugins: [ 13 | new webpack.optimize.OccurenceOrderPlugin(), 14 | new webpack.NoErrorsPlugin(), 15 | new webpack.DefinePlugin({ 16 | 'process.env': { 17 | 'NODE_ENV': '"' + (process.env.CLIENT_ENV || 'production') + '"' 18 | } 19 | }), 20 | new webpack.optimize.UglifyJsPlugin({ 21 | compressor: { 22 | warnings: false 23 | } 24 | }) 25 | ], 26 | resolve: { 27 | extensions: ['', '.js', '.jsx'] 28 | }, 29 | module: { 30 | loaders: [ 31 | { test: /\.jsx?$/, loaders: ['babel?optional[]=runtime&stage=0'], exclude: /node_modules/ }, 32 | { test: /\.css$/, loader: 'style-loader!css-loader' }, 33 | ] 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /examples/fosdem_2015/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from zeroservices import ResourceWorker, ZeroMQMedium 4 | from zeroservices.discovery import UdpDiscoveryMedium 5 | 6 | 7 | class PowerWorker(ResourceWorker): 8 | 9 | def __init__(self, *args, **kwargs): 10 | super().__init__(*args, **kwargs) 11 | # Process each resource in status building 12 | self.register(self.do_build, 'power', status='pending') 13 | 14 | def do_build(self, resource_name, resource_data, resource_id, action): 15 | power = resource_data['value'] * resource_data['value'] 16 | yield from self.send(collection_name='power', 17 | resource_id=resource_id, 18 | action='patch', patch={"$set": {'result': power, 19 | 'status': 'done'}}) 20 | 21 | if __name__ == '__main__': 22 | loop = asyncio.get_event_loop() 23 | medium = ZeroMQMedium(loop, UdpDiscoveryMedium) 24 | worker = PowerWorker('PowerWorker', medium) 25 | loop.run_until_complete(worker.start()) 26 | loop.run_forever() 27 | -------------------------------------------------------------------------------- /examples/todo_react/app.py: -------------------------------------------------------------------------------- 1 | import pymongo 2 | 3 | from zeroservices import ResourceService, ZeroMQMedium 4 | from zeroservices.backend.mongodb import MongoDBCollection, MongoDBResource 5 | from zeroservices.memory import MemoryCollection, MemoryCollection 6 | from zeroservices.resources import is_callable 7 | 8 | import sys 9 | from zmq.eventloop import ioloop, zmqstream 10 | ioloop.install() 11 | import logging 12 | import json 13 | import tornado 14 | 15 | from functools import wraps 16 | from base64 import decodestring 17 | from tornado import gen 18 | from tornado import web 19 | from tornado import websocket 20 | 21 | from zeroservices import ResourceService, ZeroMQMedium 22 | from zeroservices.services import get_http_interface, BasicAuth 23 | 24 | from zeroservices.utils import accumulate 25 | 26 | 27 | # Http utils 28 | 29 | class Auth(object): 30 | 31 | def authorized(self, handler, resource, method): 32 | return True 33 | 34 | 35 | # APP 36 | class TODOService(ResourceService): 37 | 38 | def on_event(self, message_type, data): 39 | self.logger.info("On event %s", locals()) 40 | application.clients[0].publishToRoom('*', 'event', data) 41 | 42 | topics = accumulate(message_type.split('.'), lambda x, y: '.'.join((x, y))) 43 | 44 | for topic in topics: 45 | application.clients[0].publishToRoom(topic, 'event', data) 46 | 47 | 48 | if __name__ == '__main__': 49 | todo = TODOService('todo_mvc', ZeroMQMedium(port_random=True)) 50 | application = get_http_interface(todo, port=5001, auth=Auth(), allowed_origins="*") 51 | todo.register_resource(MongoDBCollection("todo_list")) 52 | todo.register_resource(MongoDBCollection("todo_item")) 53 | todo.main() 54 | -------------------------------------------------------------------------------- /examples/todo_react/bower.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "smartforge", 3 | "version": "0.0.0", 4 | "homepage": "https://github.com/Lothiraldan/SmartForge", 5 | "authors": [ 6 | "FELD Boris " 7 | ], 8 | "license": "Copyright", 9 | "private": true, 10 | "ignore": [ 11 | "**/.*", 12 | "node_modules", 13 | "bower_components", 14 | "test", 15 | "tests" 16 | ], 17 | "dependencies": { 18 | "jquery": "~2.1.1", 19 | "sockjs": "~0.3.4", 20 | "sockjs_reconnect": "knowitnothing/sockjs_reconnect", 21 | "react": "~0.12.0" 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /examples/todo_react/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Hello React/ZeroServices 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 |
15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /examples/todo_react/todo: -------------------------------------------------------------------------------- 1 | CORS 2 | Access-Control-Allow-Origin 3 | OPTIONS 4 | on_event multi_room 5 | publish -> self.on_event 6 | -------------------------------------------------------------------------------- /examples/todo_react/todo.js: -------------------------------------------------------------------------------- 1 | /** @jsx React.DOM */ 2 | var TodoItem = React.createClass({ 3 | componentDidMount: function() { 4 | $.get("http://localhost:5001/todo_item/" + this.props.resource_id + '/', function(result) { 5 | if (this.isMounted()) { 6 | this.setState(result); 7 | } 8 | }.bind(this)); 9 | }, 10 | 11 | render: function() { 12 | return ( 13 |
  • {this.props.resource_id}
  • 14 | ); 15 | } 16 | }) 17 | 18 | var TodoList = React.createClass({ 19 | handleSubmit: function(e) { 20 | e.preventDefault(); 21 | var new_item_name = this.refs[this.props.resource_id + '_new_item_name'].getDOMNode().value.trim(); 22 | this.refs[this.props.resource_id + '_new_item_name'].getDOMNode().value = ''; 23 | 24 | // Create new item 25 | $.ajax({ 26 | url: "http://localhost:5001/todo_item/" + new_item_name + "/", 27 | type: "POST", 28 | data: JSON.stringify({"resource_data": {}}), 29 | dataType: "json", 30 | beforeSend: function(x) { 31 | if (x && x.overrideMimeType) { 32 | x.overrideMimeType("application/json;charset=UTF-8"); 33 | } 34 | }, 35 | }); 36 | 37 | // Link it 38 | $.ajax({ 39 | url: "http://localhost:5001/todo_list/" + this.props.resource_id + "/", 40 | type: "POST", 41 | headers: { 42 | "X-CUSTOM-ACTION": "add_link", 43 | }, 44 | data: JSON.stringify({"relation": 'children', 'target_id': ['todo_item', new_item_name], 'title': 'Item'}), 45 | dataType: "json", 46 | beforeSend: function(x) { 47 | if (x && x.overrideMimeType) { 48 | x.overrideMimeType("application/json;charset=UTF-8"); 49 | } 50 | }, 51 | }); 52 | }, 53 | 54 | render: function() { 55 | var links = this.props.resource_data._links || {'children': []}; 56 | 57 | return ( 58 |
    59 |

    {this.props.resource_id} list

    60 |
      61 | {links.children.map(function(link) { 62 | return 63 | })} 64 |
    65 |
    66 | 67 | 68 | 69 |
    70 | ); 71 | } 72 | }) 73 | 74 | var TodoApp = React.createClass({ 75 | getInitialState: function() { 76 | this.sock = new SockReconnect('http://localhost:5001/realtime', null, null, this.onmessage, this.on_connect); 77 | this.sock.connect(); 78 | 79 | return {'lists': {}, 'items': {}}; 80 | }, 81 | 82 | componentDidMount: function() { 83 | $.get("http://localhost:5001/todo_list/", function(result) { 84 | if (this.isMounted()) { 85 | var lists = {}; 86 | var result_lists = result; 87 | for(i in result_lists) { 88 | lists[result_lists[i].resource_id] = result_lists[i]; 89 | } 90 | this.setState({'lists': lists}); 91 | } 92 | }.bind(this)); 93 | }, 94 | 95 | onmessage: function(evt) { 96 | var evt = evt.data; 97 | if(evt.data.action == 'patch') { 98 | this.update(evt.data.data.patch['$set']); 99 | } 100 | else if(evt.data.action == 'add_link') { 101 | var lists = this.state.lists; 102 | var resource = lists[evt.data.resource_id].resource_data; 103 | if (resource._links == undefined) { 104 | resource._links = {}; 105 | } 106 | var links = resource._links; 107 | if(links[evt.data.relation] == undefined) { 108 | links[evt.data.relation] = []; 109 | } 110 | links[evt.data.relation].push({'title': evt.data.title, 'target_id': evt.data.target_id}); 111 | this.setState({'lists': lists}); 112 | } 113 | else if(evt.data.action == 'create') { 114 | if(evt.data.resource_name == 'todo_list') { 115 | // Register to resource events 116 | this.register_resource_event(evt.data.resource_id); 117 | 118 | // Add list 119 | var lists = this.state.lists; 120 | lists[evt.data.resource_id] = evt.data; 121 | this.setState({'lists': lists}); 122 | } 123 | } 124 | }, 125 | 126 | register_resource_event: function(resource_id) { 127 | this.sock.send(JSON.stringify({'name': 'join', 'data': 128 | {'topic': 'todo_list.add_link.' + resource_id}})); 129 | this.sock.send(JSON.stringify({'name': 'join', 'data': 130 | {'topic': 'todo_list.patch.' + resource_id}})); 131 | this.sock.send(JSON.stringify({'name': 'join', 'data': 132 | {'topic': 'todo_list.delete.' + resource_id}})); 133 | }, 134 | 135 | on_connect: function() { 136 | this.sock.send(JSON.stringify({'name': 'join', 'data': 137 | {'topic': 'todo_list.create'}})); 138 | 139 | for(i in this.state.lists) { 140 | // Register to already retrieved resources events 141 | this.register_resource_event(this.state.lists[i].resource_id); 142 | } 143 | }, 144 | 145 | handleNewList: function(e) { 146 | e.preventDefault(); 147 | var new_list_name = this.refs.new_list_name.getDOMNode().value.trim(); 148 | this.refs.new_list_name.getDOMNode().value = ''; 149 | $.ajax({ 150 | url: "http://localhost:5001/todo_list/" + new_list_name + "/", 151 | type: "POST", 152 | data: JSON.stringify({"resource_data": {}}), 153 | dataType: "json", 154 | beforeSend: function(x) { 155 | if (x && x.overrideMimeType) { 156 | x.overrideMimeType("application/json;charset=UTF-8"); 157 | } 158 | }, 159 | }); 160 | }, 161 | 162 | render: function() { 163 | 164 | var lists = []; 165 | for(list_id in this.state.lists) { 166 | var list = this.state.lists[list_id]; 167 | lists.push() 168 | } 169 | 170 | return ( 171 |
    172 |
    173 | 174 | 175 | 176 | 177 | {lists} 178 | 179 |
    180 | ); 181 | } 182 | }); 183 | 184 | React.render(, document.body); 185 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pyzmq 2 | pymongo>=3.0 3 | mock 4 | voluptuous 5 | requests 6 | responses 7 | nose 8 | six 9 | aiozmq 10 | aiohttp 11 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [wheel] 2 | universal = 1 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | try: 6 | from setuptools import setup 7 | except ImportError: 8 | from distutils.core import setup 9 | 10 | 11 | with open('README.rst') as readme_file: 12 | readme = readme_file.read() 13 | 14 | with open('HISTORY.rst') as history_file: 15 | history = history_file.read().replace('.. :changelog:', '') 16 | 17 | with open('requirements.txt') as requirements_file: 18 | requirements = requirements_file.read().splitlines() 19 | 20 | test_requirements = [ 21 | ] 22 | 23 | setup( 24 | name='zeroservices', 25 | version='0.1.0', 26 | description="Network services made easy and Micro-Services architectures made fucking easy.", 27 | long_description=readme + '\n\n' + history, 28 | author="Boris Feld", 29 | author_email='lothiraldan@gmail.com', 30 | url='https://github.com/lothiraldan/zeroservices', 31 | packages=[ 32 | 'zeroservices', 'zeroservices.medium', 33 | 'zeroservices.backend', 'zeroservices.services', 34 | 'zeroservices.discovery' 35 | ], 36 | package_dir={'zeroservices': 37 | 'zeroservices'}, 38 | include_package_data=True, 39 | install_requires=requirements, 40 | license="MIT", 41 | zip_safe=False, 42 | keywords='zeroservices', 43 | classifiers=[ 44 | 'Development Status :: 2 - Pre-Alpha', 45 | 'Intended Audience :: Developers', 46 | 'License :: OSI Approved :: MIT License', 47 | 'Natural Language :: English', 48 | "Programming Language :: Python :: 2", 49 | 'Programming Language :: Python :: 2.6', 50 | 'Programming Language :: Python :: 2.7', 51 | 'Programming Language :: Python :: 3', 52 | 'Programming Language :: Python :: 3.3', 53 | 'Programming Language :: Python :: 3.4', 54 | ], 55 | test_suite='tests', 56 | tests_require=test_requirements 57 | ) 58 | -------------------------------------------------------------------------------- /skeletons/all-in-one/README.md: -------------------------------------------------------------------------------- 1 | # All-in-one skeleton generator 2 | 3 | A cookiecutter template for creating a all-in-one ZeroServices projects exposing a realtime API for severals resources, 4 | all in one file and one process. 5 | 6 | ## Use it now 7 | 8 | pip install cookiecutter 9 | cookiecutter . 10 | 11 | -------------------------------------------------------------------------------- /skeletons/all-in-one/cookiecutter.json: -------------------------------------------------------------------------------- 1 | { 2 | "project_name": "sample_project_all_in_one", 3 | "mongodb_database": "test", 4 | "resources (separated by comma)": "", 5 | "api_port": 5001, 6 | "realtime_api": false 7 | } 8 | -------------------------------------------------------------------------------- /skeletons/all-in-one/{{cookiecutter.project_name}}/app.py: -------------------------------------------------------------------------------- 1 | from zeroservices import MemoryMedium, ResourceService, RealtimeResourceService 2 | from zeroservices.backend.mongodb import MongoDBCollection, MongoDBResource 3 | from zeroservices.discovery import MemoryDiscoveryMedium 4 | from zeroservices.services import get_http_interface 5 | import asyncio 6 | 7 | 8 | # TODO implement your Auth logic here 9 | class Auth(object): 10 | 11 | def authorized(self, handler, resource, method): 12 | return True 13 | 14 | 15 | def main(): 16 | loop = asyncio.get_event_loop() 17 | medium = MemoryMedium(loop, MemoryDiscoveryMedium) 18 | 19 | {% if cookiecutter.realtime_api %} 20 | service = RealtimeResourceService('{{cookiecutter.project_name}}', medium) 21 | {% else %} 22 | service = ResourceService('{{cookiecutter.project_name}}', medium) 23 | {% endif %} 24 | 25 | # Get the HTTP interface and start listening 26 | api = get_http_interface(service, loop, port='{{cookiecutter.api_port}}', auth=Auth(), allowed_origins="*") 27 | api = loop.run_until_complete(api) 28 | 29 | {% for resource in cookiecutter['resources (separated by comma)'].split(',') -%} 30 | service.register_resource(MongoDBCollection("{{resource}}", "{{cookiecutter.mongodb_database}}")) 31 | {% endfor %} 32 | 33 | # Start the service with API and resources 34 | loop.run_until_complete(service.start()) 35 | loop.run_forever() 36 | 37 | 38 | if __name__ == '__main__': 39 | main() 40 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lothiraldan/ZeroServices/c6b0bdf755be6bcc0ff6070aabcfb36f0e4c2f37/tests/__init__.py -------------------------------------------------------------------------------- /tests/backend/__init__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from asyncio import coroutine 4 | from zeroservices import BaseService 5 | from zeroservices.resources import NoActionHandler 6 | from ..utils import test_medium, TestCase, _async_test, _create_test_resource_service 7 | 8 | try: 9 | from unittest.mock import Mock 10 | except ImportError: 11 | from mock import Mock 12 | 13 | 14 | class _BaseCollectionTestCase(TestCase): 15 | 16 | def setUp(self): 17 | asyncio.set_event_loop(None) 18 | self.loop = asyncio.new_event_loop() 19 | 20 | self.service = _create_test_resource_service('test_service', self.loop) 21 | self.loop.run_until_complete(self.service.start()) 22 | self.service2 = _create_test_resource_service('test_listener', self.loop) 23 | self.loop.run_until_complete(self.service2.start()) 24 | 25 | # Resource 26 | self.resource_id = 'UUID-1' 27 | self.resource_data = {'field1': 1, 'field2': 2} 28 | self.resource_name = 'test_collection' 29 | 30 | self.event_payload = {'resource_id': self.resource_id, 31 | 'resource_name': self.resource_name} 32 | 33 | self.maxDiff = None 34 | 35 | def tearDown(self): 36 | self.service.close() 37 | self.service2.close() 38 | self.loop.stop() 39 | self.loop.close() 40 | self.service.medium.check_leak() 41 | self.service2.medium.check_leak() 42 | 43 | def _create(self, resource_data, resource_id): 44 | message = {'action': 'create', 'resource_id': resource_id, 45 | 'resource_data': resource_data} 46 | yield from self.collection.on_message(**message) 47 | 48 | @_async_test 49 | def test_create(self): 50 | message = {'action': 'create', 'resource_id': self.resource_id, 51 | 'resource_data': self.resource_data} 52 | 53 | result = yield from self.collection.on_message(**message) 54 | self.assertEqual(result, {'resource_id': self.resource_id}) 55 | 56 | expected_payload = self.event_payload.copy() 57 | expected_payload.update({'action': 'create', 58 | 'resource_data': self.resource_data}) 59 | 60 | event_topic = '%s.create.%s' % (self.resource_name, self.resource_id) 61 | self.service2.on_event_mock.assert_called_once_with(event_topic, 62 | **expected_payload) 63 | 64 | self.service2.on_event_mock.reset_mock() 65 | 66 | @_async_test 67 | def test_get(self): 68 | yield from self.test_create() 69 | 70 | message = {'action': 'get', 'resource_id': self.resource_id} 71 | 72 | result = yield from self.collection.on_message(**message) 73 | self.assertEqual(result, 74 | {'resource_id': self.resource_id, 75 | 'resource_data': self.resource_data}) 76 | 77 | @_async_test 78 | def test_update(self): 79 | yield from self.test_create() 80 | 81 | patch = {'field3': 3, 'field4': 4} 82 | query = {'$set': patch} 83 | 84 | message = {'action': 'patch', 'resource_id': self.resource_id, 85 | 'patch': query} 86 | 87 | expected_document = self.resource_data.copy() 88 | expected_document.update(patch) 89 | 90 | result = yield from self.collection.on_message(**message) 91 | self.assertEqual(result, 92 | expected_document) 93 | 94 | message = {'action': 'get', 'resource_id': self.resource_id} 95 | 96 | result = yield from self.collection.on_message(**message) 97 | self.assertEqual(result, 98 | {'resource_id': self.resource_id, 99 | 'resource_data': expected_document}) 100 | 101 | expected_payload = self.event_payload.copy() 102 | expected_payload.update({'action': 'patch', 'patch': query}) 103 | 104 | event_topic = '%s.patch.%s' % (self.resource_name, self.resource_id) 105 | self.service2.on_event_mock.assert_called_once_with(event_topic, 106 | **expected_payload) 107 | 108 | @_async_test 109 | def test_delete(self): 110 | yield from self.test_create() 111 | 112 | message = {'action': 'delete', 'resource_id': self.resource_id} 113 | 114 | result = yield from self.collection.on_message(**message) 115 | self.assertEqual(result, 116 | 'OK') 117 | 118 | message = {'action': 'get', 'resource_id': self.resource_id} 119 | 120 | result = yield from self.collection.on_message(**message) 121 | self.assertEqual(result, 122 | 'NOK') 123 | 124 | expected_payload = self.event_payload.copy() 125 | expected_payload.update({'action': 'delete'}) 126 | 127 | event_topic = '%s.delete.%s' % (self.resource_name, self.resource_id) 128 | self.service2.on_event_mock.assert_called_once_with(event_topic, 129 | **expected_payload) 130 | 131 | # Add another link on same relation 132 | @_async_test 133 | def test_add_link(self): 134 | yield from self.test_create() 135 | 136 | relation = 'relation_type' 137 | target_id = ['collection', 'target'] 138 | title = 'title' 139 | message = {'action': 'add_link', 'resource_id': self.resource_id, 140 | 'relation': relation, 'target_id': target_id, 141 | 'title': title} 142 | 143 | result = yield from self.collection.on_message(**message) 144 | self.assertEqual(result, 145 | "OK") 146 | 147 | # Check that document is updated 148 | expected_data = self.resource_data.copy() 149 | expected_data.update({'_links': 150 | {relation: [{"target_id": target_id, 151 | "title": title}], 152 | 'latest': {target_id[0]: target_id}}}) 153 | expected_document = {'resource_id': self.resource_id, 154 | 'resource_data': expected_data} 155 | 156 | 157 | message = {'action': 'get', 'resource_id': self.resource_id} 158 | result = yield from self.collection.on_message(**message) 159 | self.assertEqual(result, 160 | expected_document) 161 | 162 | # Check event payload 163 | expected_payload = self.event_payload.copy() 164 | expected_payload.update({'action': 'add_link', 'target_id': target_id, 165 | 'title': title, 'relation': relation}) 166 | 167 | event_topic = '%s.add_link.%s' % (self.resource_name, self.resource_id) 168 | self.service2.on_event_mock.assert_called_once_with(event_topic, 169 | **expected_payload) 170 | 171 | # Add another link on same relation 172 | relation = 'relation_type' 173 | target_id2 = ['collection', 'target2'] 174 | title2 = 'title2' 175 | message = {'action': 'add_link', 'resource_id': self.resource_id, 176 | 'relation': relation, 'target_id': target_id2, 177 | 'title': title2} 178 | result = yield from self.collection.on_message(**message) 179 | self.assertEqual(result, 180 | "OK") 181 | 182 | # Check that document is updated 183 | expected_data = self.resource_data.copy() 184 | expected_data.update({'_links': 185 | {relation: [{"target_id": target_id, 186 | "title": title}, 187 | {"target_id": target_id2, 188 | "title": title2}], 189 | 'latest': {target_id2[0]: target_id2}}}) 190 | expected_document = {'resource_id': self.resource_id, 191 | 'resource_data': expected_data} 192 | 193 | message = {'action': 'get', 'resource_id': self.resource_id} 194 | result = yield from self.collection.on_message(**message) 195 | self.assertEqual(result, 196 | expected_document) 197 | 198 | # Add a third link on another relation 199 | relation2 = 'relation_type2' 200 | target_id3 = ['foo', 'bar'] 201 | title3 = 'title3' 202 | message = {'action': 'add_link', 'resource_id': self.resource_id, 203 | 'relation': relation2, 'target_id': target_id3, 204 | 'title': title3} 205 | result = yield from self.collection.on_message(**message) 206 | self.assertEqual(result, 207 | "OK") 208 | 209 | # Check that document is updated 210 | expected_data = self.resource_data.copy() 211 | expected_data.update({'_links': 212 | {relation: [{"target_id": target_id, 213 | "title": title}, 214 | {"target_id": target_id2, 215 | "title": title2}], 216 | relation2: [{"target_id": target_id3, 217 | "title": title3}], 218 | 'latest': {target_id2[0]: target_id2, 219 | target_id3[0]: target_id3}}}) 220 | expected_document = {'resource_id': self.resource_id, 221 | 'resource_data': expected_data} 222 | 223 | message = {'action': 'get', 'resource_id': self.resource_id} 224 | result = yield from self.collection.on_message(**message) 225 | self.assertEqual(result, 226 | expected_document) 227 | 228 | @_async_test 229 | def test_list(self): 230 | message = {'action': 'list'} 231 | 232 | # Check that list doesn't return anything 233 | result = yield from self.collection.on_message(**message) 234 | self.assertEqual(result, []) 235 | 236 | # Create a doc 237 | yield from self.test_create() 238 | 239 | # Check that list return the document 240 | result = yield from self.collection.on_message(**message) 241 | self.assertEqual(result, 242 | [{'resource_id': self.resource_id, 243 | 'resource_data': self.resource_data}]) 244 | 245 | @_async_test 246 | def test_list_filter(self): 247 | doc_1 = ({'field1': 1, 'field2': 2}, 'UUID-1') 248 | doc_2 = ({'field1': 3, 'field2': 2}, 'UUID-2') 249 | doc_3 = ({'field1': 1, 'field2': 4}, 'UUID-3') 250 | docs = (doc_1, doc_2, doc_3) 251 | 252 | for doc in docs: 253 | yield from self._create(*doc) 254 | 255 | # All docs 256 | message = {'action': 'list'} 257 | expected = [{'resource_id': x[1], 'resource_data': x[0]} for x in 258 | docs] 259 | result = yield from self.collection.on_message(**message) 260 | self.assertItemsEqual(result, 261 | expected) 262 | 263 | # Field1 = 1 264 | message = {'action': 'list', 'where': {'field1': 1}} 265 | expected = [{'resource_id': x[1], 'resource_data': x[0]} for x in 266 | docs if x[0]['field1'] == 1] 267 | result = yield from self.collection.on_message(**message) 268 | self.assertItemsEqual(result, 269 | expected) 270 | 271 | # Field1 = 3 272 | message = {'action': 'list', 'where': {'field1': 3}} 273 | expected = [{'resource_id': x[1], 'resource_data': x[0]} for x in 274 | docs if x[0]['field1'] == 3] 275 | result = yield from self.collection.on_message(**message) 276 | self.assertItemsEqual(result, 277 | expected) 278 | 279 | # Field2 = 2 280 | message = {'action': 'list', 'where': {'field2': 2}} 281 | expected = [{'resource_id': x[1], 'resource_data': x[0]} for x in 282 | docs if x[0]['field2'] == 2] 283 | result = yield from self.collection.on_message(**message) 284 | self.assertItemsEqual(result, 285 | expected) 286 | 287 | @_async_test 288 | def test_bad_action(self): 289 | message = {'action': 'unknown', 'resource_id': self.resource_id, 290 | 'resource_data': self.resource_data} 291 | 292 | with self.assertRaises(NoActionHandler): 293 | yield from self.collection.on_message(**message) 294 | -------------------------------------------------------------------------------- /tests/backend/test_memory.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from zeroservices.memory import MemoryCollection 4 | from ..utils import test_medium 5 | from . import _BaseCollectionTestCase 6 | 7 | try: 8 | from unittest.mock import Mock 9 | except ImportError: 10 | from mock import Mock 11 | 12 | 13 | class MemoryCollectionTestCase(_BaseCollectionTestCase): 14 | 15 | def setUp(self): 16 | super(MemoryCollectionTestCase, self).setUp() 17 | self.collection = MemoryCollection(self.resource_name) 18 | self.collection.service = self.service 19 | -------------------------------------------------------------------------------- /tests/backend/test_mongodb.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from zeroservices.backend.mongodb import MongoDBCollection 4 | from . import _BaseCollectionTestCase 5 | 6 | from ..utils import TestCase, _create_test_resource_service, _async_test 7 | 8 | try: 9 | from unittest.mock import Mock 10 | except ImportError: 11 | from mock import Mock 12 | 13 | 14 | class MongoDBCollectionTestCase(_BaseCollectionTestCase): 15 | 16 | def setUp(self): 17 | super(MongoDBCollectionTestCase, self).setUp() 18 | self.database_name = 'test' 19 | self.collection = MongoDBCollection(self.resource_name, 20 | self.database_name) 21 | self.collection.service = self.service 22 | 23 | def tearDown(self): 24 | super().tearDown() 25 | self.collection.collection.drop() 26 | 27 | 28 | class MongoDBTestCase(TestCase): 29 | 30 | def setUp(self): 31 | self.database_name = 'test' 32 | self.resource_name = 'test_resource' 33 | 34 | asyncio.set_event_loop(None) 35 | self.loop = asyncio.new_event_loop() 36 | 37 | self.service = _create_test_resource_service('test_service', self.loop) 38 | self.collection = MongoDBCollection(self.resource_name, 39 | database_name=self.database_name) 40 | self.collection.service = self.service 41 | 42 | def tearDown(self): 43 | self.collection.collection.drop() 44 | 45 | @_async_test 46 | def test_custom_database(self): 47 | # Create a resource 48 | resource_id = 'UUID1' 49 | message_args = {'resource_data': {'kwarg_1': 1, 'kwarg_2': 2}, 50 | 'resource_id': resource_id} 51 | query = {'action': 'create'} 52 | query.update(message_args) 53 | 54 | result = yield from self.collection.on_message(**query) 55 | self.assertEqual(result, 56 | {'resource_id': 'UUID1'}) 57 | 58 | # Check that resource exists 59 | resource_list = yield from self.collection.on_message(action='list') 60 | self.assertEqual(resource_list, 61 | [message_args]) 62 | 63 | # On a separate database, check that resource doesn't exists 64 | collection2 = MongoDBCollection(self.resource_name, 65 | database_name='other') 66 | 67 | resource_list = yield from collection2.on_message(action='list') 68 | self.assertEqual(resource_list, []) 69 | -------------------------------------------------------------------------------- /tests/discovery_medium/__init__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import unittest 3 | from zeroservices import BaseService 4 | from ..utils import test_medium, TestCase, _async_test 5 | 6 | try: 7 | from unittest.mock import Mock 8 | except ImportError: 9 | from mock import Mock 10 | 11 | 12 | class _BaseDiscoveryMediumTestCase(TestCase): 13 | 14 | def setUp(self): 15 | self.loop = asyncio.new_event_loop() 16 | asyncio.set_event_loop(None) 17 | 18 | self.mock_1 = Mock() 19 | self.mock_1.__name__ = 'Mock1' 20 | self.mock_2 = Mock() 21 | self.mock_2.__name__ = 'Mock2' 22 | 23 | self.node_info_1 = {'node_id': '#ID1', 'service_info': {'name': '#S1'}} 24 | self.node_info_2 = {'node_id': '#ID2', 'service_info': {'name': '#S2'}} 25 | 26 | self.first_discovery_medium = self.loop.run_until_complete(self.get_medium(self.mock_1, self.loop, self.node_info_1)) 27 | self.second_discovery_medium = self.loop.run_until_complete(self.get_medium(self.mock_2, self.loop, self.node_info_2)) 28 | 29 | def tearDown(self): 30 | self.first_discovery_medium.close() 31 | self.second_discovery_medium.close() 32 | self.loop.stop() 33 | self.loop.close() 34 | self.first_discovery_medium.check_leak() 35 | self.second_discovery_medium.check_leak() 36 | 37 | @_async_test 38 | def test_discovery(self): 39 | yield from self.first_discovery_medium.send_registration_infos() 40 | yield from self.second_discovery_medium.send_registration_infos() 41 | 42 | # Wait some time for message propagation 43 | yield from asyncio.sleep(0.01, loop=self.loop) 44 | 45 | # Check that we received some informations in second medium 46 | self.assertEqual(self.mock_2.call_count, 1) 47 | call = self.mock_2.call_args[0] 48 | self.assertEqual(call[0], 'register') 49 | self.assertDictIsSubset(self.node_info_1, call[1]) 50 | address_1 = call[1]['address'] 51 | 52 | # Check that we receive some informations in first medium 53 | self.assertEqual(self.mock_1.call_count, 1) 54 | call = self.mock_1.call_args[0] 55 | self.assertEqual(call[0], 'register') 56 | self.assertDictIsSubset(self.node_info_2, call[1]) 57 | address_2 = call[1]['address'] 58 | 59 | self.assertEqual(address_1, address_2) 60 | 61 | @_async_test 62 | def test_node_info_immutability(self): 63 | # Mutate node info 1 and check that the right message is sent 64 | self.node_info_1['node_id'] = 'New #ID1' 65 | 66 | yield from self.first_discovery_medium.send_registration_infos() 67 | 68 | 69 | # Wait some time for message propagation 70 | yield from asyncio.sleep(0.01, loop=self.loop) 71 | 72 | # Check that we received some informations in second medium 73 | self.assertEqual(self.mock_2.call_count, 1) 74 | call = self.mock_2.call_args[0] 75 | self.assertEqual(call[0], 'register') 76 | self.assertDictIsSubset({'node_id': '#ID1'}, call[1]) 77 | -------------------------------------------------------------------------------- /tests/discovery_medium/test_memory.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from zeroservices.discovery import MemoryDiscoveryMedium 3 | 4 | from . import _BaseDiscoveryMediumTestCase 5 | 6 | 7 | class MemoryDiscoveryMediumTestCase(_BaseDiscoveryMediumTestCase): 8 | 9 | @asyncio.coroutine 10 | def get_medium(self, callback, loop, node_infos): 11 | medium = MemoryDiscoveryMedium(asyncio.coroutine(callback), loop, node_infos) 12 | yield from medium.start() 13 | return medium 14 | -------------------------------------------------------------------------------- /tests/discovery_medium/test_udp_discovery.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from zeroservices.discovery import UdpDiscoveryMedium 3 | 4 | from . import _BaseDiscoveryMediumTestCase 5 | 6 | 7 | class UdpDiscoveryMediumTestCase(_BaseDiscoveryMediumTestCase): 8 | 9 | @asyncio.coroutine 10 | def get_medium(self, callback, loop, node_infos): 11 | medium = UdpDiscoveryMedium(callback, loop, node_infos) 12 | yield from medium.start() 13 | return medium 14 | -------------------------------------------------------------------------------- /tests/medium/__init__.py: -------------------------------------------------------------------------------- 1 | import zmq 2 | import json 3 | import time 4 | import socket 5 | import asyncio 6 | 7 | from datetime import timedelta 8 | from time import sleep, time 9 | from copy import copy 10 | 11 | try: 12 | from unittest.mock import Mock, call 13 | except ImportError: 14 | from mock import Mock, call 15 | 16 | from zeroservices.medium.zeromq import ZeroMQMedium 17 | from zeroservices.discovery import MemoryDiscoveryMedium 18 | from .utils import generate_zeromq_medium 19 | from ..utils import TestCase, _async_test 20 | 21 | 22 | class _BaseMediumTestCase(TestCase): 23 | 24 | def setUp(self): 25 | self.loop = asyncio.new_event_loop() 26 | asyncio.set_event_loop(None) 27 | 28 | self.medium_1 = self.loop.run_until_complete(self.get_medium(self.loop)) 29 | self.medium_2 = self.loop.run_until_complete(self.get_medium(self.loop)) 30 | 31 | def tearDown(self): 32 | self.medium_1.close() 33 | self.medium_2.close() 34 | self.loop.stop() 35 | self.loop.close() 36 | self.medium_1.check_leak() 37 | self.medium_2.check_leak() 38 | 39 | @_async_test 40 | def test_register(self): 41 | yield from asyncio.sleep(0.1, loop=self.loop) 42 | 43 | expected_medium_2 = copy(self.medium_2.get_node_info()) 44 | expected_medium_2['address'] = '127.0.0.1' 45 | del expected_medium_2['service_info'] 46 | 47 | self.assertEquals(self.medium_1.get_directory(), 48 | {self.medium_2.node_id: expected_medium_2}) 49 | 50 | expected_medium_1 = copy(self.medium_1.get_node_info()) 51 | expected_medium_1['address'] = '127.0.0.1' 52 | del expected_medium_1['service_info'] 53 | 54 | self.assertEquals(self.medium_2.get_directory(), 55 | {self.medium_1.node_id: expected_medium_1}) 56 | 57 | @_async_test 58 | def test_send_no_response(self): 59 | yield from asyncio.sleep(0.1, loop=self.loop) 60 | 61 | message_type = 'MYMESSAGETYPE' 62 | message = {'foo': 'bar'} 63 | on_message = self.medium_2.service.on_message_mock 64 | 65 | on_message.return_value = None 66 | 67 | result = yield from self.medium_1.send(self.medium_2.node_id, 68 | message, 69 | message_type=message_type, 70 | wait_response=False) 71 | yield from asyncio.sleep(0.1, loop=self.loop) 72 | 73 | self.assertEqual(result, None) 74 | 75 | self.assertEqual(on_message.call_count, 1) 76 | on_message.assert_called_with(message_type=message_type, **message) 77 | 78 | @_async_test 79 | def test_send_response(self): 80 | yield from asyncio.sleep(0.1, loop=self.loop) 81 | 82 | return_value = {'data': 'ReturnValue'} 83 | message_type = 'MYMESSAGETYPE' 84 | message = {'foo': 'bar'} 85 | self.medium_2.service.on_message_mock.return_value = return_value 86 | 87 | result = yield from self.medium_1.send(self.medium_2.node_id, 88 | message, 89 | message_type=message_type) 90 | 91 | self.assertEqual(result, return_value) 92 | 93 | @_async_test 94 | def test_pub_sub(self): 95 | yield from asyncio.sleep(0.1, loop=self.loop) 96 | 97 | event_data = {'data': 'foo'} 98 | event_type = 'EVENT_TYPE' 99 | 100 | yield from self.medium_1.publish(event_type, event_data) 101 | yield from asyncio.sleep(0.1, loop=self.loop) 102 | 103 | on_event = self.medium_2.service.on_event_mock 104 | self.assertEqual(on_event.call_count, 1) 105 | on_event.assert_called_with(event_type, **event_data) 106 | 107 | @_async_test 108 | def test_pub_sub_custom_event_listener(self): 109 | yield from asyncio.sleep(0.1, loop=self.loop) 110 | 111 | mock = Mock() 112 | 113 | @asyncio.coroutine 114 | def custom_event_listener(*args, **kwargs): 115 | return mock(*args, **kwargs) 116 | 117 | self.medium_2.add_event_listener(custom_event_listener) 118 | 119 | event_data = {'data': 'foo'} 120 | event_type = 'EVENT_TYPE' 121 | 122 | yield from self.medium_1.publish(event_type, event_data) 123 | yield from asyncio.sleep(0.1, loop=self.loop) 124 | 125 | self.assertEqual(mock.call_count, 1) 126 | mock.assert_called_with(event_type, event_data) 127 | 128 | @_async_test 129 | def test_periodic_call(self): 130 | periodic_mock = Mock() 131 | 132 | @asyncio.coroutine 133 | def periodic_called(*args, **kwargs): 134 | return periodic_mock(*args, **kwargs) 135 | 136 | self.medium_1.periodic_call(periodic_called, 0.1) 137 | 138 | yield from asyncio.sleep(0.15, loop=self.loop) 139 | 140 | self.assertEqual(periodic_mock.call_count, 1) 141 | 142 | yield from asyncio.sleep(0.14, loop=self.loop) 143 | 144 | self.assertEqual(periodic_mock.call_count, 2) 145 | -------------------------------------------------------------------------------- /tests/medium/test_memory_medium.py: -------------------------------------------------------------------------------- 1 | import zmq 2 | import json 3 | import time 4 | import socket 5 | import asyncio 6 | 7 | from datetime import timedelta 8 | from time import sleep, time 9 | 10 | try: 11 | from unittest.mock import Mock, call 12 | except ImportError: 13 | from mock import Mock, call 14 | 15 | from zeroservices.medium.memory import MemoryMedium 16 | from zeroservices.discovery import MemoryDiscoveryMedium 17 | from .utils import generate_zeromq_medium 18 | from ..utils import TestCase, _async_test, TestService 19 | 20 | from . import _BaseMediumTestCase 21 | 22 | 23 | class MemoryMediumTestCase(_BaseMediumTestCase): 24 | 25 | @asyncio.coroutine 26 | def get_medium(self, loop): 27 | medium = MemoryMedium(loop=loop, discovery_class=MemoryDiscoveryMedium) 28 | medium.set_service(TestService('test_service', medium)) 29 | yield from medium.start() 30 | return medium 31 | 32 | def tearDown(self): 33 | super(MemoryMediumTestCase, self).tearDown() 34 | MemoryMedium.reset() 35 | 36 | @_async_test 37 | def test_periodic_call(self): 38 | periodic_called = Mock() 39 | 40 | @asyncio.coroutine 41 | def mock_wrapper(): 42 | return periodic_called() 43 | 44 | self.medium_1.periodic_call(mock_wrapper, 0.1) 45 | 46 | yield from self.medium_1.call_callbacks() 47 | 48 | self.assertEqual(periodic_called.call_count, 1) 49 | 50 | 51 | if __name__ == '__main__': 52 | unittest.main() 53 | -------------------------------------------------------------------------------- /tests/medium/test_zeromq_medium.py: -------------------------------------------------------------------------------- 1 | import zmq 2 | import json 3 | import time 4 | import socket 5 | import asyncio 6 | 7 | from datetime import timedelta 8 | from time import sleep, time 9 | 10 | try: 11 | from unittest.mock import Mock, call 12 | except ImportError: 13 | from mock import Mock, call 14 | 15 | from zeroservices.medium.zeromq import ZeroMQMedium 16 | from zeroservices.discovery import MemoryDiscoveryMedium 17 | from .utils import generate_zeromq_medium 18 | from ..utils import TestCase, _async_test, TestService 19 | 20 | from . import _BaseMediumTestCase 21 | 22 | 23 | class ZeroMQMediumTestCase(_BaseMediumTestCase): 24 | 25 | @asyncio.coroutine 26 | def get_medium(self, loop): 27 | medium = ZeroMQMedium(loop=loop, discovery_class=MemoryDiscoveryMedium) 28 | medium.set_service(TestService('test_service', medium)) 29 | yield from medium.start() 30 | return medium 31 | 32 | 33 | if __name__ == '__main__': 34 | unittest.main() 35 | -------------------------------------------------------------------------------- /tests/medium/utils.py: -------------------------------------------------------------------------------- 1 | try: 2 | from unittest.mock import Mock, create_autospec 3 | except ImportError: 4 | from mock import Mock, create_autospec 5 | 6 | 7 | from threading import Thread 8 | 9 | from zeroservices.medium.zeromq import ZeroMQMedium 10 | from zeroservices.service import BaseService 11 | 12 | def run_poller_for(medium, timeout): 13 | thread = Thread(target=medium.loop, args=(timeout,)) 14 | thread.start() 15 | 16 | def generate_zeromq_medium(service_info, node_id=None, ioloop=None): 17 | 18 | service = create_autospec(BaseService, True) 19 | service.name = service_info['name'] 20 | service.service_info.return_value = service_info 21 | service.medium = ZeroMQMedium(port_random=True, node_id=node_id, 22 | ioloop=ioloop) 23 | service.medium.set_service(service) 24 | 25 | def stop_loop(*args, **kwargs): 26 | service.medium.stop() 27 | 28 | # Set side_effect 29 | service.on_registration_message.side_effect = stop_loop 30 | service.on_event.side_effect = stop_loop 31 | service.on_message.side_effect = stop_loop 32 | service.on_peer_join.side_effect = stop_loop 33 | service.on_peer_leave.side_effect = stop_loop 34 | 35 | return service 36 | -------------------------------------------------------------------------------- /tests/services/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lothiraldan/ZeroServices/c6b0bdf755be6bcc0ff6070aabcfb36f0e4c2f37/tests/services/__init__.py -------------------------------------------------------------------------------- /tests/services/test_http_interface.py: -------------------------------------------------------------------------------- 1 | import json 2 | import asyncio 3 | import random 4 | from aiohttp import request 5 | 6 | from base64 import b64encode 7 | from zeroservices.services import get_http_interface, BasicAuth 8 | from zeroservices.resources import ResourceService 9 | from zeroservices.exceptions import UnknownService 10 | from ..utils import sample_collection, TestCase, _create_test_resource_service, _async_test, sample_collection 11 | from urllib.parse import quote_plus 12 | 13 | from zeroservices import ResourceService, ResourceCollection, ResourceWorker 14 | from zeroservices.resources import NoActionHandler, is_callable, Resource 15 | from zeroservices.exceptions import UnknownService, ResourceException 16 | from zeroservices.discovery.memory import MemoryDiscoveryMedium 17 | 18 | try: 19 | from unittest.mock import Mock, call, sentinel, create_autospec 20 | except ImportError: 21 | from mock import Mock, call, sentinel, create_autospec 22 | 23 | 24 | class TestAuth(BasicAuth): 25 | 26 | def authorized(self, handler, resource, method): 27 | return True 28 | 29 | 30 | class TestBasicAuth(BasicAuth): 31 | 32 | def __init__(self, username, password): 33 | self.username = username 34 | self.password = password 35 | 36 | def check_auth(self, username, password, resource, method): 37 | return username == self.username and password == self.password 38 | 39 | 40 | class HttpInterfaceTestCase(TestCase): 41 | 42 | def setUp(self): 43 | asyncio.set_event_loop(None) 44 | self.loop = asyncio.new_event_loop() 45 | 46 | self.name = "TestService1" 47 | self.service = _create_test_resource_service(self.name, loop=self.loop) 48 | self.node_id = self.service.medium.node_id 49 | 50 | self.resource_name = 'TestResource' 51 | self.collection = sample_collection(self.resource_name) 52 | self.service.register_resource(self.collection) 53 | 54 | self.app = self.loop.run_until_complete(self.get_app()) 55 | super(HttpInterfaceTestCase, self).setUp() 56 | 57 | def tearDown(self): 58 | self.service.close() 59 | self.loop.stop() 60 | self.loop.close() 61 | self.service.medium.check_leak() 62 | 63 | def get_app(self): 64 | self.port = self.get_http_port() 65 | self.app = yield from get_http_interface(self.service, self.loop, 66 | port=self.port, 67 | auth=self.get_auth()) 68 | return self.app 69 | 70 | def get_auth(self): 71 | return TestAuth() 72 | 73 | def get_http_port(self): 74 | return random.randint(6000, 65534) 75 | 76 | def _full_url(self, path): 77 | return 'http://127.0.0.1:{port}{path}'.format(port=self.port, path=path) 78 | 79 | def reverse_url(self, endpoint, **kwargs): 80 | if kwargs: 81 | return self.app.router[endpoint].url(parts=kwargs) 82 | return self.app.router[endpoint].url() 83 | 84 | def get_endpoint(self, endpoint): 85 | return self.get(self._full_url(self.reverse_url(endpoint))) 86 | 87 | def get(self, full_url): 88 | result = yield from request('GET', full_url, loop=self.loop) 89 | return result 90 | 91 | def post(self, full_url, data): 92 | data = json.dumps(data) 93 | result = yield from request('POST', full_url, loop=self.loop, data=data) 94 | return result 95 | 96 | def delete(self, full_url): 97 | result = yield from request('DELETE', full_url, loop=self.loop) 98 | return result 99 | 100 | def patch(self, full_url, data): 101 | data = json.dumps(data) 102 | result = yield from request('PATCH', full_url, loop=self.loop, data=data) 103 | return result 104 | 105 | def options(self, full_url): 106 | result = yield from request('OPTIONS', full_url, loop=self.loop) 107 | return result 108 | 109 | 110 | class HttpInterfaceMainTestCase(HttpInterfaceTestCase): 111 | 112 | @_async_test 113 | def test_get_main(self): 114 | result = yield from self.get_endpoint("main") 115 | self.assertEqual(result.status, 200) 116 | result_content = yield from result.json() 117 | self.assertEqual(result_content, {"resources": [self.resource_name]}) 118 | 119 | 120 | class HttpInterfaceCollectionTestCase(HttpInterfaceTestCase): 121 | 122 | def setUp(self): 123 | super().setUp() 124 | self.url = self._full_url(self.reverse_url("collection", collection=self.resource_name)) 125 | 126 | self.custom_action = 'custom_action' 127 | 128 | self.custom_action_url = self._full_url(self.reverse_url("collection_custom_action", 129 | collection=self.resource_name, 130 | action=self.custom_action)) 131 | 132 | self.collection_bad_url = self._full_url(self.reverse_url("collection", collection="bad")) 133 | 134 | self.resource = {'resource_id': '#1', 'resource_data': {'foo': 'bar'}} 135 | 136 | @_async_test 137 | def test_create(self): 138 | result = yield from self.post(self.url, data=self.resource) 139 | 140 | self.assertEqual(result.status, 201) 141 | self.assertEqual(result.headers["Content-Type"], "application/json") 142 | 143 | response = yield from result.json() 144 | self.assertEqual(response, {'resource_id': self.resource['resource_id']}) 145 | 146 | resource_list = yield from self.collection.list() 147 | self.assertEqual(resource_list, [self.resource]) 148 | 149 | @_async_test 150 | def test_list(self): 151 | yield from self.test_create() 152 | 153 | result = yield from self.get(self.url) 154 | self.assertEqual(result.status, 200) 155 | self.assertEqual(result.headers["Content-Type"], "application/json") 156 | response = yield from result.json() 157 | self.assertEqual(response, [self.resource]) 158 | 159 | @_async_test 160 | def test_list_on_unknown_collection(self): 161 | result = yield from self.get(self.collection_bad_url) 162 | 163 | self.assertEqual(result.status, 404) 164 | self.assertEqual(result.headers["Content-Type"], "application/json") 165 | response = yield from result.json() 166 | self.assertEqual(response, 167 | {'error': 'Unknown service bad'}) 168 | 169 | @_async_test 170 | def test_custom_action_on_collection(self): 171 | data = {'foo': 'bar'} 172 | 173 | result = yield from self.post(self.custom_action_url, data=data) 174 | self.assertEqual(result.status, 200) 175 | 176 | response = yield from result.json() 177 | self.assertEqual(response, 42) 178 | 179 | 180 | class HttpInterfaceResourceTestCase(HttpInterfaceTestCase): 181 | 182 | def setUp(self): 183 | super(HttpInterfaceResourceTestCase, self).setUp() 184 | self.resource_id = "1" 185 | self.resource_data = {'foo': 'bar', 'test': 'sample'} 186 | self.resource = {'resource_id': self.resource_id, 187 | 'resource_data': self.resource_data} 188 | self.patch_body = {"$set": {'foo': 'not_bar', 'other': 'bar'}} 189 | 190 | self.expected_updated_resource = {'foo': 'not_bar', 'test': 'sample', 191 | 'other': 'bar'} 192 | 193 | self.url = self._full_url(self.reverse_url("resource", collection=self.resource_name, 194 | resource_id=self.resource_id)) 195 | 196 | self.collection_url = self._full_url(self.reverse_url("collection", 197 | collection=self.resource_name)) 198 | 199 | 200 | self.custom_action = 'custom_action' 201 | 202 | self.custom_action_url = self._full_url(self.reverse_url("resource_custom_action", 203 | collection=self.resource_name, 204 | resource_id=self.resource_id, 205 | action=self.custom_action)) 206 | 207 | @asyncio.coroutine 208 | def _create_resource(): 209 | result = yield from self.post(self.collection_url, data=self.resource) 210 | yield from result.text() 211 | 212 | self.loop.run_until_complete(_create_resource()) 213 | 214 | @_async_test 215 | def test_get(self): 216 | 217 | result = yield from self.get(self.url) 218 | 219 | self.assertEqual(result.status, 200) 220 | self.assertEqual(result.headers["Content-Type"], "application/json") 221 | 222 | response = yield from result.json() 223 | self.assertEqual(response, self.resource) 224 | 225 | @_async_test 226 | def test_delete(self): 227 | 228 | result = yield from self.delete(self.url) 229 | self.assertEqual(result.status, 204) 230 | 231 | resource_list = yield from self.collection.list() 232 | self.assertEqual(resource_list, []) 233 | 234 | @_async_test 235 | def test_patch(self): 236 | 237 | result = yield from self.patch(self.url, data={'patch': self.patch_body}) 238 | 239 | self.assertEqual(result.status, 200) 240 | self.assertEqual(result.headers["Content-Type"], "application/json") 241 | 242 | response = yield from result.json() 243 | self.assertEqual(response, self.expected_updated_resource) 244 | 245 | @_async_test 246 | def test_custom_action_on_resource(self): 247 | data = {'foo': 'bar'} 248 | 249 | result = yield from self.post(self.custom_action_url, data=data) 250 | self.assertEqual(result.status, 200) 251 | self.assertEqual(result.headers["Content-Type"], "application/json") 252 | 253 | response = yield from result.json() 254 | self.assertEqual(response, 42) 255 | 256 | @_async_test 257 | def test_get_resource_id_urlencoded(self): 258 | resource_id = 'feature/test' 259 | 260 | resource_data = {'resource_id': resource_id, 261 | 'resource_data': {'foo': 'bar'}} 262 | 263 | result = yield from self.post(self.collection_url, data=resource_data) 264 | self.assertEqual(result.status, 201) 265 | 266 | resource_id = quote_plus(resource_id) 267 | 268 | url = self._full_url(self.reverse_url("resource", collection=self.resource_name, 269 | resource_id=resource_id)) 270 | 271 | result = yield from self.get(url) 272 | self.assertEqual(result.status, 200) 273 | self.assertEqual(result.headers["Content-Type"], "application/json") 274 | 275 | response = yield from result.json() 276 | self.assertEqual(response, resource_data) 277 | 278 | 279 | class HttpInterfaceCORSWildCardTestCase(HttpInterfaceTestCase): 280 | 281 | def get_app(self): 282 | self.port = self.get_http_port() 283 | self.app = yield from get_http_interface(self.service, self.loop, 284 | port=self.port, 285 | auth=self.get_auth(), 286 | allowed_origins="*") 287 | return self.app 288 | 289 | @_async_test 290 | def test_CORS_main(self): 291 | main_url = self._full_url(self.reverse_url("main")) 292 | result = yield from self.options(main_url) 293 | self.assertEqual(result.status, 200) 294 | self.assertEqual(result.headers['Access-Control-Allow-Origin'], '*') 295 | 296 | @_async_test 297 | def test_CORS_collection(self): 298 | url = self._full_url(self.reverse_url("collection", collection="collection")) 299 | result = yield from self.options(url) 300 | self.assertEqual(result.status, 200) 301 | self.assertEqual(result.headers['Access-Control-Allow-Origin'], '*') 302 | 303 | @_async_test 304 | def test_CORS_collection_custom_action(self): 305 | url = self._full_url(self.reverse_url("collection_custom_action", 306 | collection="collection", 307 | action="action")) 308 | result = yield from self.options(url) 309 | self.assertEqual(result.status, 200) 310 | self.assertEqual(result.headers['Access-Control-Allow-Origin'], '*') 311 | 312 | @_async_test 313 | def test_CORS_resource(self): 314 | url = self._full_url(self.reverse_url("resource", collection="collection", 315 | resource_id="resource_id")) 316 | result = yield from self.options(url) 317 | self.assertEqual(result.status, 200) 318 | self.assertEqual(result.headers['Access-Control-Allow-Origin'], '*') 319 | 320 | @_async_test 321 | def test_CORS_test_CORS_resource_custom_action(self): 322 | url = self._full_url(self.reverse_url("resource_custom_action", 323 | collection="collection", 324 | resource_id="resource_id", 325 | action="action")) 326 | result = yield from self.options(url) 327 | self.assertEqual(result.status, 200) 328 | self.assertEqual(result.headers['Access-Control-Allow-Origin'], '*') 329 | 330 | # class HttpInterfaceResourceIdSlash(HttpInterfaceTestCase): 331 | 332 | # def setUp(self): 333 | # super(HttpInterfaceResourceIdSlash, self).setUp() 334 | # self.resource_id = "feature/test" 335 | # self.url = self.app.reverse_url("resource", self.collection_name, 336 | # self.resource_id) 337 | 338 | # def test_get(self): 339 | # self.sentinel = {'_id': self.resource_id} 340 | # self.service.send.return_value = self.sentinel 341 | 342 | # result = self.fetch(self.url) 343 | # self.assertEqual(result.code, 200) 344 | # self.assertEqual(result.headers["Content-Type"], "application/json") 345 | # self.assertEqual(json.loads(result.body.decode('utf-8')), 346 | # self.sentinel) 347 | 348 | # self.assertEqual(self.service.send.call_args, 349 | # call(collection=self.collection_name, action="get", 350 | # resource_id=self.resource_id)) 351 | 352 | 353 | 354 | # class HttpInterfaceBasicAuthTestCase(HttpInterfaceTestCase): 355 | 356 | # def setUp(self): 357 | # self.username = "username" 358 | # self.password = "VERYSECURETOKEN" 359 | # super(HttpInterfaceBasicAuthTestCase, self).setUp() 360 | 361 | # def get_auth(self): 362 | # return TestBasicAuth(self.username, self.password) 363 | 364 | # def get_auth_header(self, username, password=''): 365 | # if password: 366 | # auth_header = '{0}:{1}'.format(username, password) 367 | # else: 368 | # auth_header = username 369 | # return b64encode(auth_header.encode('utf-8')).decode('utf-8') 370 | 371 | # def test_without_header(self): 372 | # result = self.fetch(self.app.reverse_url("main")) 373 | # self.assertEqual(result.code, 401) 374 | # self.assertEqual(result.headers['WWW-Authenticate'], 'Basic realm=tmr') 375 | 376 | # def test_empty_header(self): 377 | # url = self.app.reverse_url("main") 378 | # result = self.fetch(url, headers={'Authorization': ''}) 379 | 380 | # self.assertEqual(result.code, 401) 381 | # self.assertEqual(result.headers['WWW-Authenticate'], 'Basic realm=tmr') 382 | 383 | # def test_bad_header(self): 384 | # url = self.app.reverse_url("main") 385 | # result = self.fetch(url, headers={'Authorization': 'BadHeader'}) 386 | 387 | # self.assertEqual(result.code, 401) 388 | # self.assertEqual(result.headers['WWW-Authenticate'], 'Basic realm=tmr') 389 | 390 | # def test_bad_header_value(self): 391 | # url = self.app.reverse_url("main") 392 | # result = self.fetch(url, headers={'Authorization': 'Basic NOPE=+/'}) 393 | 394 | # self.assertEqual(result.code, 401) 395 | # self.assertEqual(result.headers['WWW-Authenticate'], 'Basic realm=tmr') 396 | 397 | # def test_bad_header_value(self): 398 | # url = self.app.reverse_url("main") 399 | # result = self.fetch(url, headers={'Authorization': 'Basic NOPE=+/'}) 400 | 401 | # self.assertEqual(result.code, 401) 402 | # self.assertEqual(result.headers['WWW-Authenticate'], 'Basic realm=tmr') 403 | 404 | # def test_bad_header_not_b64(self): 405 | # url = self.app.reverse_url("main") 406 | # result = self.fetch(url, headers={'Authorization': 'Basic NOPE'}) 407 | 408 | # self.assertEqual(result.code, 401) 409 | # self.assertEqual(result.headers['WWW-Authenticate'], 'Basic realm=tmr') 410 | 411 | # def test_header_missing_password(self): 412 | # url = self.app.reverse_url("main") 413 | # auth_header = b64encode(self.username.encode('utf-8')).decode('utf-8') 414 | # result = self.fetch(url, headers={'Authorization': 415 | # 'Basic {0}'.format(auth_header)}) 416 | 417 | # self.assertEqual(result.code, 401) 418 | # self.assertEqual(result.headers['WWW-Authenticate'], 'Basic realm=tmr') 419 | 420 | # def test_header_missing_password_not_b64(self): 421 | # url = self.app.reverse_url("main") 422 | # auth_header = self.get_auth_header(self.username) 423 | # result = self.fetch(url, headers={'Authorization': 424 | # 'Basic {0}'.format(auth_header)}) 425 | 426 | # self.assertEqual(result.code, 401) 427 | # self.assertEqual(result.headers['WWW-Authenticate'], 'Basic realm=tmr') 428 | 429 | # def test_header_bad_password(self): 430 | # url = self.app.reverse_url("main") 431 | # auth_header = self.get_auth_header(self.username, self.password[:-1]) 432 | # result = self.fetch(url, headers={'Authorization': 433 | # 'Basic {0}'.format(auth_header)}) 434 | 435 | # self.assertEqual(result.code, 403) 436 | 437 | # def test_good_header(self): 438 | # url = self.app.reverse_url("main") 439 | # auth_header = self.get_auth_header(self.username, self.password) 440 | # result = self.fetch(url, headers={'Authorization': 441 | # 'Basic {0}'.format(auth_header)}) 442 | 443 | # self.assertEqual(result.code, 200) 444 | -------------------------------------------------------------------------------- /tests/test_resources.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import asyncio 3 | import unittest 4 | 5 | from zeroservices import ResourceService, ResourceCollection, ResourceWorker 6 | from zeroservices.resources import NoActionHandler, is_callable, Resource 7 | from zeroservices.exceptions import UnknownService, ResourceException 8 | from zeroservices.discovery.memory import MemoryDiscoveryMedium 9 | from .utils import test_medium, sample_collection, TestCase, _create_test_resource_service, _async_test 10 | from copy import deepcopy 11 | 12 | 13 | try: 14 | from unittest.mock import call, Mock, patch, sentinel 15 | except ImportError: 16 | from mock import call, Mock, patch, sentinel 17 | 18 | 19 | class ResourceServiceTestCase(TestCase): 20 | 21 | def setUp(self): 22 | asyncio.set_event_loop(None) 23 | self.loop = asyncio.new_event_loop() 24 | 25 | self.name1 = "TestService1" 26 | self.service1 = _create_test_resource_service(self.name1, loop=self.loop) 27 | self.node_id1 = self.service1.medium.node_id 28 | 29 | self.name2 = "TestService2" 30 | self.service2 = _create_test_resource_service(self.name2, loop=self.loop) 31 | self.node_id2 = self.service2.medium.node_id 32 | 33 | # Create a resource 34 | self.resource = 'TestResource' 35 | self.resource_data = {'key': 'value', 'key2': 'value2'} 36 | self.resource_id = 'UUID1' 37 | 38 | self.collection = sample_collection(self.resource) 39 | 40 | self.service1.register_resource(self.collection) 41 | 42 | message = {'action': 'create', 'resource_id': self.resource_id, 43 | 'resource_data': self.resource_data} 44 | 45 | self.loop.run_until_complete(self.collection.on_message(**message)) 46 | 47 | def tearDown(self): 48 | self.service1.close() 49 | self.service2.close() 50 | self.loop.stop() 51 | self.loop.close() 52 | self.service1.medium.check_leak() 53 | self.service2.medium.check_leak() 54 | 55 | def test_service_info(self): 56 | service = _create_test_resource_service("test_service_info", loop=self.loop) 57 | service_info = service.service_info() 58 | self.assertEqual(service_info['name'], "test_service_info") 59 | self.assertEqual(list(service_info['resources']), []) 60 | self.assertEqual(service_info['node_type'], 'node') 61 | 62 | def test_resource_registration(self): 63 | service_info = self.service1.service_info() 64 | self.assertEqual(service_info['name'], self.name1) 65 | self.assertEqual(list(service_info['resources']), [self.resource]) 66 | 67 | @_async_test 68 | def test_resource_send(self): 69 | action = 'list' 70 | 71 | yield from self.service1.start() 72 | yield from self.service2.start() 73 | 74 | call_request = {'collection_name': self.resource, 'action': action} 75 | result = yield from self.service2.send(**call_request) 76 | 77 | self.assertEqual(result, [{'resource_data': self.resource_data, 78 | 'resource_id': self.resource_id}]) 79 | 80 | @_async_test 81 | def test_resource_send_exception(self): 82 | action = 'list' 83 | 84 | yield from self.service1.start() 85 | yield from self.service2.start() 86 | 87 | call_request = {'collection_name': self.resource, 'action': action, 88 | 'resource_id': self.resource_id} 89 | 90 | with self.assertRaises(ResourceException) as cm: 91 | yield from self.service2.send(**call_request) 92 | 93 | self.assertEquals(cm.exception.error_message, "No handler for action list") 94 | 95 | @_async_test 96 | def test_resource_send_unknown_service(self): 97 | yield from self.service1.start() 98 | yield from self.service2.start() 99 | 100 | call_request = {'collection_name': "NotFound", 'action': 'list'} 101 | 102 | with self.assertRaises(UnknownService): 103 | yield from self.service2.send(**call_request) 104 | 105 | @_async_test 106 | def test_resource_send_to_itself(self): 107 | yield from self.service1.start() 108 | yield from self.service2.start() 109 | 110 | call_request = {'collection_name': self.resource, 'action': 'list'} 111 | result = yield from self.service1.send(**call_request) 112 | 113 | self.assertEqual( 114 | result, 115 | [{'resource_data': self.resource_data, 116 | 'resource_id': self.resource_id}]) 117 | 118 | @_async_test 119 | def test_resource_publish_to_itself(self): 120 | self.service1.on_event_mock.reset_mock() 121 | 122 | publish_message = {'type': 'new', '_id': 'foo', 123 | 'resource_data': 'bar', 'resource_name': self.resource} 124 | topic = '{}.{}'.format(self.resource, 'action') 125 | 126 | yield from self.service1.publish(topic, publish_message) 127 | 128 | self.assertEquals(self.service1.on_event_mock.call_args_list, 129 | [call(topic, publish_message)]) 130 | 131 | # class ResourceWorkerTestCase(TestCase): 132 | 133 | # def setUp(self): 134 | # self.name = "TestService" 135 | # self.medium = test_medium() 136 | # self.resource_name = 'resource' 137 | 138 | # self.service = ResourceService(self.name, self.medium) 139 | # collection = sample_collection(self.resource_name) 140 | # self.service.register_resource(collection) 141 | 142 | # self.worker_base_name = "sample_worker" 143 | 144 | # def test_worker_info(self): 145 | 146 | # worker = ResourceWorker(self.worker_base_name, self.medium) 147 | 148 | # service_info = worker.service_info() 149 | # self.assertEqual(service_info['node_type'], 'worker') 150 | # self.assertItemsEqual(service_info['resources'], []) 151 | 152 | # self.assertEqual(self.medium.register.call_count, 0) 153 | 154 | # def test_worker_registration(self): 155 | 156 | # resource_name = self.resource_name 157 | 158 | # class WorkerSample(ResourceWorker): 159 | 160 | # def __init__(self, *args, **kwargs): 161 | # super(WorkerSample, self).__init__(*args, **kwargs) 162 | # self.register(self.sample_action, resource_name) 163 | 164 | # def sample_action(self, resource): 165 | # pass 166 | 167 | # worker = WorkerSample(self.worker_base_name, self.medium) 168 | 169 | # service_info = worker.service_info() 170 | # self.assertEqual(service_info['node_type'], 'worker') 171 | # self.assertItemsEqual(service_info['resources'], 172 | # [self.resource_name]) 173 | 174 | # self.assertEqual(self.medium.subscribe.call_count, 1) 175 | # self.assertEqual(self.medium.subscribe.call_args, 176 | # call(self.resource_name)) 177 | 178 | # def test_worker_registration_matcher(self): 179 | 180 | # resource_name = self.resource_name 181 | # matcher = {'key': 'value'} 182 | 183 | # class WorkerSample(ResourceWorker): 184 | 185 | # def __init__(self, *args, **kwargs): 186 | # super(WorkerSample, self).__init__(*args, **kwargs) 187 | # self.register(self.sample_action, resource_name, **matcher) 188 | 189 | # def sample_action(self, resource): 190 | # pass 191 | 192 | # worker = WorkerSample(self.worker_base_name, self.medium) 193 | 194 | # service_info = worker.service_info() 195 | # self.assertEqual(service_info['node_type'], 'worker') 196 | # self.assertItemsEqual(service_info['resources'], 197 | # [self.resource_name]) 198 | 199 | # self.assertEqual(self.medium.subscribe.call_count, 1) 200 | # self.assertEqual(self.medium.subscribe.call_args, 201 | # call(self.resource_name)) 202 | 203 | -------------------------------------------------------------------------------- /tests/test_service.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from copy import copy 4 | 5 | from zeroservices import BaseService 6 | from zeroservices.medium.memory import MemoryMedium 7 | from zeroservices.discovery.memory import MemoryDiscoveryMedium 8 | from zeroservices.exceptions import UnknownNode 9 | from .utils import TestCase, _create_test_service, _async_test 10 | 11 | try: 12 | from unittest.mock import Mock, patch, call 13 | except ImportError: 14 | from mock import Mock, patch, call 15 | 16 | 17 | class BaseServiceTestCase(TestCase): 18 | 19 | def setUp(self): 20 | asyncio.set_event_loop(None) 21 | self.loop = asyncio.new_event_loop() 22 | 23 | self.name1 = "TestService1" 24 | self.node_info1 = {'foo': 'bar'} 25 | self.service1 = _create_test_service(self.name1, self.node_info1, self.loop) 26 | self.node_id1 = self.service1.medium.node_id 27 | 28 | self.name2 = "TestService2" 29 | self.node_info2 = {'foo2': 'babar'} 30 | self.service2 = _create_test_service(self.name2, self.node_info2, self.loop) 31 | self.node_id2 = self.service2.medium.node_id 32 | 33 | def tearDown(self): 34 | self.service1.close() 35 | self.service2.close() 36 | self.loop.stop() 37 | self.loop.close() 38 | self.service1.medium.check_leak() 39 | self.service2.medium.check_leak() 40 | 41 | def test_service_info(self): 42 | expected = {'name': self.name1, 'node_type': 'node'} 43 | expected.update(self.node_info1) 44 | self.assertEqual(self.service1.service_info(), expected) 45 | 46 | expected = {'name': self.name2, 'node_type': 'node'} 47 | expected.update(self.node_info2) 48 | self.assertEqual(self.service2.service_info(), expected) 49 | 50 | @_async_test 51 | def test_register(self): 52 | yield from self.service1.start() 53 | yield from self.service2.start() 54 | 55 | def _expected_infos(service): 56 | service_info = copy(service.service_info()) 57 | service_info['node_id'] = service.medium.node_id 58 | return {service.medium.node_id: service_info} 59 | 60 | self.assertEqual(self.service1.get_directory(), _expected_infos(self.service2)) 61 | self.assertEqual(self.service2.get_directory(), _expected_infos(self.service1)) 62 | 63 | @_async_test 64 | def test_send(self): 65 | yield from self.service1.start() 66 | yield from self.service2.start() 67 | 68 | response = {'response': 'Pong'} 69 | self.service2.on_message_mock.return_value = response 70 | 71 | message = {'content': 'Ping'} 72 | result = yield from self.service1.send(self.node_id2, message) 73 | 74 | self.assertEqual(result, response) 75 | self.service2.on_message_mock.assert_called_once_with(message_type='message', **message) 76 | 77 | @_async_test 78 | def test_publish(self): 79 | yield from self.service1.start() 80 | yield from self.service2.start() 81 | 82 | event_type = 'EVENT_TYPE' 83 | event_message = {'foo': 'bar', 'foo2': 'babar'} 84 | yield from self.service1.publish(event_type, event_message) 85 | 86 | self.service2.on_event_mock.assert_called_once_with(event_type, **event_message) 87 | -------------------------------------------------------------------------------- /tests/test_worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | try: 4 | from unittest.mock import Mock, call 5 | except ImportError: 6 | from mock import Mock, call 7 | 8 | from copy import copy 9 | from zeroservices import ResourceWorker 10 | from zeroservices.medium.memory import MemoryMedium 11 | from zeroservices.discovery.memory import MemoryDiscoveryMedium 12 | from zeroservices.memory import MemoryCollection 13 | from .utils import TestCase, _create_test_resource_service, _async_test 14 | 15 | 16 | class RuleTestCase(TestCase): 17 | pass 18 | 19 | 20 | class ResourceWorkerUnitTestCase(TestCase): 21 | 22 | def setUp(self): 23 | self.resource_name = 'TestCollection' 24 | 25 | asyncio.set_event_loop(None) 26 | self.loop = asyncio.new_event_loop() 27 | 28 | self.medium1 = MemoryMedium(self.loop, MemoryDiscoveryMedium, node_id='node1') 29 | self.worker1 = ResourceWorker('worker1', self.medium1) 30 | self.mock = Mock() 31 | self.worker1.register(self.mock, 'resource2') 32 | 33 | def tearDown(self): 34 | self.medium1.close() 35 | self.worker1.close() 36 | self.loop.stop() 37 | self.loop.close() 38 | self.medium1.check_leak() 39 | self.worker1.medium.check_leak() 40 | 41 | def test_no_rule(self): 42 | ''' Check that it doesn't raise an exception 43 | ''' 44 | yield from self.worker1.on_event('resource1', 45 | **{'resource_name': 'resource1', 46 | 'resource_data': {}, 47 | 'resource_id': 'doesn\'t matter', 48 | 'action': 'create'}) 49 | self.assertEqual(self.mock.call_count, 0) 50 | 51 | 52 | class ResourceWorkerTestCase(TestCase): 53 | 54 | def setUp(self): 55 | self.resource_name = 'TestCollection' 56 | 57 | asyncio.set_event_loop(None) 58 | self.loop = asyncio.new_event_loop() 59 | 60 | self.service1 = _create_test_resource_service('test_service', self.loop) 61 | self.collection1 = MemoryCollection(self.resource_name) 62 | self.service1.register_resource(self.collection1) 63 | 64 | self.medium2 = MemoryMedium(self.loop, MemoryDiscoveryMedium, 'node2') 65 | 66 | self.patch = {'kwarg_1': 42} 67 | 68 | class SampleWorker(ResourceWorker): 69 | 70 | def __init__(self, name, medium, patch): 71 | super(SampleWorker, self).__init__(name, medium) 72 | self.patch = patch 73 | 74 | def sample_job(self, resource_name, resource_data, resource_id, 75 | action): 76 | yield from self.send(collection_name=resource_name, action="patch", 77 | resource_id=resource_id, 78 | patch={'$set': self.patch}) 79 | 80 | self.worker1 = SampleWorker('worker1', self.medium2, self.patch) 81 | 82 | self.callback = Mock() 83 | self.worker1.register(self.worker1.sample_job, self.resource_name, 84 | kwarg_1=1) 85 | 86 | def tearDown(self): 87 | self.service1.close() 88 | self.worker1.close() 89 | self.medium2.close() 90 | self.loop.stop() 91 | self.loop.close() 92 | self.service1.medium.check_leak() 93 | self.worker1.medium.check_leak() 94 | self.medium2.check_leak() 95 | 96 | @_async_test 97 | def test_join(self): 98 | yield from self.service1.start() 99 | yield from self.worker1.start() 100 | 101 | self.assertItemsEqual(self.worker1.get_known_nodes(), 102 | [self.service1.medium.node_id]) 103 | 104 | self.assertItemsEqual(self.service1.get_known_nodes(), 105 | []) 106 | self.assertEqual(self.service1.get_known_worker_nodes(), 107 | {self.resource_name: [self.worker1.name]}) 108 | 109 | @_async_test 110 | def test_simple_job(self): 111 | yield from self.service1.start() 112 | yield from self.worker1.start() 113 | 114 | resource_id = 'UUID1' 115 | resource_data = {'kwarg_1': 1, 'kwarg_2': 2} 116 | message_args = {'resource_data': resource_data, 117 | 'resource_id': resource_id} 118 | query = {'action': 'create'} 119 | query.update(message_args) 120 | 121 | # Create the resource 122 | yield from self.collection1.on_message(**query) 123 | 124 | expected_resource = copy(resource_data) 125 | expected_resource.update(self.patch) 126 | 127 | updated_resource_data = yield from self.collection1.on_message(action='get', 128 | resource_id=resource_id) 129 | updated_resource_data = updated_resource_data['resource_data'] 130 | self.assertEqual( 131 | updated_resource_data, 132 | expected_resource) 133 | 134 | @_async_test 135 | def test_no_data(self): 136 | yield from self.service1.start() 137 | 138 | # Create the resource 139 | resource_id = 'UUID1' 140 | resource_data = {'kwarg_1': 1, 'kwarg_2': 2, 'kwarg_4': 4} 141 | message_args = {'resource_data': resource_data, 142 | 'resource_id': resource_id} 143 | query = {'action': 'create'} 144 | query.update(message_args) 145 | 146 | yield from self.collection1.on_message(**query) 147 | 148 | # Start the worker and send it a patch event 149 | yield from self.worker1.start() 150 | 151 | # Update the resource 152 | resource_id = 'UUID1' 153 | 154 | patch = {'kwarg_2': 3} 155 | query = {'$set': patch} 156 | 157 | message = {'action': 'patch', 'resource_id': resource_id, 158 | 'patch': query} 159 | yield from self.collection1.on_message(**message) 160 | 161 | expected_resource = copy(resource_data) 162 | expected_resource.update(self.patch) 163 | 164 | updated_resource_data = yield from self.collection1.on_message(action='get', 165 | resource_id=resource_id) 166 | updated_resource_data = updated_resource_data['resource_data'] 167 | self.assertEqual( 168 | updated_resource_data, 169 | expected_resource) 170 | 171 | @_async_test 172 | def test_execute_job_on_startup(self): 173 | # Start the service 174 | yield from self.service1.start() 175 | 176 | # Create the resource 177 | resource_id = 'UUID1' 178 | resource_data = {'kwarg_1': 1, 'kwarg_2': 2} 179 | message_args = {'resource_data': resource_data, 180 | 'resource_id': resource_id} 181 | query = {'action': 'create'} 182 | query.update(message_args) 183 | 184 | yield from self.collection1.on_message(**query) 185 | 186 | # Start the worker after resource creation 187 | yield from self.worker1.start() 188 | 189 | # Check that jobs have been scheduled 190 | self.assertEqual(len(self.medium2.callbacks), 1) 191 | yield from self.medium2.call_callbacks() 192 | 193 | # Resource should have been updated 194 | expected_resource = copy(resource_data) 195 | expected_resource.update(self.patch) 196 | 197 | updated_resource_data = yield from self.collection1.on_message(action='get', 198 | resource_id=resource_id) 199 | updated_resource_data = updated_resource_data['resource_data'] 200 | self.assertEqual( 201 | updated_resource_data, 202 | expected_resource) 203 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | try: 4 | from unittest.mock import Mock, create_autospec 5 | except ImportError: 6 | from mock import Mock, create_autospec 7 | 8 | from uuid import uuid4 9 | from functools import wraps 10 | from copy import copy 11 | from unittest import TestCase as unittestTestCase 12 | 13 | from zeroservices.exceptions import ServiceUnavailable 14 | from zeroservices.resources import (ResourceCollection, Resource, 15 | is_callable, ResourceService) 16 | from zeroservices.medium import BaseMedium 17 | from zeroservices.medium.memory import MemoryMedium 18 | from zeroservices.discovery.memory import MemoryDiscoveryMedium 19 | from zeroservices.memory import MemoryCollection, MemoryResource 20 | from zeroservices import BaseService 21 | from zeroservices.query import match 22 | 23 | 24 | class TestCase(unittestTestCase): 25 | 26 | def assertItemsEqual(self, *args): 27 | if hasattr(self, 'assertCountEqual'): 28 | return self.assertCountEqual(*args) 29 | return super(TestCase, self).assertItemsEqual(*args) 30 | 31 | def assertDictIsSubset(self, subset, superset): 32 | for item in subset.items(): 33 | self.assertIn(item, superset.items()) 34 | 35 | 36 | def test_medium(): 37 | return Mock(spec_set=BaseMedium) 38 | 39 | 40 | class TestResource(MemoryResource): 41 | 42 | @is_callable 43 | def custom_action(self, *arhs, **kwargs): 44 | return 42 45 | 46 | 47 | class TestCollection(MemoryCollection): 48 | 49 | resource_class = TestResource 50 | 51 | @is_callable 52 | def custom_action(self, *args, **kwargs): 53 | return 42 54 | 55 | 56 | def sample_collection(sample_resource_name): 57 | return TestCollection(sample_resource_name) 58 | 59 | 60 | class TestService(BaseService): 61 | 62 | def __init__(self, *args, node_infos=None, **kwargs): 63 | super().__init__(*args, **kwargs) 64 | self.on_message_mock = Mock() 65 | self.on_event_mock = Mock() 66 | self.node_infos = node_infos or {} 67 | 68 | def service_info(self): 69 | base_infos = copy(self.node_infos) 70 | base_infos.update(super().service_info()) 71 | return base_infos 72 | 73 | @asyncio.coroutine 74 | def on_message(self, *args, **kwargs): 75 | return self.on_message_mock(*args, **kwargs) 76 | 77 | @asyncio.coroutine 78 | def on_event(self, *args, **kwargs): 79 | return self.on_event_mock(*args, **kwargs) 80 | 81 | 82 | def _create_test_service(name, node_infos, loop): 83 | medium = MemoryMedium(loop, MemoryDiscoveryMedium) 84 | service = TestService(name, medium, node_infos=node_infos) 85 | return service 86 | 87 | 88 | class TestResourceService(ResourceService): 89 | 90 | def __init__(self, *args, **kwargs): 91 | super().__init__(*args, **kwargs) 92 | self.on_event_mock = Mock() 93 | 94 | @asyncio.coroutine 95 | def on_event(self, *args, **kwargs): 96 | return self.on_event_mock(*args, **kwargs) 97 | 98 | 99 | def _create_test_resource_service(name, loop): 100 | medium = MemoryMedium(loop, MemoryDiscoveryMedium) 101 | service = TestResourceService(name, medium) 102 | return service 103 | 104 | 105 | def _async_test(f): 106 | @wraps(f) 107 | def wrapper(self, *args, **kwargs): 108 | if not self.loop.is_running(): 109 | coro = asyncio.coroutine(f) 110 | future = coro(self, *args, **kwargs) 111 | self.loop.run_until_complete(asyncio.wait_for(future, 2, loop=self.loop)) 112 | else: 113 | return f(self, *args, **kwargs) 114 | return wrapper 115 | -------------------------------------------------------------------------------- /zeroservices/__init__.py: -------------------------------------------------------------------------------- 1 | from .service import BaseService 2 | from .resources import * 3 | from .medium import * 4 | -------------------------------------------------------------------------------- /zeroservices/backend/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lothiraldan/ZeroServices/c6b0bdf755be6bcc0ff6070aabcfb36f0e4c2f37/zeroservices/backend/__init__.py -------------------------------------------------------------------------------- /zeroservices/backend/mongodb.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import pymongo 3 | from bson import ObjectId 4 | import os 5 | 6 | from copy import copy 7 | 8 | from zeroservices import ResourceCollection, Resource 9 | from zeroservices.resources import is_callable 10 | 11 | 12 | class MongoDBResource(Resource): 13 | 14 | def __init__(self, collection, **kwargs): 15 | super(MongoDBResource, self).__init__(**kwargs) 16 | self.collection = collection 17 | self._document = None 18 | 19 | @is_callable 20 | def create(self, resource_data): 21 | document_data = {'_id': self.resource_id} 22 | document_data.update(resource_data) 23 | self.collection.insert(document_data) 24 | 25 | yield from self.publish('create', {'action': 'create', 26 | 'resource_data': resource_data}) 27 | 28 | return {'resource_id': self.resource_id} 29 | 30 | @is_callable 31 | def get(self): 32 | document = self.document 33 | 34 | if not document: 35 | return 'NOK' 36 | 37 | return {'resource_id': str(document.pop('_id')), 38 | 'resource_data': document} 39 | 40 | @is_callable 41 | def patch(self, patch): 42 | new_document = self.collection.find_and_modify({'_id': ObjectId(self.resource_id)}, 43 | patch, new=True) 44 | 45 | yield from self.publish('patch', {'action': 'patch', 'patch': patch}) 46 | 47 | new_document.pop('_id') 48 | return new_document 49 | 50 | @is_callable 51 | def delete(self): 52 | self.collection.remove({'_id': self.resource_id}) 53 | yield from self.publish('delete', {'action': 'delete'}) 54 | return 'OK' 55 | 56 | @is_callable 57 | def add_link(self, relation, target_id, title): 58 | target_relation = target_id[0] 59 | patch = {"$push": {"_links.{}".format(relation): 60 | {"target_id": target_id, "title": title}}, 61 | "$set": {"_links.latest.{}".format(target_relation): 62 | target_id}} 63 | self.collection.find_and_modify({'_id': self.resource_id}, patch, 64 | new=True) 65 | 66 | event = {'action': 'add_link', 'target_id': target_id, 67 | 'title': title, 'relation': relation} 68 | yield from self.publish('add_link', event) 69 | 70 | return "OK" 71 | 72 | @property 73 | def document(self): 74 | if self._document is None: 75 | self._document = self.collection.find_one({'_id': ObjectId(self.resource_id)}) 76 | return self._document 77 | 78 | 79 | class MongoDBCollection(ResourceCollection): 80 | 81 | resource_class = MongoDBResource 82 | 83 | def __init__(self, collection_name, database_name): 84 | super(MongoDBCollection, self).__init__(collection_name) 85 | self.database_name = database_name 86 | self.collection_name = collection_name 87 | 88 | mongo_host = os.environ.get('MONGO_HOST', 'localhost') 89 | 90 | self.connection = pymongo.MongoClient(host=mongo_host) 91 | self.database = self.connection[database_name] 92 | self.collection = self.database[collection_name] 93 | 94 | def instantiate(self, **kwargs): 95 | return super(MongoDBCollection, self).instantiate( 96 | collection=self.collection, **kwargs) 97 | 98 | @is_callable 99 | def list(self, where=None): 100 | if where is None: 101 | where = {} 102 | 103 | # Support for fulltext-search 104 | if 'text' in where: 105 | text = where.pop('text') 106 | where['$text'] = {'$search': text} 107 | 108 | result = list() 109 | for document in self.collection.find(where): 110 | result.append({'resource_id': str(document.pop('_id')), 111 | 'resource_data': document}) 112 | return result 113 | 114 | @is_callable 115 | def create(self, resource_data): 116 | document_data = copy(resource_data) 117 | document_id = self.collection.insert(document_data) 118 | # Replace ObjectId by a str 119 | document_data['_id'] = str(document_data['_id']) 120 | 121 | yield from self.publish('create', {'action': 'create', 122 | 'resource_data': document_data, 123 | 'resource_id': str(document_id)}) 124 | 125 | return {'resource_id': str(document_id)} 126 | -------------------------------------------------------------------------------- /zeroservices/discovery/__init__.py: -------------------------------------------------------------------------------- 1 | from .udp import UdpDiscoveryMedium 2 | from .memory import MemoryDiscoveryMedium 3 | 4 | __all__ = [UdpDiscoveryMedium, MemoryDiscoveryMedium] 5 | -------------------------------------------------------------------------------- /zeroservices/discovery/memory.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import collections 3 | import socket 4 | from json import dumps, loads 5 | from copy import copy, deepcopy 6 | 7 | from socket import AF_INET, SOCK_STREAM, SOCK_DGRAM, IPPROTO_UDP, SOL_SOCKET, SO_REUSEADDR, IPPROTO_IP, IP_MULTICAST_TTL, IP_ADD_MEMBERSHIP, inet_aton 8 | from asyncio import coroutine, futures 9 | 10 | 11 | class MemoryDiscoveryMedium(object): 12 | 13 | MEDIUMS = set() 14 | 15 | def __init__(self, callback, loop, node_infos): 16 | self.callback = callback 17 | self.loop = loop 18 | self.node_id = node_infos['node_id'] 19 | self.node_infos = deepcopy(node_infos) 20 | 21 | @classmethod 22 | def reset(cls): 23 | cls.MEDIUMS = set() 24 | 25 | @asyncio.coroutine 26 | def start(self): 27 | self.MEDIUMS.add(self) 28 | 29 | def close(self): 30 | if self in self.MEDIUMS: 31 | self.MEDIUMS.remove(self) 32 | 33 | def _receive_registration_infos(self, registrations_infos): 34 | registrations_infos = loads(registrations_infos) 35 | registrations_infos['address'] = '127.0.0.1' 36 | 37 | yield from self.callback('register', registrations_infos) 38 | 39 | @asyncio.coroutine 40 | def send_registration_infos(self): 41 | for medium in self.MEDIUMS: 42 | if medium is not self: 43 | yield from medium._receive_registration_infos(dumps(self.node_infos)) 44 | 45 | @classmethod 46 | def check_leak(cls): 47 | if cls.MEDIUMS: 48 | mediums = cls.MEDIUMS 49 | cls.MEDIUMS = set() 50 | raise Exception(mediums) 51 | -------------------------------------------------------------------------------- /zeroservices/discovery/udp.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import asyncio 3 | import collections 4 | import socket 5 | import json 6 | from copy import copy 7 | 8 | from socket import AF_INET, SOCK_STREAM, SOCK_DGRAM, IPPROTO_UDP, SOL_SOCKET, SO_REUSEADDR, IPPROTO_IP, IP_MULTICAST_TTL, IP_ADD_MEMBERSHIP, inet_aton 9 | from asyncio import coroutine, futures 10 | 11 | ANY = "0.0.0.0" 12 | logger = logging.getLogger('UdpDiscoveryMedium') 13 | 14 | 15 | @coroutine 16 | def create_datagram_endpoint(self, protocol_factory, 17 | local_addr=None, remote_addr=None, #*, 18 | family=0, proto=0, flags=0): 19 | """Create datagram connection. 20 | Based on asyncio code with small fix""" 21 | if not (local_addr or remote_addr): 22 | if family == 0: 23 | raise ValueError('unexpected address family') 24 | addr_pairs_info = (((family, proto), (None, None)),) 25 | else: 26 | # join address by (family, protocol) 27 | addr_infos = collections.OrderedDict() 28 | for idx, addr in ((0, local_addr), (1, remote_addr)): 29 | if addr is not None: 30 | assert isinstance(addr, tuple) and len(addr) == 2, ( 31 | '2-tuple is expected') 32 | 33 | infos = yield from self.getaddrinfo( 34 | *addr, family=family, type=socket.SOCK_DGRAM, 35 | proto=proto, flags=flags) 36 | if not infos: 37 | raise OSError('getaddrinfo() returned empty list') 38 | 39 | for fam, _, pro, _, address in infos: 40 | key = (fam, pro) 41 | if key not in addr_infos: 42 | addr_infos[key] = [None, None] 43 | addr_infos[key][idx] = address 44 | 45 | # each addr has to have info for each (family, proto) pair 46 | addr_pairs_info = [ 47 | (key, addr_pair) for key, addr_pair in addr_infos.items() 48 | if not ((local_addr and addr_pair[0] is None) or 49 | (remote_addr and addr_pair[1] is None))] 50 | 51 | if not addr_pairs_info: 52 | raise ValueError('can not get address information') 53 | 54 | exceptions = [] 55 | 56 | for ((family, proto), 57 | (local_address, remote_address)) in addr_pairs_info: 58 | sock = None 59 | r_addr = None 60 | try: 61 | sock = socket.socket( 62 | family=family, type=socket.SOCK_DGRAM, proto=proto) 63 | sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) 64 | # FIX: Mandatory on Mac OS X 65 | if hasattr(socket, 'SO_REUSEPORT'): 66 | sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) 67 | sock.setblocking(False) 68 | 69 | if local_addr: 70 | sock.bind(local_address) 71 | if remote_addr: 72 | yield from self.sock_connect(sock, remote_address) 73 | r_addr = remote_address 74 | except OSError as exc: 75 | if sock is not None: 76 | sock.close() 77 | exceptions.append(exc) 78 | except: 79 | if sock is not None: 80 | sock.close() 81 | raise 82 | else: 83 | break 84 | else: 85 | raise exceptions[0] 86 | 87 | protocol = protocol_factory() 88 | waiter = futures.Future(loop=self) 89 | transport = self._make_datagram_transport(sock, protocol, r_addr, 90 | waiter) 91 | if self._debug: 92 | if local_addr: 93 | logger.info("Datagram endpoint local_addr=%r remote_addr=%r " 94 | "created: (%r, %r)", 95 | local_addr, remote_addr, transport, protocol) 96 | else: 97 | logger.debug("Datagram endpoint remote_addr=%r created: " 98 | "(%r, %r)", 99 | remote_addr, transport, protocol) 100 | yield from waiter 101 | return transport, protocol 102 | 103 | 104 | @coroutine 105 | def create_udp_multicast_endpoint(loop, address, port, protocol_factory, 106 | ttl=None): 107 | """ Create an udp multicast listening socket trough asyncio 108 | """ 109 | transport, protocol = yield from create_datagram_endpoint( 110 | loop, lambda: protocol_factory, 111 | local_addr=(ANY, port), proto=IPPROTO_UDP) 112 | sock = transport.get_extra_info('socket') 113 | sock.setsockopt(IPPROTO_IP, IP_MULTICAST_TTL, 255) 114 | sock.setsockopt(IPPROTO_IP, IP_ADD_MEMBERSHIP, 115 | inet_aton(address) + inet_aton(ANY)) 116 | 117 | return transport, protocol 118 | 119 | 120 | class UdpMulticastEmitterProtocol(object): 121 | 122 | def __init__(self, registration_address, registration_info, on_close): 123 | self.registration_address = registration_address 124 | self.registration_info = registration_info 125 | self.on_close = on_close 126 | 127 | def connection_made(self, transport): 128 | self.transport = transport 129 | # self.send_registration_infos() 130 | 131 | def send_registration_infos(self): 132 | self.transport.sendto(json.dumps(self.registration_info).encode('utf-8'), 133 | self.registration_address) 134 | # loop.call_later(5, self.send_registration_infos) 135 | 136 | def connection_lost(self, exc): 137 | self.on_close.set_result(exc) 138 | 139 | 140 | class UdpMulticastReceiverProtocol(object): 141 | 142 | def __init__(self, callback, loop, node_id, on_close): 143 | self.callback = callback 144 | self.loop = loop 145 | self.node_id = node_id 146 | self.on_close = on_close 147 | 148 | def connection_made(self, transport): 149 | self.transport = transport 150 | 151 | def datagram_received(self, data, addr): 152 | decoded = json.loads(data.decode('utf-8')) 153 | if decoded['node_id'] == self.node_id: 154 | return 155 | decoded['address'] = addr[0] 156 | 157 | asyncio.async(self.callback('register', decoded), loop=self.loop) 158 | 159 | 160 | class UdpDiscoveryMedium(object): 161 | 162 | MCAST_ADDR = "237.252.249.227" 163 | MCAST_PORT = 32000 164 | ANY = "0.0.0.0" 165 | 166 | def __init__(self, callback, loop, node_infos): 167 | self.callback = callback 168 | self.loop = loop 169 | self.node_id = node_infos['node_id'] 170 | self.node_infos = copy(node_infos) 171 | 172 | @asyncio.coroutine 173 | def start(self): 174 | self.receiver_closed = asyncio.Future(loop=self.loop) 175 | self.receiver, _ = yield from create_udp_multicast_endpoint( 176 | self.loop, self.MCAST_ADDR, self.MCAST_PORT, 177 | UdpMulticastReceiverProtocol(self.callback, self.loop, self.node_id, self.receiver_closed), 178 | ttl=255) 179 | 180 | self.emitter_closed = asyncio.Future(loop=self.loop) 181 | self.emitter, self.emitter_t = yield from self.loop.create_datagram_endpoint( 182 | lambda: UdpMulticastEmitterProtocol((self.MCAST_ADDR, self.MCAST_PORT), self.node_infos, self.emitter_closed), 183 | local_addr=(self.ANY, 0), 184 | proto=IPPROTO_UDP 185 | ) 186 | 187 | def close(self): 188 | self.receiver.close() 189 | self.emitter.close() 190 | 191 | @asyncio.coroutine 192 | def send_registration_infos(self): 193 | return self.emitter_t.send_registration_infos() 194 | 195 | def check_leak(self): 196 | return 197 | -------------------------------------------------------------------------------- /zeroservices/exceptions.py: -------------------------------------------------------------------------------- 1 | class ServiceUnavailable(Exception): 2 | pass 3 | 4 | class UnknownNode(Exception): 5 | pass 6 | 7 | class UnknownService(Exception): 8 | pass 9 | 10 | class ResourceException(Exception): 11 | 12 | def __init__(self, error_message): 13 | self.error_message = error_message 14 | 15 | def __str__(self): 16 | return self.__repr__() 17 | 18 | def __repr__(self): 19 | return "ResourceException(%s)" % self.error_message 20 | 21 | class ResourceNotFound(Exception): 22 | pass 23 | -------------------------------------------------------------------------------- /zeroservices/medium/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from uuid import uuid4 4 | from abc import ABCMeta, abstractmethod 5 | from ..utils import maybe_asynchronous 6 | 7 | import asyncio 8 | 9 | 10 | class BaseMedium(object): 11 | 12 | __metaclass__ = ABCMeta 13 | node_id = None 14 | 15 | def __init__(self, loop, discovery_class, node_id=None): 16 | # Node id 17 | if node_id is None: 18 | node_id = uuid4().hex 19 | self.node_id = node_id 20 | self.directory = {} 21 | 22 | self.loop = loop 23 | self.discovery = None 24 | self.discovery_class = discovery_class 25 | self.event_listeners = set() 26 | self.server_sockets = set() 27 | 28 | @asyncio.coroutine 29 | def start(self): 30 | self.discovery = self.discovery_class(self.process_message, self.loop, 31 | self.get_node_info()) 32 | 33 | yield from self.discovery.start() 34 | yield from self.discovery.send_registration_infos() 35 | 36 | def close(self): 37 | # If start has not been called, do not try to close discovery 38 | if self.discovery: 39 | self.discovery.close() 40 | 41 | @abstractmethod 42 | def register(self): 43 | pass 44 | 45 | @abstractmethod 46 | def subscribe(self, topic): 47 | pass 48 | 49 | def get_node_info(self): 50 | service_info = self.service.service_info() 51 | service_info['node_id'] = self.node_id 52 | return {'node_id': self.node_id, 'service_info': service_info} 53 | 54 | @abstractmethod 55 | def publish(self, event_type, event_data): 56 | pass 57 | 58 | def process_event(self, message_type, event_message): 59 | for event_listener in self.event_listeners: 60 | yield from event_listener(message_type, event_message) 61 | 62 | @abstractmethod 63 | def send(self, node_id, message, message_type="message", wait_response=True): 64 | pass 65 | 66 | @asyncio.coroutine 67 | def process_message(self, message_type, message, sender=None): 68 | self.logger.info("Process [{}] {}".format(message_type, message)) 69 | if message_type == 'register': 70 | service_info = message.pop('service_info') 71 | yield from self.process_registration(message) 72 | return self.service.on_registration_message(service_info) 73 | else: 74 | result = yield from self.on_message_callback(message_type=message_type, **message) 75 | if sender: 76 | yield from self.respond(sender, result) 77 | return ('message', result) 78 | 79 | return result 80 | 81 | @abstractmethod 82 | def connect_to_node(self, node_if): 83 | pass 84 | 85 | def periodic_call(self, callback, delay): 86 | 87 | def periodic_wrapper(): 88 | self.loop.create_task(callback()) 89 | self.loop.call_later(delay, periodic_wrapper) 90 | 91 | return self.loop.call_later(delay, periodic_wrapper) 92 | 93 | def send_registration_answer(self, node_id, node_info=None): 94 | if node_info is None: 95 | node_info = self.get_node_info() 96 | 97 | result = yield from self.send(node_id, node_info, 'register', wait_response=False) 98 | return result 99 | 100 | def set_service(self, service): 101 | self.service = service 102 | 103 | self.logger = logging.getLogger('%s.%s' % (service.name, 104 | 'medium')) 105 | self.logger.setLevel(logging.DEBUG) 106 | 107 | self.on_message_callback = service.on_message 108 | self.event_listeners.add(service.process_event) 109 | 110 | # self.logger.info('Set service %s, node_info: %s' % 111 | # (service.name, self.get_node_info())) 112 | 113 | def process_registration(self, message): 114 | node_id = message['node_id'] 115 | 116 | if node_id not in self.directory: 117 | self.directory[node_id] = message 118 | 119 | yield from self.send_registration_answer(node_id) 120 | 121 | def get_directory(self): 122 | return self.directory 123 | 124 | def add_event_listener(self, event_listener): 125 | self.event_listeners.add(event_listener) 126 | 127 | def check_leak(self): 128 | if self.discovery: 129 | return self.discovery.check_leak() 130 | 131 | 132 | from .zeromq import ZeroMQMedium 133 | -------------------------------------------------------------------------------- /zeroservices/medium/memory.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | 4 | from ..medium import BaseMedium 5 | from ..resources import (ResourceCollection, Resource, 6 | is_callable) 7 | from ..exceptions import ServiceUnavailable 8 | from ..query import match 9 | 10 | 11 | # Test memory medium 12 | class MemoryMedium(BaseMedium): 13 | 14 | NODES = {} 15 | 16 | def __init__(self, loop, discovery_class, node_id=None): 17 | super().__init__(loop, discovery_class, node_id) 18 | 19 | self.topics = [] 20 | self.callbacks = [] 21 | 22 | @classmethod 23 | def reset(cls): 24 | cls.NODES = {} 25 | 26 | def close(self): 27 | try: 28 | del self.NODES[self.node_id] 29 | except KeyError: 30 | pass 31 | super().close() 32 | 33 | @asyncio.coroutine 34 | def start(self): 35 | self.NODES[self.node_id] = self 36 | yield from super().start() 37 | 38 | def connect_to_node(self, node_id): 39 | pass 40 | 41 | def subscribe(self, topic): 42 | self.topics.append(topic) 43 | 44 | @asyncio.coroutine 45 | def publish(self, event_type, event_data): 46 | for node in self.NODES.values(): 47 | if node.node_id == self.node_id: 48 | continue 49 | yield from node.process_event(event_type, event_data) 50 | 51 | @asyncio.coroutine 52 | def send(self, node_id, message, message_type="message", wait_response=True): 53 | try: 54 | node = self.NODES[node_id] 55 | except KeyError: 56 | raise ServiceUnavailable('Service %s is unavailable.' % node_id) 57 | 58 | # Be sure that message could be dumped in json 59 | message = json.dumps(message) 60 | 61 | result = yield from node.process_message(message_type, json.loads(message), 62 | sender=self.node_id) 63 | 64 | if wait_response: 65 | assert result[0] == 'message' 66 | return result[1] 67 | 68 | return 69 | 70 | @asyncio.coroutine 71 | def respond(self, sender, message, message_type="message"): 72 | return 73 | 74 | def send_registration_answer(self, node_id, node_info=None): 75 | node_info = self.get_node_info() 76 | 77 | node_info['address'] = "127.0.0.1" 78 | 79 | return super(MemoryMedium, self).send_registration_answer(node_id, node_info) 80 | 81 | def periodic_call(self, callback, delay): 82 | super().periodic_call(callback, delay) 83 | self.callbacks.append(callback) 84 | 85 | @asyncio.coroutine 86 | def call_callbacks(self): 87 | for callback in self.callbacks: 88 | yield from callback() 89 | 90 | def check_leak(self): 91 | super().check_leak() 92 | if self.NODES: 93 | nodes = self.NODES 94 | self.NODES = {} 95 | raise Exception(nodes) 96 | -------------------------------------------------------------------------------- /zeroservices/medium/zeromq.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import asyncio 3 | import aiozmq 4 | import time 5 | import zmq 6 | import sys 7 | import json 8 | import logging 9 | import socket 10 | 11 | from asyncio import coroutine 12 | from socket import AF_INET, SOCK_STREAM, SOCK_DGRAM, IPPROTO_UDP, SOL_SOCKET, SO_REUSEADDR, IPPROTO_IP, IP_MULTICAST_TTL, IP_ADD_MEMBERSHIP, inet_aton 13 | from os.path import join 14 | from os import makedirs 15 | from asyncio import coroutine 16 | 17 | from zeroservices.medium import BaseMedium 18 | 19 | logging.basicConfig(level=logging.DEBUG) 20 | 21 | loop = asyncio.get_event_loop() 22 | 23 | 24 | class ServerProtocol(object): 25 | 26 | def __init__(self, callback, loop): 27 | self.callback = callback 28 | self.loop = loop 29 | 30 | def connection_made(self, transport): 31 | self.transport = transport 32 | 33 | def msg_received(self, msg): 34 | sender, message_type, message = msg 35 | message_type = message_type.decode('utf-8') 36 | message = json.loads(message.decode('utf-8')) 37 | 38 | asyncio.async(self.callback(message_type, message, sender=sender), loop=self.loop) 39 | 40 | 41 | class SubProtocol(object): 42 | 43 | def __init__(self, callback, loop): 44 | self.callback = callback 45 | self.loop = loop 46 | 47 | def connection_made(self, transport): 48 | self.transport = transport 49 | 50 | def msg_received(self, msg): 51 | event_type, event_data = msg[0].decode('utf-8').split(' ', maxsplit=1) 52 | event_data = json.loads(event_data) 53 | 54 | asyncio.async(self.callback(event_type, event_data), loop=self.loop) 55 | 56 | 57 | class ZeroMQMedium(BaseMedium): 58 | 59 | @asyncio.coroutine 60 | def start(self): 61 | 62 | # Pub 63 | self.pub = yield from aiozmq.create_zmq_stream( 64 | zmq.PUB, 65 | bind="tcp://*:*", 66 | loop=self.loop 67 | ) 68 | 69 | # Server 70 | self.server, self.server_t = yield from aiozmq.create_zmq_connection( 71 | lambda: ServerProtocol(self.process_message, self.loop), 72 | zmq.ROUTER, bind="tcp://*:*", 73 | loop=self.loop 74 | ) 75 | self.server_t.transport.setsockopt(zmq.IDENTITY, self.node_id.encode('utf-8')) 76 | 77 | # Sub 78 | self.sub, _ = yield from aiozmq.create_zmq_connection( 79 | lambda: SubProtocol(self.process_event, self.loop), 80 | zmq.SUB, 81 | loop=self.loop 82 | ) 83 | 84 | yield from super(ZeroMQMedium, self).start() 85 | 86 | def close(self): 87 | self.server.close() 88 | self.pub.close() 89 | self.sub.close() 90 | super(ZeroMQMedium, self).close() 91 | 92 | def get_node_info(self): 93 | node_info = super(ZeroMQMedium, self).get_node_info() 94 | 95 | node_info['server_port'] = int(tuple(self.server.bindings())[0].split(':')[-1]) 96 | node_info['pub_port'] = int(tuple(self.pub._transport.bindings())[0].split(':')[-1]) 97 | 98 | return node_info 99 | 100 | def connect_to_node(self, node_id): 101 | peer_info = self.directory[node_id] 102 | peer_address = 'tcp://%s:%s' % (peer_info['address'], 103 | peer_info['pub_port']) 104 | self.logger.debug('Connecting my sub socket to %s' % peer_address) 105 | self.sub.connect(peer_address) 106 | self.sub.setsockopt(zmq.SUBSCRIBE, ''.encode('utf-8')) 107 | 108 | @coroutine 109 | def send(self, node_id, message, message_type="message", wait_response=True): 110 | peer_info = self.directory[node_id] 111 | 112 | address = peer_info['address'] 113 | port = peer_info['server_port'] 114 | 115 | address = 'tcp://%(address)s:%(port)s' % locals() 116 | request_socket = yield from aiozmq.create_zmq_stream( 117 | zmq.DEALER, connect=address, loop=self.loop 118 | ) 119 | 120 | log_info = (message_type, json.dumps(message), address) 121 | self.logger.info('Send %s/%s to %s' % log_info) 122 | message = (message_type.encode('utf-8'), json.dumps(message).encode('utf-8')) 123 | request_socket.write(message) 124 | 125 | if wait_response: 126 | message_type, message = yield from request_socket.read() 127 | request_socket.close() 128 | assert message_type.decode('utf-8') == 'message' 129 | return json.loads(message.decode('utf-8')) 130 | 131 | yield from request_socket.drain() 132 | request_socket.close() 133 | 134 | @coroutine 135 | def publish(self, event_type, event_data): 136 | self.logger.debug("Publish %s %s" % (event_type, event_data)) 137 | pub_message = '%s %s' % (event_type, json.dumps(event_data)) 138 | pub_message = (pub_message.encode('utf-8'),) 139 | self.pub.write(pub_message) 140 | 141 | return 142 | 143 | @asyncio.coroutine 144 | def respond(self, sender, message, message_type="message"): 145 | data = (sender, message_type.encode('utf-8'), json.dumps(message).encode('utf-8')) 146 | self.server.write(data) 147 | 148 | def send_registration_answer(self, node_id, node_info=None): 149 | node_info = self.get_node_info() 150 | 151 | # Find my local address 152 | peer_info = self.directory[node_id] 153 | s = socket.socket(AF_INET, SOCK_STREAM) 154 | s.connect((peer_info['address'], peer_info['server_port'])) 155 | node_info['address'] = s.getsockname()[0] 156 | s.close() 157 | 158 | return super(ZeroMQMedium, self).send_registration_answer(node_id, node_info) 159 | 160 | @asyncio.coroutine 161 | def add_server_entrypoint(self, path=None): 162 | if path is not None: 163 | # Ensure path exists 164 | try: 165 | makedirs(path) 166 | except OSError: 167 | # Path exists 168 | pass 169 | 170 | socket_path = 'ipc://%s' % join(path, 'server.sock') 171 | server, _ = yield from aiozmq.create_zmq_connection( 172 | lambda: ServerProtocol(self.process_message, self.loop), 173 | zmq.ROUTER, bind=socket_path, 174 | loop=self.loop 175 | ) 176 | self.server_sockets.add(server) 177 | print("Server", server) 178 | 179 | return socket_path, server 180 | -------------------------------------------------------------------------------- /zeroservices/memory.py: -------------------------------------------------------------------------------- 1 | from .medium import BaseMedium 2 | from .resources import (ResourceCollection, Resource, 3 | is_callable) 4 | from .exceptions import ServiceUnavailable 5 | from .query import match 6 | 7 | 8 | # Memory Collection 9 | 10 | 11 | class MemoryResource(Resource): 12 | 13 | def __init__(self, collection, **kwargs): 14 | super(MemoryResource, self).__init__(**kwargs) 15 | self.collection = collection 16 | 17 | @is_callable 18 | def create(self, resource_data): 19 | self.collection[self.resource_id] = resource_data 20 | yield from self.publish('create', {'action': 'create', 'resource_data': resource_data}) 21 | return {'resource_id': self.resource_id} 22 | 23 | @is_callable 24 | def get(self): 25 | try: 26 | resource = {'resource_id': self.resource_id, 27 | 'resource_data': self.collection[self.resource_id]} 28 | except KeyError: 29 | return 'NOK' 30 | return resource 31 | 32 | @is_callable 33 | def patch(self, patch): 34 | resource = self.collection[self.resource_id] 35 | 36 | set_keys = patch['$set'] 37 | for key, value in set_keys.items(): 38 | resource[key] = value 39 | 40 | yield from self.publish('patch', {'action': 'patch', 'patch': patch}) 41 | 42 | return resource 43 | 44 | @is_callable 45 | def delete(self): 46 | del self.collection[self.resource_id] 47 | yield from self.publish('delete', {'action': 'delete'}) 48 | return 'OK' 49 | 50 | @is_callable 51 | def add_link(self, relation, target_id, title): 52 | target_relation = target_id[0] 53 | resource = self.collection[self.resource_id] 54 | links = resource.setdefault('_links', {}) 55 | links.setdefault(relation, []).append({'target_id': target_id, 56 | 'title': title}) 57 | links.setdefault('latest', {})[target_relation] = target_id 58 | 59 | event = {'action': 'add_link', 'target_id': target_id, 60 | 'title': title, 'relation': relation} 61 | yield from self.publish('add_link', event) 62 | return 'OK' 63 | 64 | 65 | class MemoryCollection(ResourceCollection): 66 | 67 | resource_class = MemoryResource 68 | 69 | def __init__(self, collection_name): 70 | super(MemoryCollection, self).__init__(collection_name) 71 | self._collection = {} 72 | 73 | def instantiate(self, **kwargs): 74 | return super(MemoryCollection, self).instantiate( 75 | collection=self._collection, **kwargs) 76 | 77 | @is_callable 78 | def list(self, where=None): 79 | resources = [] 80 | for resource_id, resource_data in self._collection.items(): 81 | 82 | # Filtering happens here 83 | if where: 84 | if not match(where, resource_data): 85 | continue 86 | 87 | resources.append({'resource_id': resource_id, 88 | 'resource_data': resource_data}) 89 | 90 | return resources 91 | -------------------------------------------------------------------------------- /zeroservices/query.py: -------------------------------------------------------------------------------- 1 | def match(query, resource): 2 | """Validated a query inspired by MongoDB and TaffyDB queries languages 3 | """ 4 | for query_field_name, query_field_value in query.items(): 5 | if resource.get(query_field_name) != query_field_value: 6 | return False 7 | 8 | return True 9 | 10 | 11 | def query_incoming(caller, rel, resource_id, outgoing_resource_type, 12 | *resource_types): 13 | for resource_type in resource_types: 14 | query = {"_links.{}".format(rel): 15 | {'$elemMatch': 16 | {"target_id": (outgoing_resource_type, resource_id)}}} 17 | 18 | caller.logger.info("%s / %s", resource_type, query) 19 | 20 | resource = yield from caller.send(collection=resource_type, 21 | action='list', 22 | where=query) 23 | 24 | if not len(resource) == 1: 25 | caller.logger.info("Query %s", query) 26 | caller.logger.info("Resources %s", resource) 27 | assert len(resource) == 1 28 | resource = resource[0] 29 | resource_id = resource['resource_id'] 30 | outgoing_resource_type = resource_type 31 | return resource 32 | 33 | 34 | def follow_links(caller, first_resource, *rels): 35 | resource = first_resource 36 | for rel in rels: 37 | rel_links = resource['_links']['latest'][rel] 38 | 39 | caller.logger.info('Rel %s, outgoing resource %s', 40 | rel, rel_links) 41 | 42 | resource_type, resource_id = rel_links 43 | 44 | resource = caller.send(collection=resource_type, action='get', 45 | resource_id=resource_id)['resource_data'] 46 | caller.logger.info('Resource %s', resource) 47 | 48 | return resource 49 | 50 | 51 | -------------------------------------------------------------------------------- /zeroservices/resources.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from .service import BaseService 4 | from .exceptions import UnknownService, ResourceException 5 | from .query import match 6 | from .utils import accumulate 7 | from abc import ABCMeta, abstractmethod 8 | from uuid import uuid4 9 | 10 | import logging 11 | 12 | 13 | ### Utils 14 | 15 | 16 | def is_callable(method): 17 | method.is_callable = True 18 | return asyncio.coroutine(method) 19 | 20 | 21 | class BaseResourceService(BaseService): 22 | 23 | application = None 24 | 25 | def __init__(self, name, medium): 26 | self.resources = {} 27 | self.resources_directory = {} 28 | self.resources_worker_directory = {} 29 | super().__init__(name, medium) 30 | 31 | @property 32 | def known_resources(self): 33 | return list(self.resources.keys()) + list(self.resources_directory.keys()) 34 | 35 | 36 | def save_new_node_info(self, node_info): 37 | super().save_new_node_info(node_info) 38 | 39 | for resource in node_info.get('resources', ()): 40 | self.resources_directory[resource] = node_info['node_id'] 41 | 42 | @asyncio.coroutine 43 | def send(self, collection_name, **kwargs): 44 | message = kwargs 45 | message.update({'collection_name': collection_name}) 46 | 47 | if collection_name in self.resources.keys(): 48 | result = yield from self.on_message(**message) 49 | else: 50 | try: 51 | node_id = self.resources_directory[collection_name] 52 | except KeyError: 53 | raise UnknownService("Unknown service {0}".format(collection_name)) 54 | 55 | result = yield from super().send(node_id, message) 56 | 57 | if result['success'] is False: 58 | raise ResourceException(result.pop("data")) 59 | 60 | return result.pop("data") 61 | 62 | 63 | class ResourceService(BaseResourceService): 64 | 65 | def service_info(self): 66 | return {'name': self.name, 'resources': list(self.resources.keys()), 67 | 'node_type': 'node'} 68 | 69 | def on_registration_message_worker(self, node_info): 70 | for resource_type in node_info['resources']: 71 | if resource_type in self.resources.keys(): 72 | resources_workers = self.resources_worker_directory.setdefault( 73 | resource_type, {}) 74 | # TODO, change medium node_id ? 75 | resources_workers[node_info['name']] = node_info 76 | 77 | self.medium.send_registration_answer(node_info['node_id']) 78 | self.on_peer_join(node_info['node_id']) 79 | 80 | @asyncio.coroutine 81 | def on_message(self, collection_name, message_type=None, *args, **kwargs): 82 | '''Ignore message_type for the moment 83 | ''' 84 | 85 | # Get collection 86 | try: 87 | collection = self.resources[collection_name] 88 | except KeyError: 89 | error_message = 'No collection named %s' % collection 90 | return {'success': False, 'message': error_message} 91 | 92 | self.logger.debug("Collection {0}".format(collection)) 93 | 94 | # Try to get a result 95 | try: 96 | result = yield from collection.on_message(*args, **kwargs) 97 | except Exception as e: 98 | self.logger.exception("Error: {0}".format(str(e))) 99 | return {'success': False, 'data': str(e)} 100 | else: 101 | self.logger.debug("Success: {0}".format(result)) 102 | return {'success': True, 'data': result} 103 | 104 | @asyncio.coroutine 105 | def publish(self, *args): 106 | '''Call BaseService.publish and call on_event on self. 107 | ''' 108 | yield from super(ResourceService, self).publish(*args) 109 | 110 | # Publish to itself 111 | yield from self.on_event(*args) 112 | 113 | ### Utils 114 | def register_resource(self, collection): 115 | assert isinstance(collection, ResourceCollection) 116 | 117 | # Add self reference to collection 118 | collection.service = self 119 | 120 | # Resources collections 121 | self.resources[collection.resource_name] = collection 122 | 123 | def get_known_worker_nodes(self): 124 | return {resource_type: list(workers.keys()) for resource_type, workers in 125 | self.resources_worker_directory.items()} 126 | 127 | 128 | class RealtimeResourceService(ResourceService): 129 | '''A subclass resource service compatible with realtime sockjs http 130 | interface. 131 | ''' 132 | 133 | def on_event(self, message_type, data): 134 | # Test if someone is connected to the socks endpoint 135 | if not self.application.clients: 136 | return 137 | 138 | self.logger.info("On event %s", locals()) 139 | self.application.clients[0].publishToRoom('*', 'event', data) 140 | 141 | topics = accumulate(message_type.split('.'), lambda x, y: '.'.join((x, y))) 142 | 143 | for topic in topics: 144 | self.logger.info('Publish %s to %s topic', data, topic) 145 | self.application.clients[0].publishToRoom(topic, 'event', data) 146 | 147 | 148 | class ResourceCollection(object): 149 | 150 | resource_name = None 151 | resource_class = None 152 | service = None 153 | 154 | def __init__(self, resource_name): 155 | self.resource_name = resource_name 156 | self.logger = logging.getLogger("{0}.{1}".format(resource_name, 'collection')) 157 | 158 | def on_message(self, action, resource_id=None, **kwargs): 159 | if resource_id: 160 | resource = self.instantiate(resource_id=resource_id) 161 | self.logger.debug("Resource_id, then using resource {0}".format(resource)) 162 | action_handler = getattr(resource, action, None) 163 | else: 164 | self.logger.debug("No resource id, then using collection {0}".format(self)) 165 | action_handler = getattr(self, action, None) 166 | 167 | self.logger.debug("Action handler {0} {1}".format(action_handler, locals())) 168 | 169 | if action_handler and getattr(action_handler, 'is_callable', False): 170 | return action_handler(**kwargs) 171 | else: 172 | raise NoActionHandler('No handler for action {0}'.format(action)) 173 | 174 | def instantiate(self, **kwargs): 175 | return self.resource_class(service=self.service, 176 | resource_collection=self, **kwargs) 177 | 178 | def publish(self, topic, message): 179 | message.update({'resource_name': self.resource_name}) 180 | topic = '.'.join((self.resource_name, topic)) 181 | yield from self.service.publish(topic, message) 182 | 183 | @is_callable 184 | def list(self, where=None): 185 | pass 186 | 187 | 188 | class Resource(object): 189 | 190 | __metaclass__ = ABCMeta 191 | 192 | def __init__(self, resource_id, service, resource_collection): 193 | self.resource_id = resource_id 194 | self.service = service 195 | self.resource_collection = resource_collection 196 | 197 | @abstractmethod 198 | @is_callable 199 | def get(self): 200 | pass 201 | 202 | @abstractmethod 203 | @is_callable 204 | def create(self, resource_data): 205 | return self 206 | 207 | @abstractmethod 208 | @is_callable 209 | def patch(self, patch): 210 | pass 211 | 212 | @abstractmethod 213 | @is_callable 214 | def delete(self): 215 | pass 216 | 217 | @abstractmethod 218 | @is_callable 219 | def add_link(self, relation, target_id, title): 220 | pass 221 | 222 | def publish(self, topic, message): 223 | message.update({'resource_id': self.resource_id}) 224 | topic = '.'.join((topic, self.resource_id)) 225 | yield from self.resource_collection.publish(topic, message) 226 | 227 | 228 | class ResourceWorker(BaseResourceService): 229 | 230 | def __init__(self, name, medium): 231 | name = '{:s}-{:s}'.format(name, str(uuid4())) 232 | self.rules = {} 233 | super().__init__(name, medium) 234 | 235 | @asyncio.coroutine 236 | def start(self): 237 | self.medium.periodic_call(self.poll_check, 10) 238 | yield from super().start() 239 | 240 | @asyncio.coroutine 241 | def poll_check(self): 242 | # Ask about existing resources matching rule 243 | self.logger.info('Poll check starting') 244 | for resource_type, rules in self.rules.items(): 245 | for rule in rules: 246 | matching_resources = yield from self.send(collection_name=resource_type, 247 | action="list", 248 | where=rule.matcher) 249 | self.logger.info('Rule %s, resources %s', rule, matching_resources) 250 | for resource in matching_resources: 251 | yield from rule(resource_type, resource['resource_data'], 252 | resource['resource_id'], 'periodic') 253 | 254 | def service_info(self): 255 | return {'name': self.name, 'resources': list(self.rules.keys()), 256 | 'node_type': 'worker'} 257 | 258 | @asyncio.coroutine 259 | def on_event(self, message_type, resource_name, action, resource_id, 260 | resource_data=None, **kwargs): 261 | 262 | # Check resource rules 263 | resource_rules = self.rules.get(resource_name, ()) 264 | 265 | if not resource_rules: 266 | return 267 | 268 | if not resource_data and action != "delete": 269 | resource = yield from self.send(collection_name=resource_name, 270 | action="get", 271 | resource_id=resource_id) 272 | resource_data = resource['resource_data'] 273 | 274 | # See if one rule match 275 | for rule in resource_rules: 276 | if rule.match(resource_data): 277 | yield from rule(resource_name, resource_data, resource_id, action) 278 | 279 | def register(self, callback, resource_type, **matcher): 280 | rule = Rule(callback, matcher) 281 | self.rules.setdefault(resource_type, []).append(rule) 282 | 283 | # Register to events matching resource_type 284 | self.medium.subscribe(resource_type) 285 | 286 | 287 | class Rule(object): 288 | 289 | """Util class for matching events 290 | 291 | >>> from mock import Mock, sentinel 292 | >>> resource = {'foo': 'bar'} 293 | >>> callback = Mock(return_value=sentinel.RETURN) 294 | >>> rule = Rule(callback, {'foo': 'bar'}) 295 | >>> rule.match({'foo': 'not_bar'}) 296 | False 297 | >>> rule.match(resource) 298 | True 299 | >>> rule('Resource', resource, 'ResourceID') 300 | sentinel.RETURN 301 | >>> rule.callback.assert_called_once_with('Resource', resource, \ 302 | 'ResourceID') 303 | """ 304 | 305 | def __init__(self, callback, matcher): 306 | self.callback = callback 307 | self.matcher = matcher 308 | 309 | def match(self, resource): 310 | return match(self.matcher, resource) 311 | 312 | def __call__(self, *args, **kwargs): 313 | return self.callback(*args, **kwargs) 314 | 315 | def __repr__(self): 316 | return 'Rule({})'.format(self.__dict__) 317 | 318 | 319 | #### Exceptions 320 | 321 | 322 | class NoActionHandler(Exception): 323 | pass 324 | -------------------------------------------------------------------------------- /zeroservices/service.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from asyncio import coroutine 5 | from copy import copy 6 | 7 | from zeroservices.medium.zeromq import ZeroMQMedium 8 | from zeroservices.exceptions import UnknownNode 9 | from zeroservices.validation import REGISTRATION_SCHEMA, MultipleInvalid 10 | 11 | logging.basicConfig(level=logging.DEBUG) 12 | 13 | DEFAULT_MEDIUM = ZeroMQMedium 14 | 15 | 16 | class BaseService(object): 17 | 18 | medium = None 19 | name = None 20 | 21 | def __init__(self, name, medium): 22 | self.name = name 23 | self.medium = medium 24 | self.medium.set_service(self) 25 | self.directory = {} 26 | self.logger = logging.getLogger(name) 27 | self.logger.setLevel(logging.DEBUG) 28 | 29 | def service_info(self): 30 | """Subclass to return informations for registration 31 | """ 32 | return {'name': self.name, 'node_type': 'node'} 33 | 34 | def on_registration_message(self, node_info): 35 | try: 36 | REGISTRATION_SCHEMA(node_info) 37 | except MultipleInvalid as e: 38 | message = "Invalid node_info: {}, raise Exception {}".format(node_info, e) 39 | logging.exception(message) 40 | return 41 | 42 | if node_info['node_id'] == self.medium.node_id: 43 | return 44 | 45 | if node_info['node_type'] == 'node': 46 | self.on_registration_message_node(node_info) 47 | elif node_info['node_type'] == 'worker': 48 | self.on_registration_message_worker(node_info) 49 | 50 | def on_registration_message_node(self, node_info): 51 | if node_info['node_id'] in self.directory: 52 | return 53 | 54 | self.save_new_node_info(node_info) 55 | self.medium.connect_to_node(node_info['node_id']) 56 | self.medium.send_registration_answer(node_info['node_id']) 57 | self.on_peer_join(node_info) 58 | 59 | def on_registration_message_worker(self, node_info): 60 | pass 61 | 62 | def save_new_node_info(self, node_info): 63 | self.directory[node_info['node_id']] = copy(node_info) 64 | 65 | def get_known_nodes(self): 66 | return self.directory.keys() 67 | 68 | def on_peer_join(self, node_info): 69 | pass 70 | 71 | def on_peer_leave(self, node_info): 72 | pass 73 | 74 | @asyncio.coroutine 75 | def on_message(self, message_type, *args, **kwargs): 76 | pass 77 | 78 | @asyncio.coroutine 79 | def process_event(self, message_type, event_message): 80 | if message_type == 'close': 81 | return self.service.on_peer_leave(event_message) 82 | elif message_type == 'register': 83 | return self.service.on_registration_message(event_message) 84 | else: 85 | result = yield from self.on_event(message_type, **event_message) 86 | return result 87 | 88 | @asyncio.coroutine 89 | def on_event(self, message_type, *args, **kwargs): 90 | pass 91 | 92 | def send(self, node_id, message, **kwargs): 93 | return self.medium.send(node_id, message, **kwargs) 94 | 95 | def publish(self, *args, **kwargs): 96 | return self.medium.publish(*args, **kwargs) 97 | 98 | @coroutine 99 | def start(self): 100 | yield from self.medium.start() 101 | 102 | def get_directory(self): 103 | return self.directory 104 | 105 | def close(self): 106 | return self.medium.close() 107 | -------------------------------------------------------------------------------- /zeroservices/services/__init__.py: -------------------------------------------------------------------------------- 1 | from .http_interface import get_http_interface, BasicAuth 2 | from .http_client import BaseHTTPClient, BasicAuthHTTPClient 3 | -------------------------------------------------------------------------------- /zeroservices/services/http_client.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | 4 | try: 5 | from urlparse import urlsplit, urlunsplit 6 | except ImportError: 7 | from urllib.parse import urlsplit, urlunsplit 8 | 9 | 10 | def url_path_join(*parts): 11 | """Normalize url parts and join them with a slash.""" 12 | schemes, netlocs, paths, queries, fragments = zip(*(urlsplit(part) for part in parts)) 13 | scheme = first(schemes) 14 | netloc = first(netlocs) 15 | path = '/'.join(x.strip('/') for x in paths if x) + '/' 16 | query = first(queries) 17 | fragment = first(fragments) 18 | return urlunsplit((scheme, netloc, path, query, fragment)) 19 | 20 | def first(sequence, default=''): 21 | return next((x for x in sequence if x), default) 22 | 23 | 24 | class BaseHTTPClient(object): 25 | 26 | def __init__(self, base_url): 27 | self.base_url = base_url 28 | self.parts = [] 29 | 30 | def hello_world(self): 31 | return MethodCaller(self, "get", False)() 32 | 33 | def preprocess_request(self): 34 | return {} 35 | 36 | def __getattr__(self, action): 37 | return MethodCaller(self, action) 38 | 39 | def __getitem__(self, value): 40 | self.parts.append(value) 41 | return self 42 | 43 | 44 | class MethodCaller(object): 45 | 46 | method_map = {'list': 'get', 'get': 'get', 'create': 'post', 47 | 'delete': 'delete', 'patch': 'patch'} 48 | 49 | def __init__(self, client, action, decode_json=True): 50 | self.client = client 51 | self.action = action 52 | self.decode_json = decode_json 53 | self.method = self.method_map.get(action) 54 | 55 | def __call__(self, **kwargs): 56 | url = url_path_join(self.client.base_url, *self.client.parts) 57 | additionnal = self.client.preprocess_request() 58 | 59 | # Reinitialize client part 60 | self.client.parts = [] 61 | 62 | if not self.method: 63 | self.method = 'post' 64 | additionnal.setdefault('headers', {})['X-CUSTOM-ACTION'] = self.action 65 | 66 | response = getattr(requests, self.method)(url, data=json.dumps(kwargs), 67 | **additionnal) 68 | response.raise_for_status() 69 | if self.decode_json: 70 | return response.json() 71 | else: 72 | return response.content.decode('utf-8') 73 | 74 | 75 | class BasicAuthHTTPClient(BaseHTTPClient): 76 | 77 | def __init__(self, base_url, auth_tuple): 78 | super(BasicAuthHTTPClient, self).__init__(base_url) 79 | self.auth_tuple = auth_tuple 80 | 81 | def preprocess_request(self): 82 | return {'auth': self.auth_tuple} 83 | -------------------------------------------------------------------------------- /zeroservices/services/http_interface.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import json 4 | import binascii 5 | import traceback 6 | 7 | from aiohttp import web 8 | from aiohttp import hdrs 9 | from base64 import b64decode 10 | from .realtime import RealtimeHandler 11 | from ..exceptions import UnknownService 12 | 13 | 14 | # class AuthenticationError(HTTPError): 15 | 16 | # def __init__(self, *args, **kwargs): 17 | # super(AuthenticationError, self).__init__(401, *args, **kwargs) 18 | # self.headers = [('WWW-Authenticate', 'Basic realm=tmr')] 19 | 20 | 21 | # class BadRequest(HTTPError): 22 | 23 | # def __init__(self, *args, **kwargs): 24 | # super(ForbiddenError, self).__init__(400, *args, **kwargs) 25 | 26 | 27 | # class ForbiddenError(HTTPError): 28 | 29 | # def __init__(self, *args, **kwargs): 30 | # super(ForbiddenError, self).__init__(403, *args, **kwargs) 31 | 32 | 33 | # class MethodNotAllowed(HTTPError): 34 | 35 | # def __init__(self, *args, **kwargs): 36 | # super(MethodNotAllowed, self).__init__(405, *args, **kwargs) 37 | 38 | 39 | class BasicAuth(object): 40 | """ Implements Basic AUTH logic. Should be subclassed to implement custom 41 | authentication checking. 42 | """ 43 | 44 | def check_auth(self, username, password, resource, method): 45 | """ This function is called to check if a username / password 46 | combination is valid. Must be overridden with custom logic. 47 | 48 | :param username: username provided with current request. 49 | :param password: password provided with current request 50 | :param resource: resource being requested. 51 | :param method: HTTP method being executed (POST, GET, etc.) 52 | """ 53 | raise NotImplementedError 54 | 55 | def authorized(self, handler, resource, method): 56 | """ Validates the the current request is allowed to pass through. 57 | 58 | :param resource: resource being requested. 59 | """ 60 | auth_header = handler.request.headers.get('Authorization') 61 | if auth_header is None: 62 | raise AuthenticationError() 63 | if not auth_header.startswith('Basic '): 64 | raise AuthenticationError() 65 | 66 | try: 67 | auth_decoded = b64decode(auth_header[6:]).decode('utf-8') 68 | username, password = auth_decoded.split(':', 2) 69 | except (binascii.Error, UnicodeDecodeError, ValueError, TypeError): 70 | raise AuthenticationError() 71 | 72 | 73 | if self.check_auth(username, password, resource, method): 74 | return True 75 | else: 76 | raise ForbiddenError() 77 | 78 | 79 | # Handlers 80 | class BaseHandler(object): 81 | 82 | def __init__(self, service): 83 | self.logger = logging.getLogger('api') 84 | self.service = service 85 | 86 | def prepare(self): 87 | resource = self.path_kwargs.get("collection") 88 | self.application.auth.authorized(self, resource, self.request.method) 89 | 90 | def _process(self, request, collection, action, resource_id=None, 91 | success_status_code=200, **kwargs): 92 | 93 | payload = {} 94 | 95 | request_body = yield from request.text() 96 | 97 | if request_body: 98 | try: 99 | request_body = json.loads(request_body) 100 | payload.update(request_body) 101 | except (ValueError, UnicodeDecodeError): 102 | self.logger.warning('Bad body: %s', 103 | request_body, 104 | exc_info=True) 105 | 106 | payload.update({'collection_name': collection, 'action': action}) 107 | 108 | if resource_id: 109 | payload['resource_id'] = resource_id 110 | payload.update(kwargs) 111 | 112 | self.logger.info('Payload %s' % payload) 113 | 114 | try: 115 | result = yield from self.service.send(**payload) 116 | self.logger.info('Result is %s' % result) 117 | except UnknownService as e: 118 | self.logger.error('Payload error %s' % e) 119 | err_body = json.dumps({'error': str(e)}).encode('utf-8') 120 | raise web.HTTPNotFound(content_type="application/json", 121 | body=err_body) 122 | else: 123 | response_body = json.dumps(result).encode('utf-8') 124 | return web.Response(content_type="application/json", 125 | body=response_body, status=success_status_code) 126 | 127 | 128 | class MainHandler(BaseHandler): 129 | 130 | def main(self, request): 131 | response = json.dumps({'resources': self.service.known_resources}).encode('utf-8') 132 | return web.Response(body=response) 133 | 134 | 135 | @asyncio.coroutine 136 | def options(request): 137 | return web.Response(body=b" ") 138 | 139 | 140 | class CollectionHandler(BaseHandler): 141 | 142 | def dispatch(self, request): 143 | return getattr(self, request.method.lower())(request) 144 | 145 | def get(self, request): 146 | return self._process(request, request.match_info['collection'], 'list') 147 | 148 | # def get(self, collection): 149 | # args = {key: value[0] for key, value in self.request.arguments.items()} 150 | # self._process(collection, 'list', where=args) 151 | 152 | def post(self, request): 153 | return self._process(request, request.match_info['collection'], 154 | 'create', success_status_code=201) 155 | 156 | def custom_action(self, request): 157 | return self._process(request, request.match_info['collection'], 158 | request.match_info['action']) 159 | 160 | def options(self, request): 161 | return web.Response(body=b"") 162 | 163 | 164 | class ResourceHandler(BaseHandler): 165 | 166 | def dispatch(self, request): 167 | return getattr(self, request.method.lower())(request) 168 | 169 | def get(self, request): 170 | return self._process(request, request.match_info['collection'], 171 | 'get', request.match_info['resource_id']) 172 | 173 | def delete(self, request): 174 | return self._process(request, request.match_info['collection'], 175 | 'delete', request.match_info['resource_id'], 176 | success_status_code=204) 177 | 178 | def patch(self, request): 179 | return self._process(request, request.match_info['collection'], 180 | 'patch', request.match_info['resource_id']) 181 | 182 | def options(self, request): 183 | pass 184 | 185 | def custom_action(self, request): 186 | return self._process(request, request.match_info['collection'], 187 | request.match_info['action'], 188 | request.match_info['resource_id']) 189 | 190 | 191 | @asyncio.coroutine 192 | def cors_middleware(app, handler): 193 | @asyncio.coroutine 194 | def middleware(request): 195 | response = yield from handler(request) 196 | 197 | allowed_origins = app.allowed_origins 198 | if allowed_origins != "*": 199 | if len(allowed_origins) in [0, 1]: 200 | allowed_origins = allowed_origins 201 | 202 | response.headers['Access-Control-Allow-Origin'] = app.allowed_origins 203 | return response 204 | return middleware 205 | 206 | 207 | @asyncio.coroutine 208 | def get_http_interface(service, loop, port=8888, auth=None, auth_args=(), 209 | auth_kwargs={}, bind=True, allowed_origins=None): 210 | if allowed_origins is None: 211 | allowed_origins = "" 212 | 213 | # Urls 214 | # sockjs_router = SockJSRouter(SockJSHandler, '/realtime') 215 | 216 | app = web.Application(loop=loop, middlewares=[cors_middleware]) 217 | app.allowed_origins = allowed_origins 218 | 219 | # Realtime endpoint 220 | realtime_handler = RealtimeHandler(app, service) 221 | app.router.add_route('*', '/realtime', handler=realtime_handler.handler, 222 | name='realtime') 223 | hdrs.ACCESS_CONTROL_ALLOW_ORIGIN = allowed_origins 224 | 225 | # URLS 226 | main_handler = MainHandler(service) 227 | app.router.add_route('*', '/', main_handler.main, name='main') 228 | 229 | collection_handler = CollectionHandler(service) 230 | app.router.add_route('*', '/{collection}', collection_handler.dispatch, 231 | name='collection') 232 | app.router.add_route('*', '/{collection}/', collection_handler.dispatch, 233 | name='collection_slash') 234 | app.router.add_route('POST', '/{collection}/{action}', 235 | collection_handler.custom_action, 236 | name='collection_custom_action') 237 | app.router.add_route('OPTIONS', '/{collection}/{action}', 238 | options) 239 | 240 | resource_handler = ResourceHandler(service) 241 | app.router.add_route('*', '/{collection}/{resource_id}', 242 | resource_handler.dispatch, 243 | name='resource') 244 | app.router.add_route('*', '/{collection}/{resource_id}/', 245 | resource_handler.dispatch, 246 | name='resource_slash') 247 | app.router.add_route('OPTIONS', '/{collection}/{resource_id}/{action}', 248 | options) 249 | app.router.add_route('*', '/{collection}/{resource_id}/{action}', 250 | resource_handler.custom_action, 251 | name='resource_custom_action') 252 | 253 | handler = app.make_handler() 254 | yield from loop.create_server(handler, '0.0.0.0', port) 255 | 256 | # Set application back-reference in service 257 | service.app = app 258 | 259 | return app 260 | -------------------------------------------------------------------------------- /zeroservices/services/realtime.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import aiohttp 3 | 4 | try: 5 | import json 6 | except ImportError: 7 | import simplejson as json 8 | 9 | from collections import defaultdict, namedtuple 10 | from itertools import accumulate 11 | import logging 12 | import datetime 13 | 14 | Parser = None 15 | 16 | # Limit import 17 | __all__ = ["Parser"] 18 | 19 | class DefaultJsonParser(json.JSONEncoder): 20 | """ Create a basic JSON parser instance """ 21 | def default(self, obj): 22 | """ Output data """ 23 | # Printer for datetime object 24 | if isinstance(obj, datetime.datetime): 25 | return obj.isoformat() 26 | 27 | # Switch to default handler 28 | return json.JSONEncoder.default(self, obj) 29 | 30 | # Setting parser to default one 31 | Parser = DefaultJsonParser 32 | 33 | 34 | # class DefaultSockJSHandler(SockJSConnection): 35 | # """ Default handler """ 36 | # _parser = Parser() 37 | 38 | # def check_origin(self, origin): 39 | # return origin in self.session.handler.application.allowed_origins 40 | 41 | # def __init__(self, session): 42 | # super(DefaultSockJSHandler, self).__init__(session) 43 | # self.rooms = [] 44 | 45 | # def on_open(self, info): 46 | # self.app = self.session.handler.application 47 | # self.app.clients.append(self) 48 | # self.send({'data': 'Connected'}) 49 | 50 | # def on_message(self, data): 51 | # """ Parsing data, and try to call responding message """ 52 | # # Trying to parse response 53 | # data = json.loads(data) 54 | # if data["name"] is not None: 55 | # logging.debug("%s: receiving message %s" % (data["name"], data["data"])) 56 | # fct = getattr(self, "on_" + data["name"]) 57 | # res = fct(data["data"]) 58 | # if res is not None: 59 | # self.write_message(res) 60 | # else: 61 | # logging.error("SockJSDefaultHandler: data.name was null") 62 | 63 | 64 | # ### 65 | # # Utils 66 | # ### 67 | 68 | # def publish(self, name, data, userList): 69 | # """ Publish data """ 70 | # # Publish data to all room users 71 | # self.broadcast(userList, { 72 | # "name": name, 73 | # "data": self._parser.encode(data) 74 | # }) 75 | 76 | # def _get_room(self, topic): 77 | # return self.app.rooms.setdefault(topic, set()) 78 | 79 | # def join(self, topic): 80 | # """ Join a room """ 81 | # self._get_room(topic).add(self) 82 | 83 | # def leave(self, _id): 84 | # """ Leave a room """ 85 | # for topic in self.rooms: 86 | # self._get_room(topic).delete(self) 87 | 88 | # def publishToRoom(self, topic, name, data, userList=None): 89 | # """ Publish to given room data submitted """ 90 | # if userList is None: 91 | # userList = self._get_room(topic) 92 | 93 | # # Publish data to all room users 94 | # # print("DefaultSockJSHandler: broadcasting (name: %s, data: %s, number of users: %s)" % (name, data, len(userList))) 95 | # self.broadcast(userList, { 96 | # "name": name, 97 | # "data": data 98 | # }) 99 | 100 | # def publishToOther(self, topic, name, data): 101 | # """ Publish to only other people than myself """ 102 | # tmpList = self._get_room(topic) 103 | # # Select everybody except me 104 | # userList = (x for x in tmpList if x is not self) 105 | # self.publishToRoom(topic, name, data, userList) 106 | 107 | # def publishToMyself(self, topic, name, data): 108 | # """ Publish to only myself """ 109 | # self.publishToRoom(topic, name, data, [self]) 110 | 111 | 112 | # class SockJSHandler(DefaultSockJSHandler): 113 | 114 | # def on_join(self, data): 115 | # topic = data['topic'] 116 | # self.rooms.append(topic) 117 | # self.join(topic) 118 | 119 | 120 | class Session(object): 121 | 122 | __slots__ = ['ws', 'topics'] 123 | 124 | def __init__(self, ws, topics=None): 125 | self.ws = ws 126 | self.topics = topics or set() 127 | 128 | 129 | class RealtimeHandler(object): 130 | 131 | rooms = defaultdict(set) 132 | sessions = set() 133 | 134 | def __init__(self, app, service): 135 | self.app = app 136 | self.service = service 137 | self.service.medium.add_event_listener(self.publish) 138 | self.session = None 139 | 140 | @asyncio.coroutine 141 | def publish(self, event_type, event_message): 142 | self.broadcast('*', 'event', event_message) 143 | 144 | topics = accumulate(event_type.split('.'), lambda x, y: '.'.join((x, y))) 145 | 146 | for topic in topics: 147 | # self.logger.info('Publish %s to %s topic', event_message, topic) 148 | self.broadcast(topic, 'event', event_message) 149 | 150 | def broadcast(self, topic, msg_type, msg): 151 | if topic == '*': 152 | sessions = self.__class__.sessions 153 | else: 154 | sessions = self.__class__.rooms[topic] 155 | 156 | message = json.dumps({'type': msg_type, 'data': msg}) 157 | 158 | for session in sessions: 159 | try: 160 | session.ws.send_str(message) 161 | except RuntimeError as e: 162 | pass 163 | 164 | @asyncio.coroutine 165 | def handler(self, request): 166 | ws = aiohttp.web.WebSocketResponse() 167 | ws.start(request) 168 | 169 | while True: 170 | msg = yield from ws.receive() 171 | 172 | session = Session(ws, None) 173 | self.sessions.add(session) 174 | 175 | self.process(session, msg.tp, msg) 176 | 177 | print("Status", self.__class__.sessions, self.__class__.rooms) 178 | 179 | return ws 180 | 181 | def process(self, session, msg_type, msg): 182 | if msg_type == aiohttp.MsgType.text: 183 | parsed_msg = json.loads(msg.data) 184 | 185 | if parsed_msg['type'] == 'join': 186 | topics = parsed_msg.get('topics', set()) 187 | 188 | for topic in topics: 189 | session.topics.add(topic) 190 | self.__class__.rooms[topic].add(session) 191 | 192 | if parsed_msg['type'] == 'subscribe': 193 | for topic in parsed_msg.get('topics', set()): 194 | if not topic in self.session.topics: 195 | session.topics.add(topic) 196 | self.__class__.rooms[topic].add(session) 197 | elif msg_type == aiohttp.MsgType.close: 198 | pass 199 | elif msg_type == aiohttp.MsgType.error: 200 | pass 201 | -------------------------------------------------------------------------------- /zeroservices/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import operator 3 | 4 | 5 | def extract_links(links, link_type): 6 | return [link for link in links if link.startswith(link_type)] 7 | 8 | 9 | def pop_and_replace_link(links, link_type, replace_by): 10 | for ind, link in enumerate(links): 11 | if link.startswith(link_type): 12 | del links[ind] 13 | break 14 | 15 | links.append(replace_by) 16 | 17 | 18 | def maybe_asynchronous(f): 19 | def wrapped(*args, **kwargs): 20 | try: 21 | callback = kwargs.pop('callback') 22 | except KeyError: 23 | callback = None 24 | 25 | result = f(*args, **kwargs) 26 | 27 | if callback is not None: 28 | callback(result) 29 | else: 30 | return result 31 | return wrapped 32 | 33 | 34 | def accumulate(iterable, func=operator.add): 35 | 'Return running totals' 36 | # accumulate([1,2,3,4,5]) --> 1 3 6 10 15 37 | # accumulate([1,2,3,4,5], operator.mul) --> 1 2 6 24 120 38 | it = iter(iterable) 39 | total = next(it) 40 | yield total 41 | for element in it: 42 | total = func(total, element) 43 | yield total 44 | -------------------------------------------------------------------------------- /zeroservices/validation.py: -------------------------------------------------------------------------------- 1 | import six 2 | 3 | from voluptuous import Schema, MultipleInvalid, Required, Any 4 | 5 | 6 | def _str(value): 7 | if not isinstance(value, six.string_types): 8 | raise ValueError("{} is not a string".format(value)) 9 | 10 | 11 | REGISTRATION_SCHEMA = Schema({Required('node_type'): _str, 12 | Required('node_id'): _str, 13 | Required('name'): _str}, extra=True) 14 | 15 | --------------------------------------------------------------------------------