├── .coveragerc ├── .gitignore ├── .travis.yml ├── LICENSE ├── MANIFEST.in ├── README.rst ├── asyncflux ├── __init__.py ├── client.py ├── clusteradmin.py ├── database.py ├── errors.py ├── shardspace.py ├── testing.py ├── user.py └── util.py ├── distribute_setup.py ├── docs ├── Makefile ├── conf.py ├── index.rst ├── make.bat ├── modules │ ├── client.rst │ ├── clusteradmins.rst │ ├── database.rst │ ├── index.rst │ ├── testing.rst │ └── util.rst └── releases │ ├── index.rst │ └── next.rst ├── setup.py └── tests ├── __init__.py ├── asyncflux_test.py ├── client_test.py ├── clusteradmin_test.py ├── database_test.py ├── runtests.py ├── shardspace_test.py ├── user_test.py └── util_test.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = asyncflux 3 | 4 | [report] 5 | exclude_lines = 6 | pragma: no cover 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | *.nja 3 | *.pyc 4 | *.swp 5 | *.so 6 | *~ 7 | build/* 8 | dist/* 9 | distribute-* 10 | *.egg/* 11 | *.egg-info/* 12 | docs/_build/* 13 | docs/_templates/* 14 | env/* 15 | .tox/* 16 | .coverage 17 | htmlcov/* 18 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - 2.7 4 | - 3.2 5 | - 3.3 6 | - 3.4 7 | - pypy 8 | install: 9 | - pip install pep8 pyflakes coverage coveralls 10 | - pip install . --use-mirrors 11 | script: coverage run tests/runtests.py 12 | after_success: 13 | - coveralls 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include LICENSE 3 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Asyncflux 2 | ========= 3 | 4 | Asynchronous client for InfluxDB_ and Tornado_. 5 | 6 | .. image:: https://travis-ci.org/puentesarrin/asyncflux.png 7 | :target: https://travis-ci.org/puentesarrin/asyncflux 8 | :alt: Travis CI status 9 | 10 | .. image:: https://coveralls.io/repos/puentesarrin/asyncflux/badge.png 11 | :target: https://coveralls.io/r/puentesarrin/asyncflux 12 | :alt: Coveralls status 13 | 14 | Installation 15 | ============ 16 | 17 | You can use pip_ to install Asyncflux: 18 | 19 | .. code-block:: bash 20 | 21 | $ pip install git+https://github.com/puentesarrin/asyncflux.git 22 | 23 | Documentation 24 | ============= 25 | 26 | Sphinx_ is needed to generate the documentation. Documentation can be generated 27 | by issuing the following commands: 28 | 29 | .. code-block:: bash 30 | 31 | $ cd docs 32 | $ make html 33 | 34 | Or simply: 35 | 36 | .. code-block:: bash 37 | 38 | $ python setup.py doc 39 | 40 | Also, the current documentation can be found at ReadTheDocs_. 41 | 42 | License 43 | ======= 44 | 45 | Asyncflux is available under the |apache-license|_. 46 | 47 | .. _InfluxDB: http://influxdb.org 48 | .. _Tornado: http://tornadoweb.org 49 | .. _pip: http://pypi.python.org/pypi/pip 50 | .. _Sphinx: http://sphinx-doc.org 51 | .. _ReadTheDocs: https://asyncflux.readthedocs.org 52 | .. _apache-license: http://www.apache.org/licenses/LICENSE-2.0.html 53 | .. |apache-license| replace:: Apache License, Version 2.0 54 | -------------------------------------------------------------------------------- /asyncflux/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Asynchronous client for InfluxDB and Tornado.""" 3 | import sys 4 | 5 | __all__ = ('__author__', '__since__', '__version__', 'version', 6 | 'AsyncfluxClient', ) 7 | 8 | version_tuple = (0, 0, '+') 9 | 10 | if sys.version_info[0] >= 3: 11 | basestring = str # pragma: no cover 12 | else: 13 | basestring = basestring # pragma: no cover 14 | 15 | 16 | def get_version_string(): 17 | if isinstance(version_tuple[-1], basestring): 18 | return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1] 19 | return '.'.join(map(str, version_tuple)) 20 | 21 | version = get_version_string() 22 | """Current version of Asyncflux.""" 23 | 24 | __author__ = 'Jorge Puente-Sarrín ' 25 | __since__ = '2014-05-18' 26 | __version__ = version 27 | 28 | 29 | from asyncflux.client import AsyncfluxClient 30 | -------------------------------------------------------------------------------- /asyncflux/client.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Connection to InfluxDB""" 3 | import json 4 | import sys 5 | try: 6 | from urlparse import urlparse 7 | except ImportError: # pragma: no cover 8 | from urllib.parse import urlparse # pragma: no cover 9 | if sys.version_info[0] >= 3: 10 | basestring = str # pragma: no cover 11 | else: 12 | basestring = basestring # pragma: no cover 13 | 14 | from tornado import gen, httpclient, httputil, ioloop 15 | 16 | from asyncflux import clusteradmin, database, shardspace 17 | from asyncflux.errors import AsyncfluxError 18 | from asyncflux.util import asyncflux_coroutine, snake_case_dict 19 | 20 | 21 | class AsyncfluxClient(object): 22 | 23 | HOST = 'localhost' 24 | PORT = 8086 25 | USERNAME = 'root' 26 | PASSWORD = 'root' 27 | 28 | def __init__(self, host=None, port=None, username=None, password=None, 29 | is_secure=False, io_loop=None, **kwargs): 30 | scheme = 'https' if is_secure else 'http' 31 | host = host or self.HOST 32 | port = port or self.PORT 33 | username = username or self.USERNAME 34 | password = password or self.PASSWORD 35 | if not isinstance(port, int): 36 | raise TypeError("port must be an instance of int") 37 | 38 | if '://' in host: 39 | if host.startswith(('http://', 'https://')): 40 | result = urlparse(host) 41 | scheme = result.scheme 42 | host = result.hostname 43 | port = result.port or port 44 | username = result.username or username 45 | password = result.password or password 46 | else: 47 | index = host.find("://") 48 | raise ValueError('Invalid URL scheme: %s' % host[:index]) 49 | 50 | self.__scheme = scheme 51 | self.__host = host 52 | self.__port = port 53 | self.__username = username 54 | self.__password = password 55 | 56 | self.__json = kwargs.get('json_module', json) 57 | self.io_loop = io_loop or ioloop.IOLoop.current() 58 | self.http_client = httpclient.AsyncHTTPClient(self.io_loop) 59 | 60 | @property 61 | def host(self): 62 | return self.__host 63 | 64 | @property 65 | def port(self): 66 | return self.__port 67 | 68 | @property 69 | def base_url(self): 70 | return '%s://%s:%s' % (self.__scheme, self.host, self.port, ) 71 | 72 | @property 73 | def username(self): 74 | return self.__username 75 | 76 | @username.setter 77 | def username(self, value): 78 | self.__username = value 79 | 80 | @property 81 | def password(self): 82 | return self.__password 83 | 84 | @password.setter 85 | def password(self, value): 86 | self.__password = value 87 | 88 | def __getattr__(self, name): 89 | return database.Database(self, name) 90 | 91 | def __getitem__(self, name): 92 | return self.__getattr__(name) 93 | 94 | @asyncflux_coroutine 95 | def request(self, path, path_params=None, qs=None, body=None, 96 | method='GET', auth_username=None, auth_password=None): 97 | try: 98 | path_params = path_params or {} 99 | qs = qs or {} 100 | auth_username = auth_username or self.username 101 | auth_password = auth_password or self.password 102 | 103 | url = (self.base_url + path) % path_params 104 | if isinstance(body, dict): 105 | body = self.__json.dumps(body) 106 | response = yield self.http_client.fetch( 107 | httputil.url_concat(url, qs), body=body, method=method, 108 | auth_username=auth_username, auth_password=auth_password) 109 | if hasattr(response, 'body') and response.body: 110 | raise gen.Return(self.__json.loads(response.body)) 111 | except httpclient.HTTPError as e: 112 | raise AsyncfluxError(e.response) 113 | 114 | @asyncflux_coroutine 115 | def ping(self): 116 | status = yield self.request('/ping') 117 | raise gen.Return(status) 118 | 119 | @asyncflux_coroutine 120 | def get_databases(self): 121 | dbs = yield self.request('/db') 122 | databases = [database.Database(self, db['name']) for db in dbs] 123 | raise gen.Return(databases) 124 | 125 | @asyncflux_coroutine 126 | def get_database_names(self): 127 | databases = yield self.request('/db') 128 | raise gen.Return([db['name'] for db in databases]) 129 | 130 | @asyncflux_coroutine 131 | def create_database(self, name_or_database): 132 | name = name_or_database 133 | if isinstance(name, database.Database): 134 | name = name_or_database.name 135 | if not isinstance(name, basestring): 136 | raise TypeError("name_or_database must be an instance of " 137 | "%s or Database" % (basestring.__name__,)) 138 | yield self.request('/db', body={'name': name}, method='POST') 139 | new_database = database.Database(self, name) 140 | raise gen.Return(new_database) 141 | 142 | @asyncflux_coroutine 143 | def delete_database(self, name_or_database): 144 | name = name_or_database 145 | if isinstance(name, database.Database): 146 | name = name_or_database.name 147 | if not isinstance(name, basestring): 148 | raise TypeError("name_or_database must be an instance of " 149 | "%s or Database" % (basestring.__name__,)) 150 | yield self.request('/db/%(database)s', {'database': name}, 151 | method='DELETE') 152 | 153 | @asyncflux_coroutine 154 | def get_cluster_admin_names(self): 155 | admins = yield self.request('/cluster_admins') 156 | raise gen.Return([a['name'] for a in admins]) 157 | 158 | @asyncflux_coroutine 159 | def get_cluster_admins(self): 160 | cas = yield self.request('/cluster_admins') 161 | admins = [clusteradmin.ClusterAdmin(self, ca['name']) for ca in cas] 162 | raise gen.Return(admins) 163 | 164 | @asyncflux_coroutine 165 | def create_cluster_admin(self, username, password): 166 | yield self.request('/cluster_admins', method='POST', 167 | body={'name': username, 'password': password}) 168 | new_cluster_admin = clusteradmin.ClusterAdmin(self, username) 169 | raise gen.Return(new_cluster_admin) 170 | 171 | @asyncflux_coroutine 172 | def change_cluster_admin_password(self, username, new_password): 173 | yield self.request('/cluster_admins/%(username)s', 174 | {'username': username}, method='POST', 175 | body={'password': new_password}) 176 | 177 | @asyncflux_coroutine 178 | def delete_cluster_admin(self, username): 179 | yield self.request('/cluster_admins/%(username)s', 180 | {'username': username}, method='DELETE') 181 | 182 | @asyncflux_coroutine 183 | def authenticate_cluster_admin(self, username, password): 184 | try: 185 | yield self.request('/cluster_admins/authenticate', 186 | auth_username=username, auth_password=password) 187 | except AsyncfluxError: 188 | raise gen.Return(False) 189 | raise gen.Return(True) 190 | 191 | @asyncflux_coroutine 192 | def get_shard_spaces(self): 193 | spaces = yield self.request('/cluster/shard_spaces') 194 | shard_spaces = [ 195 | shardspace.ShardSpace(self, **snake_case_dict(s)) for s in spaces 196 | ] 197 | raise gen.Return(shard_spaces) 198 | 199 | def __repr__(self): 200 | return "AsyncfluxClient(%r, %r)" % (self.host, self.port) 201 | -------------------------------------------------------------------------------- /asyncflux/clusteradmin.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Tools for cluster administration""" 3 | from asyncflux.util import asyncflux_coroutine 4 | 5 | 6 | class ClusterAdmin(object): 7 | 8 | def __init__(self, client, name): 9 | self.__client = client 10 | self.__name = name 11 | 12 | @property 13 | def client(self): 14 | return self.__client 15 | 16 | @property 17 | def name(self): 18 | return self.__name 19 | 20 | @asyncflux_coroutine 21 | def change_password(self, new_password): 22 | yield self.client.change_cluster_admin_password(self.name, 23 | new_password) 24 | 25 | @asyncflux_coroutine 26 | def delete(self): 27 | yield self.client.delete_cluster_admin(self.name) 28 | 29 | def __repr__(self): 30 | return 'ClusterAdmin(%r, %r)' % (self.client, self.name) 31 | -------------------------------------------------------------------------------- /asyncflux/database.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Database level operations""" 3 | from tornado import gen 4 | 5 | from asyncflux import user 6 | from asyncflux.errors import AsyncfluxError 7 | from asyncflux.util import asyncflux_coroutine, snake_case_dict 8 | 9 | 10 | class Database(object): 11 | 12 | def __init__(self, client, name): 13 | self.__client = client 14 | self.__name = name 15 | 16 | @property 17 | def client(self): 18 | return self.__client 19 | 20 | @property 21 | def name(self): 22 | return self.__name 23 | 24 | @asyncflux_coroutine 25 | def delete(self): 26 | yield self.client.delete_database(self.name) 27 | 28 | @asyncflux_coroutine 29 | def get_user_names(self): 30 | users = yield self.client.request('/db/%(database)s/users', 31 | {'database': self.name}) 32 | raise gen.Return([u['name'] for u in users]) 33 | 34 | @asyncflux_coroutine 35 | def get_users(self): 36 | us = yield self.client.request('/db/%(database)s/users', 37 | {'database': self.name}) 38 | users = [user.User(self, **snake_case_dict(u)) for u in us] 39 | raise gen.Return(users) 40 | 41 | @asyncflux_coroutine 42 | def get_user(self, username): 43 | path_params = {'database': self.name, 'username': username} 44 | u = yield self.client.request('/db/%(database)s/users/%(username)s', 45 | path_params) 46 | raise gen.Return(user.User(self, **snake_case_dict(u))) 47 | 48 | def __validate_permission_params(self, read_from=None, write_to=None, 49 | allow_nulls=True): 50 | if allow_nulls: 51 | condition = bool(read_from) != bool(write_to) 52 | else: 53 | condition = not(read_from and write_to) 54 | if condition: 55 | raise ValueError('You have to provide read and write permissions') 56 | 57 | @asyncflux_coroutine 58 | def create_user(self, username, password, is_admin=False, read_from=None, 59 | write_to=None): 60 | self.__validate_permission_params(read_from=read_from, 61 | write_to=write_to) 62 | payload = {'name': username, 'password': password, 'isAdmin': is_admin} 63 | if read_from and write_to: 64 | payload['readFrom'] = read_from 65 | payload['writeTo'] = write_to 66 | yield self.client.request('/db/%(database)s/users', 67 | {'database': self.name}, method='POST', 68 | body=payload) 69 | read_from = read_from or user.User.READ_FROM 70 | write_to = write_to or user.User.WRITE_TO 71 | new_user = user.User(self, username, is_admin=is_admin, 72 | read_from=read_from, write_to=write_to) 73 | raise gen.Return(new_user) 74 | 75 | @asyncflux_coroutine 76 | def update_user(self, username, new_password=None, is_admin=None, 77 | read_from=None, write_to=None): 78 | self.__validate_permission_params(read_from=read_from, 79 | write_to=write_to) 80 | payload = {} 81 | if new_password: 82 | payload['password'] = new_password 83 | if is_admin: 84 | payload['isAdmin'] = is_admin 85 | if read_from and write_to: 86 | payload['readFrom'] = read_from 87 | payload['writeTo'] = write_to 88 | if not payload: 89 | raise ValueError('You have to set at least one argument') 90 | yield self.client.request('/db/%(database)s/users/%(username)s', 91 | {'database': self.name, 'username': username}, 92 | method='POST', body=payload) 93 | 94 | @asyncflux_coroutine 95 | def change_user_password(self, username, new_password): 96 | yield self.update_user(username, new_password=new_password) 97 | 98 | @asyncflux_coroutine 99 | def change_user_privileges(self, username, is_admin, read_from=None, 100 | write_to=None): 101 | self.__validate_permission_params(read_from=read_from, 102 | write_to=write_to) 103 | yield self.update_user(username, is_admin=is_admin, 104 | read_from=read_from, write_to=write_to) 105 | 106 | @asyncflux_coroutine 107 | def change_user_permissions(self, username, read_from, write_to): 108 | self.__validate_permission_params(read_from=read_from, 109 | write_to=write_to, 110 | allow_nulls=False) 111 | yield self.update_user(username, read_from=read_from, 112 | write_to=write_to) 113 | 114 | @asyncflux_coroutine 115 | def delete_user(self, username): 116 | yield self.client.request('/db/%(database)s/users/%(username)s', 117 | {'database': self.name, 'username': username}, 118 | method='DELETE') 119 | 120 | @asyncflux_coroutine 121 | def authenticate_user(self, username, password): 122 | try: 123 | yield self.client.request('/db/%(database)s/authenticate', 124 | {'database': self.name}, 125 | auth_username=username, 126 | auth_password=password) 127 | except AsyncfluxError: 128 | raise gen.Return(False) 129 | raise gen.Return(True) 130 | 131 | def __repr__(self): 132 | return "Database(%r, %r)" % (self.client, self.name) 133 | -------------------------------------------------------------------------------- /asyncflux/errors.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | class AsyncfluxError(Exception): 5 | 6 | def __init__(self, http_response): 7 | self.response = http_response 8 | self.message = http_response.body 9 | super(AsyncfluxError, self).__init__(self.message) 10 | -------------------------------------------------------------------------------- /asyncflux/shardspace.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Tools for shard spaces administration""" 3 | from asyncflux.database import Database 4 | 5 | 6 | class ShardSpace(object): 7 | 8 | def __init__(self, client, name, database, regex, retention_policy, 9 | shard_duration, replication_factor, split): 10 | self.__client = client 11 | self.__name = name 12 | if isinstance(database, Database): 13 | self.__database = database 14 | else: 15 | self.__database = Database(client, database) 16 | self.__regex = regex 17 | self.__retention_policy = retention_policy 18 | self.__shard_duration = shard_duration 19 | self.__replication_factor = replication_factor 20 | self.__split = split 21 | 22 | @property 23 | def client(self): 24 | return self.__client 25 | 26 | @property 27 | def name(self): 28 | return self.__name 29 | 30 | @property 31 | def database(self): 32 | return self.__database 33 | 34 | @property 35 | def regex(self): 36 | return self.__regex 37 | 38 | @property 39 | def retention_policy(self): 40 | return self.__retention_policy 41 | 42 | @property 43 | def shard_duration(self): 44 | return self.__shard_duration 45 | 46 | @property 47 | def replication_factor(self): 48 | return self.__replication_factor 49 | 50 | @property 51 | def split(self): 52 | return self.__split 53 | -------------------------------------------------------------------------------- /asyncflux/testing.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Unit testing support for asynchronous code""" 3 | import json 4 | import mock 5 | try: 6 | from StringIO import StringIO 7 | except ImportError: # pragma: no cover 8 | from io import StringIO # pragma: no cover 9 | 10 | from tornado.gen import coroutine, Return 11 | from tornado.httpclient import HTTPError, HTTPRequest, HTTPResponse 12 | from tornado.testing import AsyncTestCase, gen_test 13 | 14 | __all__ = ('AsyncfluxTestCase', 'gen_test', ) 15 | 16 | 17 | class AsyncfluxTestCase(AsyncTestCase): 18 | 19 | def patch_fetch_mock(self, client): 20 | return mock.patch.object(client.http_client, 'fetch') 21 | 22 | def setup_fetch_mock(self, fetch_mock, status_code, **kwargs): 23 | @coroutine 24 | def side_effect(request, **_): 25 | if request is not HTTPRequest: 26 | request = HTTPRequest(request) 27 | body = kwargs.pop('body', None) 28 | if body: 29 | if isinstance(body, (dict, list)): 30 | body = json.dumps(body) 31 | kwargs['buffer'] = StringIO(body) 32 | response = HTTPResponse(request, status_code, **kwargs) 33 | if status_code < 200 or status_code >= 300: 34 | raise HTTPError(status_code, response=response) 35 | raise Return(response) 36 | 37 | fetch_mock.side_effect = side_effect 38 | 39 | def assert_mock_args(self, fetch_mock, path, method='GET', body=None, 40 | auth_username='root', auth_password='root', *args, 41 | **kwargs): 42 | url = 'http://localhost:8086' + path 43 | fetch_mock.assert_called_once_with(url, method=method, body=body, 44 | auth_username=auth_username, 45 | auth_password=auth_password, 46 | *args, **kwargs) 47 | 48 | def stop_op(self, result, error): 49 | if error: 50 | raise error 51 | super(AsyncfluxTestCase, self).stop(result) 52 | -------------------------------------------------------------------------------- /asyncflux/user.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Tools for database users""" 3 | from asyncflux.util import asyncflux_coroutine 4 | 5 | 6 | class User(object): 7 | 8 | IS_ADMIN = False 9 | READ_FROM = '.*' 10 | WRITE_TO = '.*' 11 | 12 | def __init__(self, database, name, is_admin=None, read_from=None, 13 | write_to=None): 14 | self.__database = database 15 | self.__client = database.client 16 | self.__name = name 17 | self.__is_admin = is_admin or self.IS_ADMIN 18 | self.__read_from = read_from or self.READ_FROM 19 | self.__write_to = write_to or self.WRITE_TO 20 | 21 | @property 22 | def database(self): 23 | return self.__database 24 | 25 | @property 26 | def name(self): 27 | return self.__name 28 | 29 | @property 30 | def is_admin(self): 31 | return self.__is_admin 32 | 33 | @property 34 | def write_to(self): 35 | return self.__write_to 36 | 37 | @property 38 | def read_from(self): 39 | return self.__read_from 40 | 41 | def __update_attributes(self, is_admin=None, read_from=None, 42 | write_to=None): 43 | if is_admin: 44 | self.__is_admin = is_admin 45 | if read_from: 46 | self.__read_from = read_from 47 | if write_to: 48 | self.__write_to = write_to 49 | 50 | @asyncflux_coroutine 51 | def update(self, new_password=None, is_admin=None, read_from=None, 52 | write_to=None): 53 | yield self.database.update_user(self.name, new_password=new_password, 54 | is_admin=is_admin, read_from=read_from, 55 | write_to=write_to) 56 | self.__update_attributes(is_admin=is_admin, read_from=read_from, 57 | write_to=write_to) 58 | 59 | @asyncflux_coroutine 60 | def change_password(self, new_password): 61 | yield self.database.change_user_password(self.name, new_password) 62 | 63 | @asyncflux_coroutine 64 | def change_privileges(self, is_admin, read_from=None, write_to=None): 65 | yield self.database.change_user_privileges(self.name, is_admin, 66 | read_from=read_from, 67 | write_to=write_to) 68 | self.__update_attributes(is_admin=is_admin, read_from=read_from, 69 | write_to=write_to) 70 | 71 | @asyncflux_coroutine 72 | def change_permissions(self, read_from, write_to): 73 | yield self.database.change_user_permissions(self.name, read_from, 74 | write_to) 75 | self.__update_attributes(read_from=read_from, write_to=write_to) 76 | 77 | @asyncflux_coroutine 78 | def delete(self): 79 | yield self.database.delete_user(self.name) 80 | 81 | def __repr__(self): 82 | return 'User(%r, %r)' % (self.database, self.name) 83 | -------------------------------------------------------------------------------- /asyncflux/util.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """General-purpose utilities""" 3 | import functools 4 | import re 5 | 6 | from tornado import gen 7 | 8 | 9 | def asyncflux_coroutine(f): 10 | """A coroutine that accepts an optional callback. 11 | 12 | Given a callback, the function returns None, and the callback is run 13 | with (result, error). Without a callback the function returns a Future. 14 | """ 15 | coro = gen.coroutine(f) 16 | 17 | @functools.wraps(f) 18 | def wrapper(*args, **kwargs): 19 | callback = kwargs.pop('callback', None) 20 | if callback and not callable(callback): 21 | raise TypeError("callback must be a callable") 22 | future = coro(*args, **kwargs) 23 | if callback: 24 | def _callback(future): 25 | try: 26 | result = future.result() 27 | callback(result, None) 28 | except Exception as e: 29 | callback(None, e) 30 | future.add_done_callback(_callback) 31 | else: 32 | return future 33 | return wrapper 34 | 35 | _SNAKE_RE = re.compile('(?!^)([A-Z]+)') 36 | 37 | 38 | def snake_case(string): 39 | return re.sub(_SNAKE_RE, r'_\1', string).lower() 40 | 41 | 42 | def snake_case_dict(_dict): 43 | raw_dict = _dict.copy() 44 | result = {} 45 | try: 46 | while 1: 47 | key, value = raw_dict.popitem() 48 | result[snake_case(key)] = value 49 | except KeyError: 50 | return result 51 | -------------------------------------------------------------------------------- /distribute_setup.py: -------------------------------------------------------------------------------- 1 | #!python 2 | """Bootstrap distribute installation 3 | 4 | If you want to use setuptools in your package's setup.py, just include this 5 | file in the same directory with it, and add this to the top of your setup.py:: 6 | 7 | from distribute_setup import use_setuptools 8 | use_setuptools() 9 | 10 | If you want to require a specific version of setuptools, set a download 11 | mirror, or use an alternate download directory, you can do so by supplying 12 | the appropriate options to ``use_setuptools()``. 13 | 14 | This file can also be run as a script to install or upgrade setuptools. 15 | """ 16 | import os 17 | import shutil 18 | import sys 19 | import time 20 | import fnmatch 21 | import tempfile 22 | import tarfile 23 | import optparse 24 | 25 | from distutils import log 26 | 27 | try: 28 | from site import USER_SITE 29 | except ImportError: 30 | USER_SITE = None 31 | 32 | try: 33 | import subprocess 34 | 35 | def _python_cmd(*args): 36 | args = (sys.executable,) + args 37 | return subprocess.call(args) == 0 38 | 39 | except ImportError: 40 | # will be used for python 2.3 41 | def _python_cmd(*args): 42 | args = (sys.executable,) + args 43 | # quoting arguments if windows 44 | if sys.platform == 'win32': 45 | def quote(arg): 46 | if ' ' in arg: 47 | return '"%s"' % arg 48 | return arg 49 | args = [quote(arg) for arg in args] 50 | return os.spawnl(os.P_WAIT, sys.executable, *args) == 0 51 | 52 | DEFAULT_VERSION = "0.6.49" 53 | DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/" 54 | SETUPTOOLS_FAKED_VERSION = "0.6c11" 55 | 56 | SETUPTOOLS_PKG_INFO = """\ 57 | Metadata-Version: 1.0 58 | Name: setuptools 59 | Version: %s 60 | Summary: xxxx 61 | Home-page: xxx 62 | Author: xxx 63 | Author-email: xxx 64 | License: xxx 65 | Description: xxx 66 | """ % SETUPTOOLS_FAKED_VERSION 67 | 68 | 69 | def _install(tarball, install_args=()): 70 | # extracting the tarball 71 | tmpdir = tempfile.mkdtemp() 72 | log.warn('Extracting in %s', tmpdir) 73 | old_wd = os.getcwd() 74 | try: 75 | os.chdir(tmpdir) 76 | tar = tarfile.open(tarball) 77 | _extractall(tar) 78 | tar.close() 79 | 80 | # going in the directory 81 | subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) 82 | os.chdir(subdir) 83 | log.warn('Now working in %s', subdir) 84 | 85 | # installing 86 | log.warn('Installing Distribute') 87 | if not _python_cmd('setup.py', 'install', *install_args): 88 | log.warn('Something went wrong during the installation.') 89 | log.warn('See the error message above.') 90 | # exitcode will be 2 91 | return 2 92 | finally: 93 | os.chdir(old_wd) 94 | shutil.rmtree(tmpdir) 95 | 96 | 97 | def _build_egg(egg, tarball, to_dir): 98 | # extracting the tarball 99 | tmpdir = tempfile.mkdtemp() 100 | log.warn('Extracting in %s', tmpdir) 101 | old_wd = os.getcwd() 102 | try: 103 | os.chdir(tmpdir) 104 | tar = tarfile.open(tarball) 105 | _extractall(tar) 106 | tar.close() 107 | 108 | # going in the directory 109 | subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) 110 | os.chdir(subdir) 111 | log.warn('Now working in %s', subdir) 112 | 113 | # building an egg 114 | log.warn('Building a Distribute egg in %s', to_dir) 115 | _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) 116 | 117 | finally: 118 | os.chdir(old_wd) 119 | shutil.rmtree(tmpdir) 120 | # returning the result 121 | log.warn(egg) 122 | if not os.path.exists(egg): 123 | raise IOError('Could not build the egg.') 124 | 125 | 126 | def _do_download(version, download_base, to_dir, download_delay): 127 | egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg' 128 | % (version, sys.version_info[0], sys.version_info[1])) 129 | if not os.path.exists(egg): 130 | tarball = download_setuptools(version, download_base, 131 | to_dir, download_delay) 132 | _build_egg(egg, tarball, to_dir) 133 | sys.path.insert(0, egg) 134 | import setuptools 135 | setuptools.bootstrap_install_from = egg 136 | 137 | 138 | def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, 139 | to_dir=os.curdir, download_delay=15, no_fake=True): 140 | # making sure we use the absolute path 141 | to_dir = os.path.abspath(to_dir) 142 | was_imported = 'pkg_resources' in sys.modules or \ 143 | 'setuptools' in sys.modules 144 | try: 145 | try: 146 | import pkg_resources 147 | 148 | # Setuptools 0.7b and later is a suitable (and preferable) 149 | # substitute for any Distribute version. 150 | try: 151 | pkg_resources.require("setuptools>=0.7b") 152 | return 153 | except (pkg_resources.DistributionNotFound, 154 | pkg_resources.VersionConflict): 155 | pass 156 | 157 | if not hasattr(pkg_resources, '_distribute'): 158 | if not no_fake: 159 | _fake_setuptools() 160 | raise ImportError 161 | except ImportError: 162 | return _do_download(version, download_base, to_dir, download_delay) 163 | try: 164 | pkg_resources.require("distribute>=" + version) 165 | return 166 | except pkg_resources.VersionConflict: 167 | e = sys.exc_info()[1] 168 | if was_imported: 169 | sys.stderr.write( 170 | "The required version of distribute (>=%s) is not available,\n" 171 | "and can't be installed while this script is running. Please\n" 172 | "install a more recent version first, using\n" 173 | "'easy_install -U distribute'." 174 | "\n\n(Currently using %r)\n" % (version, e.args[0])) 175 | sys.exit(2) 176 | else: 177 | del pkg_resources, sys.modules['pkg_resources'] # reload ok 178 | return _do_download(version, download_base, to_dir, 179 | download_delay) 180 | except pkg_resources.DistributionNotFound: 181 | return _do_download(version, download_base, to_dir, 182 | download_delay) 183 | finally: 184 | if not no_fake: 185 | _create_fake_setuptools_pkg_info(to_dir) 186 | 187 | 188 | def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, 189 | to_dir=os.curdir, delay=15): 190 | """Download distribute from a specified location and return its filename 191 | 192 | `version` should be a valid distribute version number that is available 193 | as an egg for download under the `download_base` URL (which should end 194 | with a '/'). `to_dir` is the directory where the egg will be downloaded. 195 | `delay` is the number of seconds to pause before an actual download 196 | attempt. 197 | """ 198 | # making sure we use the absolute path 199 | to_dir = os.path.abspath(to_dir) 200 | try: 201 | from urllib.request import urlopen 202 | except ImportError: 203 | from urllib2 import urlopen 204 | tgz_name = "distribute-%s.tar.gz" % version 205 | url = download_base + tgz_name 206 | saveto = os.path.join(to_dir, tgz_name) 207 | src = dst = None 208 | if not os.path.exists(saveto): # Avoid repeated downloads 209 | try: 210 | log.warn("Downloading %s", url) 211 | src = urlopen(url) 212 | # Read/write all in one block, so we don't create a corrupt file 213 | # if the download is interrupted. 214 | data = src.read() 215 | dst = open(saveto, "wb") 216 | dst.write(data) 217 | finally: 218 | if src: 219 | src.close() 220 | if dst: 221 | dst.close() 222 | return os.path.realpath(saveto) 223 | 224 | 225 | def _no_sandbox(function): 226 | def __no_sandbox(*args, **kw): 227 | try: 228 | from setuptools.sandbox import DirectorySandbox 229 | if not hasattr(DirectorySandbox, '_old'): 230 | def violation(*args): 231 | pass 232 | DirectorySandbox._old = DirectorySandbox._violation 233 | DirectorySandbox._violation = violation 234 | patched = True 235 | else: 236 | patched = False 237 | except ImportError: 238 | patched = False 239 | 240 | try: 241 | return function(*args, **kw) 242 | finally: 243 | if patched: 244 | DirectorySandbox._violation = DirectorySandbox._old 245 | del DirectorySandbox._old 246 | 247 | return __no_sandbox 248 | 249 | 250 | def _patch_file(path, content): 251 | """Will backup the file then patch it""" 252 | f = open(path) 253 | existing_content = f.read() 254 | f.close() 255 | if existing_content == content: 256 | # already patched 257 | log.warn('Already patched.') 258 | return False 259 | log.warn('Patching...') 260 | _rename_path(path) 261 | f = open(path, 'w') 262 | try: 263 | f.write(content) 264 | finally: 265 | f.close() 266 | return True 267 | 268 | _patch_file = _no_sandbox(_patch_file) 269 | 270 | 271 | def _same_content(path, content): 272 | f = open(path) 273 | existing_content = f.read() 274 | f.close() 275 | return existing_content == content 276 | 277 | 278 | def _rename_path(path): 279 | new_name = path + '.OLD.%s' % time.time() 280 | log.warn('Renaming %s to %s', path, new_name) 281 | os.rename(path, new_name) 282 | return new_name 283 | 284 | 285 | def _remove_flat_installation(placeholder): 286 | if not os.path.isdir(placeholder): 287 | log.warn('Unkown installation at %s', placeholder) 288 | return False 289 | found = False 290 | for file in os.listdir(placeholder): 291 | if fnmatch.fnmatch(file, 'setuptools*.egg-info'): 292 | found = True 293 | break 294 | if not found: 295 | log.warn('Could not locate setuptools*.egg-info') 296 | return 297 | 298 | log.warn('Moving elements out of the way...') 299 | pkg_info = os.path.join(placeholder, file) 300 | if os.path.isdir(pkg_info): 301 | patched = _patch_egg_dir(pkg_info) 302 | else: 303 | patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO) 304 | 305 | if not patched: 306 | log.warn('%s already patched.', pkg_info) 307 | return False 308 | # now let's move the files out of the way 309 | for element in ('setuptools', 'pkg_resources.py', 'site.py'): 310 | element = os.path.join(placeholder, element) 311 | if os.path.exists(element): 312 | _rename_path(element) 313 | else: 314 | log.warn('Could not find the %s element of the ' 315 | 'Setuptools distribution', element) 316 | return True 317 | 318 | _remove_flat_installation = _no_sandbox(_remove_flat_installation) 319 | 320 | 321 | def _after_install(dist): 322 | log.warn('After install bootstrap.') 323 | placeholder = dist.get_command_obj('install').install_purelib 324 | _create_fake_setuptools_pkg_info(placeholder) 325 | 326 | 327 | def _create_fake_setuptools_pkg_info(placeholder): 328 | if not placeholder or not os.path.exists(placeholder): 329 | log.warn('Could not find the install location') 330 | return 331 | pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1]) 332 | setuptools_file = 'setuptools-%s-py%s.egg-info' % \ 333 | (SETUPTOOLS_FAKED_VERSION, pyver) 334 | pkg_info = os.path.join(placeholder, setuptools_file) 335 | if os.path.exists(pkg_info): 336 | log.warn('%s already exists', pkg_info) 337 | return 338 | 339 | log.warn('Creating %s', pkg_info) 340 | try: 341 | f = open(pkg_info, 'w') 342 | except EnvironmentError: 343 | log.warn("Don't have permissions to write %s, skipping", pkg_info) 344 | return 345 | try: 346 | f.write(SETUPTOOLS_PKG_INFO) 347 | finally: 348 | f.close() 349 | 350 | pth_file = os.path.join(placeholder, 'setuptools.pth') 351 | log.warn('Creating %s', pth_file) 352 | f = open(pth_file, 'w') 353 | try: 354 | f.write(os.path.join(os.curdir, setuptools_file)) 355 | finally: 356 | f.close() 357 | 358 | _create_fake_setuptools_pkg_info = _no_sandbox( 359 | _create_fake_setuptools_pkg_info 360 | ) 361 | 362 | 363 | def _patch_egg_dir(path): 364 | # let's check if it's already patched 365 | pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') 366 | if os.path.exists(pkg_info): 367 | if _same_content(pkg_info, SETUPTOOLS_PKG_INFO): 368 | log.warn('%s already patched.', pkg_info) 369 | return False 370 | _rename_path(path) 371 | os.mkdir(path) 372 | os.mkdir(os.path.join(path, 'EGG-INFO')) 373 | pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') 374 | f = open(pkg_info, 'w') 375 | try: 376 | f.write(SETUPTOOLS_PKG_INFO) 377 | finally: 378 | f.close() 379 | return True 380 | 381 | _patch_egg_dir = _no_sandbox(_patch_egg_dir) 382 | 383 | 384 | def _before_install(): 385 | log.warn('Before install bootstrap.') 386 | _fake_setuptools() 387 | 388 | 389 | def _under_prefix(location): 390 | if 'install' not in sys.argv: 391 | return True 392 | args = sys.argv[sys.argv.index('install') + 1:] 393 | for index, arg in enumerate(args): 394 | for option in ('--root', '--prefix'): 395 | if arg.startswith('%s=' % option): 396 | top_dir = arg.split('root=')[-1] 397 | return location.startswith(top_dir) 398 | elif arg == option: 399 | if len(args) > index: 400 | top_dir = args[index + 1] 401 | return location.startswith(top_dir) 402 | if arg == '--user' and USER_SITE is not None: 403 | return location.startswith(USER_SITE) 404 | return True 405 | 406 | 407 | def _fake_setuptools(): 408 | log.warn('Scanning installed packages') 409 | try: 410 | import pkg_resources 411 | except ImportError: 412 | # we're cool 413 | log.warn('Setuptools or Distribute does not seem to be installed.') 414 | return 415 | ws = pkg_resources.working_set 416 | try: 417 | setuptools_dist = ws.find( 418 | pkg_resources.Requirement.parse('setuptools', replacement=False) 419 | ) 420 | except TypeError: 421 | # old distribute API 422 | setuptools_dist = ws.find( 423 | pkg_resources.Requirement.parse('setuptools') 424 | ) 425 | 426 | if setuptools_dist is None: 427 | log.warn('No setuptools distribution found') 428 | return 429 | # detecting if it was already faked 430 | setuptools_location = setuptools_dist.location 431 | log.warn('Setuptools installation detected at %s', setuptools_location) 432 | 433 | # if --root or --preix was provided, and if 434 | # setuptools is not located in them, we don't patch it 435 | if not _under_prefix(setuptools_location): 436 | log.warn('Not patching, --root or --prefix is installing Distribute' 437 | ' in another location') 438 | return 439 | 440 | # let's see if its an egg 441 | if not setuptools_location.endswith('.egg'): 442 | log.warn('Non-egg installation') 443 | res = _remove_flat_installation(setuptools_location) 444 | if not res: 445 | return 446 | else: 447 | log.warn('Egg installation') 448 | pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO') 449 | if (os.path.exists(pkg_info) and 450 | _same_content(pkg_info, SETUPTOOLS_PKG_INFO)): 451 | log.warn('Already patched.') 452 | return 453 | log.warn('Patching...') 454 | # let's create a fake egg replacing setuptools one 455 | res = _patch_egg_dir(setuptools_location) 456 | if not res: 457 | return 458 | log.warn('Patching complete.') 459 | _relaunch() 460 | 461 | 462 | def _relaunch(): 463 | log.warn('Relaunching...') 464 | # we have to relaunch the process 465 | # pip marker to avoid a relaunch bug 466 | _cmd1 = ['-c', 'install', '--single-version-externally-managed'] 467 | _cmd2 = ['-c', 'install', '--record'] 468 | if sys.argv[:3] == _cmd1 or sys.argv[:3] == _cmd2: 469 | sys.argv[0] = 'setup.py' 470 | args = [sys.executable] + sys.argv 471 | sys.exit(subprocess.call(args)) 472 | 473 | 474 | def _extractall(self, path=".", members=None): 475 | """Extract all members from the archive to the current working 476 | directory and set owner, modification time and permissions on 477 | directories afterwards. `path' specifies a different directory 478 | to extract to. `members' is optional and must be a subset of the 479 | list returned by getmembers(). 480 | """ 481 | import copy 482 | import operator 483 | from tarfile import ExtractError 484 | directories = [] 485 | 486 | if members is None: 487 | members = self 488 | 489 | for tarinfo in members: 490 | if tarinfo.isdir(): 491 | # Extract directories with a safe mode. 492 | directories.append(tarinfo) 493 | tarinfo = copy.copy(tarinfo) 494 | tarinfo.mode = 448 # decimal for oct 0700 495 | self.extract(tarinfo, path) 496 | 497 | # Reverse sort directories. 498 | if sys.version_info < (2, 4): 499 | def sorter(dir1, dir2): 500 | return cmp(dir1.name, dir2.name) 501 | directories.sort(sorter) 502 | directories.reverse() 503 | else: 504 | directories.sort(key=operator.attrgetter('name'), reverse=True) 505 | 506 | # Set correct owner, mtime and filemode on directories. 507 | for tarinfo in directories: 508 | dirpath = os.path.join(path, tarinfo.name) 509 | try: 510 | self.chown(tarinfo, dirpath) 511 | self.utime(tarinfo, dirpath) 512 | self.chmod(tarinfo, dirpath) 513 | except ExtractError: 514 | e = sys.exc_info()[1] 515 | if self.errorlevel > 1: 516 | raise 517 | else: 518 | self._dbg(1, "tarfile: %s" % e) 519 | 520 | 521 | def _build_install_args(options): 522 | """ 523 | Build the arguments to 'python setup.py install' on the distribute package 524 | """ 525 | install_args = [] 526 | if options.user_install: 527 | if sys.version_info < (2, 6): 528 | log.warn("--user requires Python 2.6 or later") 529 | raise SystemExit(1) 530 | install_args.append('--user') 531 | return install_args 532 | 533 | def _parse_args(): 534 | """ 535 | Parse the command line for options 536 | """ 537 | parser = optparse.OptionParser() 538 | parser.add_option( 539 | '--user', dest='user_install', action='store_true', default=False, 540 | help='install in user site package (requires Python 2.6 or later)') 541 | parser.add_option( 542 | '--download-base', dest='download_base', metavar="URL", 543 | default=DEFAULT_URL, 544 | help='alternative URL from where to download the distribute package') 545 | options, args = parser.parse_args() 546 | # positional arguments are ignored 547 | return options 548 | 549 | def main(version=DEFAULT_VERSION): 550 | """Install or upgrade setuptools and EasyInstall""" 551 | options = _parse_args() 552 | tarball = download_setuptools(download_base=options.download_base) 553 | return _install(tarball, _build_install_args(options)) 554 | 555 | if __name__ == '__main__': 556 | sys.exit(main()) 557 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Asyncflux.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Asyncflux.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Asyncflux" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Asyncflux" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Asyncflux documentation build configuration file, created by 4 | # sphinx-quickstart on Mon May 19 03:36:27 2014. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | sys.path.insert(0, os.path.abspath('..')) 22 | 23 | import asyncflux 24 | 25 | 26 | # -- General configuration ------------------------------------------------ 27 | 28 | # If your documentation needs a minimal Sphinx version, state it here. 29 | #needs_sphinx = '1.0' 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = [ 35 | 'sphinx.ext.autodoc', 36 | 'sphinx.ext.doctest', 37 | 'sphinx.ext.intersphinx', 38 | 'sphinx.ext.viewcode', 39 | ] 40 | 41 | # Add any paths that contain templates here, relative to this directory. 42 | templates_path = ['_templates'] 43 | 44 | # The suffix of source filenames. 45 | source_suffix = '.rst' 46 | 47 | # The encoding of source files. 48 | #source_encoding = 'utf-8-sig' 49 | 50 | # The master toctree document. 51 | master_doc = 'index' 52 | 53 | # General information about the project. 54 | project = u'Asyncflux' 55 | author = u'Jorge Puente-Sarrín' 56 | copyright = u'2014, ' + author 57 | 58 | # The version info for the project you're documenting, acts as replacement for 59 | # |version| and |release|, also used in various other places throughout the 60 | # built documents. 61 | # 62 | # The short X.Y version. 63 | version = asyncflux.version 64 | # The full version, including alpha/beta/rc tags. 65 | release = asyncflux.version 66 | 67 | # The language for content autogenerated by Sphinx. Refer to documentation 68 | # for a list of supported languages. 69 | #language = None 70 | 71 | # There are two options for replacing |today|: either, you set today to some 72 | # non-false value, then it is used: 73 | #today = '' 74 | # Else, today_fmt is used as the format for a strftime call. 75 | #today_fmt = '%B %d, %Y' 76 | 77 | # List of patterns, relative to source directory, that match files and 78 | # directories to ignore when looking for source files. 79 | exclude_patterns = ['_build'] 80 | 81 | # The reST default role (used for this markup: `text`) to use for all 82 | # documents. 83 | #default_role = None 84 | 85 | # If true, '()' will be appended to :func: etc. cross-reference text. 86 | #add_function_parentheses = True 87 | 88 | # If true, the current module name will be prepended to all description 89 | # unit titles (such as .. function::). 90 | #add_module_names = True 91 | 92 | # If true, sectionauthor and moduleauthor directives will be shown in the 93 | # output. They are ignored by default. 94 | #show_authors = False 95 | 96 | # The name of the Pygments (syntax highlighting) style to use. 97 | pygments_style = 'sphinx' 98 | 99 | # A list of ignored prefixes for module index sorting. 100 | #modindex_common_prefix = [] 101 | 102 | # If true, keep warnings as "system message" paragraphs in the built documents. 103 | #keep_warnings = False 104 | 105 | 106 | # -- Options for HTML output ---------------------------------------------- 107 | 108 | # The theme to use for HTML and HTML Help pages. See the documentation for 109 | # a list of builtin themes. 110 | html_theme = 'default' 111 | 112 | # Theme options are theme-specific and customize the look and feel of a theme 113 | # further. For a list of options available for each theme, see the 114 | # documentation. 115 | #html_theme_options = {} 116 | 117 | # Add any paths that contain custom themes here, relative to this directory. 118 | #html_theme_path = [] 119 | 120 | # The name for this set of Sphinx documents. If None, it defaults to 121 | # " v documentation". 122 | #html_title = None 123 | 124 | # A shorter title for the navigation bar. Default is the same as html_title. 125 | #html_short_title = None 126 | 127 | # The name of an image file (relative to this directory) to place at the top 128 | # of the sidebar. 129 | #html_logo = None 130 | 131 | # The name of an image file (within the static path) to use as favicon of the 132 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 133 | # pixels large. 134 | #html_favicon = None 135 | 136 | # Add any paths that contain custom static files (such as style sheets) here, 137 | # relative to this directory. They are copied after the builtin static files, 138 | # so a file named "default.css" will overwrite the builtin "default.css". 139 | html_static_path = ['_static'] 140 | 141 | # Add any extra paths that contain custom files (such as robots.txt or 142 | # .htaccess) here, relative to this directory. These files are copied 143 | # directly to the root of the documentation. 144 | #html_extra_path = [] 145 | 146 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 147 | # using the given strftime format. 148 | #html_last_updated_fmt = '%b %d, %Y' 149 | 150 | # If true, SmartyPants will be used to convert quotes and dashes to 151 | # typographically correct entities. 152 | #html_use_smartypants = True 153 | 154 | # Custom sidebar templates, maps document names to template names. 155 | #html_sidebars = {} 156 | 157 | # Additional templates that should be rendered to pages, maps page names to 158 | # template names. 159 | #html_additional_pages = {} 160 | 161 | # If false, no module index is generated. 162 | #html_domain_indices = True 163 | 164 | # If false, no index is generated. 165 | #html_use_index = True 166 | 167 | # If true, the index is split into individual pages for each letter. 168 | #html_split_index = False 169 | 170 | # If true, links to the reST sources are added to the pages. 171 | #html_show_sourcelink = True 172 | 173 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 174 | #html_show_sphinx = True 175 | 176 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 177 | #html_show_copyright = True 178 | 179 | # If true, an OpenSearch description file will be output, and all pages will 180 | # contain a tag referring to it. The value of this option must be the 181 | # base URL from which the finished HTML is served. 182 | #html_use_opensearch = '' 183 | 184 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 185 | #html_file_suffix = None 186 | 187 | # Output file base name for HTML help builder. 188 | htmlhelp_basename = project + 'doc' 189 | 190 | 191 | # -- Options for LaTeX output --------------------------------------------- 192 | 193 | latex_elements = { 194 | # The paper size ('letterpaper' or 'a4paper'). 195 | #'papersize': 'letterpaper', 196 | 197 | # The font size ('10pt', '11pt' or '12pt'). 198 | #'pointsize': '10pt', 199 | 200 | # Additional stuff for the LaTeX preamble. 201 | #'preamble': '', 202 | } 203 | 204 | # Grouping the document tree into LaTeX files. List of tuples 205 | # (source start file, target name, title, 206 | # author, documentclass [howto, manual, or own class]). 207 | latex_documents = [ 208 | ('index', project + '.tex', project + u' Documentation', 209 | author, 'manual'), 210 | ] 211 | 212 | # The name of an image file (relative to this directory) to place at the top of 213 | # the title page. 214 | #latex_logo = None 215 | 216 | # For "manual" documents, if this is true, then toplevel headings are parts, 217 | # not chapters. 218 | #latex_use_parts = False 219 | 220 | # If true, show page references after internal links. 221 | #latex_show_pagerefs = False 222 | 223 | # If true, show URL addresses after external links. 224 | #latex_show_urls = False 225 | 226 | # Documents to append as an appendix to all manuals. 227 | #latex_appendices = [] 228 | 229 | # If false, no module index is generated. 230 | #latex_domain_indices = True 231 | 232 | 233 | # -- Options for manual page output --------------------------------------- 234 | 235 | # One entry per manual page. List of tuples 236 | # (source start file, name, description, authors, manual section). 237 | man_pages = [ 238 | ('index', project.lower(), project + u' Documentation', 239 | [author], 1) 240 | ] 241 | 242 | # If true, show URL addresses after external links. 243 | #man_show_urls = False 244 | 245 | 246 | # -- Options for Texinfo output ------------------------------------------- 247 | 248 | # Grouping the document tree into Texinfo files. List of tuples 249 | # (source start file, target name, title, author, 250 | # dir menu entry, description, category) 251 | texinfo_documents = [ 252 | ('index', project, project + u' Documentation', 253 | author, project, 'One line description of project.', 254 | 'Miscellaneous'), 255 | ] 256 | 257 | # Documents to append as an appendix to all manuals. 258 | #texinfo_appendices = [] 259 | 260 | # If false, no module index is generated. 261 | #texinfo_domain_indices = True 262 | 263 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 264 | #texinfo_show_urls = 'footnote' 265 | 266 | # If true, do not generate a @detailmenu in the "Top" node's menu. 267 | #texinfo_no_detailmenu = False 268 | 269 | 270 | # Example configuration for intersphinx: refer to the Python standard library. 271 | intersphinx_mapping = {'http://docs.python.org/': None} 272 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. Asyncflux documentation master file, created by 2 | sphinx-quickstart on Mon May 19 03:36:27 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Asyncflux 7 | ========= 8 | 9 | Asynchronous client for InfluxDB_ and Tornado_. 10 | 11 | Installation 12 | ============ 13 | 14 | You can use pip_ to install Asyncflux: 15 | 16 | .. code-block:: bash 17 | 18 | $ pip install git+https://github.com/puentesarrin/asyncflux.git 19 | 20 | Documentation 21 | ============= 22 | 23 | Sphinx_ is needed to generate the documentation. Documentation can be generated 24 | by issuing the following commands: 25 | 26 | .. code-block:: bash 27 | 28 | $ cd docs 29 | $ make html 30 | 31 | Or simply: 32 | 33 | .. code-block:: bash 34 | 35 | $ python setup.py doc 36 | 37 | Also, the current documentation can be found at ReadTheDocs_. 38 | 39 | License 40 | ======= 41 | 42 | Asyncflux is available under the |apache-license|_. 43 | 44 | Indices and tables 45 | ================== 46 | 47 | * :ref:`genindex` 48 | * :ref:`modindex` 49 | * :ref:`search` 50 | 51 | .. toctree:: 52 | :hidden: 53 | 54 | modules/index 55 | releases/index 56 | 57 | .. _InfluxDB: http://influxdb.org 58 | .. _Tornado: http://tornadoweb.org 59 | .. _pip: http://pypi.python.org/pypi/pip 60 | .. _Sphinx: http://sphinx-doc.org 61 | .. _ReadTheDocs: https://asyncflux.readthedocs.org 62 | .. _apache-license: http://www.apache.org/licenses/LICENSE-2.0.html 63 | .. |apache-license| replace:: Apache License, Version 2.0 64 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Asyncflux.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Asyncflux.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /docs/modules/client.rst: -------------------------------------------------------------------------------- 1 | :mod:`asyncflux.client` -- Connection to InfluxDB 2 | ------------------------------------------------- 3 | 4 | .. automodule:: asyncflux.client 5 | :synopsis: Connection to InfluxDB 6 | :members: 7 | :undoc-members: 8 | :show-inheritance: 9 | -------------------------------------------------------------------------------- /docs/modules/clusteradmins.rst: -------------------------------------------------------------------------------- 1 | :mod:`asyncflux.clusteradmins` -- Tools for cluster administration 2 | ------------------------------------------------------------------ 3 | 4 | .. automodule:: asyncflux.clusteradmins 5 | :synopsis: Tools for cluster administration 6 | :members: 7 | :undoc-members: 8 | :show-inheritance: 9 | -------------------------------------------------------------------------------- /docs/modules/database.rst: -------------------------------------------------------------------------------- 1 | :mod:`asyncflux.database` -- Database level operations 2 | ------------------------------------------------------ 3 | 4 | .. automodule:: asyncflux.database 5 | :synopsis: Database level operations 6 | :members: 7 | :undoc-members: 8 | :show-inheritance: 9 | -------------------------------------------------------------------------------- /docs/modules/index.rst: -------------------------------------------------------------------------------- 1 | Package Documentation 2 | ===================== 3 | 4 | :mod:`asyncflux` Package 5 | ------------------------ 6 | 7 | .. automodule:: asyncflux.__init__ 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | :mod:`asyncflux` Modules 13 | ------------------------ 14 | 15 | .. toctree:: 16 | :maxdepth: 4 17 | 18 | client 19 | database 20 | clusteradmins 21 | testing 22 | util 23 | -------------------------------------------------------------------------------- /docs/modules/testing.rst: -------------------------------------------------------------------------------- 1 | :mod:`asyncflux.testing` -- Unit testing support for asynchronous code 2 | ----------------------------------------------------------------------- 3 | 4 | .. automodule:: asyncflux.testing 5 | :synopsis: Unit testing support for asynchronous code 6 | :members: 7 | :undoc-members: 8 | :show-inheritance: 9 | -------------------------------------------------------------------------------- /docs/modules/util.rst: -------------------------------------------------------------------------------- 1 | :mod:`asyncflux.util` -- General-purpose utilities 2 | -------------------------------------------------- 3 | 4 | .. automodule:: asyncflux.util 5 | :synopsis: General-purpose utilities 6 | :members: 7 | :undoc-members: 8 | :show-inheritance: 9 | -------------------------------------------------------------------------------- /docs/releases/index.rst: -------------------------------------------------------------------------------- 1 | Release Notes 2 | ============= 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | 7 | next 8 | -------------------------------------------------------------------------------- /docs/releases/next.rst: -------------------------------------------------------------------------------- 1 | Next Release 2 | ============ 3 | 4 | *Very soon* 5 | 6 | - Initial release. 7 | - Added Sphinx docs and ReadTheDocs_ configuration. 8 | 9 | .. _ReadTheDocs: http://asyncflux.readthedocs.org 10 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 *-* 2 | import os 3 | import subprocess 4 | import sys 5 | 6 | try: 7 | from setuptools import setup 8 | except ImportError: 9 | from distribute_setup import use_setuptools 10 | use_setuptools() 11 | from setuptools import setup 12 | 13 | from distutils.cmd import Command 14 | 15 | 16 | with open('README.rst') as f: 17 | readme_content = f.read() 18 | 19 | 20 | class DocCommand(Command): 21 | 22 | description = "generate or test documentation" 23 | user_options = [("test", "t", 24 | "run doctests instead of generating documentation")] 25 | boolean_options = ["test"] 26 | 27 | def initialize_options(self): 28 | self.test = False 29 | 30 | def finalize_options(self): 31 | pass 32 | 33 | def run(self): 34 | if self.test: 35 | path = "docs/_build/doctest" 36 | mode = "doctest" 37 | else: 38 | path = "docs/_build/%s" % __version__ 39 | mode = "html" 40 | try: 41 | os.makedirs(path) 42 | except: 43 | pass 44 | status = subprocess.call(["sphinx-build", "-E", 45 | "-b", mode, "docs", path]) 46 | if status: 47 | raise RuntimeError("documentation step '%s' failed" % (mode,)) 48 | sys.stdout.write("\nDocumentation step '%s' performed, results here:\n" 49 | " %s/\n" % (mode, path)) 50 | 51 | setup( 52 | name='asyncflux', 53 | version='0.0+', 54 | url='https://github.com/puentesarrin/asyncflux', 55 | description='Asynchronous client for InfluxDB and Tornado.', 56 | long_description=readme_content, 57 | author='Jorge Puente-Sarrín', 58 | author_email='puentesarrin@gmail.com', 59 | packages=['asyncflux'], 60 | keywords=['asyncflux', 'tornado', 'influxdb', 'influx', 'async'], 61 | install_requires=['tornado>=3.0'], 62 | license='Apache License, Version 2.0', 63 | classifiers=[ 64 | 'Development Status :: 4 - Beta', 65 | 'Intended Audience :: Developers', 66 | 'License :: OSI Approved :: Apache Software License', 67 | 'Operating System :: OS Independent', 68 | 'Programming Language :: Python', 69 | 'Programming Language :: Python :: 2', 70 | 'Programming Language :: Python :: 2.6', 71 | 'Programming Language :: Python :: 2.7', 72 | 'Programming Language :: Python :: 3', 73 | 'Programming Language :: Python :: 3.2', 74 | 'Programming Language :: Python :: 3.3', 75 | 'Programming Language :: Python :: 3.4', 76 | 'Programming Language :: Python :: Implementation :: CPython', 77 | 'Programming Language :: Python :: Implementation :: PyPy'], 78 | test_suite='tests.runtests', 79 | cmdclass={"doc": DocCommand} 80 | ) 81 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | -------------------------------------------------------------------------------- /tests/asyncflux_test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Test the asyncflux module itself.""" 3 | import unittest 4 | import asyncflux 5 | 6 | 7 | class TestAsyncflux(unittest.TestCase): 8 | 9 | def test_asyncflux_client_alias(self): 10 | # Testing that asyncflux module imports client.AsyncfluxClient 11 | c = asyncflux.AsyncfluxClient() 12 | self.assertIsNotNone(c) 13 | 14 | def test_version_string(self): 15 | asyncflux.version_tuple = (0, 0, 0) 16 | self.assertEqual(asyncflux.get_version_string(), '0.0.0') 17 | asyncflux.version_tuple = (1, 0, 0) 18 | self.assertEqual(asyncflux.get_version_string(), '1.0.0') 19 | asyncflux.version_tuple = (5, 0, '+') 20 | self.assertEqual(asyncflux.get_version_string(), '5.0+') 21 | asyncflux.version_tuple = (0, 4, 'b') 22 | self.assertEqual(asyncflux.get_version_string(), '0.4b') 23 | -------------------------------------------------------------------------------- /tests/client_test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import json 3 | 4 | from asyncflux import AsyncfluxClient 5 | from asyncflux.clusteradmin import ClusterAdmin 6 | from asyncflux.database import Database 7 | from asyncflux.testing import AsyncfluxTestCase, gen_test 8 | from asyncflux.errors import AsyncfluxError 9 | 10 | 11 | class AsyncfluxClientTestCase(AsyncfluxTestCase): 12 | 13 | def test_class_instantiation(self): 14 | client = AsyncfluxClient() 15 | self.assertEqual(client.host, 'localhost') 16 | self.assertEqual(client.port, 8086) 17 | self.assertEqual(client.base_url, 'http://localhost:8086') 18 | self.assertEqual(client.username, 'root') 19 | self.assertEqual(client.password, 'root') 20 | 21 | client = AsyncfluxClient('anotherhost') 22 | self.assertEqual(client.host, 'anotherhost') 23 | self.assertEqual(client.port, 8086) 24 | self.assertEqual(client.base_url, 'http://anotherhost:8086') 25 | self.assertEqual(client.username, 'root') 26 | self.assertEqual(client.password, 'root') 27 | 28 | client = AsyncfluxClient(port=8089) 29 | self.assertEqual(client.host, 'localhost') 30 | self.assertEqual(client.port, 8089) 31 | self.assertEqual(client.base_url, 'http://localhost:8089') 32 | self.assertEqual(client.username, 'root') 33 | self.assertEqual(client.password, 'root') 34 | 35 | client = AsyncfluxClient('http://localhost') 36 | self.assertEqual(client.host, 'localhost') 37 | self.assertEqual(client.port, 8086) 38 | self.assertEqual(client.base_url, 'http://localhost:8086') 39 | self.assertEqual(client.username, 'root') 40 | self.assertEqual(client.password, 'root') 41 | 42 | client = AsyncfluxClient('http://localhost', 8089) 43 | self.assertEqual(client.host, 'localhost') 44 | self.assertEqual(client.port, 8089) 45 | self.assertEqual(client.base_url, 'http://localhost:8089') 46 | self.assertEqual(client.username, 'root') 47 | self.assertEqual(client.password, 'root') 48 | 49 | client = AsyncfluxClient('http://remotehost:8089') 50 | self.assertEqual(client.host, 'remotehost') 51 | self.assertEqual(client.port, 8089) 52 | self.assertEqual(client.base_url, 'http://remotehost:8089') 53 | self.assertEqual(client.username, 'root') 54 | self.assertEqual(client.password, 'root') 55 | 56 | client = AsyncfluxClient('http://remotehost:8086', port=8089) 57 | self.assertEqual(client.host, 'remotehost') 58 | self.assertEqual(client.port, 8086) 59 | self.assertEqual(client.base_url, 'http://remotehost:8086') 60 | self.assertEqual(client.username, 'root') 61 | self.assertEqual(client.password, 'root') 62 | 63 | client = AsyncfluxClient('http://user:pass@remotehost:8089') 64 | self.assertEqual(client.host, 'remotehost') 65 | self.assertEqual(client.port, 8089) 66 | self.assertEqual(client.base_url, 'http://remotehost:8089') 67 | self.assertEqual(client.username, 'user') 68 | self.assertEqual(client.password, 'pass') 69 | 70 | client = AsyncfluxClient('https://user:pass@remotehost:8089') 71 | self.assertEqual(client.host, 'remotehost') 72 | self.assertEqual(client.port, 8089) 73 | self.assertEqual(client.base_url, 'https://remotehost:8089') 74 | self.assertEqual(client.username, 'user') 75 | self.assertEqual(client.password, 'pass') 76 | 77 | client = AsyncfluxClient(username='me') 78 | self.assertEqual(client.host, 'localhost') 79 | self.assertEqual(client.port, 8086) 80 | self.assertEqual(client.base_url, 'http://localhost:8086') 81 | self.assertEqual(client.username, 'me') 82 | self.assertEqual(client.password, 'root') 83 | 84 | client = AsyncfluxClient(password='mysecurepassword') 85 | self.assertEqual(client.host, 'localhost') 86 | self.assertEqual(client.port, 8086) 87 | self.assertEqual(client.base_url, 'http://localhost:8086') 88 | self.assertEqual(client.username, 'root') 89 | self.assertEqual(client.password, 'mysecurepassword') 90 | 91 | client = AsyncfluxClient(is_secure=True) 92 | self.assertEqual(client.host, 'localhost') 93 | self.assertEqual(client.port, 8086) 94 | self.assertEqual(client.base_url, 'https://localhost:8086') 95 | self.assertEqual(client.username, 'root') 96 | self.assertEqual(client.password, 'root') 97 | 98 | self.assertRaisesRegexp(ValueError, 'Invalid URL scheme: ftp', 99 | AsyncfluxClient, 'ftp://localhost:23') 100 | self.assertRaisesRegexp(TypeError, 'port must be an instance of int', 101 | AsyncfluxClient, port='bar') 102 | 103 | def test_credential_properties_setters(self): 104 | client = AsyncfluxClient(username='foo', password='bar') 105 | username = 'new_username' 106 | password = 'new_password' 107 | client.username = username 108 | client.password = password 109 | 110 | self.assertEqual(client.username, username) 111 | self.assertEqual(client.password, password) 112 | 113 | def test_class_attributes_and_items(self): 114 | client = AsyncfluxClient() 115 | databases = ['foo', 'bar', 'fubar'] 116 | 117 | for db_name in databases: 118 | database = getattr(client, db_name) 119 | self.assertIsInstance(database, Database) 120 | self.assertEqual(database.client, client) 121 | self.assertEqual(database.name, db_name) 122 | 123 | for db_name in databases: 124 | database = client[db_name] 125 | self.assertIsInstance(database, Database) 126 | self.assertEqual(database.client, client) 127 | self.assertEqual(database.name, db_name) 128 | 129 | @gen_test 130 | def test_ping(self): 131 | client = AsyncfluxClient() 132 | response_body = {'status': 'ok'} 133 | 134 | with self.patch_fetch_mock(client) as m: 135 | self.setup_fetch_mock(m, 200, body=response_body) 136 | response = yield client.ping() 137 | self.assertEqual(response, response_body) 138 | 139 | self.assert_mock_args(m, '/ping') 140 | 141 | @gen_test 142 | def test_get_database_names(self): 143 | client = AsyncfluxClient() 144 | databases = [{'name': 'foo'}, {'name': 'bar'}] 145 | db_names = [db['name'] for db in databases] 146 | 147 | with self.patch_fetch_mock(client) as m: 148 | self.setup_fetch_mock(m, 200, body=databases) 149 | response = yield client.get_database_names() 150 | self.assertEqual(response, db_names) 151 | 152 | self.assert_mock_args(m, '/db') 153 | 154 | @gen_test 155 | def test_get_databases(self): 156 | client = AsyncfluxClient() 157 | databases = [{'name': 'foo'}, {'name': 'bar'}] 158 | db_names = [db['name'] for db in databases] 159 | 160 | with self.patch_fetch_mock(client) as m: 161 | self.setup_fetch_mock(m, 200, body=databases) 162 | response = yield client.get_databases() 163 | self.assertEqual(len(response), len(databases)) 164 | for r in response: 165 | self.assertIsInstance(r, Database) 166 | self.assertIn(r.name, db_names) 167 | 168 | self.assert_mock_args(m, '/db') 169 | 170 | def test_create_database_cps(self): 171 | client = AsyncfluxClient() 172 | db_name = 'foo' 173 | 174 | # Using a string 175 | with self.patch_fetch_mock(client) as m: 176 | self.setup_fetch_mock(m, 201) 177 | client.create_database(db_name, callback=self.stop_op) 178 | response = self.wait() 179 | self.assertIsInstance(response, Database) 180 | self.assertEqual(response.name, db_name) 181 | 182 | self.assert_mock_args(m, '/db', method='POST', 183 | body=json.dumps({'name': db_name})) 184 | 185 | # Existing database 186 | response_body = 'database %s exists' % db_name 187 | with self.patch_fetch_mock(client) as m: 188 | self.setup_fetch_mock(m, 409, body=response_body) 189 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 190 | client.create_database(db_name, callback=self.stop_op) 191 | response = self.wait() 192 | self.assertEqual(response, response_body) 193 | 194 | self.assert_mock_args(m, '/db', method='POST', 195 | body=json.dumps({'name': db_name})) 196 | 197 | @gen_test 198 | def test_create_database(self): 199 | client = AsyncfluxClient() 200 | db_name = 'foo' 201 | 202 | # Using a string 203 | with self.patch_fetch_mock(client) as m: 204 | self.setup_fetch_mock(m, 201) 205 | response = yield client.create_database(db_name) 206 | self.assertIsInstance(response, Database) 207 | self.assertEqual(response.name, db_name) 208 | 209 | self.assert_mock_args(m, '/db', method='POST', 210 | body=json.dumps({'name': db_name})) 211 | 212 | # Using an instance of Database 213 | db = Database(client, db_name) 214 | with self.patch_fetch_mock(client) as m: 215 | self.setup_fetch_mock(m, 201) 216 | response = yield client.create_database(db) 217 | self.assertIsInstance(response, Database) 218 | self.assertEqual(response.name, db_name) 219 | 220 | self.assert_mock_args(m, '/db', method='POST', 221 | body=json.dumps({'name': db_name})) 222 | 223 | # Using an unsupported type 224 | with self.patch_fetch_mock(client) as m: 225 | re_exc_msg = r'^name_or_database must be an instance' 226 | with self.assertRaisesRegexp(TypeError, re_exc_msg): 227 | yield client.create_database(None) 228 | self.assertFalse(m.called) 229 | 230 | # Existing database 231 | response_body = 'database %s exists' % db_name 232 | with self.patch_fetch_mock(client) as m: 233 | self.setup_fetch_mock(m, 409, body=response_body) 234 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 235 | response = yield client.create_database(db_name) 236 | self.assertEqual(response, response_body) 237 | 238 | self.assert_mock_args(m, '/db', method='POST', 239 | body=json.dumps({'name': db_name})) 240 | 241 | @gen_test 242 | def test_delete_database(self): 243 | client = AsyncfluxClient() 244 | db_name = 'foo' 245 | 246 | # Using a string 247 | with self.patch_fetch_mock(client) as m: 248 | self.setup_fetch_mock(m, 204) 249 | response = yield client.delete_database(db_name) 250 | self.assertIsNone(response) 251 | 252 | self.assert_mock_args(m, '/db/%s' % db_name, method='DELETE') 253 | 254 | # Using an instance of Database 255 | db = Database(client, db_name) 256 | with self.patch_fetch_mock(client) as m: 257 | self.setup_fetch_mock(m, 204) 258 | response = yield client.delete_database(db) 259 | self.assertIsNone(response) 260 | 261 | self.assert_mock_args(m, '/db/%s' % db_name, method='DELETE') 262 | 263 | # Using an unsupported type 264 | with self.patch_fetch_mock(client) as m: 265 | re_exc_msg = r'^name_or_database must be an instance' 266 | with self.assertRaisesRegexp(TypeError, re_exc_msg): 267 | yield client.delete_database(None) 268 | self.assertFalse(m.called) 269 | 270 | # Non-existing database 271 | response_body = "Database %s doesn't exist" % db_name 272 | with self.patch_fetch_mock(client) as m: 273 | self.setup_fetch_mock(m, 400, body=response_body) 274 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 275 | yield client.delete_database(db_name) 276 | 277 | self.assert_mock_args(m, '/db/%s' % db_name, method='DELETE') 278 | 279 | @gen_test 280 | def test_get_cluster_admin_names(self): 281 | client = AsyncfluxClient() 282 | admins = [{'name': 'foo'}, {'name': 'bar'}] 283 | admin_names = [a['name'] for a in admins] 284 | 285 | with self.patch_fetch_mock(client) as m: 286 | self.setup_fetch_mock(m, 200, body=admins) 287 | response = yield client.get_cluster_admin_names() 288 | self.assertListEqual(response, admin_names) 289 | 290 | self.assert_mock_args(m, '/cluster_admins') 291 | 292 | @gen_test 293 | def test_get_cluster_admins(self): 294 | client = AsyncfluxClient() 295 | admins = [{'name': 'foo'}, {'name': 'bar'}] 296 | admin_names = [a['name'] for a in admins] 297 | 298 | with self.patch_fetch_mock(client) as m: 299 | self.setup_fetch_mock(m, 200, body=admins) 300 | response = yield client.get_cluster_admins() 301 | self.assertEqual(len(response), len(admin_names)) 302 | for r in response: 303 | self.assertIsInstance(r, ClusterAdmin) 304 | self.assertIn(r.name, admin_names) 305 | 306 | self.assert_mock_args(m, '/cluster_admins') 307 | 308 | @gen_test 309 | def test_create_cluster_admin(self): 310 | client = AsyncfluxClient() 311 | username = 'foo' 312 | password = 'fubar' 313 | 314 | with self.patch_fetch_mock(client) as m: 315 | self.setup_fetch_mock(m, 200) 316 | response = yield client.create_cluster_admin(username, password) 317 | self.assertIsInstance(response, ClusterAdmin) 318 | self.assertEqual(response.name, username) 319 | 320 | self.assert_mock_args(m, '/cluster_admins', method='POST', 321 | body=json.dumps({'name': username, 322 | 'password': password})) 323 | 324 | # Existing cluster admin 325 | response_body = 'User %s already exists' % username 326 | with self.patch_fetch_mock(client) as m: 327 | self.setup_fetch_mock(m, 400, body=response_body) 328 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 329 | yield client.create_cluster_admin(username, password) 330 | 331 | self.assert_mock_args(m, '/cluster_admins', method='POST', 332 | body=json.dumps({'name': username, 333 | 'password': password})) 334 | 335 | # Invalid password 336 | password = 'bar' 337 | response_body = ('Password must be more than 4 and less than 56 ' 338 | 'characters') 339 | with self.patch_fetch_mock(client) as m: 340 | self.setup_fetch_mock(m, 400, body=response_body) 341 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 342 | yield client.create_cluster_admin(username, password) 343 | 344 | self.assert_mock_args(m, '/cluster_admins', method='POST', 345 | body=json.dumps({'name': username, 346 | 'password': password})) 347 | 348 | @gen_test 349 | def test_change_cluster_admin_password(self): 350 | client = AsyncfluxClient() 351 | username = 'foo' 352 | password = 'fubar' 353 | 354 | with self.patch_fetch_mock(client) as m: 355 | self.setup_fetch_mock(m, 200) 356 | response = yield client.change_cluster_admin_password(username, 357 | password) 358 | self.assertIsNone(response) 359 | 360 | self.assert_mock_args(m, '/cluster_admins/%s' % username, 361 | method='POST', 362 | body=json.dumps({'password': password})) 363 | 364 | # Non-existing cluster admin 365 | response_body = 'Invalid user name %s' % username 366 | with self.patch_fetch_mock(client) as m: 367 | self.setup_fetch_mock(m, 400, body=response_body) 368 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 369 | yield client.change_cluster_admin_password(username, password) 370 | 371 | self.assert_mock_args(m, '/cluster_admins/%s' % username, 372 | method='POST', 373 | body=json.dumps({'password': password})) 374 | 375 | # Invalid password 376 | password = 'bar' 377 | response_body = ('Password must be more than 4 and less than 56 ' 378 | 'characters') 379 | with self.patch_fetch_mock(client) as m: 380 | self.setup_fetch_mock(m, 400, body=response_body) 381 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 382 | yield client.change_cluster_admin_password(username, password) 383 | 384 | self.assert_mock_args(m, '/cluster_admins/%s' % username, 385 | method='POST', 386 | body=json.dumps({'password': password})) 387 | 388 | @gen_test 389 | def test_delete_cluster_admin(self): 390 | client = AsyncfluxClient() 391 | username = 'foo' 392 | 393 | with self.patch_fetch_mock(client) as m: 394 | self.setup_fetch_mock(m, 200) 395 | response = yield client.delete_cluster_admin(username) 396 | self.assertIsNone(response) 397 | 398 | self.assert_mock_args(m, '/cluster_admins/%s' % username, 399 | method='DELETE') 400 | 401 | # Non-existing cluster admin 402 | response_body = "User %s doesn't exists" % username 403 | with self.patch_fetch_mock(client) as m: 404 | self.setup_fetch_mock(m, 400, body=response_body) 405 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 406 | yield client.delete_cluster_admin(username) 407 | 408 | self.assert_mock_args(m, '/cluster_admins/%s' % username, 409 | method='DELETE') 410 | 411 | @gen_test 412 | def test_authenticate_cluster_admin(self): 413 | client = AsyncfluxClient() 414 | username = 'foo' 415 | password = 'bar' 416 | 417 | with self.patch_fetch_mock(client) as m: 418 | self.setup_fetch_mock(m, 200) 419 | response = yield client.authenticate_cluster_admin(username, 420 | password) 421 | self.assertTrue(response) 422 | 423 | self.assert_mock_args(m, '/cluster_admins/authenticate', 424 | auth_username=username, 425 | auth_password=password) 426 | 427 | # Invalid credentials 428 | response_body = 'Invalid username/password' 429 | with self.patch_fetch_mock(client) as m: 430 | self.setup_fetch_mock(m, 401, body=response_body) 431 | response = yield client.authenticate_cluster_admin(username, 432 | password) 433 | self.assertFalse(response) 434 | 435 | self.assert_mock_args(m, '/cluster_admins/authenticate', 436 | auth_username=username, 437 | auth_password=password) 438 | 439 | def test_repr(self): 440 | host = 'localhost' 441 | port = 8086 442 | self.assertEqual(repr(AsyncfluxClient(str(host), port)), 443 | "AsyncfluxClient('%s', %d)" % (host, port)) 444 | -------------------------------------------------------------------------------- /tests/clusteradmin_test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import json 3 | 4 | from asyncflux import AsyncfluxClient 5 | from asyncflux.clusteradmin import ClusterAdmin 6 | from asyncflux.testing import AsyncfluxTestCase, gen_test 7 | from asyncflux.errors import AsyncfluxError 8 | 9 | 10 | class ClusterAdminTestCase(AsyncfluxTestCase): 11 | 12 | @gen_test 13 | def test_change_password(self): 14 | client = AsyncfluxClient() 15 | username = 'foo' 16 | password = 'fubar' 17 | cluster_admin = ClusterAdmin(client, username) 18 | 19 | with self.patch_fetch_mock(client) as m: 20 | self.setup_fetch_mock(m, 200) 21 | response = yield cluster_admin.change_password(password) 22 | self.assertIsNone(response) 23 | 24 | self.assert_mock_args(m, '/cluster_admins/%s' % username, 25 | method='POST', 26 | body=json.dumps({'password': password})) 27 | 28 | # Non-existing cluster admin 29 | response_body = 'Invalid user name %s' % username 30 | with self.patch_fetch_mock(client) as m: 31 | self.setup_fetch_mock(m, 400, body=response_body) 32 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 33 | yield cluster_admin.change_password(password) 34 | 35 | self.assert_mock_args(m, '/cluster_admins/%s' % username, 36 | method='POST', 37 | body=json.dumps({'password': password})) 38 | 39 | # Invalid password 40 | password = 'bar' 41 | response_body = ('Password must be more than 4 and less than 56 ' 42 | 'characters') 43 | with self.patch_fetch_mock(client) as m: 44 | self.setup_fetch_mock(m, 400, body=response_body) 45 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 46 | yield cluster_admin.change_password(password) 47 | 48 | self.assert_mock_args(m, '/cluster_admins/%s' % username, 49 | method='POST', 50 | body=json.dumps({'password': password})) 51 | 52 | @gen_test 53 | def test_delete(self): 54 | client = AsyncfluxClient() 55 | username = 'foo' 56 | cluster_admin = ClusterAdmin(client, username) 57 | 58 | with self.patch_fetch_mock(client) as m: 59 | self.setup_fetch_mock(m, 200) 60 | response = yield cluster_admin.delete() 61 | self.assertIsNone(response) 62 | 63 | self.assert_mock_args(m, '/cluster_admins/%s' % username, 64 | method='DELETE') 65 | 66 | # Non-existing cluster admin 67 | response_body = "User %s doesn't exists" % username 68 | with self.patch_fetch_mock(client) as m: 69 | self.setup_fetch_mock(m, 400, body=response_body) 70 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 71 | yield cluster_admin.delete() 72 | 73 | self.assert_mock_args(m, '/cluster_admins/%s' % username, 74 | method='DELETE') 75 | 76 | def test_repr(self): 77 | host = 'localhost' 78 | port = 8086 79 | username = 'foo' 80 | client = AsyncfluxClient(host, port) 81 | self.assertEqual(repr(ClusterAdmin(client, username)), 82 | ("ClusterAdmin(AsyncfluxClient('%s', %d), %r)" % 83 | (host, port, username))) 84 | -------------------------------------------------------------------------------- /tests/database_test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import json 3 | 4 | from asyncflux import AsyncfluxClient 5 | from asyncflux.database import Database 6 | from asyncflux.errors import AsyncfluxError 7 | from asyncflux.testing import AsyncfluxTestCase, gen_test 8 | from asyncflux.user import User 9 | 10 | 11 | class DatabaseTestCase(AsyncfluxTestCase): 12 | 13 | @gen_test 14 | def test_delete(self): 15 | client = AsyncfluxClient() 16 | db_name = 'foo' 17 | db = Database(client, db_name) 18 | 19 | with self.patch_fetch_mock(client) as m: 20 | self.setup_fetch_mock(m, 204) 21 | response = yield db.delete() 22 | self.assertIsNone(response) 23 | 24 | self.assert_mock_args(m, '/db/%s' % db_name, method='DELETE') 25 | 26 | # Non-existing databse 27 | response_body = "Database %s doesn't exist" % db_name 28 | with self.patch_fetch_mock(client) as m: 29 | self.setup_fetch_mock(m, 400, body=response_body) 30 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 31 | yield db.delete() 32 | 33 | self.assert_mock_args(m, '/db/%s' % db_name, method='DELETE') 34 | 35 | @gen_test 36 | def test_get_user_names(self): 37 | client = AsyncfluxClient() 38 | db_name = 'foo' 39 | db = client[db_name] 40 | users = [{'name': 'foo', 'isAdmin': False, 'writeTo': '.*', 41 | 'readFrom': '.*'}, 42 | {'name': 'bar', 'isAdmin': False, 'writeTo': '.*', 43 | 'readFrom': '.*'}] 44 | user_names = [u['name'] for u in users] 45 | 46 | with self.patch_fetch_mock(client) as m: 47 | self.setup_fetch_mock(m, 200, body=users) 48 | response = yield db.get_user_names() 49 | self.assertListEqual(response, user_names) 50 | 51 | self.assert_mock_args(m, '/db/%s/users' % db_name) 52 | 53 | @gen_test 54 | def test_get_users(self): 55 | client = AsyncfluxClient() 56 | db_name = 'foo' 57 | db = client[db_name] 58 | users = [{'name': 'foo', 'isAdmin': False, 'writeTo': '.*', 59 | 'readFrom': '.*'}, 60 | {'name': 'bar', 'isAdmin': False, 'writeTo': '.*', 61 | 'readFrom': '.*'}] 62 | user_names = [u['name'] for u in users] 63 | 64 | with self.patch_fetch_mock(client) as m: 65 | self.setup_fetch_mock(m, 200, body=users) 66 | response = yield db.get_users() 67 | self.assertEqual(len(response), len(user_names)) 68 | for r in response: 69 | self.assertIsInstance(r, User) 70 | self.assertIn(r.name, user_names) 71 | 72 | self.assert_mock_args(m, '/db/%s/users' % db_name) 73 | 74 | @gen_test 75 | def test_get_user(self): 76 | client = AsyncfluxClient() 77 | db_name = 'foo' 78 | db = client[db_name] 79 | username = 'foo' 80 | user = {'name': username, 'isAdmin': False, 'writeTo': '^$', 81 | 'readFrom': '^$'} 82 | 83 | with self.patch_fetch_mock(client) as m: 84 | self.setup_fetch_mock(m, 200, body=user) 85 | response = yield db.get_user('foo') 86 | self.assertIsInstance(response, User) 87 | self.assertEqual(response.name, user['name']) 88 | self.assertEqual(response.is_admin, user['isAdmin']) 89 | self.assertEqual(response.write_to, user['writeTo']) 90 | self.assertEqual(response.read_from, user['readFrom']) 91 | 92 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username)) 93 | 94 | # Non-existing database user 95 | response_body = 'Invalid username %s' % username 96 | with self.patch_fetch_mock(client) as m: 97 | self.setup_fetch_mock(m, 400, body=response_body) 98 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 99 | yield db.get_user(username) 100 | 101 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username)) 102 | 103 | @gen_test 104 | def test_create_user(self): 105 | client = AsyncfluxClient() 106 | db_name = 'foo' 107 | db = client[db_name] 108 | username = 'foo' 109 | password = 'fubar' 110 | is_admin = True 111 | read_from = '.*' 112 | write_to = '.*' 113 | 114 | payload = {'name': username, 'password': password, 'isAdmin': is_admin, 115 | 'readFrom': read_from, 'writeTo': write_to} 116 | with self.patch_fetch_mock(client) as m: 117 | self.setup_fetch_mock(m, 200) 118 | response = yield db.create_user(username, password, 119 | is_admin=is_admin, 120 | read_from=read_from, 121 | write_to=write_to) 122 | self.assertIsInstance(response, User) 123 | self.assertEqual(response.name, username) 124 | 125 | self.assert_mock_args(m, '/db/%s/users' % db_name, method='POST', 126 | body=json.dumps(payload)) 127 | 128 | # Without permissions 129 | payload = {'name': username, 'password': password, 'isAdmin': False} 130 | with self.patch_fetch_mock(client) as m: 131 | self.setup_fetch_mock(m, 200) 132 | response = yield db.create_user(username, password) 133 | self.assertIsInstance(response, User) 134 | self.assertEqual(response.name, username) 135 | self.assertEqual(response.is_admin, False) 136 | self.assertEqual(response.read_from, '.*') 137 | self.assertEqual(response.write_to, '.*') 138 | 139 | self.assert_mock_args(m, '/db/%s/users' % db_name, method='POST', 140 | body=json.dumps(payload)) 141 | 142 | # Invalid permission argument values 143 | exc_msg = 'You have to provide read and write permissions' 144 | with self.assertRaisesRegexp(ValueError, exc_msg): 145 | yield db.create_user(username, password, is_admin=is_admin, 146 | read_from=read_from) 147 | 148 | with self.assertRaisesRegexp(ValueError, exc_msg): 149 | yield db.create_user(username, password, is_admin=is_admin, 150 | write_to=write_to) 151 | 152 | # Existing database user 153 | payload = {'name': username, 'password': password, 'isAdmin': is_admin, 154 | 'readFrom': read_from, 'writeTo': write_to} 155 | response_body = 'User %s already exists' % username 156 | with self.patch_fetch_mock(client) as m: 157 | self.setup_fetch_mock(m, 400, body=response_body) 158 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 159 | yield db.create_user(username, password, is_admin=is_admin, 160 | read_from=read_from, write_to=write_to) 161 | 162 | self.assert_mock_args(m, '/db/%s/users' % db_name, method='POST', 163 | body=json.dumps(payload)) 164 | 165 | # Invalid password 166 | password = 'bar' 167 | payload = {'name': username, 'password': password, 'isAdmin': is_admin, 168 | 'readFrom': read_from, 'writeTo': write_to} 169 | response_body = ('Password must be more than 4 and less than 56 ' 170 | 'characters') 171 | with self.patch_fetch_mock(client) as m: 172 | self.setup_fetch_mock(m, 400, body=response_body) 173 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 174 | yield db.create_user(username, password, is_admin=is_admin, 175 | read_from=read_from, write_to=write_to) 176 | 177 | self.assert_mock_args(m, '/db/%s/users' % db_name, method='POST', 178 | body=json.dumps(payload)) 179 | 180 | # Non-default permissions 181 | read_from = '^$' 182 | write_to = '^$' 183 | payload = {'name': username, 'password': password, 'isAdmin': is_admin, 184 | 'readFrom': read_from, 'writeTo': write_to} 185 | with self.patch_fetch_mock(client) as m: 186 | self.setup_fetch_mock(m, 200) 187 | response = yield db.create_user(username, password, 188 | is_admin=is_admin, 189 | read_from=read_from, 190 | write_to=write_to) 191 | self.assertIsInstance(response, User) 192 | self.assertEqual(response.name, username) 193 | 194 | self.assert_mock_args(m, '/db/%s/users' % db_name, method='POST', 195 | body=json.dumps(payload)) 196 | 197 | @gen_test 198 | def test_update_user(self): 199 | client = AsyncfluxClient() 200 | db_name = 'foo' 201 | db = client[db_name] 202 | username = 'foo' 203 | password = 'fubar' 204 | is_admin = True 205 | read_from = '^$' 206 | write_to = '^$' 207 | 208 | # Update password 209 | payload = {'password': password} 210 | with self.patch_fetch_mock(client) as m: 211 | self.setup_fetch_mock(m, 200) 212 | response = yield db.update_user(username, password) 213 | self.assertIsNone(response) 214 | 215 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 216 | method='POST', body=json.dumps(payload)) 217 | 218 | # Update isAdmin value 219 | payload = {'isAdmin': is_admin} 220 | with self.patch_fetch_mock(client) as m: 221 | self.setup_fetch_mock(m, 200) 222 | response = yield db.update_user(username, is_admin=is_admin) 223 | self.assertIsNone(response) 224 | 225 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 226 | method='POST', body=json.dumps(payload)) 227 | 228 | # Update permissions 229 | payload = {'readFrom': read_from, 'writeTo': write_to} 230 | with self.patch_fetch_mock(client) as m: 231 | self.setup_fetch_mock(m, 200) 232 | response = yield db.update_user(username, read_from=read_from, 233 | write_to=write_to) 234 | self.assertIsNone(response) 235 | 236 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 237 | method='POST', body=json.dumps(payload)) 238 | 239 | # Invalid permission argument values 240 | exc_msg = 'You have to provide read and write permissions' 241 | with self.assertRaisesRegexp(ValueError, exc_msg): 242 | yield db.update_user(username, password, is_admin=is_admin, 243 | read_from=read_from) 244 | 245 | # Without any arguments 246 | exc_msg = 'You have to set at least one argument' 247 | with self.assertRaisesRegexp(ValueError, exc_msg): 248 | yield db.update_user(username) 249 | 250 | @gen_test 251 | def test_change_user_password(self): 252 | client = AsyncfluxClient() 253 | db_name = 'foo' 254 | db = client[db_name] 255 | username = 'foo' 256 | password = 'fubar' 257 | 258 | payload = {'password': password} 259 | with self.patch_fetch_mock(client) as m: 260 | self.setup_fetch_mock(m, 200) 261 | response = yield db.change_user_password(username, password) 262 | self.assertIsNone(response) 263 | 264 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 265 | method='POST', body=json.dumps(payload)) 266 | 267 | # Non-existing user 268 | response_body = "Invalid username %s" % username 269 | with self.patch_fetch_mock(client) as m: 270 | self.setup_fetch_mock(m, 400, body=response_body) 271 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 272 | yield db.change_user_password(username, password) 273 | 274 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 275 | method='POST', body=json.dumps(payload)) 276 | 277 | # Invalid password 278 | password = 'bar' 279 | payload = {'password': password} 280 | response_body = ('Password must be more than 4 and less than 56 ' 281 | 'characters') 282 | with self.patch_fetch_mock(client) as m: 283 | self.setup_fetch_mock(m, 400, body=response_body) 284 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 285 | yield db.change_user_password(username, password) 286 | 287 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 288 | method='POST', body=json.dumps(payload)) 289 | 290 | @gen_test 291 | def test_change_user_privileges(self): 292 | client = AsyncfluxClient() 293 | db_name = 'foo' 294 | db = client[db_name] 295 | username = 'foo' 296 | is_admin = True 297 | read_from = '^$' 298 | write_to = '^$' 299 | 300 | # Update permissions 301 | payload = {'isAdmin': is_admin, 'readFrom': read_from, 302 | 'writeTo': write_to} 303 | with self.patch_fetch_mock(client) as m: 304 | self.setup_fetch_mock(m, 200) 305 | response = yield db.change_user_privileges(username, is_admin, 306 | read_from=read_from, 307 | write_to=write_to) 308 | self.assertIsNone(response) 309 | 310 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 311 | method='POST', body=json.dumps(payload)) 312 | 313 | payload = {'isAdmin': is_admin} 314 | with self.patch_fetch_mock(client) as m: 315 | self.setup_fetch_mock(m, 200) 316 | yield db.change_user_privileges(username, is_admin, None, None) 317 | self.assertIsNone(response) 318 | 319 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 320 | method='POST', body=json.dumps(payload)) 321 | 322 | # Non-existing user 323 | payload = {'isAdmin': is_admin, 'readFrom': read_from, 324 | 'writeTo': write_to} 325 | response_body = "Invalid username %s" % username 326 | with self.patch_fetch_mock(client) as m: 327 | self.setup_fetch_mock(m, 400, body=response_body) 328 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 329 | yield db.change_user_privileges(username, is_admin, 330 | read_from=read_from, 331 | write_to=write_to) 332 | 333 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 334 | method='POST', body=json.dumps(payload)) 335 | 336 | # Invalid permission argument values 337 | exc_msg = 'You have to provide read and write permissions' 338 | with self.assertRaisesRegexp(ValueError, exc_msg): 339 | yield db.change_user_privileges(username, is_admin, read_from, None) 340 | 341 | with self.assertRaisesRegexp(ValueError, exc_msg): 342 | yield db.change_user_privileges(username, is_admin, None, write_to) 343 | 344 | @gen_test 345 | def test_change_user_permissions(self): 346 | client = AsyncfluxClient() 347 | db_name = 'foo' 348 | db = client[db_name] 349 | username = 'foo' 350 | read_from = '^$' 351 | write_to = '^$' 352 | 353 | # Update permissions 354 | payload = {'readFrom': read_from, 'writeTo': write_to} 355 | with self.patch_fetch_mock(client) as m: 356 | self.setup_fetch_mock(m, 200) 357 | response = yield db.change_user_permissions(username, 358 | read_from=read_from, 359 | write_to=write_to) 360 | self.assertIsNone(response) 361 | 362 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 363 | method='POST', body=json.dumps(payload)) 364 | 365 | # Non-existing user 366 | response_body = "Invalid username %s" % username 367 | with self.patch_fetch_mock(client) as m: 368 | self.setup_fetch_mock(m, 400, body=response_body) 369 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 370 | yield db.change_user_permissions(username, read_from=read_from, 371 | write_to=write_to) 372 | 373 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 374 | method='POST', body=json.dumps(payload)) 375 | 376 | # Invalid permission argument values 377 | exc_msg = 'You have to provide read and write permissions' 378 | with self.assertRaisesRegexp(ValueError, exc_msg): 379 | yield db.change_user_permissions(username, None, None) 380 | 381 | with self.assertRaisesRegexp(ValueError, exc_msg): 382 | yield db.change_user_permissions(username, read_from, None) 383 | 384 | with self.assertRaisesRegexp(ValueError, exc_msg): 385 | yield db.change_user_permissions(username, None, write_to) 386 | 387 | @gen_test 388 | def test_delete_user(self): 389 | client = AsyncfluxClient() 390 | db_name = 'foo' 391 | db = client[db_name] 392 | username = 'foo' 393 | 394 | with self.patch_fetch_mock(client) as m: 395 | self.setup_fetch_mock(m, 200) 396 | response = yield db.delete_user(username) 397 | self.assertIsNone(response) 398 | 399 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 400 | method='DELETE') 401 | 402 | # Non-existing user 403 | response_body = "User %s doesn't exists" % username 404 | with self.patch_fetch_mock(client) as m: 405 | self.setup_fetch_mock(m, 400, body=response_body) 406 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 407 | yield db.delete_user(username) 408 | 409 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 410 | method='DELETE') 411 | 412 | @gen_test 413 | def test_authenticate_user(self): 414 | client = AsyncfluxClient() 415 | db_name = 'foo' 416 | db = client[db_name] 417 | username = 'foo' 418 | password = 'bar' 419 | 420 | with self.patch_fetch_mock(client) as m: 421 | self.setup_fetch_mock(m, 200) 422 | response = yield db.authenticate_user(username, password) 423 | self.assertTrue(response) 424 | 425 | self.assert_mock_args(m, '/db/%s/authenticate' % db_name, 426 | auth_username=username, 427 | auth_password=password) 428 | 429 | # Invalid credentials 430 | response_body = 'Invalid username/password' 431 | with self.patch_fetch_mock(client) as m: 432 | self.setup_fetch_mock(m, 401, body=response_body) 433 | response = yield db.authenticate_user(username, password) 434 | self.assertFalse(response) 435 | 436 | self.assert_mock_args(m, '/db/%s/authenticate' % db_name, 437 | auth_username=username, 438 | auth_password=password) 439 | 440 | def test_repr(self): 441 | host = 'localhost' 442 | port = 8086 443 | client = AsyncfluxClient(host, port) 444 | db_name = 'db' 445 | self.assertEqual(repr(Database(client, db_name)), 446 | ("Database(AsyncfluxClient('%s', %d), '%s')" % 447 | (host, port, db_name))) 448 | -------------------------------------------------------------------------------- /tests/runtests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import sys 4 | from unittest import defaultTestLoader, TextTestRunner, TestSuite 5 | 6 | TESTS = ('asyncflux_test', 'client_test', 'clusteradmin_test', 'database_test', 7 | 'shardspace_test', 'user_test', 'util_test', ) 8 | 9 | 10 | def make_suite(prefix='', extra=(), force_all=False): 11 | tests = TESTS + extra 12 | test_names = list(prefix + x for x in tests) 13 | suite = TestSuite() 14 | suite.addTest(defaultTestLoader.loadTestsFromNames(test_names)) 15 | return suite 16 | 17 | 18 | def additional_tests(): 19 | """ 20 | This is called automatically by setup.py test 21 | """ 22 | return make_suite('tests.') 23 | 24 | 25 | def main(): 26 | my_dir = os.path.dirname(os.path.abspath(__file__)) 27 | sys.path.insert(0, os.path.abspath(os.path.join(my_dir, '..'))) 28 | 29 | from optparse import OptionParser 30 | parser = OptionParser() 31 | parser.add_option('--with-pep8', action='store_true', dest='with_pep8', 32 | default=True) 33 | parser.add_option('--with-pyflakes', action='store_true', 34 | dest='with_pyflakes', default=True) 35 | parser.add_option('--force-all', action='store_true', dest='force_all', 36 | default=False) 37 | parser.add_option('-v', '--verbose', action='count', dest='verbosity', 38 | default=0) 39 | parser.add_option('-q', '--quiet', action='count', dest='quietness', 40 | default=0) 41 | options, extra_args = parser.parse_args() 42 | has_pep8 = False 43 | try: 44 | import pep8 45 | has_pep8 = True 46 | except ImportError: 47 | if options.with_pep8: 48 | sys.stderr.write('# Could not find pep8 library.') 49 | sys.exit(1) 50 | 51 | if has_pep8: 52 | guide_main = pep8.StyleGuide( 53 | ignore=['E402'], 54 | paths=['asyncflux/'], 55 | exclude=[], 56 | max_line_length=80, 57 | ) 58 | guide_tests = pep8.StyleGuide( 59 | ignore=['E221'], 60 | paths=['tests/'], 61 | max_line_length=80, 62 | ) 63 | for guide in (guide_main, guide_tests): 64 | report = guide.check_files() 65 | if report.total_errors: 66 | sys.exit(1) 67 | 68 | if options.with_pyflakes: 69 | try: 70 | import pyflakes 71 | assert pyflakes # silence pyflakes 72 | except ImportError: 73 | sys.stderr.write('# Could not find pyflakes library.\n') 74 | sys.exit(1) 75 | 76 | from pyflakes import api, reporter 77 | warnings = api.checkRecursive(['asyncflux', 'tests'], 78 | reporter._makeDefaultReporter()) 79 | if warnings > 0: 80 | sys.exit(1) 81 | 82 | suite = make_suite('', tuple(extra_args), options.force_all) 83 | 84 | runner = TextTestRunner(verbosity=options.verbosity - options.quietness + 1) 85 | result = runner.run(suite) 86 | sys.exit(not result.wasSuccessful()) 87 | 88 | if __name__ == '__main__': 89 | main() 90 | -------------------------------------------------------------------------------- /tests/shardspace_test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from asyncflux import AsyncfluxClient 3 | from asyncflux.database import Database 4 | from asyncflux.shardspace import ShardSpace 5 | from asyncflux.testing import AsyncfluxTestCase, gen_test 6 | 7 | 8 | class ShardSpaceTestCase(AsyncfluxTestCase): 9 | 10 | def test_class_instantiation(self): 11 | client = AsyncfluxClient() 12 | 13 | name = 'default' 14 | database = 'foo' 15 | regex = '/.*/' 16 | retention_policy = 'inf' 17 | shard_duration = '7d' 18 | replication_factor = 5 19 | split = 3 20 | shard_space = ShardSpace(client, name=name, database=database, 21 | regex=regex, retention_policy=retention_policy, 22 | shard_duration=shard_duration, 23 | replication_factor=replication_factor, 24 | split=split) 25 | self.assertIsInstance(shard_space.database, Database) 26 | self.assertEqual(shard_space.database.name, database) 27 | 28 | database = Database(client, 'foo') 29 | shard_space = ShardSpace(client, name=name, database=database, 30 | regex=regex, retention_policy=retention_policy, 31 | shard_duration=shard_duration, 32 | replication_factor=replication_factor, 33 | split=split) 34 | self.assertIsInstance(shard_space.database, Database) 35 | self.assertEqual(shard_space.database.name, database.name) 36 | 37 | @gen_test 38 | def test_get_shard_spaces(self): 39 | client = AsyncfluxClient() 40 | shard_spaces = [{'name': 'default', 'database': 'foo', 'regex': '/.*/', 41 | 'retentionPolicy': 'inf', 'shardDuration': '7d', 42 | 'replicationFactor': 1, 'split': 1}, 43 | {'name': 'second', 'database': 'bar', 'regex': '/.*/', 44 | 'retentionPolicy': '365d', 'shardDuration': '15m', 45 | 'replicationFactor': 5, 'split': 5}, 46 | {'name': 'default', 'database': 'bar', 'regex': '/.*/', 47 | 'retentionPolicy': 'inf', 'shardDuration': '7d', 48 | 'replicationFactor': 1, 'split': 1}] 49 | 50 | with self.patch_fetch_mock(client) as m: 51 | self.setup_fetch_mock(m, 200, body=shard_spaces) 52 | response = yield client.get_shard_spaces() 53 | for actual, expected in zip(response, shard_spaces): 54 | self.assertIsInstance(actual, ShardSpace) 55 | self.assertEqual(actual.client, client) 56 | self.assertEqual(actual.name, expected['name']) 57 | self.assertIsInstance(actual.database, Database) 58 | self.assertEqual(actual.database.name, expected['database']) 59 | self.assertEqual(actual.regex, expected['regex']) 60 | self.assertEqual(actual.retention_policy, 61 | expected['retentionPolicy']) 62 | self.assertEqual(actual.shard_duration, 63 | expected['shardDuration']) 64 | self.assertEqual(actual.replication_factor, 65 | expected['replicationFactor']) 66 | self.assertEqual(actual.split, expected['split']) 67 | 68 | self.assert_mock_args(m, '/cluster/shard_spaces') 69 | -------------------------------------------------------------------------------- /tests/user_test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import json 3 | 4 | from asyncflux import AsyncfluxClient 5 | from asyncflux.testing import AsyncfluxTestCase, gen_test 6 | from asyncflux.user import User 7 | from asyncflux.errors import AsyncfluxError 8 | 9 | 10 | class UserTestCase(AsyncfluxTestCase): 11 | 12 | @gen_test 13 | def test_update(self): 14 | client = AsyncfluxClient() 15 | db_name = 'foo' 16 | db = client[db_name] 17 | username = 'foo' 18 | user = User(db, username) 19 | password = 'fubar' 20 | is_admin = True 21 | read_from = '^$' 22 | write_to = '^$' 23 | 24 | # Update password 25 | payload = {'password': password} 26 | with self.patch_fetch_mock(client) as m: 27 | self.setup_fetch_mock(m, 200) 28 | response = yield user.update(new_password=password) 29 | self.assertIsNone(response) 30 | 31 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 32 | method='POST', body=json.dumps(payload)) 33 | 34 | # Update isAdmin value 35 | payload = {'isAdmin': is_admin} 36 | with self.patch_fetch_mock(client) as m: 37 | self.setup_fetch_mock(m, 200) 38 | response = yield user.update(is_admin=is_admin) 39 | self.assertIsNone(response) 40 | self.assertEqual(user.is_admin, is_admin) 41 | 42 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 43 | method='POST', body=json.dumps(payload)) 44 | 45 | # Update permissions 46 | payload = {'readFrom': read_from, 'writeTo': write_to} 47 | with self.patch_fetch_mock(client) as m: 48 | self.setup_fetch_mock(m, 200) 49 | response = yield user.update(read_from=read_from, 50 | write_to=write_to) 51 | self.assertIsNone(response) 52 | self.assertEqual(user.read_from, read_from) 53 | self.assertEqual(user.write_to, write_to) 54 | 55 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 56 | method='POST', body=json.dumps(payload)) 57 | 58 | # Invalid permission argument values 59 | exc_msg = 'You have to provide read and write permissions' 60 | with self.assertRaisesRegexp(ValueError, exc_msg): 61 | yield user.update(password, is_admin=is_admin, read_from=read_from) 62 | 63 | # Without any arguments 64 | exc_msg = 'You have to set at least one argument' 65 | with self.assertRaisesRegexp(ValueError, exc_msg): 66 | yield user.update() 67 | 68 | @gen_test 69 | def test_change_password(self): 70 | client = AsyncfluxClient() 71 | db_name = 'foo' 72 | db = client[db_name] 73 | username = 'foo' 74 | password = 'fubar' 75 | user = User(db, username) 76 | 77 | with self.patch_fetch_mock(client) as m: 78 | self.setup_fetch_mock(m, 200) 79 | response = yield user.change_password(password) 80 | self.assertIsNone(response) 81 | 82 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 83 | method='POST', 84 | body=json.dumps({'password': password})) 85 | 86 | # Non-existing user 87 | response_body = 'Invalid username %s' % username 88 | with self.patch_fetch_mock(client) as m: 89 | self.setup_fetch_mock(m, 400, body=response_body) 90 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 91 | yield user.change_password(password) 92 | 93 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 94 | method='POST', 95 | body=json.dumps({'password': password})) 96 | 97 | # Invalid password 98 | password = 'bar' 99 | response_body = ('Password must be more than 4 and less than 56 ' 100 | 'characters') 101 | with self.patch_fetch_mock(client) as m: 102 | self.setup_fetch_mock(m, 400, body=response_body) 103 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 104 | yield user.change_password(password) 105 | 106 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 107 | method='POST', 108 | body=json.dumps({'password': password})) 109 | 110 | @gen_test 111 | def test_change_privileges(self): 112 | client = AsyncfluxClient() 113 | db_name = 'foo' 114 | db = client[db_name] 115 | username = 'foo' 116 | user = User(db, username) 117 | is_admin = True 118 | read_from = '^$' 119 | write_to = '^$' 120 | 121 | # Update permissions 122 | payload = {'isAdmin': is_admin, 'readFrom': read_from, 123 | 'writeTo': write_to} 124 | with self.patch_fetch_mock(client) as m: 125 | self.setup_fetch_mock(m, 200) 126 | response = yield user.change_privileges(is_admin, 127 | read_from=read_from, 128 | write_to=write_to) 129 | self.assertIsNone(response) 130 | self.assertEqual(user.is_admin, is_admin) 131 | self.assertEqual(user.read_from, read_from) 132 | self.assertEqual(user.write_to, write_to) 133 | 134 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 135 | method='POST', body=json.dumps(payload)) 136 | 137 | payload = {'isAdmin': is_admin} 138 | with self.patch_fetch_mock(client) as m: 139 | self.setup_fetch_mock(m, 200) 140 | yield user.change_privileges(is_admin, None, None) 141 | self.assertIsNone(response) 142 | self.assertEqual(user.is_admin, is_admin) 143 | 144 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 145 | method='POST', body=json.dumps(payload)) 146 | 147 | # Non-existing user 148 | payload = {'isAdmin': is_admin, 'readFrom': read_from, 149 | 'writeTo': write_to} 150 | response_body = "Invalid username %s" % username 151 | with self.patch_fetch_mock(client) as m: 152 | self.setup_fetch_mock(m, 400, body=response_body) 153 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 154 | yield user.change_privileges(is_admin, read_from=read_from, 155 | write_to=write_to) 156 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 157 | method='POST', body=json.dumps(payload)) 158 | 159 | # Invalid permission argument values 160 | exc_msg = 'You have to provide read and write permissions' 161 | with self.assertRaisesRegexp(ValueError, exc_msg): 162 | yield user.change_privileges(is_admin, read_from, None) 163 | 164 | with self.assertRaisesRegexp(ValueError, exc_msg): 165 | yield user.change_privileges(is_admin, None, write_to) 166 | 167 | @gen_test 168 | def test_change_permissions(self): 169 | client = AsyncfluxClient() 170 | db_name = 'foo' 171 | db = client[db_name] 172 | username = 'foo' 173 | user = User(db, username) 174 | read_from = '^$' 175 | write_to = '^$' 176 | 177 | # Update permissions 178 | payload = {'readFrom': read_from, 'writeTo': write_to} 179 | with self.patch_fetch_mock(client) as m: 180 | self.setup_fetch_mock(m, 200) 181 | response = yield user.change_permissions(read_from=read_from, 182 | write_to=write_to) 183 | self.assertIsNone(response) 184 | self.assertEqual(user.read_from, read_from) 185 | self.assertEqual(user.write_to, write_to) 186 | 187 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 188 | method='POST', body=json.dumps(payload)) 189 | 190 | # Non-existing user 191 | response_body = "Invalid username %s" % username 192 | with self.patch_fetch_mock(client) as m: 193 | self.setup_fetch_mock(m, 400, body=response_body) 194 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 195 | yield user.change_permissions(read_from=read_from, 196 | write_to=write_to) 197 | 198 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 199 | method='POST', body=json.dumps(payload)) 200 | 201 | # Invalid permission argument values 202 | exc_msg = 'You have to provide read and write permissions' 203 | with self.assertRaisesRegexp(ValueError, exc_msg): 204 | yield user.change_permissions(None, None) 205 | 206 | with self.assertRaisesRegexp(ValueError, exc_msg): 207 | yield user.change_permissions(read_from, None) 208 | 209 | with self.assertRaisesRegexp(ValueError, exc_msg): 210 | yield user.change_permissions(None, write_to) 211 | 212 | @gen_test 213 | def test_delete(self): 214 | client = AsyncfluxClient() 215 | db_name = 'foo' 216 | db = client[db_name] 217 | username = 'foo' 218 | user = User(db, username) 219 | 220 | with self.patch_fetch_mock(client) as m: 221 | self.setup_fetch_mock(m, 200) 222 | response = yield user.delete() 223 | self.assertIsNone(response) 224 | 225 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 226 | method='DELETE') 227 | 228 | # Non-existing user 229 | response_body = "User %s doesn't exist" % username 230 | with self.patch_fetch_mock(client) as m: 231 | self.setup_fetch_mock(m, 400, body=response_body) 232 | with self.assertRaisesRegexp(AsyncfluxError, response_body): 233 | yield user.delete() 234 | 235 | self.assert_mock_args(m, '/db/%s/users/%s' % (db_name, username), 236 | method='DELETE') 237 | 238 | def test_repr(self): 239 | host = 'localhost' 240 | port = 8086 241 | db_name = 'foo' 242 | db = AsyncfluxClient(host, port)[db_name] 243 | username = 'foo' 244 | format_repr = "User(Database(AsyncfluxClient('%s', %d), '%s'), '%s')" 245 | self.assertEqual(repr(User(db, username)), 246 | (format_repr % (host, port, db_name, username))) 247 | -------------------------------------------------------------------------------- /tests/util_test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from asyncflux import AsyncfluxClient 3 | from asyncflux.testing import AsyncfluxTestCase 4 | from asyncflux.util import snake_case, snake_case_dict 5 | 6 | 7 | class TestAsyncfluxCoroutine(AsyncfluxTestCase): 8 | 9 | def test_non_callable(self): 10 | client = AsyncfluxClient() 11 | with self.assertRaisesRegexp(TypeError, 'callback must be a callable'): 12 | client.get_databases(callback='this is not a callable') 13 | 14 | 15 | class TestSnakeCase(AsyncfluxTestCase): 16 | 17 | def test_snake_case(self): 18 | values = [ 19 | ('CamelCase', 'camel_case'), 20 | ('isAdmin', 'is_admin'), 21 | ('writeTo', 'write_to'), 22 | ('readFrom', 'read_from') 23 | ] 24 | for raw_string, snake in values: 25 | self.assertEqual(snake_case(raw_string), snake) 26 | 27 | def test_snake_case_dict(self): 28 | raw_dict = { 29 | 'name': 'foo', 30 | 'isAdmin': False, 31 | 'writeTo': '.*', 32 | 'readFrom': '.*' 33 | } 34 | snake_dict = { 35 | 'name': 'foo', 36 | 'is_admin': False, 37 | 'write_to': '.*', 38 | 'read_from': '.*' 39 | } 40 | self.assertDictEqual(snake_case_dict(raw_dict), snake_dict) 41 | --------------------------------------------------------------------------------