├── .gitignore ├── .travis.yml ├── CHANGELOG.rst ├── CONTRIBUTING.md ├── DESCRIPTION.rst ├── LICENSE ├── MANIFEST.in ├── README.rst ├── examples ├── createSocialGraph.py ├── debiangraph.py ├── fetchDebianDependencyGraph.py └── json_schema_validation_in_db.py ├── pyArango ├── ISSUES.txt ├── __init__.py ├── action.py ├── admin.py ├── ca_certificate.py ├── collection.py ├── connection.py ├── consts.py ├── database.py ├── doc │ ├── Makefile │ ├── make.bat │ └── source │ │ ├── action.rst │ │ ├── admin.rst │ │ ├── ca_certificate.rst │ │ ├── collection.rst │ │ ├── conf.py │ │ ├── connection.rst │ │ ├── database.rst │ │ ├── document.rst │ │ ├── exceptions.rst │ │ ├── foxx.rst │ │ ├── gevent_session.rst │ │ ├── graph.rst │ │ ├── index.rst │ │ ├── indexes.rst │ │ ├── jwauth.rst │ │ ├── query.rst │ │ ├── tasks.rst │ │ ├── users.rst │ │ └── validation.rst ├── document.py ├── foxx.py ├── gevent_session.py ├── graph.py ├── index.py ├── jwauth.py ├── query.py ├── tasks.py ├── tests │ ├── __init__.py │ ├── doc_save_benchmark.py │ ├── doc_save_bulk_benchmark.py │ ├── setup_arangodb.sh │ ├── tests.py │ └── validators_tests.py ├── theExceptions.py ├── users.py └── validation.py ├── run_tests.sh ├── setup.cfg └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # PyInstaller 26 | # Usually these files are written by a python script from a template 27 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 28 | *.manifest 29 | *.spec 30 | 31 | # Installer logs 32 | pip-log.txt 33 | pip-delete-this-directory.txt 34 | 35 | # Unit test / coverage reports 36 | htmlcov/ 37 | .tox/ 38 | .coverage 39 | .cache 40 | nosetests.xml 41 | coverage.xml 42 | 43 | # Translations 44 | *.mo 45 | *.pot 46 | 47 | # Django stuff: 48 | *.log 49 | 50 | # Sphinx documentation 51 | docs/_build/ 52 | 53 | # PyBuilder 54 | target/ 55 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | language: python 3 | 4 | services: 5 | - docker 6 | 7 | python: 8 | - "2.7" 9 | - "3.6" 10 | 11 | addons: 12 | apt: 13 | update: true 14 | 15 | before_script: 16 | - chmod 777 ./pyArango/tests/setup_arangodb.sh 17 | - ./pyArango/tests/setup_arangodb.sh 18 | 19 | install: 20 | - pip install --upgrade pip 21 | - pip install coverage gevent setuptools enum34 22 | - python setup.py install 23 | 24 | script: coverage run -m unittest discover pyArango/tests/ 25 | 26 | after_success: bash <(curl -s https://codecov.io/bash) 27 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | 2.1.1 2 | ===== 3 | * Added missing fields value settings on getitem 4 | 5 | ===== 6 | 7 | 2.1.0 8 | ===== 9 | * Added getitem for documents at the database level 10 | * Added fill_default() on documents to replace None values by schema defaults 11 | * fill_default() is automatically called on save 12 | ===== 13 | 14 | 2.0.3 15 | ===== 16 | * Added support for authentication via client-side certificates 17 | 18 | 2.0.2 19 | ===== 20 | * Fixed contains functions 21 | * Added UniqueConstrainViolation exception, inherits from CreationError 22 | 23 | 2.0.1 24 | ===== 25 | 26 | * Fixed max retries for write conflicts 27 | * Added parameter ``pool_maxsize`` on class ``Connection`` to allow user configure the http pool size. 28 | ======= 29 | 30 | 2.0 31 | ===== 32 | 33 | * changed the default value of reject_zero in NotNull from True to False 34 | * added to_default function to reset a document to its default values 35 | * fixed bug in default documents where default values could be overwritten 36 | * default value for fields is now None 37 | 38 | 1.3.5 39 | ===== 40 | 41 | * restoreIndex and restoreIndexes in collection will restore previously deleted indexes 42 | * added max_conflict_retries to handle arango's 1200 43 | * added single session so AikidoSessio.Holders can share a single request session 44 | * added task deletion to tests reset 45 | * added drop() to tasks to remove all tasks in one command 46 | * better documentation of connection class 47 | * False is not considered a Null value while validating 48 | * Removed redundant document creation functions 49 | * More explicit validation error with field name 50 | 51 | 1.3.4 52 | ===== 53 | * Bugfix: Query iterator now returns all elements instead of a premature empty list 54 | * Bugfix: Collection naming when using the arango's name argument 55 | * New: Schema validation example 56 | * New: Satellite graphs 57 | 58 | 1.3.3 59 | ===== 60 | 61 | * SSL certificate support 62 | * More doc 63 | * Fixed on_load schema validation 64 | * Gevent, monkey patching breaks python's multi=processing. Removed grequests as the default, back to requests. 65 | * Removed grequests and gevent as hard dependencies. Added explicit error messages, to prompt users can install them if needed. 66 | * Jwauth is not in its own file 67 | * Generic rest call to database support (action) for connection, database. 68 | * Foxx support 69 | * Tasks create, delete, fetch support 70 | 71 | 1.3.2 72 | ===== 73 | 74 | * Validation bug fixes 75 | * New Numeric, Int, Bool, String, Enumeration, Range validators 76 | * Fields can have default values 77 | * When creating a new document, Collection will serve one populated with defaults 78 | * stastd support thx to: @dothebart 79 | * properties definition in schema 80 | * AQL errors now come with prints and line numbers for everyone's convenience 81 | * Bulk save for Document objects and dicts 82 | 83 | 1.3.1 84 | ===== 85 | 86 | * Will die gracefully if server response is empty 87 | * getStore and getPatches shorthands added to Document 88 | 89 | 1.3.0 90 | ===== 91 | 92 | * Fixed nested store patch update 93 | * REFACT: New DocumentStore class for taking care of storing document in a hierarchy of stores (nested objects) and validate them 94 | * Minor bug fixes 95 | 96 | 1.2.9 97 | ===== 98 | 99 | * Added bulk import to connection 100 | * Added bindvars to explain 101 | 102 | 1.2.8 103 | ===== 104 | 105 | * BugFix: recursive field validation 106 | * BugFix: fullCount option now works 107 | * Length validator will raise a ValidationError if value has no length 108 | * users can now specify custom json encoders 109 | 110 | 1.2.7 111 | ===== 112 | 113 | * Fixed connection reuse 114 | 115 | 1.2.6 116 | ===== 117 | 118 | * Fixed Cache 119 | 120 | * Cache now exposes document store and attributes transparently 121 | 122 | 1.2.5 123 | ===== 124 | 125 | * Added getter for users 126 | 127 | * Edges back compatibility with 2.8 solved "_from" "_to" are no longer foreign fields, ._from ._to work again 128 | 129 | * Calls to json() now print the request's content upon failure. 130 | 131 | 132 | 1.2.4 133 | ===== 134 | 135 | * missing import in collections.py added 136 | 137 | 1.2.3 138 | ===== 139 | 140 | * Some more meaningful error messages 141 | 142 | 1.2.2 143 | ====== 144 | 145 | * Cross python support for iterators 146 | 147 | 1.2.1 148 | ====== 149 | 150 | * Cross python support for metclasses 151 | 152 | 1.2.0 153 | ====== 154 | 155 | * Support for python 3, does not support python 2.7 yet. 156 | * Test root password and username can be defined in environement variables. 157 | 158 | 1.1.0 159 | ====== 160 | 161 | * Support for ArangoDB 3.X, pyArango no longer supports 2.X versions 162 | * Support for authentication 163 | * User support added 164 | * Added AikidoSession to seemlessly manage request sessions 165 | * AikidoSession stores basic stats about the requests 166 | * AikidoSession detects 401 errors and notifies the user that authentication is required 167 | * AikidoSession detects connection errors and notifies the user that arango is probably not running 168 | * save() and patch() functions now empty _patchStore is succesfull 169 | * Added free key word arguments for the creation of AQL Queries 170 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | pyArango contributor guidelines 2 | =============================== 3 | 4 | Thank you for your interest, pyArango is now a community project and everybody is welcome to join in and contribute. 5 | We do not a have specific schedule for releases and pyArango releases tend be in synch with ArangoDB releases. 6 | 7 | Our guidelines are simple: 8 | 9 | * Write beautiful code. 10 | * The master branch is the stable branch. Only critical pull requests are directly merged into this branch. 11 | * Send all non-critical pull requests to the dev branch. 12 | * If you add a new feature please provide a test for it. Otherwise your pull request might be rejected. 13 | * Any pull request that improves code coverage is highly appreciated. 14 | * Function names and arguments follow the naming used in ArangoDB's API documentation (hence the camel case). 15 | * Update the CHANGELOG.rst. We use a very simple nomenclature. A bullet point for each item. Bug fixes descriptions are prefixed with *bugfix:*, new features with *new:*, removed features with *removed:*. Anything else is not prefixed. 16 | 17 | -------------------------------------------------------------------------------- /DESCRIPTION.rst: -------------------------------------------------------------------------------- 1 | Python Object Wrapper for ArangoDB_ with built-in validation 2 | ============================================================= 3 | 4 | pyArango aims to be an easy to use driver for ArangoDB with built in validation. Collections are treated as types that apply to the documents within. You can be 100% permissive or enforce schemas and validate fields on set, on save or on both. 5 | 6 | pyArango supports graphs, indexes and probably everything that arangodb_ can do. 7 | 8 | pyArango is developed by `Tariq Daouda`_, the full source code is available from github_. 9 | 10 | .. _Tariq Daouda: http://bioinfo.iric.ca/~daoudat/ 11 | .. _github: https://github.com/tariqdaouda/pyArango 12 | .. _arangodb: http://www.arangodb.com 13 | .. _ArangoDB: http://www.arangodb.com 14 | 15 | For the latest news about pyArango, you can follow me on twitter `@tariqdaouda`_. 16 | If you have any issues with it, please file a github issue. 17 | 18 | .. _@tariqdaouda: https://www.twitter.com/tariqdaouda 19 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2014 Tariq Daouda 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.rst 2 | include LICENSE 3 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | pyArango 2 | ======== 3 | 4 | .. image:: https://pepy.tech/badge/pyarango 5 | :alt: downloads 6 | :target: https://pepy.tech/project/pyarango 7 | 8 | .. image:: https://pepy.tech/badge/pyarango/month 9 | :alt: downloads_month 10 | :target: https://pepy.tech/project/pyarango/month 11 | 12 | .. image:: https://pepy.tech/badge/pyarango/week 13 | :alt: downloads_week 14 | :target: https://pepy.tech/project/pyarango/week 15 | 16 | .. image:: https://travis-ci.com/Alexsaphir/pyArango.svg?branch=master 17 | :target: https://travis-ci.com/github/Alexsaphir/pyArango 18 | .. image:: https://img.shields.io/badge/python-2.7%2C%203.5-blue.svg 19 | .. image:: https://img.shields.io/badge/arangodb-3.0-blue.svg 20 | 21 | NoSQL is really cool, but in this harsh world it is impossible to live without field validation. 22 | 23 | **WARNING**: The last versions of pyArango are only compatible with ArangoDB 3.X. For the old version checkout the branch ArangoDBV2_ 24 | 25 | .. _ArangoDBV2: https://github.com/tariqdaouda/pyArango/tree/ArangoDBV2 26 | 27 | Key Features 28 | ------------ 29 | pyArango is geared toward the developer. It's here to help to you develop really cool apps using ArangoDB, really fast. 30 | 31 | - Light and simple interface 32 | - Built-in validation of fields on setting or on saving 33 | - Support for all index types 34 | - Supports graphs, traversals and all types of queries 35 | - Caching of documents with Insertions and Lookups in O(1) 36 | 37 | Collections are treated as types that apply to the documents within. That means you can define 38 | a Collection and then create instances of this Collection in several databases. The same goes for graphs. 39 | 40 | In other words, you can have two databases, **cache_db** and **real_db**, each of them with an instance of a 41 | **Users** Collection. You can then be assured that documents of both collections will be subjected to the same 42 | validation rules. Ain't that cool? 43 | 44 | You can be 100% permissive or enforce schemas and validate fields on set, on save or both. 45 | 46 | Installation 47 | ------------ 48 | 49 | Supports python 2.7 and 3.5. 50 | 51 | From PyPi: 52 | 53 | .. code:: shell 54 | 55 | pip install pyArango 56 | 57 | For the latest version: 58 | 59 | .. code:: shell 60 | 61 | git clone https://github.com/tariqdaouda/pyArango.git 62 | cd pyArango 63 | python setup.py develop 64 | 65 | Full documentation 66 | ------------------- 67 | 68 | This is the quickstart guide; you can find the full documentation here_. 69 | 70 | .. _here: https://pyarango.readthedocs.io/en/stable/ 71 | 72 | Initialization and document saving 73 | ------------------------------------- 74 | 75 | .. code:: python 76 | 77 | from pyArango.connection import * 78 | 79 | conn = Connection() 80 | 81 | conn.createDatabase(name="test_db") 82 | db = conn["test_db"] # all databases are loaded automatically into the connection and are accessible in this fashion 83 | collection = db.createCollection(name="users") # all collections are also loaded automatically 84 | 85 | # collection.delete() # self explanatory 86 | 87 | for i in xrange(100): 88 | doc = collection.createDocument() 89 | doc["name"] = "Tesla-%d" % i 90 | doc["number"] = i 91 | doc["species"] = "human" 92 | doc.save() 93 | 94 | doc = collection.createDocument() 95 | doc["name"] = "Tesla-101" 96 | doc["number"] = 101 97 | doc["species"] = "human" 98 | 99 | doc["name"] = "Simba" 100 | # doc.save() # overwrites the document 101 | doc.patch() # updates the modified field 102 | doc.delete() 103 | 104 | Queries : AQL 105 | ------------- 106 | 107 | .. code:: python 108 | 109 | aql = "FOR c IN users FILTER c.name == @name LIMIT 10 RETURN c" 110 | bindVars = {'name': 'Tesla-3'} 111 | # by setting rawResults to True you'll get dictionaries instead of Document objects, useful if you want to result to set of fields for example 112 | queryResult = db.AQLQuery(aql, rawResults=False, batchSize=1, bindVars=bindVars) 113 | document = queryResult[0] 114 | 115 | Queries : Simple queries by example 116 | ------------------------------------- 117 | PyArango supports all types of simple queries (see collection.py for the full list). Here's an example query: 118 | 119 | .. code:: python 120 | 121 | example = {'species': "human"} 122 | query = collection.fetchByExample(example, batchSize=20, count=True) 123 | print query.count # print the total number or documents 124 | 125 | Queries : Batches 126 | ------------------ 127 | 128 | .. code:: python 129 | 130 | for e in query : 131 | print e['name'] 132 | 133 | Defining a Collection and field/schema Validation 134 | ------------------------------------------------- 135 | 136 | PyArango allows you to implement your own field validation. 137 | Validators are simple objects deriving from classes that inherit 138 | from **Validator** and implement a **validate()** method: 139 | 140 | .. code:: python 141 | 142 | import pyArango.collection as COL 143 | import pyArango.validation as VAL 144 | from pyArango.theExceptions import ValidationError 145 | import types 146 | 147 | class String_val(VAL.Validator): 148 | def validate(self, value): 149 | if type(value) is not types.StringType : 150 | raise ValidationError("Field value must be a string") 151 | return True 152 | 153 | class Humans(COL.Collection): 154 | 155 | _validation = { 156 | 'on_save': False, 157 | 'on_set': False, 158 | 'allow_foreign_fields': True # allow fields that are not part of the schema 159 | } 160 | 161 | _fields = { 162 | 'name': COL.Field(validators=[VAL.NotNull(), String_val()]), 163 | 'anything': COL.Field(), 164 | 'species': COL.Field(validators=[VAL.NotNull(), VAL.Length(5, 15), String_val()]) 165 | } 166 | 167 | collection = db.createCollection('Humans') 168 | 169 | 170 | In addition, you can also define collection properties_ (creation arguments for ArangoDB) right inside the definition: 171 | 172 | .. code:: python 173 | 174 | class Humans(COL.Collection): 175 | 176 | _properties = { 177 | "keyOptions" : { 178 | "allowUserKeys": False, 179 | "type": "autoincrement", 180 | "increment": 1, 181 | "offset": 0, 182 | } 183 | } 184 | 185 | _validation = { 186 | 'on_save': False, 187 | 'on_set': False, 188 | 'allow_foreign_fields': True # allow fields that are not part of the schema 189 | } 190 | 191 | _fields = { 192 | 'name': COL.Field(validators=[VAL.NotNull(), String_val()]), 193 | 'anything': COL.Field(), 194 | 'species': COL.Field(validators=[VAL.NotNull(), VAL.Length(5, 15), String_val()]) 195 | } 196 | 197 | .. _properties: https://docs.arangodb.com/3.1/HTTP/Collection/Creating.html 198 | 199 | A note on inheritence 200 | ---------------------- 201 | 202 | There is no inheritance of the "_validation" and "_fields" dictionaries. 203 | If a class does not fully define its own, the defaults will be automatically assigned to any missing value. 204 | 205 | Creating Edges 206 | ---------------- 207 | 208 | .. code:: python 209 | 210 | from pyArango.collection import Edges 211 | 212 | class Connections(Edges): 213 | 214 | _validation = { 215 | 'on_save': False, 216 | 'on_set': False, 217 | 'allow_foreign_fields': True # allow fields that are not part of the schema 218 | } 219 | 220 | _fields = { 221 | 'length': Field(NotNull=True), 222 | } 223 | 224 | Linking Documents with Edges 225 | ----------------------------- 226 | 227 | .. code:: python 228 | 229 | from pyArango.collection import * 230 | 231 | class Things(Collection): 232 | .... 233 | 234 | class Connections(Edges): 235 | .... 236 | 237 | .... 238 | a = myThings.createDocument() 239 | b = myThings.createDocument() 240 | 241 | conn = myConnections.createEdge() 242 | 243 | conn.links(a, b) 244 | conn["someField"] = 35 245 | conn.save() # once an edge links documents, save() and patch() can be used as with any other Document object 246 | 247 | 248 | Geting Edges linked to a vertex 249 | -------------------------------- 250 | 251 | You can do it either from a Document or an Edges collection: 252 | 253 | .. code:: python 254 | 255 | # in edges 256 | myDocument.getInEdges(myConnections) 257 | myConnections.getInEdges(myDocument) 258 | 259 | # out edges 260 | myDocument.getOutEdges(myConnections) 261 | myConnections.getOutEdges(myDocument) 262 | 263 | # both 264 | myDocument.getEdges(myConnections) 265 | myConnections.getEdges(myDocument) 266 | 267 | # you can also of ask for the raw json with 268 | myDocument.getInEdges(myConnections, rawResults=True) 269 | # otherwise Document objects are retuned in a list 270 | 271 | Creating a Graph 272 | ----------------- 273 | 274 | By using the graph interface you ensure for example that, whenever you delete a document, all the edges linking 275 | to that document are also deleted: 276 | 277 | .. code:: python 278 | 279 | from pyArango.collection import Collection, Field 280 | from pyArango.graph import Graph, EdgeDefinition 281 | 282 | class Humans(Collection): 283 | _fields = { 284 | "name": Field() 285 | } 286 | 287 | class Friend(Edges): # theGraphtheGraph 288 | _fields = { 289 | "lifetime": Field() 290 | } 291 | 292 | # Here's how you define a graph 293 | class MyGraph(Graph) : 294 | _edgeDefinitions = [EdgeDefinition("Friend", fromCollections=["Humans"], toCollections=["Humans"])] 295 | _orphanedCollections = [] 296 | 297 | # create the collections (do this only if they don't already exist in the database) 298 | self.db.createCollection("Humans") 299 | self.db.createCollection("Friend") 300 | # same for the graph 301 | theGraph = self.db.createGraph("MyGraph") 302 | 303 | # creating some documents 304 | h1 = theGraph.createVertex('Humans', {"name": "simba"}) 305 | h2 = theGraph.createVertex('Humans', {"name": "simba2"}) 306 | 307 | # linking them 308 | theGraph.link('Friend', h1, h2, {"lifetime": "eternal"}) 309 | 310 | # deleting one of them along with the edge 311 | theGraph.deleteVertex(h2) 312 | 313 | Creating a Satellite Graph 314 | ----------------- 315 | 316 | If you want to benefit from the advantages of satellite graphs, you can also create them of course. 317 | Please read the official ArangoDB Documentation for further technical information. 318 | 319 | .. code:: python 320 | 321 | from pyArango.connection import * 322 | from pyArango.collection import Collection, Edges, Field 323 | from pyArango.graph import Graph, EdgeDefinition 324 | 325 | databaseName = "satellite_graph_db" 326 | 327 | conn = Connection() 328 | 329 | # Cleanup (if needed) 330 | try: 331 | conn.createDatabase(name=databaseName) 332 | except Exception: 333 | pass 334 | 335 | # Select our "satellite_graph_db" database 336 | db = conn[databaseName] # all databases are loaded automatically into the connection and are accessible in this fashion 337 | 338 | # Define our vertex to use 339 | class Humans(Collection): 340 | _fields = { 341 | "name": Field() 342 | } 343 | 344 | # Define our edge to use 345 | class Friend(Edges): 346 | _fields = { 347 | "lifetime": Field() 348 | } 349 | 350 | # Here's how you define a Satellite Graph 351 | class MySatelliteGraph(Graph) : 352 | _edgeDefinitions = [EdgeDefinition("Friend", fromCollections=["Humans"], toCollections=["Humans"])] 353 | _orphanedCollections = [] 354 | 355 | theSatelliteGraph = db.createSatelliteGraph("MySatelliteGraph") 356 | 357 | Document Cache 358 | -------------- 359 | 360 | pyArango collections have a caching system for documents that performs insertions and retrievals in O(1): 361 | 362 | .. code:: python 363 | 364 | # create a cache a of 1500 documents for collection humans 365 | humans.activateCache(1500) 366 | 367 | # disable the cache 368 | humans.deactivateCache() 369 | 370 | Statsd Reporting 371 | ---------------- 372 | 373 | pyArango can optionally report query times to a statsd server for statistical evaluation: 374 | 375 | import statsd 376 | from pyArango.connection import Connection 377 | statsdclient = statsd.StatsClient(os.environ.get('STATSD_HOST'), int(os.environ.get('STATSD_PORT'))) 378 | conn = Connection('http://127.0.0.1:8529', 'root', 'opensesame', statsdClient = statsdclient, reportFileName = '/tmp/queries.log') 379 | 380 | It's intended to be used in a two phase way: (we assume you're using bind values - right?) 381 | - First run, which will trigger all usecases. You create the connection by specifying statsdHost, statsdPort and reportFileName. 382 | reportFilename will be filled with your queries paired with your hash identifiers. It's reported to statsd as 'pyArango_'. 383 | Later on you can use this digest to identify your queries to the gauges. 384 | - On subsequent runs you only specify statsdHost and statsdPort; only the request times are reported to statsd. 385 | 386 | Examples 387 | ======== 388 | More examples can be found in the examples directory. 389 | To try them out change the connection strings according to your local setup. 390 | 391 | Debian Dependency Graph 392 | ----------------------- 393 | If you are on a Debian / Ubuntu you can install packages with automatic dependency resolution. 394 | In the end this is a graph. This example parses Debian package files using the `deb_pkg_tools`, 395 | and will then create vertices and edges from packages and their relations. 396 | 397 | Use `examples/debiangraph.py` to install it, or `examples/fetchDebianDependencyGraph.py` to browse 398 | it as an ascii tree. 399 | 400 | ArangoDB Social Graph 401 | --------------------- 402 | You can create the `ArangoDB SocialGraph `_ using `examples/createSocialGraph.py`. 403 | It resemples `The original ArangoDB Javascript implementation: `_ in python. 404 | -------------------------------------------------------------------------------- /examples/createSocialGraph.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | import sys 3 | from pyArango.connection import * 4 | from pyArango.graph import * 5 | from pyArango.collection import * 6 | 7 | 8 | class Social(object): 9 | class male(Collection): 10 | _fields = { 11 | "name" : Field() 12 | } 13 | 14 | class female(Collection): 15 | _fields = { 16 | "name" : Field() 17 | } 18 | 19 | class relation(Edges): 20 | _fields = { 21 | "number" : Field() 22 | } 23 | 24 | class social(Graph): 25 | 26 | _edgeDefinitions = (EdgeDefinition ('relation', 27 | fromCollections = ["female", "male"], 28 | toCollections = ["female", "male"]),) 29 | _orphanedCollections = [] 30 | 31 | 32 | def __init__(self): 33 | self.conn = Connection(username="USERNAME", password="SECRET") 34 | 35 | self.db = self.conn["_system"] 36 | if self.db.hasGraph('social'): 37 | raise Exception("The social graph was already provisioned! remove it first") 38 | 39 | self.female = self.db.createCollection(className='Collection', name='female') 40 | self.male = self.db.createCollection(className='Collection', name='male') 41 | 42 | self.relation = self.db.createCollection(className='Edges', name='relation') 43 | 44 | g = self.db.createGraph("social") 45 | 46 | a = g.createVertex('female', {"name": 'Alice', "_key": 'alice'}); 47 | b = g.createVertex('male', {"name": 'Bob', "_key": 'bob'}); 48 | c = g.createVertex('male', {"name": 'Charly', "_key": 'charly'}); 49 | d = g.createVertex('female', {"name": 'Diana', "_key": 'diana'}); 50 | a.save() 51 | b.save() 52 | c.save() 53 | d.save() 54 | 55 | g.link('relation', a, b, {"type": 'married', "_key": 'aliceAndBob'}) 56 | g.link('relation', a, c, {"type": 'friend', "_key": 'aliceAndCharly'}) 57 | g.link('relation', c, d, {"type": 'married', "_key": 'charlyAndDiana'}) 58 | g.link('relation', b, d, {"type": 'friend', "_key": 'bobAndDiana'}) 59 | 60 | 61 | Social() 62 | -------------------------------------------------------------------------------- /examples/debiangraph.py: -------------------------------------------------------------------------------- 1 | #/usr/bin/env python3 2 | """ 3 | example to build a file with all packages known to your system for a debian jessie: 4 | 5 | for i in $(ls /var/lib/apt/lists/*debian_dists_jessie_* |\ 6 | grep -v i386 |grep -v Release); do 7 | cat $i >> /tmp/allpackages; 8 | echo >> /tmp/allpackages; 9 | done 10 | 11 | All debian based distros have a set of files in /var/lib/apt/lists. 12 | In doubt create a filter for your distro 13 | 14 | Install needed Python modules: 15 | 16 | pip3 install pyArango 17 | pip3 install deb_pkg_tools 18 | """ 19 | 20 | import deb_pkg_tools 21 | from deb_pkg_tools.control import deb822_from_string 22 | from deb_pkg_tools.control import parse_control_fields 23 | from pyArango.connection import * 24 | from pyArango.database import * 25 | from pyArango.collection import * 26 | from pyArango.document import * 27 | from pyArango.query import * 28 | from pyArango.graph import * 29 | from pyArango.theExceptions import * 30 | 31 | # Configure your ArangoDB server connection here 32 | conn = Connection(arangoURL="http://localhost:8529", username="root", password="") 33 | 34 | db = None 35 | edgeCols = {} 36 | packagesCol = {} 37 | 38 | # we create our own database so we don't interfere with userdata: 39 | 40 | if not conn.hasDatabase("testdb"): 41 | db = conn.createDatabase("testdb") 42 | else: 43 | db = conn["testdb"] 44 | 45 | if not db.hasCollection('packages'): 46 | packagesCol = db.createCollection('Collection', name='packages') 47 | else: 48 | packagesCol = db.collections['packages'] 49 | 50 | 51 | def getEdgeCol(name): 52 | if not name in edgeCols: 53 | if not db.hasCollection(name): 54 | edgeCols[name] = db.createCollection(name=name, className='Edges') 55 | else: 56 | edgeCols[name] = db.collections[name] 57 | return edgeCols[name] 58 | 59 | 60 | def saveGraphDefinition(): 61 | graph_collection = db.collections["_graphs"] 62 | graph_defintion = { 63 | "_key": "debian_dependency_graph", 64 | "edgeDefinitions": [], 65 | "orphanCollections": [], 66 | } 67 | for collection in edgeCols.keys(): 68 | graph_defintion["edgeDefinitions"].append( 69 | {"collection": collection, 70 | "from": ["packages",], 71 | "to": ["packages",]}) 72 | graph_collection.createDocument(graph_defintion).save() 73 | 74 | 75 | def VersionedDependencyToDict(oneDep, hasAlternatives): 76 | return { 77 | 'name': oneDep.name, 78 | 'version': oneDep.version, 79 | 'operator': oneDep.operator, 80 | 'hasAlternatives': hasAlternatives 81 | } 82 | 83 | 84 | def DependencyToDict(oneDep, hasAlternatives): 85 | return { 86 | 'name': oneDep.name, 87 | 'hasAlternatives': hasAlternatives 88 | } 89 | 90 | 91 | def DependencySetToDict(dep, hasAlternatives): 92 | depset = [] 93 | for oneDep in dep.relationships: 94 | if isinstance(oneDep, deb_pkg_tools.deps.VersionedRelationship): 95 | depset.append(VersionedDependencyToDict(oneDep, hasAlternatives)) 96 | elif isinstance(oneDep, deb_pkg_tools.deps.AlternativeRelationship): 97 | depset.append(DependencySetToDict(oneDep, True)) 98 | elif isinstance(oneDep, deb_pkg_tools.deps.Relationship): 99 | depset.append(DependencyToDict(oneDep, hasAlternatives)) 100 | else: 101 | print("Unknown relationshitp: " + repr(oneDep)) 102 | return depset 103 | 104 | 105 | def PackageToDict(pkg): 106 | # packages aren't serializable by default, translate it: 107 | ret = {} 108 | for attribute in pkg.keys(): 109 | if isinstance(pkg[attribute], deb_pkg_tools.deps.RelationshipSet): 110 | # relation ship field to become an array of relations: 111 | ret[attribute] = DependencySetToDict(pkg[attribute], False) 112 | else: 113 | # regular string field: 114 | ret[attribute] = pkg[attribute] 115 | ret["_key"] = ret["Package"] 116 | return ret 117 | 118 | 119 | def saveDependencyToEdgeCol(edgeCol, dep, pname, hasAlternatives): 120 | for oneDep in dep.relationships: 121 | if isinstance(oneDep, deb_pkg_tools.deps.VersionedRelationship): 122 | # version dependend relations: 123 | d = VersionedDependencyToDict(oneDep, hasAlternatives) 124 | d['_from'] = 'packages/' + pname 125 | d['_to'] = 'packages/' + oneDep.name 126 | relation = edgeCol.createDocument(d).save() 127 | elif isinstance(oneDep, deb_pkg_tools.deps.AlternativeRelationship): 128 | # A set of alternative relations; recurse: 129 | saveDependencyToEdgeCol(edgeCol, oneDep, pname, True) 130 | elif isinstance(oneDep, deb_pkg_tools.deps.Relationship): 131 | # simple relations only to package names without versions: 132 | d = DependencyToDict(oneDep, hasAlternatives) 133 | d['_from'] = 'packages/' + pname 134 | d['_to'] = 'packages/' + oneDep.name 135 | relation = edgeCol.createDocument(d).save() 136 | else: 137 | print("Unknown relationshitp: " + repr(oneDep)) 138 | 139 | # 140 | # Main import routine 141 | # 142 | 143 | onePackage = '' 144 | for line in open('/tmp/allpackages', encoding='utf-8'): 145 | # Package blocks are separated by new lines. 146 | if len(line) == 1 and len(onePackage) > 4: 147 | pkg = deb822_from_string(onePackage) 148 | pname = pkg['Package'] 149 | pkg1 = parse_control_fields(pkg) 150 | p = PackageToDict(pkg1) 151 | try: 152 | packagesCol.createDocument(p).save() 153 | for key in pkg1.keys(): 154 | # filter for fields with relations: 155 | if isinstance(pkg1[key], deb_pkg_tools.deps.RelationshipSet): 156 | # save one relation set to field: 157 | saveDependencyToEdgeCol(getEdgeCol(key), pkg1[key], pname, False) 158 | onePackage = '' 159 | except CreationError: 160 | pass 161 | else: 162 | onePackage += line 163 | 164 | saveGraphDefinition() 165 | -------------------------------------------------------------------------------- /examples/fetchDebianDependencyGraph.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | import sys 3 | from pyArango.connection import * 4 | from pyArango.graph import * 5 | from asciitree import * 6 | 7 | conn = Connection(username="USERNAME", password="SECRET") 8 | 9 | db = conn["ddependencyGrahp"] 10 | 11 | if not db.hasGraph('debian_dependency_graph'): 12 | raise Exception("didn't find the debian dependency graph, please import first!") 13 | 14 | ddGraph = db.graphs['debian_dependency_graph'] 15 | 16 | graphQuery = ''' 17 | FOR package, depends, path IN 18 | 1..2 ANY 19 | @startPackage Depends RETURN path 20 | ''' 21 | 22 | startNode = sys.argv[1] 23 | 24 | bindVars = { "startPackage": "packages/" + startNode } 25 | 26 | queryResult = db.AQLQuery(graphQuery, bindVars=bindVars, rawResults=True) 27 | 28 | # sub iterateable object to build up the tree for draw_tree: 29 | class Node(object): 30 | def __init__(self, name, children): 31 | self.name = name 32 | self.children = children 33 | 34 | def getChild(self, searchName): 35 | for child in self.children: 36 | if child.name == searchName: 37 | return child 38 | return None 39 | 40 | def __str__(self): 41 | return self.name 42 | 43 | def iteratePath(path, depth, currentNode): 44 | pname = path[depth]['name'] 45 | subNode = currentNode.getChild(pname) 46 | if subNode == None: 47 | subNode = Node(pname, []) 48 | currentNode.children.append(subNode) 49 | if len(path) > depth + 1: 50 | iteratePath(path, depth + 1, subNode) 51 | 52 | # Now we fold the paths substructure into the tree: 53 | rootNode = Node(startNode, []) 54 | for path in queryResult: 55 | p = path['edges'] 56 | iteratePath(p, 0, rootNode) 57 | 58 | print draw_tree(rootNode) 59 | -------------------------------------------------------------------------------- /examples/json_schema_validation_in_db.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | import time 4 | from pyArango.connection import * 5 | from pyArango.graph import * 6 | from pyArango.collection import * 7 | 8 | def main(): 9 | conn = Connection(username="", password="") 10 | db = conn["_system"] 11 | name = "pyArangoValidation" 12 | 13 | schema = { 14 | "rule" : { 15 | "properties" : { 16 | "value" : { 17 | "type" : "number" 18 | } 19 | } 20 | }, 21 | "level" : "strict", 22 | "message" : "invalid document - schema validation failed!" 23 | } 24 | 25 | collection = None 26 | if db.hasCollection(name): 27 | db[name].delete() # drop 28 | db.reloadCollections() # work around drop should reload... 29 | 30 | collection = db.createCollection( 31 | name = name, 32 | schema = schema 33 | ) 34 | 35 | try: 36 | d = collection.createDocument() 37 | d["value"] = "bar" 38 | d.save() 39 | except Exception as e: 40 | print(e) 41 | 42 | d = collection.createDocument() 43 | d["value"] = 3 44 | d.save() 45 | 46 | print(collection.fetchAll()) 47 | return 0 48 | 49 | if __name__ == "__main__": 50 | sys.exit(main()) 51 | -------------------------------------------------------------------------------- /pyArango/ISSUES.txt: -------------------------------------------------------------------------------- 1 | * Graph definitions are not loaded from the db but only defined in the code. 2 | => a graph that has been created outside of pyArango won't load 3 | 4 | * Using the from collection import * with import pyArango.connection as PAC 5 | causes the metaclass for collections to load twice, and classes might not 6 | be referenced by the metaclass. the import as should be avoided -------------------------------------------------------------------------------- /pyArango/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ArangoDB-Community/pyArango/251e74aa161c34877c5244ac99787820da302292/pyArango/__init__.py -------------------------------------------------------------------------------- /pyArango/action.py: -------------------------------------------------------------------------------- 1 | """Action Base Classes to do actions on to db.""" 2 | 3 | class ConnectionAction: 4 | """Base class for using the session to execute action.""" 5 | 6 | def __init__(self, connection): 7 | """Initialise connection.""" 8 | self.connection = connection 9 | 10 | @property 11 | def session(self): 12 | """Session of the connection.""" 13 | return self.connection.session 14 | 15 | @property 16 | def end_point_url(self): 17 | """End point url for connection.""" 18 | return self.connection.getEndpointURL() 19 | 20 | def get(self, url, **kwargs): 21 | """HTTP GET Method.""" 22 | action_url = '%s%s' % (self.end_point_url, url) 23 | return self.session.get(action_url, **kwargs) 24 | 25 | def post(self, url, data=None, json=None, **kwargs): 26 | """HTTP POST Method.""" 27 | action_url = '%s%s' % (self.end_point_url, url) 28 | return self.session.post( 29 | action_url, data, json, **kwargs 30 | ) 31 | 32 | def put(self, url, data=None, **kwargs): 33 | """HTTP PUT Method.""" 34 | action_url = '%s%s' % (self.end_point_url, url) 35 | return self.session.put(action_url, data, **kwargs) 36 | 37 | def head(self, url, **kwargs): 38 | """HTTP HEAD Method.""" 39 | action_url = '%s%s' % (self.end_point_url, url) 40 | return self.session.head(action_url, **kwargs) 41 | 42 | def options(self, url, **kwargs): 43 | """HTTP OPTIONS Method.""" 44 | action_url = '%s%s' % (self.end_point_url, url) 45 | return self.session.options(action_url, **kwargs) 46 | 47 | def patch(self, url, data=None, **kwargs): 48 | """HTTP PATCH Method.""" 49 | action_url = '%s%s' % (self.end_point_url, url) 50 | return self.session.patch(action_url, data, **kwargs) 51 | 52 | def delete(self, url, **kwargs): 53 | """HTTP DELETE Method.""" 54 | action_url = '%s%s' % (self.end_point_url, url) 55 | return self.session.delete(action_url, **kwargs) 56 | 57 | 58 | class DatabaseAction(ConnectionAction): 59 | """Base class for using the session to execute action.""" 60 | 61 | def __init__(self, database): 62 | """Initialise database.""" 63 | self.database = database 64 | 65 | @property 66 | def session(self): 67 | """Session of the connection.""" 68 | return self.database.connection.session 69 | 70 | @property 71 | def end_point_url(self): 72 | """End point url for database.""" 73 | return '%s/_db/%s' % ( 74 | self.database.connection.getEndpointURL(), self.database.name 75 | ) 76 | -------------------------------------------------------------------------------- /pyArango/admin.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import types 4 | 5 | import requests 6 | 7 | from .connection import Connection 8 | from .theExceptions import (ArangoError) 9 | 10 | 11 | class Admin(object): 12 | """administrative tasks with arangodb""" 13 | def __init__(self, connection): 14 | self.connection = connection 15 | 16 | def status(self): 17 | """ fetches the server status.""" 18 | url = "%s/_admin/status" % self.connection.getEndpointURL() 19 | result = self.connection.session.get(url) 20 | if result.status_code < 400: 21 | return result.json() 22 | 23 | raise ArangoError(result.json()['errorMessage'], result.json()) 24 | 25 | def is_cluster(self): 26 | status = self.status() 27 | return status['serverInfo']['role'] == 'COORDINATOR' 28 | -------------------------------------------------------------------------------- /pyArango/ca_certificate.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | import os 3 | import base64 4 | 5 | class CA_Certificate(object): 6 | """A CA certificate. If encoded is True the certificate will be automatically base64 decoded""" 7 | def __init__(self, certificate, encoded): 8 | super(CA_Certificate, self).__init__() 9 | self.certificate = certificate 10 | if encoded: 11 | self.certificate = base64.b64decode(self.certificate) 12 | self.tmp_file = None 13 | 14 | def get_file_path(self): 15 | """saves the cetificate into a tmp file and returns the file path""" 16 | if self.tmp_file is not None: 17 | return self.tmp_file 18 | _ , self.tmp_file = tempfile.mkstemp(text=True) 19 | f = open(self.tmp_file, "wb") 20 | f.write(self.certificate) 21 | f.close() 22 | return self.tmp_file 23 | 24 | def clean(self): 25 | """erases the tmp_file containing the certificate""" 26 | if self.tmp_file is not None: 27 | os.remove(self.tmp_file) 28 | self.tmp_file = None -------------------------------------------------------------------------------- /pyArango/connection.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | import json as json_mod 3 | from datetime import datetime 4 | 5 | import requests 6 | 7 | from .action import ConnectionAction 8 | from .database import Database, DBHandle 9 | from .theExceptions import CreationError, ConnectionError 10 | from .users import Users 11 | 12 | from .ca_certificate import CA_Certificate 13 | 14 | from json.decoder import JSONDecodeError 15 | 16 | class JsonHook(object): 17 | """This one replaces requests' original json() function. If a call to json() fails, it will print a message with the request content""" 18 | def __init__(self, ret): 19 | self.ret = ret 20 | self.ret.json_originalFct = self.ret.json 21 | 22 | def __call__(self, *args, **kwargs): 23 | try: 24 | return self.ret.json_originalFct(*args, **kwargs) 25 | except Exception as e: 26 | print( "Unable to get json for request: %s. Content: %s" % (self.ret.url, self.ret.content) ) 27 | raise e 28 | 29 | 30 | class AikidoSession: 31 | """Magical Aikido being that you probably do not need to access directly 32 | that deflects every http request to requests in the most graceful way. 33 | It will also save basic stats on requests in it's attribute '.log'. 34 | """ 35 | 36 | class Holder(object): 37 | def __init__(self, fct, auth, max_conflict_retries=5, verify=True, timeout=30): 38 | self.fct = fct 39 | self.auth = auth 40 | self.max_conflict_retries = max_conflict_retries 41 | if not isinstance(verify, bool) and not isinstance(verify, CA_Certificate) and not not isinstance(verify, str) : 42 | raise ValueError("'verify' argument can only be of type: bool, CA_Certificate or str ") 43 | self.verify = verify 44 | self.timeout = timeout 45 | 46 | def __call__(self, *args, **kwargs): 47 | if self.auth: 48 | kwargs["auth"] = self.auth 49 | if isinstance(self.verify, CA_Certificate): 50 | kwargs["verify"] = self.verify.get_file_path() 51 | else : 52 | kwargs["verify"] = self.verify 53 | 54 | kwargs["timeout"] = self.timeout 55 | 56 | try: 57 | do_retry = True 58 | retry = 0 59 | while do_retry and retry < self.max_conflict_retries: 60 | ret = self.fct(*args, **kwargs) 61 | do_retry = ret.status_code == 1200 62 | try : 63 | data = ret.json() 64 | do_retry = do_retry or ("errorNum" in data and data["errorNum"] == 1200) 65 | except JSONDecodeError: 66 | pass 67 | 68 | retry += 1 69 | except: 70 | print ("===\nUnable to establish connection, perhaps arango is not running.\n===") 71 | raise 72 | 73 | if len(ret.content) < 1: 74 | raise ConnectionError("Empty server response", ret.url, ret.status_code, ret.content) 75 | elif ret.status_code == 401: 76 | raise ConnectionError("Unauthorized access, you must supply a (username, password) with the correct credentials", ret.url, ret.status_code, ret.content) 77 | 78 | ret.json = JsonHook(ret) 79 | return ret 80 | 81 | def __init__( 82 | self, 83 | username, 84 | password, 85 | verify=True, 86 | cert=None, 87 | max_conflict_retries=5, 88 | max_retries=5, 89 | single_session=True, 90 | log_requests=False, 91 | pool_maxsize=10, 92 | timeout=30, 93 | ): 94 | if username: 95 | self.auth = (username, password) 96 | else: 97 | self.auth = None 98 | self.pool_maxsize = pool_maxsize 99 | self.verify = verify 100 | self.cert = cert 101 | self.max_retries = max_retries 102 | self.log_requests = log_requests 103 | self.max_conflict_retries = max_conflict_retries 104 | self.timeout = timeout 105 | 106 | self.session = None 107 | if single_session: 108 | self.session = self._make_session() 109 | 110 | if log_requests: 111 | self.log = {} 112 | self.log["nb_request"] = 0 113 | self.log["requests"] = {} 114 | 115 | def _make_session(self): 116 | session = requests.Session() 117 | kwargs = { 118 | 'max_retries': self.max_retries, 119 | 'pool_connections': self.pool_maxsize, 120 | 'pool_maxsize': self.pool_maxsize, 121 | #'pool_block': True # We don't want to lose connections 122 | } 123 | http = requests.adapters.HTTPAdapter(**kwargs) 124 | https = requests.adapters.HTTPAdapter(**kwargs) 125 | session.mount('http://', http) 126 | session.mount('https://', https) 127 | if self.cert: 128 | session.cert = self.cert 129 | 130 | return session 131 | 132 | def __getattr__(self, request_function_name): 133 | if self.session is not None: 134 | session = self.session 135 | else: 136 | session = self._make_session() 137 | 138 | try: 139 | request_function = getattr(session, request_function_name) 140 | except AttributeError: 141 | raise AttributeError("Attribute '%s' not found (no Aikido move available)" % request_function_name) 142 | 143 | auth = object.__getattribute__(self, "auth") 144 | verify = object.__getattribute__(self, "verify") 145 | timeout = object.__getattribute__(self, "timeout") 146 | if self.log_requests: 147 | log = object.__getattribute__(self, "log") 148 | log["nb_request"] += 1 149 | log["requests"][request_function.__name__] += 1 150 | 151 | return AikidoSession.Holder(request_function, auth, max_conflict_retries=self.max_conflict_retries, verify=verify, timeout=timeout) 152 | 153 | def disconnect(self): 154 | pass 155 | 156 | 157 | class Connection(object): 158 | """This is the entry point in pyArango and directly handles databases. 159 | @param arangoURL: can be either a string url or a list of string urls to different coordinators 160 | @param use_grequests: allows for running concurent requets. 161 | 162 | Parameters 163 | ---------- 164 | arangoURL: list or str 165 | list of urls or url for connecting to the db 166 | 167 | username: str 168 | for credentials 169 | password: str 170 | for credentials 171 | verify: bool 172 | check the validity of the CA certificate 173 | verbose: bool 174 | flag for addictional prints during run 175 | statsdClient: instance 176 | statsd instance 177 | reportFileName: str 178 | where to save statsd report 179 | loadBalancing: str 180 | type of load balancing between collections 181 | use_grequests: bool 182 | parallelise requests using gevents. Use with care as gevents monkey patches python, this could have unintended concequences on other packages 183 | use_jwt_authentication: bool 184 | use JWT authentication 185 | use_lock_for_reseting_jwt: bool 186 | use lock for reseting gevents authentication 187 | max_retries: int 188 | max number of retries for a request 189 | max_conflict_retries: int 190 | max number of requests for a conflict error (1200 arangodb error). Does not work with gevents (grequests), 191 | pool_maxsize: int 192 | max number of open connections. (Not intended for grequest) 193 | timeout: int 194 | number of seconds to wait on a hanging connection before giving up 195 | """ 196 | 197 | LOAD_BLANCING_METHODS = {'round-robin', 'random'} 198 | 199 | def __init__( 200 | self, 201 | arangoURL='http://127.0.0.1:8529', 202 | username=None, 203 | password=None, 204 | verify=True, 205 | cert=None, 206 | verbose=False, 207 | statsdClient=None, 208 | reportFileName=None, 209 | loadBalancing="round-robin", 210 | use_grequests=False, 211 | use_jwt_authentication=False, 212 | use_lock_for_reseting_jwt=True, 213 | max_retries=5, 214 | max_conflict_retries=5, 215 | pool_maxsize=10, 216 | timeout=30 217 | ): 218 | 219 | if loadBalancing not in Connection.LOAD_BLANCING_METHODS: 220 | raise ValueError("loadBalancing should be one of : %s, got %s" % (Connection.LOAD_BLANCING_METHODS, loadBalancing) ) 221 | 222 | self.pool_maxsize = pool_maxsize 223 | self.loadBalancing = loadBalancing 224 | self.currentURLId = 0 225 | self.username = username 226 | self.use_grequests = use_grequests 227 | self.use_jwt_authentication = use_jwt_authentication 228 | self.use_lock_for_reseting_jwt = use_lock_for_reseting_jwt 229 | self.max_retries = max_retries 230 | self.max_conflict_retries = max_conflict_retries 231 | self.action = ConnectionAction(self) 232 | self.timeout = timeout 233 | 234 | self.databases = {} 235 | self.verbose = verbose 236 | 237 | if isinstance(arangoURL, str): 238 | self.arangoURL = [arangoURL] 239 | else: 240 | self.arangoURL = arangoURL 241 | 242 | for i, url in enumerate(self.arangoURL): 243 | if url[-1] == "/": 244 | self.arangoURL[i] = url[:-1] 245 | 246 | self.identifier = None 247 | self.startTime = None 248 | self.session = None 249 | self.resetSession(username, password, verify, cert) 250 | 251 | self.users = Users(self) 252 | 253 | if reportFileName != None: 254 | self.reportFile = open(reportFileName, 'a') 255 | else: 256 | self.reportFile = None 257 | 258 | self.statsdc = statsdClient 259 | self.reload() 260 | 261 | def getEndpointURL(self): 262 | """return an endpoint url applying load balacing strategy""" 263 | if self.loadBalancing == "round-robin": 264 | url = self.arangoURL[self.currentURLId] 265 | self.currentURLId = (self.currentURLId + 1) % len(self.arangoURL) 266 | return url 267 | elif self.loadBalancing == "random": 268 | import random 269 | return random.choice(self.arangoURL) 270 | 271 | def getURL(self): 272 | """return an URL for the connection""" 273 | return '%s/_api' % self.getEndpointURL() 274 | 275 | def getDatabasesURL(self): 276 | """return an URL to the databases""" 277 | if not self.session.auth: 278 | return '%s/database/user' % self.getURL() 279 | else: 280 | return '%s/user/%s/database' % (self.getURL(), self.username) 281 | 282 | def updateEndpoints(self, coordinatorURL = None): 283 | """udpdates the list of available endpoints from the server""" 284 | raise NotImplementedError("Not done yet.") 285 | 286 | def disconnectSession(self): 287 | if self.session: 288 | self.session.disconnect() 289 | 290 | def getVersion(self): 291 | """fetches the arangodb server version""" 292 | r = self.session.get(self.getURL() + "/version") 293 | data = r.json() 294 | if r.status_code == 200 and not "error" in data: 295 | return data 296 | else: 297 | raise CreationError(data["errorMessage"], data) 298 | 299 | def create_aikido_session( 300 | self, 301 | username, 302 | password, 303 | verify, 304 | cert 305 | ) -> AikidoSession: 306 | return AikidoSession( 307 | username=username, 308 | password=password, 309 | verify=verify, 310 | cert=cert, 311 | single_session=True, 312 | max_conflict_retries=self.max_conflict_retries, 313 | max_retries=self.max_retries, 314 | log_requests=False, 315 | pool_maxsize=self.pool_maxsize, 316 | timeout=self.timeout 317 | ) 318 | 319 | def create_grequest_session( 320 | self, 321 | username, 322 | password, 323 | verify 324 | ): 325 | from .gevent_session import AikidoSession_GRequests 326 | return AikidoSession_GRequests( 327 | username, password, self.arangoURL, 328 | self.use_jwt_authentication, 329 | self.use_lock_for_reseting_jwt, 330 | self.max_retries, 331 | verify 332 | ) 333 | 334 | def resetSession(self, username=None, password=None, verify=True, cert=None): 335 | """resets the session""" 336 | self.disconnectSession() 337 | if self.use_grequests: 338 | if cert is not None: 339 | raise NotImplementedError('client-side certificates not supported in conjunction with grequests yet') 340 | self.session = self.create_grequest_session( 341 | username, 342 | password, 343 | verify 344 | ) 345 | else: 346 | self.session = self.create_aikido_session( 347 | username, 348 | password, 349 | verify, 350 | cert 351 | ) 352 | 353 | def reload(self): 354 | """Reloads the database list. 355 | Because loading a database triggers the loading of all collections and graphs within, 356 | only handles are loaded when this function is called. The full databases are loaded on demand when accessed 357 | """ 358 | 359 | r = self.session.get(self.getDatabasesURL()) 360 | 361 | data = r.json() 362 | if r.status_code == 200 and not data["error"]: 363 | self.databases = {} 364 | for dbName in data["result"]: 365 | if dbName not in self.databases: 366 | self.databases[dbName] = DBHandle(self, dbName) 367 | else: 368 | raise ConnectionError(data["errorMessage"], self.getDatabasesURL(), r.status_code, r.content) 369 | 370 | def createDatabase(self, name, **dbArgs): 371 | "use dbArgs for arguments other than name. for a full list of arguments please have a look at arangoDB's doc" 372 | dbArgs['name'] = name 373 | payload = json_mod.dumps(dbArgs, default=str) 374 | url = self.getURL() + "/database" 375 | r = self.session.post(url, data = payload) 376 | data = r.json() 377 | if r.status_code == 201 and not data["error"]: 378 | db = Database(self, name) 379 | self.databases[name] = db 380 | return self.databases[name] 381 | else: 382 | raise CreationError(data["errorMessage"], r.content) 383 | 384 | def hasDatabase(self, name): 385 | """returns true/false wether the connection has a database by the name of 'name'""" 386 | return name in self.databases 387 | 388 | def __contains__(self, name): 389 | """Alias for hasDatabase""" 390 | return self.hasDatabase(name) 391 | 392 | def __getitem__(self, dbName): 393 | """Collection[dbName] returns a database by the name of 'dbName', raises a KeyError if not found""" 394 | try: 395 | return self.databases[dbName] 396 | except KeyError: 397 | self.reload() 398 | try: 399 | return self.databases[dbName] 400 | except KeyError: 401 | raise KeyError("Can't find any database named : %s" % dbName) 402 | 403 | def reportStart(self, name): 404 | if self.statsdc != None: 405 | self.identifier = str(uuid.uuid5(uuid.NAMESPACE_DNS, name))[-6:] 406 | if self.reportFile != None: 407 | self.reportFile.write("[%s]: %s\n" % (self.identifier, name)) 408 | self.reportFile.flush() 409 | self.startTime = datetime.now() 410 | 411 | def reportItem(self): 412 | if self.statsdc != None: 413 | diff = datetime.now() - self.startTime 414 | microsecs = (diff.total_seconds() * (1000 ** 2) ) + diff.microseconds 415 | self.statsdc.timing("pyArango_" + self.identifier, int(microsecs)) 416 | -------------------------------------------------------------------------------- /pyArango/consts.py: -------------------------------------------------------------------------------- 1 | COLLECTION_DOCUMENT_TYPE = 2 2 | COLLECTION_EDGE_TYPE = 3 3 | 4 | COLLECTION_NEWBORN_STATUS = 1 5 | COLLECTION_UNLOADED_STATUS = 2 6 | COLLECTION_LOADED_STATUS = 3 7 | COLLECTION_LOADING_STATUS = 4 8 | COLLECTION_DELETED_STATUS = 5 -------------------------------------------------------------------------------- /pyArango/database.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import types 4 | 5 | from . import collection as COL 6 | from . import consts as CONST 7 | from . import graph as GR 8 | 9 | from .action import DatabaseAction 10 | from .document import Document 11 | from .foxx import Foxx 12 | from .tasks import Tasks 13 | from .graph import Graph 14 | from .query import AQLQuery 15 | from .theExceptions import CreationError, UpdateError, AQLQueryError, TransactionError, AQLFetchError 16 | 17 | __all__ = ["Database", "DBHandle"] 18 | 19 | class Database(object): 20 | """Databases are meant to be instanciated by connections""" 21 | 22 | def __init__(self, connection, name): 23 | 24 | self.name = name 25 | self.connection = connection 26 | self.action = DatabaseAction(self) 27 | self.collections = {} 28 | self.graphs = {} 29 | self.foxx = Foxx(self) 30 | self.tasks = Tasks(self) 31 | 32 | self.reload() 33 | 34 | def getURL(self): 35 | return '%s/_db/%s/_api' % (self.connection.getEndpointURL(), self.name) 36 | 37 | def getCollectionsURL(self): 38 | return '%s/collection' % (self.getURL()) 39 | 40 | def getCursorsURL(self): 41 | return '%s/cursor' % (self.getURL()) 42 | 43 | def getExplainURL(self): 44 | return '%s/explain' % (self.getURL()) 45 | 46 | def getGraphsURL(self): 47 | return "%s/gharial" % self.getURL() 48 | 49 | def getTransactionURL(self): 50 | return "%s/transaction" % self.getURL() 51 | 52 | def reloadCollections(self): 53 | "reloads the collection list." 54 | r = self.connection.session.get(self.getCollectionsURL()) 55 | data = r.json() 56 | if r.status_code == 200: 57 | self.collections = {} 58 | 59 | for colData in data["result"]: 60 | colName = colData['name'] 61 | if colData['isSystem']: 62 | colObj = COL.SystemCollection(self, colData) 63 | else: 64 | try: 65 | colClass = COL.getCollectionClass(colName) 66 | colObj = colClass(self, colData) 67 | except KeyError: 68 | if colData["type"] == CONST.COLLECTION_EDGE_TYPE: 69 | colObj = COL.Edges(self, colData) 70 | elif colData["type"] == CONST.COLLECTION_DOCUMENT_TYPE: 71 | colObj = COL.Collection(self, colData) 72 | else: 73 | print(("Warning!! Collection of unknown type: %d, trying to load it as Collection nonetheless." % colData["type"])) 74 | colObj = COL.Collection(self, colData) 75 | 76 | self.collections[colName] = colObj 77 | else: 78 | raise UpdateError(data["errorMessage"], data) 79 | 80 | def reloadGraphs(self): 81 | "reloads the graph list" 82 | r = self.connection.session.get(self.getGraphsURL()) 83 | data = r.json() 84 | if r.status_code == 200: 85 | self.graphs = {} 86 | for graphData in data["graphs"]: 87 | try: 88 | self.graphs[graphData["_key"]] = GR.getGraphClass(graphData["_key"])(self, graphData) 89 | except KeyError: 90 | self.graphs[graphData["_key"]] = Graph(self, graphData) 91 | else: 92 | raise UpdateError(data["errorMessage"], data) 93 | 94 | def reload(self): 95 | "reloads collections and graphs" 96 | self.reloadCollections() 97 | self.reloadGraphs() 98 | self.foxx.reload() 99 | 100 | def createCollection(self, className = 'Collection', **colProperties): 101 | """Creates a collection and returns it. 102 | ClassName the name of a class inheriting from Collection or Egdes, it can also be set to 'Collection' or 'Edges' in order to create untyped collections of documents or edges. 103 | Use colProperties to put things such as 'waitForSync = True' (see ArangoDB's doc 104 | for a full list of possible arugments). If a '_properties' dictionary is defined in the collection schema, arguments to this function overide it""" 105 | 106 | colClass = COL.getCollectionClass(className) 107 | 108 | if len(colProperties) > 0: 109 | colProperties = dict(colProperties) 110 | else: 111 | try: 112 | colProperties = dict(colClass._properties) 113 | except AttributeError: 114 | colProperties = {} 115 | 116 | if className != 'Collection' and className != 'Edges' and 'name' not in colProperties: 117 | colProperties['name'] = className 118 | else: 119 | if 'name' not in colProperties: 120 | raise ValueError("a 'name' argument mush be supplied if you want to create a generic collection") 121 | 122 | if colProperties['name'] in self.collections: 123 | raise CreationError("Database %s already has a collection named %s" % (self.name, colProperties['name']) ) 124 | 125 | if issubclass(colClass, COL.Edges) or colClass.__class__ is COL.Edges: 126 | colProperties["type"] = CONST.COLLECTION_EDGE_TYPE 127 | else: 128 | colProperties["type"] = CONST.COLLECTION_DOCUMENT_TYPE 129 | 130 | payload = json.dumps(colProperties, default=str) 131 | req = self.connection.session.post(self.getCollectionsURL(), data = payload) 132 | data = req.json() 133 | 134 | if req.status_code == 200 and not data["error"]: 135 | col = colClass(self, data) 136 | self.collections[col.name] = col 137 | return self.collections[col.name] 138 | else: 139 | raise CreationError(data["errorMessage"], data) 140 | 141 | def fetchDocument(self, _id): 142 | "fetchs a document using it's _id" 143 | sid = _id.split("/") 144 | return self[sid[0]][sid[1]] 145 | 146 | def createGraph(self, name, createCollections = True, isSmart = False, numberOfShards = None, smartGraphAttribute = None, replicationFactor = None, writeConcern = None): 147 | """Creates a graph and returns it. 'name' must be the name of a class inheriting from Graph. 148 | Checks will be performed to make sure that every collection mentionned in the edges definition exist. Raises a ValueError in case of 149 | a non-existing collection.""" 150 | 151 | def _checkCollectionList(lst): 152 | for colName in lst: 153 | if not COL.isCollection(colName): 154 | raise ValueError("'%s' is not a defined Collection" % colName) 155 | 156 | graphClass = GR.getGraphClass(name) 157 | 158 | ed = [] 159 | for e in graphClass._edgeDefinitions: 160 | if not COL.isEdgeCollection(e.edgesCollection): 161 | raise ValueError("'%s' is not a defined Edge Collection" % e.edgesCollection) 162 | _checkCollectionList(e.fromCollections) 163 | _checkCollectionList(e.toCollections) 164 | 165 | ed.append(e.toJson()) 166 | 167 | _checkCollectionList(graphClass._orphanedCollections) 168 | 169 | options = {} 170 | if numberOfShards: 171 | options['numberOfShards'] = numberOfShards 172 | if smartGraphAttribute: 173 | options['smartGraphAttribute'] = smartGraphAttribute 174 | if replicationFactor: 175 | options['replicationFactor'] = replicationFactor 176 | if writeConcern: 177 | options['writeConcern'] = writeConcern 178 | 179 | payload = { 180 | "name": name, 181 | "edgeDefinitions": ed, 182 | "orphanCollections": graphClass._orphanedCollections 183 | } 184 | 185 | if isSmart: 186 | payload['isSmart'] = isSmart 187 | 188 | if options: 189 | payload['options'] = options 190 | 191 | payload = json.dumps(payload) 192 | 193 | r = self.connection.session.post(self.getGraphsURL(), data = payload) 194 | data = r.json() 195 | 196 | if r.status_code == 201 or r.status_code == 202: 197 | self.graphs[name] = graphClass(self, data["graph"]) 198 | else: 199 | raise CreationError(data["errorMessage"], data) 200 | return self.graphs[name] 201 | 202 | def createSatelliteGraph(self, name, createCollections = True): 203 | return self.createGraph(name, createCollections, False, None, None, "satellite", None); 204 | 205 | def hasCollection(self, name): 206 | """returns true if the databse has a collection by the name of 'name'""" 207 | return name in self.collections 208 | 209 | def hasGraph(self, name): 210 | """returns true if the databse has a graph by the name of 'name'""" 211 | return name in self.graphs 212 | 213 | def dropAllCollections(self): 214 | """drops all public collections (graphs included) from the database""" 215 | for graph_name in self.graphs: 216 | self.graphs[graph_name].delete() 217 | for collection_name in self.collections: 218 | # Collections whose name starts with '_' are system collections 219 | if not collection_name.startswith('_'): 220 | self[collection_name].delete() 221 | return 222 | 223 | def AQLQuery(self, query, batchSize = 100, rawResults = False, bindVars = None, options = None, count = False, fullCount = False, 224 | json_encoder = None, **moreArgs): 225 | """Set rawResults = True if you want the query to return dictionnaries instead of Document objects. 226 | You can use **moreArgs to pass more arguments supported by the api, such as ttl=60 (time to live)""" 227 | if bindVars is None: 228 | bindVars = {} 229 | if options is None: 230 | options = {} 231 | 232 | return AQLQuery(self, query, rawResults = rawResults, batchSize = batchSize, bindVars = bindVars, options = options, count = count, fullCount = fullCount, 233 | json_encoder = json_encoder, **moreArgs) 234 | 235 | def __get_logger(self, logger, log_level): 236 | if logger is None: 237 | return None 238 | return getattr(logger, logging.getLevelName(log_level).lower()) 239 | 240 | def fetch_element( 241 | self, aql_query, bind_vars=None, dont_raise_error_if_empty=False, 242 | default_output=None, logger=None, log_level=logging.DEBUG 243 | ): 244 | """Fetch element by running a query. 245 | 246 | Parameters 247 | ---------- 248 | aql_query : str 249 | aql query string. 250 | bind_vars : dict, optional 251 | dictonary of bind variables (the default is None) 252 | dont_raise_error_if_empty: bool, optional 253 | do not raise error if the returned is empty. (the default is False) 254 | default_output: dict, optional 255 | default output if no value is returned. (the default is None) 256 | logger : Logger, optional 257 | logger to log the query and result. 258 | (the default is None means don't log) 259 | log_level: Logger.loglevel, optional 260 | level of the log. (the default is logging.DEBUG) 261 | 262 | Raises 263 | ------ 264 | AQLFetchError 265 | When unable to fetch results or more than one 1 results returned. 266 | 267 | Returns 268 | ------- 269 | any 270 | an element returned by query. 271 | 272 | """ 273 | log = self.__get_logger(logger, log_level) 274 | if log is not None: 275 | log(aql_query) 276 | if bind_vars is None: 277 | bind_vars = {} 278 | response = self.AQLQuery( 279 | aql_query, bindVars=bind_vars, rawResults=True 280 | ).response 281 | if log is not None: 282 | log(response["result"]) 283 | num_results = len(response["result"]) 284 | if num_results == 1: 285 | return response["result"][0] 286 | if dont_raise_error_if_empty and num_results == 0: 287 | return default_output 288 | raise AQLFetchError( 289 | "No results matched for query." if num_results == 0 290 | else "More than one results received" 291 | ) 292 | 293 | def fetch_list( 294 | self, aql_query, bind_vars=None, batch_size=200, 295 | dont_raise_error_if_empty=False, logger=None, 296 | log_level=logging.DEBUG 297 | ): 298 | """Fetch list of elements by running a query and merging all the batches. 299 | 300 | Parameters 301 | ---------- 302 | aql_query : str 303 | aql query string. 304 | bind_vars : dict, optional 305 | dictonary of bind variables (the default is None) 306 | batch_size : int, optional 307 | fetching batch size (the default is 200) 308 | dont_raise_error_if_empty: bool, optional 309 | do not raise error if the returned is empty. (the default is False) 310 | logger : Logger, optional 311 | logger to log the query and result. 312 | (the default is None means don't log) 313 | log_level: Logger.loglevel, optional 314 | level of the log. (the default is logging.DEBUG) 315 | 316 | Raises 317 | ------ 318 | AQLFetchError 319 | When unable to fetch results 320 | 321 | Returns 322 | ------- 323 | list(any) 324 | a list returned by query. 325 | 326 | """ 327 | try: 328 | log = self.__get_logger(logger, log_level) 329 | if log is not None: 330 | log(aql_query) 331 | query = self.AQLQuery( 332 | aql_query, batchSize=batch_size, rawResults=True, 333 | bindVars=(bind_vars if bind_vars is not None else {}) 334 | ) 335 | batch_index = 0 336 | result = [] 337 | while True: 338 | if len(query.response['result']) == 0: 339 | break 340 | result.extend(query.response['result']) 341 | batch_index += 1 342 | query.nextBatch() 343 | except StopIteration: 344 | if log is not None: 345 | log(result) 346 | if len(result) != 0: 347 | return result 348 | except: 349 | raise 350 | if batch_index == 0 and dont_raise_error_if_empty: 351 | return [] 352 | raise AQLFetchError( 353 | "No results matched for query in fetching the batch index: %s." % ( 354 | batch_index 355 | ) 356 | ) 357 | 358 | def fetch_list_as_batches( 359 | self, aql_query, bind_vars=None, batch_size=200, 360 | dont_raise_error_if_empty=False, logger=None, 361 | log_level=logging.DEBUG 362 | ): 363 | """Fetch list of elements as batches by running the query. 364 | 365 | Generator which yeilds each batch as result. 366 | 367 | Parameters 368 | ---------- 369 | aql_query : str 370 | aql query string. 371 | bind_vars : dict, optional 372 | dictonary of bind variables (the default is None) 373 | batch_size : int, optional 374 | fetching batch size (the default is 200) 375 | dont_raise_error_if_empty: bool, optional 376 | do not raise error if the returned is empty. (the default is False) 377 | logger : Logger, optional 378 | logger to log the query and result. 379 | (the default is None means don't log) 380 | log_level: Logger.loglevel, optional 381 | level of the log. (the default is logging.DEBUG) 382 | 383 | Raises 384 | ------ 385 | AQLFetchError 386 | When unable to fetch results 387 | 388 | Returns 389 | ------- 390 | list(any) 391 | a list returned by query. 392 | 393 | """ 394 | try: 395 | log = self.__get_logger(logger, log_level) 396 | if log is not None: 397 | log(aql_query) 398 | query = self.AQLQuery( 399 | aql_query, batchSize=batch_size, rawResults=True, 400 | bindVars=(bind_vars if bind_vars is not None else {}) 401 | ) 402 | batch_index = 0 403 | while True: 404 | if len(query.response['result']) == 0: 405 | break 406 | if log is not None: 407 | log( 408 | "batch_result for index '%s': %s", 409 | batch_index, query.response['result'] 410 | ) 411 | yield query.response['result'] 412 | batch_index += 1 413 | query.nextBatch() 414 | except StopIteration: 415 | return 416 | except: 417 | raise 418 | if batch_index == 0 and dont_raise_error_if_empty: 419 | return 420 | raise AQLFetchError( 421 | "No results matched for query in fetching the batch index: %s." % ( 422 | batch_index 423 | ) 424 | ) 425 | 426 | def no_fetch_run( 427 | self, aql_query, bind_vars=None, logger=None, 428 | log_level=logging.DEBUG 429 | ): 430 | """Run query which doesn't have a return. 431 | 432 | Parameters 433 | ---------- 434 | aql_query : str 435 | aql query string. 436 | bind_vars : dict, optional 437 | dictonary of bind variables (the default is None) 438 | logger : Logger, optional 439 | logger to log the query and result. 440 | (the default is None means don't log) 441 | log_level: Logger.loglevel, optional 442 | level of the log. (the default is logging.DEBUG) 443 | 444 | Raises 445 | ------ 446 | AQLFetchError 447 | When able to fetch results. 448 | 449 | """ 450 | log = self.__get_logger(logger, log_level) 451 | if log is not None: 452 | log(aql_query) 453 | response = self.AQLQuery( 454 | aql_query, rawResults=True, 455 | bindVars=(bind_vars if bind_vars is not None else {}) 456 | ).response 457 | if log is not None: 458 | log(response["result"]) 459 | if len(response["result"]) == 0: 460 | return 461 | raise AQLFetchError("No results should be returned for the query.") 462 | 463 | def explainAQLQuery(self, query, bindVars = None, allPlans = False): 464 | """Returns an explanation of the query. Setting allPlans to True will result in ArangoDB returning all possible plans. False returns only the optimal plan""" 465 | if bindVars is None: 466 | bindVars = {} 467 | 468 | payload = {'query' : query, 'bindVars' : bindVars, 'allPlans' : allPlans} 469 | request = self.connection.session.post(self.getExplainURL(), data = json.dumps(payload, default=str)) 470 | return request.json() 471 | 472 | def validateAQLQuery(self, query, bindVars = None, options = None): 473 | "returns the server answer is the query is valid. Raises an AQLQueryError if not" 474 | if bindVars is None: 475 | bindVars = {} 476 | if options is None: 477 | options = {} 478 | payload = {'query' : query, 'bindVars' : bindVars, 'options' : options} 479 | r = self.connection.session.post(self.getCursorsURL(), data = json.dumps(payload, default=str)) 480 | data = r.json() 481 | if r.status_code == 201 and not data["error"]: 482 | return data 483 | else: 484 | raise AQLQueryError(data["errorMessage"], query, data) 485 | 486 | def transaction(self, collections, action, waitForSync = False, lockTimeout = None, params = None): 487 | """Execute a server-side transaction""" 488 | payload = { 489 | "collections": collections, 490 | "action": action, 491 | "waitForSync": waitForSync} 492 | if lockTimeout is not None: 493 | payload["lockTimeout"] = lockTimeout 494 | if params is not None: 495 | payload["params"] = params 496 | 497 | self.connection.reportStart(action) 498 | 499 | r = self.connection.session.post(self.getTransactionURL(), data = json.dumps(payload, default=str)) 500 | 501 | self.connection.reportItem() 502 | 503 | data = r.json() 504 | 505 | if (r.status_code == 200 or r.status_code == 201 or r.status_code == 202) and not data.get("error"): 506 | return data 507 | else: 508 | raise TransactionError(data["errorMessage"], action, data) 509 | 510 | def __repr__(self): 511 | return "ArangoDB database: %s" % self.name 512 | 513 | # def __contains__(self, name): 514 | # """if name in database""" 515 | # return self.hasCollection(name) or self.hasGraph(name) 516 | 517 | def __contains__(self, name_or_id): 518 | """allows to check if name_or_id:str is the id of an existing document""" 519 | splid = name_or_id.split('/') 520 | if len(splid) == 2: 521 | col, key = splid 522 | try: 523 | return key in self[col] 524 | except KeyError: 525 | return False 526 | else: 527 | return self.hasCollection(name_or_id) or self.hasGraph(name_or_id) 528 | 529 | def __getitem__(self, col_or_doc_id): 530 | """use database[col_or_doc_id] to get a collection from the database""" 531 | try: 532 | col_name, doc_key = col_or_doc_id.split('/') 533 | return self.collections[col_name][doc_key] 534 | except ValueError: 535 | try: 536 | return self.collections[col_or_doc_id] 537 | except KeyError: 538 | self.reload() 539 | try: 540 | return self.collections[col_or_doc_id] 541 | except KeyError: 542 | raise KeyError("Can't find any collection named : %s" % col_or_doc_id) 543 | 544 | class DBHandle(Database): 545 | "As the loading of a Database also triggers the loading of collections and graphs within. Only handles are loaded first. The full database are loaded on demand in a fully transparent manner." 546 | def __init__(self, connection, name): 547 | self.connection = connection 548 | self.name = name 549 | 550 | def __getattr__(self, k): 551 | name = Database.__getattribute__(self, 'name') 552 | connection = Database.__getattribute__(self, 'connection') 553 | Database.__init__(self, connection, name) 554 | return Database.__getattribute__(self, k) 555 | -------------------------------------------------------------------------------- /pyArango/doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pyArango.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pyArango.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/pyArango" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pyArango" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /pyArango/doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source 10 | set I18NSPHINXOPTS=%SPHINXOPTS% source 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pyArango.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pyArango.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /pyArango/doc/source/action.rst: -------------------------------------------------------------------------------- 1 | Action 2 | ---------- 3 | .. automodule:: pyArango.action 4 | :members: 5 | -------------------------------------------------------------------------------- /pyArango/doc/source/admin.rst: -------------------------------------------------------------------------------- 1 | Admin 2 | ---------- 3 | .. automodule:: pyArango.admin 4 | :members: 5 | -------------------------------------------------------------------------------- /pyArango/doc/source/ca_certificate.rst: -------------------------------------------------------------------------------- 1 | CA Certificate 2 | ---------- 3 | .. automodule:: pyArango.ca_certificate 4 | :members: 5 | -------------------------------------------------------------------------------- /pyArango/doc/source/collection.rst: -------------------------------------------------------------------------------- 1 | Collection 2 | ---------- 3 | .. automodule:: pyArango.collection 4 | :members: -------------------------------------------------------------------------------- /pyArango/doc/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # pyArango documentation build configuration file, created by 4 | # sphinx-quickstart on Sat Feb 7 19:33:06 2015. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | import sphinx_rtd_theme 19 | 20 | # If extensions (or modules to document with autodoc) are in another directory, 21 | # add these directories to sys.path here. If the directory is relative to the 22 | # documentation root, use os.path.abspath to make it absolute, like shown here. 23 | sys.path.insert(0, os.path.abspath('.')) 24 | 25 | # -- General configuration ------------------------------------------------ 26 | 27 | # If your documentation needs a minimal Sphinx version, state it here. 28 | #needs_sphinx = '1.0' 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | extensions = [ 34 | 'sphinx.ext.autodoc', 35 | 'sphinx.ext.todo', 36 | 'sphinx.ext.coverage', 37 | 'sphinx.ext.viewcode', 38 | ] 39 | 40 | # Add any paths that contain templates here, relative to this directory. 41 | templates_path = ['_templates'] 42 | 43 | # The suffix of source filenames. 44 | source_suffix = '.rst' 45 | 46 | # The encoding of source files. 47 | #source_encoding = 'utf-8-sig' 48 | 49 | # The master toctree document. 50 | master_doc = 'index' 51 | 52 | # General information about the project. 53 | project = u'pyArango' 54 | copyright = u'2015, Tariq Daouda' 55 | 56 | # The version info for the project you're documenting, acts as replacement for 57 | # |version| and |release|, also used in various other places throughout the 58 | # built documents. 59 | # 60 | # The short X.Y version. 61 | version = '1' 62 | # The full version, including alpha/beta/rc tags. 63 | release = '1.x.x' 64 | 65 | # The language for content autogenerated by Sphinx. Refer to documentation 66 | # for a list of supported languages. 67 | #language = None 68 | 69 | # There are two options for replacing |today|: either, you set today to some 70 | # non-false value, then it is used: 71 | #today = '' 72 | # Else, today_fmt is used as the format for a strftime call. 73 | #today_fmt = '%B %d, %Y' 74 | 75 | # List of patterns, relative to source directory, that match files and 76 | # directories to ignore when looking for source files. 77 | exclude_patterns = [] 78 | 79 | # The reST default role (used for this markup: `text`) to use for all 80 | # documents. 81 | #default_role = None 82 | 83 | # If true, '()' will be appended to :func: etc. cross-reference text. 84 | #add_function_parentheses = True 85 | 86 | # If true, the current module name will be prepended to all description 87 | # unit titles (such as .. function::). 88 | #add_module_names = True 89 | 90 | # If true, sectionauthor and moduleauthor directives will be shown in the 91 | # output. They are ignored by default. 92 | #show_authors = False 93 | 94 | # The name of the Pygments (syntax highlighting) style to use. 95 | pygments_style = 'sphinx' 96 | 97 | # A list of ignored prefixes for module index sorting. 98 | #modindex_common_prefix = [] 99 | 100 | # If true, keep warnings as "system message" paragraphs in the built documents. 101 | #keep_warnings = False 102 | 103 | 104 | # -- Options for HTML output ---------------------------------------------- 105 | 106 | # The theme to use for HTML and HTML Help pages. See the documentation for 107 | # a list of builtin themes. 108 | # html_theme = 'default' 109 | html_theme = "sphinx_rtd_theme" 110 | 111 | # Theme options are theme-specific and customize the look and feel of a theme 112 | # further. For a list of options available for each theme, see the 113 | # documentation. 114 | #html_theme_options = {} 115 | 116 | # Add any paths that contain custom themes here, relative to this directory. 117 | #html_theme_path = [] 118 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 119 | 120 | # The name for this set of Sphinx documents. If None, it defaults to 121 | # " v documentation". 122 | #html_title = None 123 | 124 | # A shorter title for the navigation bar. Default is the same as html_title. 125 | #html_short_title = None 126 | 127 | # The name of an image file (relative to this directory) to place at the top 128 | # of the sidebar. 129 | #html_logo = None 130 | 131 | # The name of an image file (within the static path) to use as favicon of the 132 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 133 | # pixels large. 134 | #html_favicon = None 135 | 136 | # Add any paths that contain custom static files (such as style sheets) here, 137 | # relative to this directory. They are copied after the builtin static files, 138 | # so a file named "default.css" will overwrite the builtin "default.css". 139 | html_static_path = ['_static'] 140 | 141 | # Add any extra paths that contain custom files (such as robots.txt or 142 | # .htaccess) here, relative to this directory. These files are copied 143 | # directly to the root of the documentation. 144 | #html_extra_path = [] 145 | 146 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 147 | # using the given strftime format. 148 | #html_last_updated_fmt = '%b %d, %Y' 149 | 150 | # If true, SmartyPants will be used to convert quotes and dashes to 151 | # typographically correct entities. 152 | #html_use_smartypants = True 153 | 154 | # Custom sidebar templates, maps document names to template names. 155 | #html_sidebars = {} 156 | 157 | # Additional templates that should be rendered to pages, maps page names to 158 | # template names. 159 | #html_additional_pages = {} 160 | 161 | # If false, no module index is generated. 162 | #html_domain_indices = True 163 | 164 | # If false, no index is generated. 165 | #html_use_index = True 166 | 167 | # If true, the index is split into individual pages for each letter. 168 | #html_split_index = False 169 | 170 | # If true, links to the reST sources are added to the pages. 171 | #html_show_sourcelink = True 172 | 173 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 174 | #html_show_sphinx = True 175 | 176 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 177 | #html_show_copyright = True 178 | 179 | # If true, an OpenSearch description file will be output, and all pages will 180 | # contain a tag referring to it. The value of this option must be the 181 | # base URL from which the finished HTML is served. 182 | #html_use_opensearch = '' 183 | 184 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 185 | #html_file_suffix = None 186 | 187 | # Output file base name for HTML help builder. 188 | htmlhelp_basename = 'pyArangodoc' 189 | 190 | 191 | # -- Options for LaTeX output --------------------------------------------- 192 | 193 | latex_elements = { 194 | # The paper size ('letterpaper' or 'a4paper'). 195 | #'papersize': 'letterpaper', 196 | 197 | # The font size ('10pt', '11pt' or '12pt'). 198 | #'pointsize': '10pt', 199 | 200 | # Additional stuff for the LaTeX preamble. 201 | #'preamble': '', 202 | } 203 | 204 | # Grouping the document tree into LaTeX files. List of tuples 205 | # (source start file, target name, title, 206 | # author, documentclass [howto, manual, or own class]). 207 | latex_documents = [ 208 | ('index', 'pyArango.tex', u'pyArango Documentation', 209 | u'Tariq Daouda', 'manual'), 210 | ] 211 | 212 | # The name of an image file (relative to this directory) to place at the top of 213 | # the title page. 214 | #latex_logo = None 215 | 216 | # For "manual" documents, if this is true, then toplevel headings are parts, 217 | # not chapters. 218 | #latex_use_parts = False 219 | 220 | # If true, show page references after internal links. 221 | #latex_show_pagerefs = False 222 | 223 | # If true, show URL addresses after external links. 224 | #latex_show_urls = False 225 | 226 | # Documents to append as an appendix to all manuals. 227 | #latex_appendices = [] 228 | 229 | # If false, no module index is generated. 230 | #latex_domain_indices = True 231 | 232 | 233 | # -- Options for manual page output --------------------------------------- 234 | 235 | # One entry per manual page. List of tuples 236 | # (source start file, name, description, authors, manual section). 237 | man_pages = [ 238 | ('index', 'pyarango', u'pyArango Documentation', 239 | [u'Tariq Daouda'], 1) 240 | ] 241 | 242 | # If true, show URL addresses after external links. 243 | #man_show_urls = False 244 | 245 | 246 | # -- Options for Texinfo output ------------------------------------------- 247 | 248 | # Grouping the document tree into Texinfo files. List of tuples 249 | # (source start file, target name, title, author, 250 | # dir menu entry, description, category) 251 | texinfo_documents = [ 252 | ('index', 'pyArango', u'pyArango Documentation', 253 | u'Tariq Daouda', 'pyArango', 'One line description of project.', 254 | 'Miscellaneous'), 255 | ] 256 | 257 | # Documents to append as an appendix to all manuals. 258 | #texinfo_appendices = [] 259 | 260 | # If false, no module index is generated. 261 | #texinfo_domain_indices = True 262 | 263 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 264 | #texinfo_show_urls = 'footnote' 265 | 266 | # If true, do not generate a @detailmenu in the "Top" node's menu. 267 | #texinfo_no_detailmenu = False 268 | 269 | #Document both the class and the __init__() 270 | autoclass_content = 'both' 271 | -------------------------------------------------------------------------------- /pyArango/doc/source/connection.rst: -------------------------------------------------------------------------------- 1 | Connection 2 | ---------- 3 | .. automodule:: pyArango.connection 4 | :members: 5 | -------------------------------------------------------------------------------- /pyArango/doc/source/database.rst: -------------------------------------------------------------------------------- 1 | Database 2 | ---------- 3 | .. automodule:: pyArango.database 4 | :members: -------------------------------------------------------------------------------- /pyArango/doc/source/document.rst: -------------------------------------------------------------------------------- 1 | Document 2 | ---------- 3 | .. automodule:: pyArango.document 4 | :members: -------------------------------------------------------------------------------- /pyArango/doc/source/exceptions.rst: -------------------------------------------------------------------------------- 1 | Exceptions 2 | ---------- 3 | .. automodule:: pyArango.theExceptions 4 | :members: -------------------------------------------------------------------------------- /pyArango/doc/source/foxx.rst: -------------------------------------------------------------------------------- 1 | Foxx 2 | ---------- 3 | .. automodule:: pyArango.foxx 4 | :members: 5 | -------------------------------------------------------------------------------- /pyArango/doc/source/gevent_session.rst: -------------------------------------------------------------------------------- 1 | Gevent 2 | ---------- 3 | .. automodule:: pyArango.gevent_session 4 | :members: 5 | -------------------------------------------------------------------------------- /pyArango/doc/source/graph.rst: -------------------------------------------------------------------------------- 1 | Graph 2 | ---------- 3 | .. automodule:: pyArango.graph 4 | :members: -------------------------------------------------------------------------------- /pyArango/doc/source/index.rst: -------------------------------------------------------------------------------- 1 | .. pyArango documentation master file, created by 2 | sphinx-quickstart on Sat Feb 7 19:33:06 2015. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to pyArango's documentation! 7 | ==================================== 8 | 9 | .. image:: https://travis-ci.org/tariqdaouda/pyArango.svg?branch=1.2.2 10 | :target: https://travis-ci.org/tariqdaouda/pyArango 11 | .. image:: https://img.shields.io/badge/python-2.7%2C%203.5-blue.svg 12 | .. image:: https://img.shields.io/badge/arangodb-3.6-blue.svg 13 | 14 | pyArango is a python driver for the NoSQL amazing database ArangoDB_ first written by `Tariq Daouda`_. As of January 2019 pyArango was handed over to the ArangoDB-Community that now ensures the developement and maintenance. It has a very light interface and built in validation. pyArango is distributed under the ApacheV2 Licence and the full source code can be found on github_. 15 | 16 | Key Features 17 | ------------ 18 | pyArango is geared toward the developer. It's here to help to you develop really cool apps using ArangoDB, really fast. 19 | 20 | - Light and Simple interface 21 | - Built-in Validation of fields on setting or on saving 22 | - Support for all index types 23 | - Supports graphs, traversals and all types of queries 24 | - Caching of documents with Insertions and Lookups in O(1) 25 | 26 | Collections are treated as types that apply to the documents within. That means that you can define 27 | a Collection and then create instances of this Collection in several databases. The same goes for graphs 28 | 29 | In other words, you can have two databases **cache_db** and **real_db** each of them with an instance of a 30 | **Users** Collection. You can then be assured that documents of both collections will be subjected to the same 31 | validation rules. Ain't that cool? 32 | 33 | You can be 100% permissive or enforce schemas and validate fields, on set, on save or both. 34 | 35 | .. _ArangoDB: http://www.arangodb.com 36 | .. _Tariq Daouda: http://www.tariqdaouda.com 37 | .. _github: https://github.com/tariqdaouda/pyArango 38 | 39 | Installation: 40 | ------------- 41 | 42 | From PyPi: 43 | 44 | .. code:: 45 | 46 | pip install pyArango 47 | 48 | For the latest version: 49 | 50 | .. code:: 51 | 52 | git clone https://github.com/tariqdaouda/pyArango.git 53 | cd pyArango 54 | python setup.py develop 55 | 56 | Quickstart: 57 | ----------- 58 | 59 | pyArango's github has list of examples to get you started here_. 60 | 61 | .. _here: https://github.com/tariqdaouda/pyArango 62 | 63 | Contents: 64 | --------- 65 | 66 | .. toctree:: 67 | :maxdepth: 3 68 | 69 | connection 70 | database 71 | collection 72 | indexes 73 | document 74 | users 75 | query 76 | graph 77 | exceptions 78 | validation 79 | admin 80 | ca_certificate 81 | foxx 82 | jwauth 83 | tasks 84 | gevent_session 85 | 86 | Indices and tables 87 | ================== 88 | 89 | * :ref:`genindex` 90 | * :ref:`modindex` 91 | * :ref:`search` 92 | 93 | -------------------------------------------------------------------------------- /pyArango/doc/source/indexes.rst: -------------------------------------------------------------------------------- 1 | Indexes 2 | ---------- 3 | .. automodule:: pyArango.index 4 | :members: -------------------------------------------------------------------------------- /pyArango/doc/source/jwauth.rst: -------------------------------------------------------------------------------- 1 | JW Auth 2 | ---------- 3 | .. automodule:: pyArango.jwauth 4 | :members: 5 | -------------------------------------------------------------------------------- /pyArango/doc/source/query.rst: -------------------------------------------------------------------------------- 1 | Query 2 | ---------- 3 | .. automodule:: pyArango.query 4 | :members: -------------------------------------------------------------------------------- /pyArango/doc/source/tasks.rst: -------------------------------------------------------------------------------- 1 | Tasks 2 | ---------- 3 | .. automodule:: pyArango.tasks 4 | :members: 5 | -------------------------------------------------------------------------------- /pyArango/doc/source/users.rst: -------------------------------------------------------------------------------- 1 | Users 2 | ---------- 3 | .. automodule:: pyArango.users 4 | :members: -------------------------------------------------------------------------------- /pyArango/doc/source/validation.rst: -------------------------------------------------------------------------------- 1 | Validation 2 | ---------- 3 | .. automodule:: pyArango.validation 4 | :members: -------------------------------------------------------------------------------- /pyArango/document.py: -------------------------------------------------------------------------------- 1 | import json, types 2 | from .theExceptions import (CreationError, UniqueConstrainViolation, DeletionError, UpdateError, ValidationError, SchemaViolation, InvalidDocument, ArangoError) 3 | 4 | __all__ = ["DocumentStore", "Document", "Edge"] 5 | 6 | class DocumentStore(object): 7 | """Store all the data of a document in hierarchy of stores and handles validation. 8 | Does not store private information, these are in the document.""" 9 | 10 | def __init__(self, collection, validators=None, initDct=None, patch=False, subStore=False, validateInit=False): 11 | if validators is None: 12 | validators = {} 13 | if initDct is None: 14 | initDct = {} 15 | 16 | self.store = {} 17 | self.patchStore = {} 18 | self.collection = collection 19 | self.validators = validators 20 | self.validateInit = validateInit 21 | self.isSubStore = subStore 22 | self.subStores = {} 23 | self.patching = patch 24 | 25 | if not self.validateInit : 26 | self.mustValidate = False 27 | self.set(initDct) 28 | 29 | for v in self.collection._validation.values(): 30 | if v: 31 | self.mustValidate = True 32 | break 33 | 34 | if self.validateInit: 35 | self.set(initDct) 36 | 37 | self.patching = True 38 | 39 | def resetPatch(self): 40 | """reset patches""" 41 | self.patchStore = {} 42 | 43 | def getPatches(self): 44 | """get patches as a dictionary""" 45 | if not self.mustValidate: 46 | return self.getStore() 47 | 48 | res = {} 49 | res.update(self.patchStore) 50 | for k, v in self.subStores.items(): 51 | res[k] = v.getPatches() 52 | 53 | return res 54 | 55 | def getStore(self): 56 | """get the inner store as dictionary""" 57 | res = {} 58 | res.update(self.store) 59 | for k, v in self.subStores.items(): 60 | res[k] = v.getStore() 61 | return res 62 | 63 | def validateField(self, field): 64 | """Validatie a field""" 65 | if field not in self.validators and not self.collection._validation['allow_foreign_fields']: 66 | raise SchemaViolation(self.collection.__class__, field) 67 | 68 | if field in self.store: 69 | if isinstance(self.store[field], DocumentStore): 70 | return self[field].validate() 71 | 72 | if field in self.patchStore: 73 | try: 74 | return self.validators[field].validate(self.patchStore[field]) 75 | except ValidationError as e: 76 | raise ValidationError( "'%s' -> %s" % ( field, str(e)) ) 77 | else: 78 | try: 79 | return self.validators[field].validate(self.store[field]) 80 | except ValidationError as e: 81 | raise ValidationError( "'%s' -> %s" % ( field, str(e)) ) 82 | except AttributeError: 83 | if isinstance(self.validators[field], dict) and not isinstance(self.store[field], dict): 84 | raise ValueError("Validator expected a sub document for field '%s', got '%s' instead" % (field, self.store[field]) ) 85 | else: 86 | raise 87 | return True 88 | 89 | def validate(self): 90 | """Validate the whole document""" 91 | if not self.mustValidate: 92 | return True 93 | 94 | res = {} 95 | for field in self.validators.keys(): 96 | try: 97 | if isinstance(self.validators[field], dict) and field not in self.store: 98 | self.store[field] = DocumentStore(self.collection, validators = self.validators[field], initDct = {}, subStore=True, validateInit=self.validateInit) 99 | self.validateField(field) 100 | except InvalidDocument as e: 101 | res.update(e.errors) 102 | except (ValidationError, SchemaViolation) as e: 103 | res[field] = str(e) 104 | 105 | if len(res) > 0: 106 | raise InvalidDocument(res) 107 | 108 | return True 109 | 110 | def set(self, dct): 111 | """Set the values to a dict. Any missing value will be filled by it's default""" 112 | for field, value in dct.items(): 113 | if field not in self.collection.arangoPrivates: 114 | if isinstance(value, dict): 115 | if field in self.validators and isinstance(self.validators[field], dict): 116 | vals = self.validators[field] 117 | else: 118 | vals = {} 119 | self[field] = DocumentStore(self.collection, validators = vals, initDct = value, patch = self.patching, subStore=True, validateInit=self.validateInit) 120 | self.subStores[field] = self.store[field] 121 | else: 122 | self[field] = value 123 | 124 | def fill_default(self): 125 | """replace all None values with defaults""" 126 | for field, value in self.validators.items(): 127 | if isinstance(value, dict): 128 | self[field].fill_default() 129 | elif self[field] is None: 130 | self[field] = value.default 131 | 132 | def __dir__(self): 133 | return dir(self.getStore()) 134 | 135 | def __len__(self): 136 | return len(self.store) 137 | 138 | def __dict__(self): 139 | return dict(self.store) + dict(self.patchStore) 140 | 141 | def __contains__(self, field): 142 | return field in self.store 143 | 144 | def __getitem__(self, field): 145 | """Get an element from the store""" 146 | if self.mustValidate and (field in self.validators) and isinstance(self.validators[field], dict) and (field not in self.store) : 147 | self.store[field] = DocumentStore(self.collection, validators = self.validators[field], initDct = {}, patch = self.patching, subStore=True, validateInit=self.validateInit) 148 | self.subStores[field] = self.store[field] 149 | self.patchStore[field] = self.store[field] 150 | 151 | if self.collection._validation['allow_foreign_fields'] or self.collection.hasField(field): 152 | return self.store.get(field) 153 | 154 | if not field in self.validators: 155 | raise SchemaViolation(self.collection.__class__, field) 156 | 157 | try: 158 | return self.store[field] 159 | except KeyError: 160 | self.store[field] = self.validators[field].default 161 | return self.store[field] 162 | 163 | def __setitem__(self, field, value): 164 | """Set an element in the store""" 165 | if self.mustValidate and (not self.collection._validation['allow_foreign_fields']) and (field not in self.validators) and (field not in self.collection.arangoPrivates): 166 | raise SchemaViolation(self.collection.__class__, field) 167 | 168 | if field in self.collection.arangoPrivates: 169 | raise ValueError("DocumentStore cannot contain private field (got %s)" % field) 170 | 171 | if isinstance(value, dict): 172 | if field in self.validators and isinstance(self.validators[field], dict): 173 | vals = self.validators[field] 174 | else: 175 | vals = {} 176 | self.store[field] = DocumentStore(self.collection, validators = vals, initDct = value, patch = self.patching, subStore=True, validateInit=self.validateInit) 177 | 178 | self.subStores[field] = self.store[field] 179 | else: 180 | self.store[field] = value 181 | 182 | if self.patching: 183 | self.patchStore[field] = self.store[field] 184 | 185 | if self.mustValidate and self.collection._validation['on_set']: 186 | self.validateField(field) 187 | 188 | def __delitem__(self, k): 189 | """removes an element from the store""" 190 | try: 191 | del(self.store[k]) 192 | except: 193 | pass 194 | 195 | try: 196 | del(self.patchStore[k]) 197 | except: 198 | pass 199 | 200 | try: 201 | del(self.subStores[k]) 202 | except: 203 | pass 204 | 205 | def __contains__(self, k): 206 | """returns true or false weither the store has a key k""" 207 | return (k in self.store) or (k in self.validators) 208 | 209 | def __repr__(self): 210 | return "" % repr(self.store) 211 | 212 | class Document(object): 213 | """The class that represents a document. Documents are meant to be instanciated by collections""" 214 | 215 | def __init__(self, collection, jsonFieldInit = None, on_load_validation=False) : 216 | if jsonFieldInit is None : 217 | jsonFieldInit = {} 218 | self.privates = ["_id", "_key", "_rev"] 219 | self.reset(collection, jsonFieldInit, on_load_validation=on_load_validation) 220 | self.typeName = "ArangoDoc" 221 | # self._store = None 222 | 223 | def reset(self, collection, jsonFieldInit = None, on_load_validation=False) : 224 | """replaces the current values in the document by those in jsonFieldInit""" 225 | if not jsonFieldInit: 226 | jsonFieldInit = {} 227 | for k in self.privates: 228 | setattr(self, k, None) 229 | 230 | self.collection = collection 231 | self.connection = self.collection.connection 232 | 233 | self.setPrivates(jsonFieldInit) 234 | self._store = DocumentStore(self.collection, validators=self.collection._fields, initDct=jsonFieldInit, validateInit=on_load_validation) 235 | if self.collection._validation['on_load']: 236 | self.validate() 237 | 238 | self.modified = True 239 | 240 | def to_default(self): 241 | """reset the document to the default values""" 242 | self.reset(self.collection, self.collection.getDefaultDocument()) 243 | 244 | def fill_default(self): 245 | """reset the document to the default values""" 246 | self._store.fill_default() 247 | 248 | def validate(self): 249 | """validate the document""" 250 | self._store.validate() 251 | for pField in self.collection.arangoPrivates: 252 | self.collection.validatePrivate(pField, getattr(self, pField)) 253 | 254 | def setPrivates(self, fieldDict): 255 | """will set self._id, self._rev and self._key field.""" 256 | for priv in self.privates: 257 | if priv in fieldDict: 258 | setattr(self, priv, fieldDict[priv]) 259 | # else: 260 | # setattr(self, priv, None) 261 | # if priv not in ["_from", "_to"]: 262 | 263 | def getURL(self): 264 | if self._id is None: 265 | return AttributeError("An unsaved document cannot have an URL") 266 | return "%s/%s" % (self.collection.getDocumentsURL(), self._id) 267 | 268 | def set(self, fieldDict): 269 | """set the document with a dictionary""" 270 | self.setPrivates(fieldDict) 271 | self._store.set(fieldDict) 272 | 273 | def save(self, waitForSync = False, **docArgs): 274 | """Saves the document to the database by either performing a POST (for a new document) or a PUT (complete document overwrite). 275 | If you want to only update the modified fields use the .patch() function. 276 | Use docArgs to put things such as 'waitForSync = True' (for a full list cf ArangoDB's doc). 277 | It will only trigger a saving of the document if it has been modified since the last save. If you want to force the saving you can use forceSave()""" 278 | self._store.fill_default() 279 | payload = self._store.getStore() 280 | # print(payload) 281 | self._save(payload, waitForSync = False, **docArgs) 282 | 283 | def _save(self, payload, waitForSync = False, **docArgs): 284 | 285 | if self.modified: 286 | 287 | params = dict(docArgs) 288 | params.update({'collection': self.collection.name, "waitForSync" : waitForSync }) 289 | 290 | if self.collection._validation['on_save']: 291 | self.validate() 292 | if self.collection._isBulkInProgress: 293 | if self._key is not None: 294 | payload["_key"] = self._key 295 | self.collection._saveBatch(self, params) 296 | return self._store.resetPatch() 297 | if self._id is None: 298 | if self._key is not None: 299 | payload["_key"] = self._key 300 | payload = json.dumps(payload, default=str) 301 | r = self.connection.session.post(self.collection.getDocumentsURL(), params = params, data = payload) 302 | update = False 303 | data = r.json() 304 | self.setPrivates(data) 305 | else: 306 | payload = json.dumps(payload, default=str) 307 | r = self.connection.session.put(self.getURL(), params = params, data = payload) 308 | update = True 309 | data = r.json() 310 | 311 | 312 | if (r.status_code == 201 or r.status_code == 202) and "error" not in data: 313 | if update: 314 | self._rev = data['_rev'] 315 | else: 316 | self.set(data) 317 | else: 318 | if update: 319 | raise UpdateError(data['errorMessage'], data) 320 | else: 321 | if data["errorNum"] == 1210: 322 | raise UniqueConstrainViolation(data['errorMessage'], data) 323 | else: 324 | raise CreationError(data['errorMessage'], data) 325 | 326 | self.modified = False 327 | 328 | self._store.resetPatch() 329 | 330 | def forceSave(self, **docArgs): 331 | "saves even if the document has not been modified since the last save" 332 | self.modified = True 333 | self.save(**docArgs) 334 | 335 | def saveCopy(self): 336 | "saves a copy of the object and become that copy. returns a tuple (old _key, new _key)" 337 | old_key = self._key 338 | self.reset(self.collection) 339 | self.save() 340 | return (old_key, self._key) 341 | 342 | def patch(self, keepNull = True, **docArgs): 343 | """Saves the document by only updating the modified fields. 344 | The default behaviour concening the keepNull parameter is the opposite of ArangoDB's default, Null values won't be ignored 345 | Use docArgs for things such as waitForSync = True""" 346 | 347 | if self._id is None: 348 | raise ValueError("Cannot patch a document that was not previously saved") 349 | 350 | params = dict(docArgs) 351 | params.update({'collection': self.collection.name, 'keepNull' : keepNull}) 352 | 353 | if self.collection._isBulkInProgress: 354 | self.collection._patchBatch(self, params ) 355 | return self._store.resetPatch() 356 | 357 | payload = self._store.getPatches() 358 | 359 | if self.collection._validation['on_save']: 360 | self.validate() 361 | 362 | if len(payload) > 0: 363 | payload = json.dumps(payload, default=str) 364 | 365 | r = self.connection.session.patch(self.getURL(), params = params, data = payload) 366 | data = r.json() 367 | if (r.status_code == 201 or r.status_code == 202) and "error" not in data: 368 | self._rev = data['_rev'] 369 | else: 370 | raise UpdateError(data['errorMessage'], data) 371 | 372 | self.modified = False 373 | 374 | self._store.resetPatch() 375 | 376 | def delete(self): 377 | "deletes the document from the database" 378 | if self._id is None: 379 | raise DeletionError("Can't delete a document that was not saved") 380 | 381 | if self.collection._isBulkInProgress: 382 | params = {'collection': self.collection.name} 383 | self.collection._deleteBatch(self, params) 384 | self.modified = True 385 | return 386 | 387 | r = self.connection.session.delete(self.getURL()) 388 | data = r.json() 389 | 390 | if (r.status_code != 200 and r.status_code != 202) or 'error' in data: 391 | raise DeletionError(data['errorMessage'], data) 392 | self.reset(self.collection) 393 | 394 | self.modified = True 395 | 396 | def getInEdges(self, edges, rawResults = False): 397 | "An alias for getEdges() that returns only the in Edges" 398 | return self.getEdges(edges, inEdges = True, outEdges = False, rawResults = rawResults) 399 | 400 | def getOutEdges(self, edges, rawResults = False): 401 | "An alias for getEdges() that returns only the out Edges" 402 | return self.getEdges(edges, inEdges = False, outEdges = True, rawResults = rawResults) 403 | 404 | def getEdges(self, edges, inEdges = True, outEdges = True, rawResults = False): 405 | """returns in, out, or both edges linked to self belonging the collection 'edges'. 406 | If rawResults a arango results will be return as fetched, if false, will return a liste of Edge objects""" 407 | try: 408 | return edges.getEdges(self, inEdges, outEdges, rawResults) 409 | except AttributeError: 410 | raise AttributeError("%s does not seem to be a valid Edges object" % edges) 411 | 412 | def getResponsibleShard(self): 413 | """ If we're working with an arangodb cluster, we can use this method to fetch where a document lives.""" 414 | 415 | result = self.connection.session.put("%s/responsibleShard" % self.collection.getURL(), data = json.dumps(self.getStore())) 416 | if result.status_code == 200: 417 | return result.json()["shardId"] 418 | raise ArangoError(result.json()['errorMessage'], result.json()) 419 | 420 | def getStore(self): 421 | """return the store in a dict format""" 422 | store = self._store.getStore() 423 | for priv in self.privates: 424 | v = getattr(self, priv) 425 | if v: 426 | store[priv] = v 427 | return store 428 | 429 | def getPatches(self): 430 | """return the patches in a dict format""" 431 | return self._store.getPatches() 432 | 433 | def __dir__(self): 434 | if not self._store: 435 | return [] 436 | return dir(self._store) 437 | 438 | def __len__(self): 439 | if not self._store: 440 | return 0 441 | 442 | return self._store.__len__() 443 | 444 | def __dict__(self): 445 | if not self._store: 446 | return {} 447 | return dict(self._store) 448 | 449 | def __contains__(self, field): 450 | if not self._store: 451 | return False 452 | return field in self._store 453 | 454 | def __getitem__(self, k): 455 | """get an element from the document""" 456 | if k in self.collection.arangoPrivates: 457 | return getattr(self, k) 458 | return self._store[k] 459 | 460 | def __getattr__(self, k): 461 | if not self._store: 462 | return None 463 | return self._store[k] 464 | 465 | def __setitem__(self, k, v): 466 | """set an element in the document""" 467 | if k in self.collection.arangoPrivates: 468 | setattr(self, k, v) 469 | else: 470 | self.modified = True 471 | self._store[k] = v 472 | 473 | def __delitem__(self, k): 474 | """removes an element from the document""" 475 | self.modified = True 476 | del(self._store[k]) 477 | 478 | def __str__(self): 479 | return repr(self) 480 | 481 | def __repr__(self): 482 | privStr = [] 483 | for p in self.collection.arangoPrivates: 484 | privStr.append("%s: %s" % (p, getattr(self, p)) ) 485 | 486 | privStr = ', '.join(privStr) 487 | return "%s '%s': %s" % (self.typeName, privStr, repr(self._store)) 488 | 489 | class Edge(Document): 490 | """An Edge document""" 491 | def __init__(self, edgeCollection, jsonFieldInit = None, on_load_validation=False) : 492 | if not jsonFieldInit: 493 | jsonFieldInit = {} 494 | 495 | self.typeName = "ArangoEdge" 496 | self.privates = ["_id", "_key", "_rev", "_from", "_to"] 497 | self.reset(edgeCollection, jsonFieldInit, on_load_validation=on_load_validation) 498 | 499 | def reset(self, edgeCollection, jsonFieldInit = None, on_load_validation=False) : 500 | if jsonFieldInit is None: 501 | jsonFieldInit = {} 502 | Document.reset(self, edgeCollection, jsonFieldInit, on_load_validation=on_load_validation) 503 | 504 | def links(self, fromVertice, toVertice, **edgeArgs): 505 | """ 506 | An alias to save that updates the _from and _to attributes. 507 | fromVertice and toVertice, can be either strings or documents. It they are unsaved documents, they will be automatically saved. 508 | """ 509 | if isinstance(fromVertice, Document) or isinstance(getattr(fromVertice, 'document', None), Document): 510 | if not fromVertice._id: 511 | fromVertice.save() 512 | self._from = fromVertice._id 513 | elif (type(fromVertice) is bytes) or (type(fromVertice) is str): 514 | self._from = fromVertice 515 | elif not self._from: 516 | raise CreationError('fromVertice %s is invalid!' % str(fromVertice)) 517 | 518 | if isinstance(toVertice, Document) or isinstance(getattr(toVertice, 'document', None), Document): 519 | if not toVertice._id: 520 | toVertice.save() 521 | self._to = toVertice._id 522 | elif (type(toVertice) is bytes) or (type(toVertice) is str): 523 | self._to = toVertice 524 | elif not self._to: 525 | raise CreationError('toVertice %s is invalid!' % str(toVertice)) 526 | 527 | self.save(**edgeArgs) 528 | 529 | def save(self, **edgeArgs): 530 | """Works like Document's except that you must specify '_from' and '_to' vertices before. 531 | There's also a links() function especially for first saves.""" 532 | 533 | if not getattr(self, "_from") or not getattr(self, "_to"): 534 | raise AttributeError("You must specify '_from' and '_to' attributes before saving. You can also use the function 'links()'") 535 | 536 | payload = self._store.getStore() 537 | payload["_from"] = self._from 538 | payload["_to"] = self._to 539 | Document._save(self, payload, **edgeArgs) 540 | 541 | # def __getattr__(self, k): 542 | # if k == "_from" or k == "_to": 543 | # return self._store[k] 544 | # else: 545 | # return Document.__getattr__(self, k) 546 | -------------------------------------------------------------------------------- /pyArango/foxx.py: -------------------------------------------------------------------------------- 1 | """All foxx related methods.""" 2 | from .action import DatabaseAction 3 | 4 | 5 | class Foxx: 6 | """A generic foxx function executor.""" 7 | 8 | def __init__(self, database): 9 | """Initialise database and its services.""" 10 | self.database = database 11 | self.services = [] 12 | self.mounts = {} 13 | 14 | def service(self, mount): 15 | """Return a service so that only route after the mount. 16 | 17 | Parameters 18 | ---------- 19 | mount : str 20 | mount point. 21 | 22 | Returns 23 | ------- 24 | FoxxService 25 | A mounted service 26 | 27 | """ 28 | if mount not in self.mounts: 29 | self.reload() 30 | if mount not in self.mounts: 31 | raise ValueError("Unable to find the mount: '%s'", mount) 32 | return FoxxService(self.database, mount) 33 | 34 | def get_available_services(self): 35 | response = self.database.action.get('/_api/foxx', params={'excludeSystem': False}) 36 | response.raise_for_status() 37 | return response.json() 38 | 39 | def reload(self): 40 | self.services = self.get_available_services() 41 | self.mounts = {service['mount'] for service in self.services} 42 | 43 | 44 | 45 | class FoxxService(DatabaseAction): 46 | """A foxx mount function executor.""" 47 | 48 | def __init__(self, database, mount): 49 | """Initialise mount and database.""" 50 | self.database = database 51 | self.mount = mount 52 | 53 | @property 54 | def end_point_url(self): 55 | """End point url for foxx service.""" 56 | return '%s/_db/%s%s' % ( 57 | self.database.connection.getEndpointURL(), self.database.name, 58 | self.mount 59 | ) 60 | -------------------------------------------------------------------------------- /pyArango/gevent_session.py: -------------------------------------------------------------------------------- 1 | """Gevent Session.""" 2 | 3 | try: 4 | import grequests 5 | import gevent 6 | from gevent.threading import Lock 7 | except ModuleNotFoundError as e: 8 | print("grequests is not installed, try pip install grequests") 9 | raise e 10 | 11 | try: 12 | import gevent 13 | from gevent.threading import Lock 14 | except ModuleNotFoundError as e: 15 | print("gevent is not installed, try pip install gevent") 16 | raise e 17 | 18 | import logging 19 | import requests 20 | from requests import exceptions as requests_exceptions 21 | 22 | from .jwauth import JWTAuth 23 | from .ca_certificate import CA_Certificate 24 | 25 | class AikidoSession_GRequests(object): 26 | """A version of Aikido that uses grequests.""" 27 | 28 | def __init__( 29 | self, username, password, urls, use_jwt_authentication=False, 30 | use_lock_for_reseting_jwt=True, max_retries=5, verify=None 31 | ): 32 | self.max_retries = max_retries 33 | self.use_jwt_authentication = use_jwt_authentication 34 | if username: 35 | if self.use_jwt_authentication: 36 | self.auth = JWTAuth( 37 | username, password, urls, 38 | use_lock_for_reseting_jwt, max_retries 39 | ) 40 | else: 41 | self.auth = (username, password) 42 | 43 | if (verify is not None) and not isinstance(verify, bool) and not isinstance(verify, CA_Certificate) and not isinstance(verify, str) : 44 | raise ValueError("'verify' argument can only be of type: bool, CA_Certificate or str or None") 45 | self.verify = verify 46 | else: 47 | self.auth = None 48 | 49 | def __reset_auth(self): 50 | if not self.use_jwt_authentication: 51 | return 52 | if self.auth.lock_for_reseting_jwt is not None: 53 | self.auth.lock_for_reseting_jwt.acquire() 54 | self.auth.reset_token() 55 | if self.auth.lock_for_reseting_jwt is not None: 56 | self.auth.lock_for_reseting_jwt.release() 57 | 58 | def _run(self, req): 59 | """Run the request.""" 60 | if not self.use_jwt_authentication and self.verify is not None: 61 | if isinstance(self.verify, CA_Certificate): 62 | req.kwargs['verify'] = self.verify.get_file_path() 63 | else : 64 | req.kwargs['verify'] = self.verify 65 | for _ in range(self.max_retries): 66 | gevent.joinall([gevent.spawn(req.send)]) 67 | if self.use_jwt_authentication: 68 | if hasattr(req, 'exception'): 69 | logging.critical("%s is raised, will try to reset the auth and request again.", req.exception) 70 | self.__reset_auth() 71 | elif req.response.status_code == 401: 72 | logging.critical("Invalid authentication token provided, will try to reset the auth and request again.") 73 | self.__reset_auth() 74 | else: 75 | return req.response 76 | else: 77 | if hasattr(req, 'exception'): 78 | logging.critical("%s is raised, will try to request again", req.exception) 79 | elif req.response.status_code == 401: 80 | logging.critical("Unauthorized access, you must supply a (username, password) with the correct credentials") 81 | else: 82 | return req.response 83 | logging.critical("Tried to send the request max number of times.") 84 | return req.response 85 | 86 | def post(self, url, data=None, json=None, **kwargs): 87 | """HTTP POST Method.""" 88 | if data is not None: 89 | kwargs['data'] = data 90 | if json is not None: 91 | kwargs['json'] = json 92 | 93 | kwargs['auth'] = self.auth 94 | 95 | req = grequests.post(url, **kwargs) 96 | return self._run(req) 97 | 98 | def get(self, url, **kwargs): 99 | """HTTP GET Method.""" 100 | kwargs['auth'] = self.auth 101 | req = grequests.get(url, **kwargs) 102 | return self._run(req) 103 | 104 | def put(self, url, data=None, **kwargs): 105 | """HTTP PUT Method.""" 106 | if data is not None: 107 | kwargs['data'] = data 108 | kwargs['auth'] = self.auth 109 | req = grequests.put(url, **kwargs) 110 | return self._run(req) 111 | 112 | def head(self, url, **kwargs): 113 | """HTTP HEAD Method.""" 114 | kwargs['auth'] = self.auth 115 | req = grequests.head(url, **kwargs) 116 | return self._run(req) 117 | 118 | def options(self, url, **kwargs): 119 | """HTTP OPTIONS Method.""" 120 | kwargs['auth'] = self.auth 121 | req = grequests.options(url, **kwargs) 122 | return self._run(req) 123 | 124 | def patch(self, url, data=None, **kwargs): 125 | """HTTP PATCH Method.""" 126 | if data is not None: 127 | kwargs['data'] = data 128 | kwargs['auth'] = self.auth 129 | req = grequests.patch(url, **kwargs) 130 | return self._run(req) 131 | 132 | def delete(self, url, **kwargs): 133 | """HTTP DELETE Method.""" 134 | kwargs['auth'] = self.auth 135 | req = grequests.delete(url, **kwargs) 136 | return self._run(req) 137 | 138 | def disconnect(self): 139 | pass 140 | -------------------------------------------------------------------------------- /pyArango/graph.py: -------------------------------------------------------------------------------- 1 | import json 2 | from future.utils import with_metaclass 3 | 4 | from .theExceptions import (CreationError, DeletionError, UpdateError, TraversalError) 5 | from . import collection as COL 6 | from . import document as DOC 7 | 8 | __all__ = ["Graph", "getGraphClass", "isGraph", "getGraphClasses", "Graph_metaclass", "EdgeDefinition"] 9 | 10 | class Graph_metaclass(type): 11 | """Keeps track of all graph classes and does basic validations on fields""" 12 | graphClasses = {} 13 | 14 | def __new__(cls, name, bases, attrs): 15 | clsObj = type.__new__(cls, name, bases, attrs) 16 | if name != 'Graph': 17 | try: 18 | if len(attrs['_edgeDefinitions']) < 1: 19 | raise CreationError("Graph class '%s' has no edge definition" % name) 20 | except KeyError: 21 | raise CreationError("Graph class '%s' has no field _edgeDefinition" % name) 22 | 23 | if name != "Graph": 24 | Graph_metaclass.graphClasses[name] = clsObj 25 | return clsObj 26 | 27 | @classmethod 28 | def getGraphClass(cls, name): 29 | """return a graph class by its name""" 30 | try: 31 | return cls.graphClasses[name] 32 | except KeyError: 33 | raise KeyError("There's no child of Graph by the name of: %s" % name) 34 | 35 | @classmethod 36 | def isGraph(cls, name): 37 | """returns true/false depending if there is a graph called name""" 38 | return name in cls.graphClasses 39 | 40 | def getGraphClass(name): 41 | """alias for Graph_metaclass.getGraphClass()""" 42 | return Graph_metaclass.getGraphClass(name) 43 | 44 | def isGraph(name): 45 | """alias for Graph_metaclass.isGraph()""" 46 | return Graph_metaclass.isGraph(name) 47 | 48 | def getGraphClasses(): 49 | "returns a dictionary of all defined graph classes" 50 | return Graph_metaclass.graphClasses 51 | 52 | class EdgeDefinition(object): 53 | """An edge definition for a graph""" 54 | 55 | def __init__(self, edgesCollection, fromCollections, toCollections): 56 | self.name = edgesCollection 57 | self.edgesCollection = edgesCollection 58 | self.fromCollections = fromCollections 59 | self.toCollections = toCollections 60 | 61 | def toJson(self): 62 | return { 'collection' : self.edgesCollection, 'from' : self.fromCollections, 'to' : self.toCollections } 63 | 64 | def __str__(self): 65 | return ''+ str(self.toJson()) 66 | 67 | def __repr__(self): 68 | return str(self) 69 | 70 | class Graph(with_metaclass(Graph_metaclass, object)): 71 | """The class from witch all your graph types must derive""" 72 | 73 | _edgeDefinitions = [] 74 | _orphanedCollections = [] 75 | 76 | def __init__(self, database, jsonInit): 77 | self.database = database 78 | self.connection = self.database.connection 79 | try: 80 | self._key = jsonInit["_key"] 81 | except KeyError: 82 | self._key = jsonInit["name"] 83 | except KeyError: 84 | raise KeyError("'jsonInit' must have a field '_key' or a field 'name'") 85 | 86 | self.name = self._key 87 | self._rev = jsonInit["_rev"] 88 | self._id = jsonInit["_id"] 89 | 90 | orfs = set(self._orphanedCollections) 91 | for o in jsonInit["orphanCollections"]: 92 | if o not in orfs: 93 | self._orphanedCollections.append(o) 94 | if self.connection.verbose: 95 | print("Orphan collection %s is not in graph definition. Added it" % o) 96 | 97 | self.definitions = {} 98 | edNames = set() 99 | for ed in self._edgeDefinitions: 100 | self.definitions[ed.edgesCollection] = ed.edgesCollection 101 | 102 | for ed in jsonInit["edgeDefinitions"]: 103 | if ed["collection"] not in self.definitions: 104 | self.definitions[ed["collection"]] = EdgeDefinition(ed["collection"], fromCollections = ed["from"], toCollections = ed["to"]) 105 | if self.connection.verbose: 106 | print("Edge definition %s is not in graph definition. Added it" % ed) 107 | 108 | for de in self._edgeDefinitions: 109 | if de.edgesCollection not in self.database.collections and not COL.isEdgeCollection(de.edgesCollection): 110 | raise KeyError("'%s' is not a valid edge collection" % de.edgesCollection) 111 | self.definitions[de.edgesCollection] = de 112 | 113 | def getURL(self): 114 | return "%s/%s" % (self.database.getGraphsURL(), self._key) 115 | 116 | def createVertex(self, collectionName, docAttributes, waitForSync = False): 117 | """adds a vertex to the graph and returns it""" 118 | url = "%s/vertex/%s" % (self.getURL(), collectionName) 119 | 120 | store = DOC.DocumentStore(self.database[collectionName], validators=self.database[collectionName]._fields, initDct=docAttributes) 121 | # self.database[collectionName].validateDct(docAttributes) 122 | store.validate() 123 | 124 | r = self.connection.session.post(url, data = json.dumps(docAttributes, default=str), params = {'waitForSync' : waitForSync}) 125 | 126 | data = r.json() 127 | if r.status_code == 201 or r.status_code == 202: 128 | return self.database[collectionName][data["vertex"]["_key"]] 129 | 130 | raise CreationError("Unable to create vertice, %s" % data["errorMessage"], data) 131 | 132 | def deleteVertex(self, document, waitForSync = False): 133 | """deletes a vertex from the graph as well as al linked edges""" 134 | url = "%s/vertex/%s" % (self.getURL(), document._id) 135 | 136 | r = self.connection.session.delete(url, params = {'waitForSync' : waitForSync}) 137 | data = r.json() 138 | if r.status_code == 200 or r.status_code == 202: 139 | return True 140 | 141 | raise DeletionError("Unable to delete vertice, %s" % document._id, data) 142 | 143 | def createEdge(self, collectionName, _fromId, _toId, edgeAttributes, waitForSync = False): 144 | """creates an edge between two documents""" 145 | 146 | if not _fromId: 147 | raise ValueError("Invalid _fromId: %s" % _fromId) 148 | 149 | if not _toId: 150 | raise ValueError("Invalid _toId: %s" % _toId) 151 | 152 | if collectionName not in self.definitions: 153 | raise KeyError("'%s' is not among the edge definitions" % collectionName) 154 | 155 | url = "%s/edge/%s" % (self.getURL(), collectionName) 156 | self.database[collectionName].validatePrivate("_from", _fromId) 157 | self.database[collectionName].validatePrivate("_to", _toId) 158 | 159 | ed = self.database[collectionName].createEdge() 160 | ed.set(edgeAttributes) 161 | ed.validate() 162 | 163 | payload = ed.getStore() 164 | payload.update({'_from' : _fromId, '_to' : _toId}) 165 | 166 | r = self.connection.session.post(url, data = json.dumps(payload, default=str), params = {'waitForSync' : waitForSync}) 167 | data = r.json() 168 | if r.status_code == 201 or r.status_code == 202: 169 | return self.database[collectionName][data["edge"]["_key"]] 170 | # print "\ngraph 160, ", data, payload, _fromId 171 | raise CreationError("Unable to create edge, %s" % r.json()["errorMessage"], data) 172 | 173 | def link(self, definition, doc1, doc2, edgeAttributes, waitForSync = False): 174 | """A shorthand for createEdge that takes two documents as input""" 175 | if type(doc1) is DOC.Document: 176 | if not doc1._id: 177 | doc1.save() 178 | doc1_id = doc1._id 179 | else: 180 | doc1_id = doc1 181 | 182 | if type(doc2) is DOC.Document: 183 | if not doc2._id: 184 | doc2.save() 185 | doc2_id = doc2._id 186 | else: 187 | doc2_id = doc2 188 | 189 | return self.createEdge(definition, doc1_id, doc2_id, edgeAttributes, waitForSync) 190 | 191 | def unlink(self, definition, doc1, doc2): 192 | """deletes all links between doc1 and doc2""" 193 | links = self.database[definition].fetchByExample( {"_from": doc1._id,"_to" : doc2._id}, batchSize = 100) 194 | for l in links: 195 | self.deleteEdge(l) 196 | 197 | def deleteEdge(self, edge, waitForSync = False): 198 | """removes an edge from the graph""" 199 | url = "%s/edge/%s" % (self.getURL(), edge._id) 200 | r = self.connection.session.delete(url, params = {'waitForSync' : waitForSync}) 201 | if r.status_code == 200 or r.status_code == 202: 202 | return True 203 | raise DeletionError("Unable to delete edge, %s" % edge._id, r.json()) 204 | 205 | def delete(self): 206 | """deletes the graph""" 207 | r = self.connection.session.delete(self.getURL()) 208 | data = r.json() 209 | if r.status_code < 200 or r.status_code > 202 or data["error"]: 210 | raise DeletionError(data["errorMessage"], data) 211 | 212 | def traverse(self, startVertex, **kwargs): 213 | """Traversal! see: https://docs.arangodb.com/HttpTraversal/README.html for a full list of the possible kwargs. 214 | The function must have as argument either: direction = "outbout"/"any"/"inbound" or expander = "custom JS (see arangodb's doc)". 215 | The function can't have both 'direction' and 'expander' as arguments. 216 | """ 217 | 218 | url = "%s/traversal" % self.database.getURL() 219 | if type(startVertex) is DOC.Document: 220 | startVertex_id = startVertex._id 221 | else: 222 | startVertex_id = startVertex 223 | 224 | payload = {"startVertex": startVertex_id, "graphName" : self.name} 225 | if "expander" in kwargs: 226 | if "direction" in kwargs: 227 | raise ValueError("""The function can't have both 'direction' and 'expander' as arguments""") 228 | elif "direction" not in kwargs: 229 | raise ValueError("""The function must have as argument either: direction = "outbout"/"any"/"inbound" or expander = "custom JS (see arangodb's doc)" """) 230 | 231 | payload.update(kwargs) 232 | 233 | r = self.connection.session.post(url, data = json.dumps(payload, default=str)) 234 | data = r.json() 235 | if r.status_code < 200 or r.status_code > 202 or data["error"]: 236 | raise TraversalError(data["errorMessage"], data) 237 | 238 | return data["result"] 239 | 240 | def __str__(self): 241 | return "ArangoGraph: %s" % self.name 242 | -------------------------------------------------------------------------------- /pyArango/index.py: -------------------------------------------------------------------------------- 1 | import json 2 | from .theExceptions import (CreationError, DeletionError, UpdateError) 3 | 4 | class Index(object): 5 | """An index on a collection's fields. Indexes are meant to de created by ensureXXX functions of Collections. 6 | Indexes have a .infos dictionary that stores all the infos about the index""" 7 | 8 | def __init__(self, collection, infos = None, creationData = None): 9 | 10 | self.collection = collection 11 | self.connection = self.collection.database.connection 12 | self.infos = None 13 | self.active = False 14 | 15 | if infos: 16 | self.infos = infos 17 | elif creationData: 18 | self._create(creationData) 19 | 20 | def getURL(self): 21 | if self.infos: 22 | return "%s/%s" % (self.getIndexesURL(), self.infos["id"]) 23 | return None 24 | 25 | def getIndexesURL(self): 26 | return "%s/index" % self.collection.database.getURL() 27 | 28 | def _create(self, postData, force=False): 29 | """Creates an index of any type according to postData""" 30 | if self.infos is None or not self.active or force: 31 | r = self.connection.session.post(self.getIndexesURL(), params = {"collection" : self.collection.name}, data = json.dumps(postData, default=str)) 32 | data = r.json() 33 | if (r.status_code >= 400) or data['error']: 34 | raise CreationError(data['errorMessage'], data) 35 | self.infos = data 36 | self.active = True 37 | 38 | def restore(self): 39 | """restore and index that has been previously deleted""" 40 | self._create(self.infos, force=True) 41 | 42 | def delete(self): 43 | """Delete the index""" 44 | r = self.connection.session.delete(self.getURL()) 45 | data = r.json() 46 | if (r.status_code != 200 and r.status_code != 202) or data['error']: 47 | raise DeletionError(data['errorMessage'], data) 48 | self.active = False 49 | 50 | def __repr__(self): 51 | return "" % self.infos["type"] 52 | -------------------------------------------------------------------------------- /pyArango/jwauth.py: -------------------------------------------------------------------------------- 1 | from base64 import b64decode 2 | import time 3 | import json as json_mod 4 | import logging 5 | 6 | 7 | import requests 8 | from requests import exceptions as requests_exceptions 9 | 10 | 11 | class JWTAuth(requests.auth.AuthBase): 12 | 13 | # Half a day before the actual expiration. 14 | REAUTH_TIME_INTERVEL = 43200 15 | 16 | def __init__( 17 | self, username, password, urls, use_lock_for_reseting_jwt=False, 18 | max_retries=5 19 | ): 20 | self.username = username 21 | self.password = password 22 | self.urls = urls 23 | self.lock_for_reseting_jwt = Lock() if use_lock_for_reseting_jwt else None 24 | self.__init_request_session(max_retries) 25 | self.__set_token() 26 | 27 | def __init_request_session(self, max_retries): 28 | self.max_retries = max_retries 29 | self.session = requests.Session() 30 | http = requests.adapters.HTTPAdapter(max_retries=max_retries) 31 | https = requests.adapters.HTTPAdapter(max_retries=max_retries) 32 | self.session.mount('http://', http) 33 | self.session.mount('https://', https) 34 | 35 | def __parse_token(self): 36 | decoded_token = b64decode(self.token.split('.')[1].encode()) 37 | return json_mod.loads(decoded_token.decode()) 38 | 39 | def __get_auth_token(self): 40 | request_data = '{"username":"%s","password":"%s"}' % (self.username, self.password) 41 | for connection_url in self.urls: 42 | try: 43 | response = self.session.post('%s/_open/auth' % connection_url, data=request_data) 44 | if response.ok: 45 | json_data = response.content 46 | if json_data: 47 | data_dict = json_mod.loads(json_data.decode("utf-8")) 48 | return data_dict.get('jwt') 49 | except requests_exceptions.ConnectionError: 50 | if connection_url is not self.urls[-1]: 51 | logging.critical("Unable to connect to %s trying another", connection_url) 52 | else: 53 | logging.critical("Unable to connect to any of the urls: %s", self.urls) 54 | raise 55 | 56 | def __set_token(self): 57 | self.token = self.__get_auth_token() 58 | self.parsed_token = \ 59 | self.__parse_token() if self.token is not None else {} 60 | self.token_last_updated = time.time() 61 | 62 | def reset_token(self): 63 | logging.warning("Reseting the token.") 64 | self.__set_token() 65 | 66 | def is_token_expired(self): 67 | return ( 68 | self.parsed_token.get("exp", 0) - time.time() < 69 | JWTAuth.REAUTH_TIME_INTERVEL 70 | ) 71 | 72 | def __call__(self, req): 73 | # Implement JWT authentication 74 | if self.is_token_expired(): 75 | if self.lock_for_reseting_jwt is not None: 76 | self.lock_for_reseting_jwt.acquire() 77 | if self.is_token_expired(): 78 | self.reset_token() 79 | if self.lock_for_reseting_jwt is not None: 80 | self.lock_for_reseting_jwt.release() 81 | req.headers['Authorization'] = 'Bearer %s' % self.token 82 | return req 83 | -------------------------------------------------------------------------------- /pyArango/query.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from future.utils import implements_iterator 4 | 5 | from .document import Document, Edge 6 | from .theExceptions import QueryError, AQLQueryError, SimpleQueryError, CreationError, CursorError 7 | from . import consts as CONST 8 | 9 | __all__ = ["Query", "AQLQuery", "SimpleQuery", "Cursor", "RawCursor"] 10 | 11 | @implements_iterator 12 | class RawCursor(object): 13 | "a raw interface to cursors that returns json" 14 | def __init__(self, database, cursorId): 15 | self.database = database 16 | self.connection = self.database.connection 17 | self.id = cursorId 18 | 19 | def getURL(self): 20 | return "%s/%s" % (self.database.getCursorsURL(), self.id) 21 | 22 | def __next__(self): 23 | "returns the next batch" 24 | r = self.connection.session.put(self.getURL()) 25 | data = r.json() 26 | if r.status_code in [400, 404]: 27 | raise CursorError(data["errorMessage"], self.id, data) 28 | return r.json() 29 | 30 | @implements_iterator 31 | class Query(object): 32 | "This class is abstract and should not be instanciated. All query classes derive from it" 33 | 34 | def __init__(self, request, database, rawResults): 35 | "If rawResults = True, the results will be returned as dictionaries instead of Document objects." 36 | 37 | self.rawResults = rawResults 38 | self.response = request.json() 39 | if self.response.get("error") and self.response["errorMessage"] != "no match": 40 | raise QueryError(self.response["errorMessage"], self.response) 41 | 42 | self.request = request 43 | self.database = database 44 | self.connection = self.database.connection 45 | self.currI = 0 46 | if request.status_code == 201 or request.status_code == 200 or request.status_code == 202: 47 | self.batchNumber = 1 48 | try : #if there's only one element 49 | self.response = {"result" : [self.response["document"]], 'hasMore' : False} 50 | del(self.response["document"]) 51 | except KeyError: 52 | pass 53 | 54 | if "hasMore" in self.response and self.response["hasMore"]: 55 | cursor_id = self.response.get("id","") 56 | self.cursor = RawCursor(self.database, cursor_id) 57 | else: 58 | self.cursor = None 59 | elif request.status_code == 404: 60 | self.batchNumber = 0 61 | self.result = [] 62 | else: 63 | self._raiseInitFailed(request) 64 | 65 | def _raiseInitFailed(self, request): 66 | "must be implemented in child, this called if the __init__ fails" 67 | raise NotImplementedError("Must be implemented in child") 68 | 69 | def _developDoc(self, i): 70 | """private function that transforms a json returned by ArangoDB into a pyArango Document or Edge""" 71 | docJson = self.result[i] 72 | try: 73 | collection = self.database[docJson["_id"].split("/")[0]] 74 | except KeyError: 75 | raise CreationError("result %d is not a valid Document. Try setting rawResults to True" % i) 76 | 77 | if collection.type == CONST.COLLECTION_EDGE_TYPE: 78 | self.result[i] = Edge(collection, docJson) 79 | else: 80 | self.result[i] = Document(collection, docJson) 81 | 82 | def nextBatch(self): 83 | "become the next batch. raises a StopIteration if there is None" 84 | self.batchNumber += 1 85 | self.currI = 0 86 | try: 87 | if not self.response["hasMore"] or self.cursor is None: 88 | raise StopIteration("That was the last batch") 89 | except KeyError: 90 | raise AQLQueryError(self.response["errorMessage"], self.query, self.response) 91 | 92 | self.response = next(self.cursor) 93 | 94 | def delete(self): 95 | "kills the cursor" 96 | self.connection.session.delete(self.cursor) 97 | 98 | def __next__(self): 99 | """returns the next element of the query result. Automatomatically calls for new batches if needed""" 100 | try: 101 | v = self[self.currI] 102 | except IndexError: 103 | self.nextBatch() 104 | v = self[self.currI] 105 | self.currI += 1 106 | return v 107 | 108 | def __iter__(self): 109 | """Returns an itererator so you can do:: 110 | 111 | for doc in query : print doc 112 | """ 113 | return self 114 | 115 | def __getitem__(self, i): 116 | "returns a ith result of the query. Raises IndexError if we reached the end of the current batch." 117 | if not self.rawResults and (not isinstance(self.result[i], (Edge, Document))): 118 | self._developDoc(i) 119 | return self.result[i] 120 | 121 | def __len__(self): 122 | """Returns the number of elements in the query results""" 123 | return len(self.result) 124 | 125 | def __getattr__(self, k): 126 | try: 127 | resp = object.__getattribute__(self, "response") 128 | return resp[k] 129 | except (KeyError, AttributeError): 130 | raise AttributeError("There's no attribute %s" %(k)) 131 | 132 | def __str__(self): 133 | return str(self.result) 134 | 135 | class AQLQuery(Query): 136 | "AQL queries are attached to and instanciated by a database" 137 | def __init__(self, database, query, batchSize, bindVars, options, count, fullCount, rawResults = True, 138 | json_encoder = None, **moreArgs): 139 | # fullCount is passed in the options dict per https://docs.arangodb.com/3.1/HTTP/AqlQueryCursor/AccessingCursors.html 140 | options["fullCount"] = fullCount 141 | payload = {'query' : query, 'batchSize' : batchSize, 'bindVars' : bindVars, 'options' : options, 'count' : count} 142 | payload.update(moreArgs) 143 | 144 | self.query = query 145 | self.database = database 146 | self.connection = self.database.connection 147 | self.connection.reportStart(query) 148 | request = self.connection.session.post(database.getCursorsURL(), data = json.dumps(payload, cls=json_encoder, default=str)) 149 | self.connection.reportItem() 150 | 151 | try: 152 | Query.__init__(self, request, database, rawResults) 153 | except QueryError as e: 154 | raise AQLQueryError( message = e.message, query = self.query, errors = e.errors) 155 | 156 | def explain(self, bindVars = None, allPlans = False): 157 | """Returns an explanation of the query. Setting allPlans to True will result in ArangoDB returning all possible plans. False returns only the optimal plan""" 158 | if bindVars is None: 159 | bindVars = {} 160 | return self.database.explainAQLQuery(self.query, bindVars, allPlans) 161 | 162 | def _raiseInitFailed(self, request): 163 | data = request.json() 164 | raise AQLQueryError(data["errorMessage"], self.query, data) 165 | 166 | class Cursor(Query): 167 | "Cursor queries are attached to and instanciated by a database, use them to continue from where you left" 168 | def __init__(self, database, cursorId, rawResults): 169 | self.rawResults = rawResults 170 | self._developed = set() 171 | self.batchNumber = 1 172 | self.cursor = RawCursor(database, cursorId) 173 | self.response = next(self.cursor) 174 | 175 | def _raiseInitFailed(self, request): 176 | data = request.json() 177 | raise CursorError(data["errorMessage"], self.id, data) 178 | 179 | 180 | class SimpleQuery(Query): 181 | "Simple queries are attached to and instanciated by a collection" 182 | def __init__(self, collection, queryType, rawResults, json_encoder = None, 183 | **queryArgs): 184 | 185 | self.collection = collection 186 | self.connection = self.collection.database.connection 187 | 188 | payload = {'collection' : collection.name} 189 | payload.update(queryArgs) 190 | payload = json.dumps(payload, cls=json_encoder, default=str) 191 | URL = "%s/simple/%s" % (collection.database.getURL(), queryType) 192 | request = self.connection.session.put(URL, data = payload) 193 | 194 | Query.__init__(self, request, collection.database, rawResults) 195 | 196 | def _raiseInitFailed(self, request): 197 | data = request.json() 198 | raise SimpleQueryError(data["errorMessage"], data) 199 | 200 | def _developDoc(self, i): 201 | docJson = self.result[i] 202 | if self.collection.type == CONST.COLLECTION_EDGE_TYPE: 203 | self.result[i] = Edge(self.collection, docJson) 204 | else: 205 | self.result[i] = Document(self.collection, docJson) 206 | -------------------------------------------------------------------------------- /pyArango/tasks.py: -------------------------------------------------------------------------------- 1 | """All Task related methods.""" 2 | 3 | 4 | class Tasks: 5 | """Tasks for database.""" 6 | 7 | URL = '/_api/tasks' 8 | 9 | def __init__(self, database): 10 | """Initialise the database.""" 11 | self.database = database 12 | 13 | def __call__(self): 14 | """All the active tasks in the db.""" 15 | # response = self.database.action.get(self.URL) 16 | # response.raise_for_status() 17 | # return response.json() 18 | return self.fetch() 19 | 20 | def drop(self): 21 | """delete all tasks""" 22 | for task in self.fetch(): 23 | self.delete(task["id"]) 24 | 25 | def fetch(self, task_id=None): 26 | """Fetch the task for given task_id. If task_id is None return all tasks """ 27 | if task_id is not None: 28 | url = '{tasks_url}/{task_id}'.format( 29 | tasks_url=self.URL, task_id=task_id 30 | ) 31 | else: 32 | url = self.URL 33 | 34 | response = self.database.action.get(url) 35 | response.raise_for_status() 36 | return response.json() 37 | 38 | def create( 39 | self, name, command, params=None, 40 | period=None, offset=None, task_id=None 41 | ): 42 | """Create a task with given command and its parameters.""" 43 | task = {'name': name, 'command': command, 'params': params} 44 | if period is not None: 45 | task['period'] = period 46 | if offset is not None: 47 | task['offset'] = offset 48 | 49 | if task_id is not None: 50 | task['id'] = task_id 51 | url = '{tasks_url}/{task_id}'.format( 52 | tasks_url=self.URL, task_id=task_id 53 | ) 54 | else: 55 | url = self.URL 56 | 57 | response = self.database.action.post(url, json=task) 58 | response.raise_for_status() 59 | return response.json() 60 | 61 | def delete(self, task_id): 62 | """Delete the task for given task_id.""" 63 | url = '{tasks_url}/{task_id}'.format( 64 | tasks_url=self.URL, task_id=task_id 65 | ) 66 | response = self.database.action.delete(url) 67 | response.raise_for_status() 68 | return response.json() 69 | -------------------------------------------------------------------------------- /pyArango/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ArangoDB-Community/pyArango/251e74aa161c34877c5244ac99787820da302292/pyArango/tests/__init__.py -------------------------------------------------------------------------------- /pyArango/tests/doc_save_benchmark.py: -------------------------------------------------------------------------------- 1 | import unittest, copy 2 | import os 3 | import time 4 | 5 | from pyArango.connection import * 6 | from pyArango.database import * 7 | from pyArango.collection import * 8 | 9 | # A little script to test performaces 10 | 11 | def createUsers(collection, i): 12 | doc = collection.createDocument() 13 | doc["name"] = "Tesla-%d" % i 14 | doc["number"] = i 15 | doc["species"] = "human" 16 | doc.save() 17 | 18 | conn = Connection(username="root", password="root") 19 | # conn = Connection(username=None, password=None) 20 | 21 | print ("Creating db...") 22 | try: 23 | db = conn.createDatabase(name = "test_db_2") 24 | except: 25 | print ("DB already exists") 26 | db = conn["test_db_2"] 27 | 28 | try: 29 | collection = db.createCollection(name = "users") 30 | except: 31 | print ("Collection already exists") 32 | 33 | collection = db["users"] 34 | collection.truncate() 35 | 36 | startTime = time.time() 37 | nbUsers = 1000000 38 | 39 | for i in range(nbUsers): 40 | if i % 1000 == 0: 41 | print ("->", i, "saved") 42 | try: 43 | createUsers(collection, i) 44 | except Exception as e: 45 | print ("died at", i) 46 | raise e 47 | 48 | took = time.time() - startTime 49 | print ("avg, 1sc => ", float(nbUsers)/took, "saves") 50 | 51 | print ("Cleaning up...") 52 | 53 | db["users"].delete() 54 | collection.truncate() 55 | print ("Done...") -------------------------------------------------------------------------------- /pyArango/tests/doc_save_bulk_benchmark.py: -------------------------------------------------------------------------------- 1 | import unittest, copy 2 | import os 3 | import time 4 | import random 5 | 6 | from pyArango.connection import * 7 | from pyArango.database import * 8 | from pyArango.collection import * 9 | 10 | from pyArango.collection import BulkOperation as BulkOperation 11 | 12 | # A little script to test performaces 13 | 14 | allUsers = [] 15 | 16 | def createUsers(collection, i): 17 | global allUsers 18 | doc = collection.createDocument() 19 | doc["name"] = "Tesla-%d" % i 20 | doc["number"] = i 21 | doc["species"] = "human" 22 | allUsers.append(doc) 23 | doc.save() 24 | 25 | allLinks = [] 26 | def linkUsers(collection, userA, userB, count): 27 | global allLinks 28 | doc = collection.createEdge() 29 | doc["count"] = count 30 | doc.links(userA, userB) 31 | allLinks.append(doc) 32 | 33 | conn = Connection(username="root", password="") 34 | # conn = Connection(username=None, password=None) 35 | 36 | print ("Creating db...") 37 | try: 38 | db = conn.createDatabase(name = "test_db_2") 39 | except: 40 | print ("DB already exists") 41 | db = conn["test_db_2"] 42 | 43 | try: 44 | collection = db.createCollection(name = "users") 45 | except: 46 | print ("Collection already exists") 47 | 48 | try: 49 | relcol = db.createCollection(className = 'Edges', name = "relations") 50 | except: 51 | print ("Relations Collection already exists") 52 | 53 | collection = db["users"] 54 | collection.truncate() 55 | 56 | relcol = db["relations"] 57 | relcol.truncate() 58 | 59 | startTime = time.time() 60 | nbUsers = 100000 61 | batchSize = 500 62 | 63 | print("Saving Users: ") 64 | with BulkOperation(collection, batchSize=batchSize) as col: 65 | for i in range(nbUsers): 66 | if i % 1000 == 0: 67 | print ("->", i, "saved") 68 | try: 69 | createUsers(col, i) 70 | except Exception as e: 71 | print ("died at", i) 72 | raise e 73 | 74 | i = 0 75 | with BulkOperation(relcol, batchSize=batchSize) as col: 76 | for userA in allUsers: 77 | i += 1 78 | try: 79 | otherUser = random.choice(allUsers) 80 | linkUsers(col, userA, otherUser, i) 81 | except Exception as e: 82 | print ("died at", userA) 83 | raise e 84 | 85 | print("Modifying relations: \n") 86 | with BulkOperation(relcol, batchSize=batchSize) as col: 87 | for link in allLinks: 88 | try: 89 | link.set({'modified': 'true'}) 90 | link.patch() 91 | except Exception as e: 92 | print ("died at", link) 93 | raise e 94 | 95 | print("Modifying Users: \n") 96 | with BulkOperation(collection, batchSize=batchSize) as col: 97 | for user in allUsers: 98 | try: 99 | user.set({'modified': 'true'}) 100 | user.patch() 101 | except Exception as e: 102 | print ("died at", link) 103 | raise e 104 | 105 | print("Deleting relations: \n") 106 | with BulkOperation(relcol, batchSize=batchSize) as col: 107 | for link in allLinks: 108 | try: 109 | link.delete() 110 | except Exception as e: 111 | print ("died at", link) 112 | raise e 113 | 114 | print("Deleting Users: \n") 115 | with BulkOperation(collection, batchSize=batchSize) as col: 116 | for user in allUsers: 117 | try: 118 | user.delete() 119 | except Exception as e: 120 | print ("died at", link) 121 | raise e 122 | 123 | 124 | took = time.time() - startTime 125 | print ("avg, 1sc => ", float(nbUsers)/took, "saves") 126 | 127 | print ("Cleaning up...") 128 | 129 | db["users"].delete() 130 | collection.truncate() 131 | print ("Done...") 132 | -------------------------------------------------------------------------------- /pyArango/tests/setup_arangodb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker pull arangodb/arangodb-preview:devel-nightly 4 | docker run -d -e ARANGO_ROOT_PASSWORD="root" -p 8529:8529 arangodb/arangodb-preview:devel-nightly 5 | 6 | sleep 2 7 | 8 | n=0 9 | # timeout value for startup 10 | timeout=60 11 | while [[ (-z `curl -H 'Authorization: Basic cm9vdDp0ZXN0' -s 'http://127.0.0.1:8529/_api/version' `) && (n -lt timeout) ]] ; do 12 | echo -n "." 13 | sleep 1s 14 | n=$[$n+1] 15 | done 16 | 17 | if [[ n -eq timeout ]]; 18 | then 19 | echo "Could not start ArangoDB. Timeout reached." 20 | exit 1 21 | fi 22 | 23 | echo "ArangoDB is up" 24 | -------------------------------------------------------------------------------- /pyArango/tests/validators_tests.py: -------------------------------------------------------------------------------- 1 | import unittest, copy 2 | from pyArango.validation import * 3 | from pyArango.theExceptions import ValidationError 4 | 5 | class ValidatorTests(unittest.TestCase): 6 | 7 | def setUp(self): 8 | pass 9 | 10 | def tearDown(self): 11 | pass 12 | 13 | def test_notNull(self): 14 | v = NotNull() 15 | self.assertTrue(v.validate(33)) 16 | self.assertRaises(ValidationError, v.validate, None) 17 | 18 | def test_email(self): 19 | v = Email() 20 | self.assertTrue(v.validate('nicholas.tesla@simba.com')) 21 | self.assertRaises(ValidationError, v.validate, 'nicholas.tesla @simba.com') 22 | self.assertRaises(ValidationError, v.validate, 'nicholas.tesla&@simba.com') 23 | self.assertRaises(ValidationError, v.validate, 'nicholas.tesla @simba.com') 24 | self.assertRaises(ValidationError, v.validate, 'nicholas.tesla') 25 | self.assertRaises(ValidationError, v.validate, 'nicholas.tesla@.com') 26 | self.assertRaises(ValidationError, v.validate, 'nicholas.tesla@com') 27 | 28 | def test_length(self): 29 | v = Length(2, 5) 30 | self.assertTrue(v.validate("12")) 31 | self.assertRaises(ValidationError, v.validate, '1') 32 | self.assertRaises(ValidationError, v.validate, '123456') 33 | 34 | if __name__ == "__main__": 35 | unittest.main() 36 | -------------------------------------------------------------------------------- /pyArango/theExceptions.py: -------------------------------------------------------------------------------- 1 | class pyArangoException(Exception): 2 | """The calss from witch all Exceptions inherit""" 3 | def __init__(self, message, errors = None): 4 | Exception.__init__(self, message) 5 | if errors is None: 6 | errors = {} 7 | self.message = message 8 | self.errors = errors 9 | 10 | def __str__(self): 11 | return self.message + ". Errors: " + str(self.errors) 12 | 13 | class ConnectionError(pyArangoException): 14 | """Something went wrong with the connection""" 15 | def __init__(self, message, URL, statusCode = "", errors = None): 16 | if errors is None: 17 | errors = {} 18 | mes = "%s. URL: %s, status: %s" % (message, URL, statusCode) 19 | pyArangoException.__init__(self, mes, errors) 20 | 21 | class ArangoError(pyArangoException): 22 | """a generic arangodb error object""" 23 | def __init__(self, errorObject): 24 | self.errorNum = errorObject['errorNum'] 25 | pyArangoException.__init__(self, errorObject['errorMessage'], errorObject) 26 | 27 | class CreationError(pyArangoException): 28 | """Something went wrong when creating something""" 29 | def __init__(self, message, errors = None): 30 | if errors is None: 31 | errors = {} 32 | pyArangoException.__init__(self, message, errors) 33 | 34 | class UniqueConstrainViolation(CreationError): 35 | """Violation of a unique key""" 36 | def __init__(self, message, errors = None): 37 | if errors is None: 38 | errors = {} 39 | CreationError.__init__(self, message, errors) 40 | 41 | class IndexError(pyArangoException): 42 | """wasn't able to get the index""" 43 | def __init__(self, message, errors = None): 44 | if errors is None: 45 | errors = {} 46 | pyArangoException.__init__(self, message, errors) 47 | 48 | class UpdateError(pyArangoException): 49 | """Something went wrong when updating something""" 50 | def __init__(self, message, errors = None): 51 | if errors is None: 52 | errors = {} 53 | pyArangoException.__init__(self, message, errors) 54 | 55 | class DeletionError(pyArangoException): 56 | """Something went wrong when deleting something""" 57 | def __init__(self, message, errors = None): 58 | if errors is None: 59 | errors = {} 60 | pyArangoException.__init__(self, message, errors) 61 | 62 | class TraversalError(pyArangoException): 63 | """Something went wrong when doing a graph traversal""" 64 | def __init__(self, message, errors = None): 65 | if errors is None: 66 | errors = {} 67 | pyArangoException.__init__(self, message, errors) 68 | 69 | class ValidationError(pyArangoException): 70 | """Something went wrong when validating something""" 71 | def __init__(self, message, errors = None): 72 | if errors is None: 73 | errors = {} 74 | pyArangoException.__init__(self, message, errors) 75 | 76 | class SchemaViolation(pyArangoException): 77 | """Raised when someone tries to add a new field to an object belonging a to a Collection with enforced schema""" 78 | def __init__(self, collection, field, errors = None): 79 | if errors is None: 80 | errors = {} 81 | message = "Collection '%s' does not have a field '%s' in it's schema" % (collection.__name__, field) 82 | pyArangoException.__init__(self, message, errors) 83 | 84 | class InvalidDocument(pyArangoException): 85 | """Raised when a Document does not respect schema/validation defined in its collection""" 86 | def __init__(self, errors): 87 | message = "Unsuccesful validation" 88 | self.strErrors = [] 89 | for k, v in errors.items(): 90 | self.strErrors.append("%s -> %s" % (k, v)) 91 | self.strErrors = '\n\t'.join(self.strErrors) 92 | 93 | pyArangoException.__init__(self, message, errors) 94 | 95 | def __str__(self): 96 | strErrors = [] 97 | for k, v in self.errors.items(): 98 | strErrors.append("%s -> %s" % (k, v)) 99 | strErrors = '\n\t'.join(strErrors) 100 | return self.message + ":\n\t" + strErrors 101 | 102 | class SimpleQueryError(pyArangoException): 103 | """Something went wrong with a simple query""" 104 | def __init__(self, message, errors = None): 105 | if errors is None: 106 | errors = {} 107 | pyArangoException.__init__(self, message, errors) 108 | 109 | class BulkOperationError(pyArangoException): 110 | """Something went wrong in one of the bulk operations. This error contains more errors""" 111 | def __init__(self, message): 112 | self._errors = [] 113 | self._errmsgs = [] 114 | self._documents = [] 115 | pyArangoException.__init__(self, "Batch error - + " + message) 116 | 117 | def addBulkError(self, error, document): 118 | self._errors.append(error) 119 | self._errmsgs.append(str(error)) 120 | self._documents.append(document) 121 | def __str__(self): 122 | strErrors = [] 123 | i = 0 124 | for errMsg in self._errmsgs: 125 | err = "" 126 | docstr = "" 127 | try: 128 | err = errMsg 129 | except: 130 | pass 131 | try: 132 | docstr = self._documents[i] 133 | except: 134 | pass 135 | strErrors.append("\t<%s> -> %s" % (err, docstr)) 136 | i+=1 137 | strErrors = '\n\t'.join(strErrors) 138 | return self.message + ":\n\t" + strErrors 139 | 140 | class QueryError(pyArangoException): 141 | """Something went wrong with an aql query""" 142 | def __init__(self, message, errors = None): 143 | if errors is None: 144 | errors = {} 145 | pyArangoException.__init__(self, message, errors) 146 | 147 | class AQLQueryError(pyArangoException): 148 | """Something went wrong with an aql query""" 149 | def __init__(self, message, query, errors = None): 150 | if errors is None: 151 | errors = {} 152 | lq = [] 153 | for i, ll in enumerate(query.split("\n")): 154 | lq.append("%s: %s" % (i+1, ll)) 155 | lq = '\n'.join(lq) 156 | 157 | message = "Error in:\n%s.\n->%s" % (lq, message) 158 | pyArangoException.__init__(self, message, errors) 159 | 160 | class CursorError(pyArangoException): 161 | """Something went wrong when trying to fetch data with a cursor""" 162 | def __init__(self, message, cursorId, errors = None): 163 | if errors is None: 164 | errors = {} 165 | message = "Unable to retreive data for cursor %s: %s" % (cursorId, message) 166 | pyArangoException.__init__(self, message, errors) 167 | 168 | class TransactionError(pyArangoException): 169 | """Something went wrong with a transaction""" 170 | def __init__(self, message, action, errors = None): 171 | if errors is None: 172 | errors = {} 173 | message = "Error in: %s.\n->%s" % (action, message) 174 | pyArangoException.__init__(self, message, errors) 175 | 176 | class AbstractInstanciationError(Exception): 177 | """Raised when someone tries to instanciate an abstract class""" 178 | def __init__(self, cls): 179 | self.cls = cls 180 | self.message = "%s is abstract and is not supposed to be instanciated. Collections my inherit from it" % self.cls.__name__ 181 | Exception.__init__(self, self.message) 182 | 183 | def __str__(self): 184 | return self.message 185 | 186 | class ExportError(pyArangoException): 187 | """ Something went wrong using the export cursor """ 188 | def __init__(self, message, errors = None ): 189 | if errors is None: 190 | errors = {} 191 | pyArangoException.__init__(self, message, errors) 192 | 193 | class DocumentNotFoundError(pyArangoException): 194 | def __init__(self, message, errors = None): 195 | if errors is None: 196 | errors = {} 197 | pyArangoException.__init__(self, message, errors) 198 | 199 | 200 | class AQLFetchError(Exception): 201 | """Raised error when fetching the data.""" 202 | 203 | def __init__(self, err_message): 204 | """Error when unable to fetch. 205 | 206 | Parameters 207 | ---------- 208 | err_message : str 209 | error message. 210 | 211 | """ 212 | Exception.__init__(self, err_message) 213 | 214 | -------------------------------------------------------------------------------- /pyArango/users.py: -------------------------------------------------------------------------------- 1 | from .theExceptions import ConnectionError, CreationError, DeletionError, UpdateError 2 | 3 | class User(object): 4 | """This class represents a user""" 5 | def __init__(self, users, jsonData = None): 6 | if jsonData is None: 7 | jsonData = {} 8 | self._store = {} 9 | self.users = users 10 | self.connection = self.users.connection 11 | 12 | self._store = { 13 | "username": None, 14 | "active": True, 15 | "extra": None, 16 | "changePassword": None, 17 | "password": None, 18 | } 19 | 20 | self.isSet = False 21 | if len(jsonData) > 0: 22 | self._set(jsonData) 23 | 24 | def _set(self, jsonData): 25 | """Initialize all fields at once. If no password is specified, it will be set as an empty string""" 26 | 27 | self["username"] = jsonData["user"] 28 | self["active"] = jsonData["active"] 29 | self["extra"] = jsonData["extra"] 30 | try: 31 | self["changePassword"] = jsonData["changePassword"] 32 | except Exception as e: 33 | pass 34 | # self["changePassword"] = "" 35 | 36 | try: 37 | self["password"] = jsonData["passwd"] 38 | except KeyError: 39 | self["password"] = "" 40 | 41 | self.isSet = True 42 | 43 | def getURL(self): 44 | return "%s/user/%s" % (self.connection.getURL(), self["username"]) 45 | 46 | def save(self): 47 | """Save/updates the user""" 48 | 49 | import json 50 | 51 | payload = {} 52 | payload.update(self._store) 53 | payload["user"] = payload["username"] 54 | payload["passwd"] = payload["password"] 55 | del(payload["username"]) 56 | del(payload["password"]) 57 | 58 | payload = json.dumps(payload, default=str) 59 | if not self.isSet: 60 | if "username" not in self._store or "password" not in self._store: 61 | raise KeyError("You must define self['name'] and self['password'] to be able to create a new user") 62 | 63 | r = self.connection.session.post(self.users.getURL(), data = payload) 64 | data = r.json() 65 | if r.status_code == 201: 66 | self._set(data) 67 | else: 68 | raise CreationError("Unable to create new user", data) 69 | else: 70 | r = self.connection.session.put(self.getURL(), data = payload) 71 | data = r.json() 72 | if r.status_code == 200: 73 | self._set(data) 74 | else: 75 | raise UpdateError("Unable to update user, status: %s" %r.status_code, data) 76 | 77 | def setPermissions(self, dbName, access): 78 | """Grant revoke rights on a database, 'access' is supposed to be boolean. ArangoDB grants/revokes both read and write rights at the same time""" 79 | import json 80 | 81 | if not self.isSet: 82 | raise CreationError("Please save user first", None, None) 83 | 84 | rights = [] 85 | if access: 86 | rights.append("rw") 87 | 88 | rights = ''.join(rights) 89 | 90 | if not self.connection.hasDatabase(dbName): 91 | raise KeyError("Unknown database: %s" % dbName) 92 | 93 | url = "%s/database/%s" % (self.getURL(), dbName) 94 | r = self.connection.session.put(url, data = json.dumps({"grant": rights}, default=str)) 95 | if r.status_code < 200 or r.status_code > 202: 96 | raise CreationError("Unable to grant rights", r.content) 97 | 98 | def delete(self): 99 | """Permanently remove the user""" 100 | if not self.isSet: 101 | raise CreationError("Please save user first", None, None) 102 | 103 | r = self.connection.session.delete(self.getURL()) 104 | if r.status_code < 200 or r.status_code > 202: 105 | raise DeletionError("Unable to delete user, url: %s, status: %s" %(r.url, r.status_code), r.content ) 106 | self.isSet = False 107 | 108 | def __repr__(self): 109 | return "ArangoUser: %s" % (self._store) 110 | 111 | def __setitem__(self, k, v): 112 | if k not in list(self._store.keys()): 113 | raise KeyError("The only keys available for user are: %s" % (list(self._store.keys()))) 114 | self._store[k] = v 115 | 116 | def __getitem__(self, k): 117 | return self._store[k] 118 | 119 | class Users(object): 120 | """This one manages users.""" 121 | def __init__(self, connection): 122 | self.connection = connection 123 | 124 | def getURL(self): 125 | return "%s/user" % (self.connection.getURL()) 126 | 127 | def createUser(self, username, password): 128 | u = User(self) 129 | u["username"] = username 130 | u["password"] = password 131 | return u 132 | 133 | def fetchAllUsers(self, rawResults = False): 134 | """Returns all available users. if rawResults, the result will be a list of python dicts instead of User objects""" 135 | r = self.connection.session.get(self.getURL()) 136 | if r.status_code == 200: 137 | data = r.json() 138 | if rawResults: 139 | return data["result"] 140 | else: 141 | res = [] 142 | for resu in data["result"]: 143 | u = User(self, resu) 144 | res.append(u) 145 | return res 146 | else: 147 | raise ConnectionError("Unable to get user list", r.url, r.status_code) 148 | 149 | def fetchUser(self, username, rawResults = False): 150 | """Returns a single user. if rawResults, the result will be a list of python dicts instead of User objects""" 151 | url = "%s/%s" % (self.getURL(), username) 152 | 153 | r = self.connection.session.get(url) 154 | if r.status_code == 200: 155 | data = r.json() 156 | if rawResults: 157 | return data["result"] 158 | else: 159 | u = User(self, data) 160 | return u 161 | else: 162 | raise KeyError("Unable to get user: %s" % username) 163 | 164 | def __getitem__(self, k): 165 | return self.fetchUser(k) 166 | -------------------------------------------------------------------------------- /pyArango/validation.py: -------------------------------------------------------------------------------- 1 | from .theExceptions import ValidationError 2 | 3 | class Validator(object): 4 | """All validators must inherit from this class""" 5 | def __init__(self, *args, **kwrags): 6 | pass 7 | 8 | def validate(self, value): 9 | """The only function that a validator must implement. Must return True if erevything went well or a ValidationError otherwise""" 10 | raise NotImplemented("Should be implemented in child") 11 | 12 | def __str__(self): 13 | """This function should be redifined in child to give a quick overview of the validator""" 14 | return self.__class__.__name__ 15 | 16 | class NotNull(Validator): 17 | """Checks that the Field has a non null value. False is not considered a Null Value""" 18 | def __init__(self, reject_zero=False, reject_empty_string=True): 19 | self.reject_zero = reject_zero 20 | self.reject_empty_string = reject_empty_string 21 | 22 | def validate(self, value): 23 | if value is None or ( (value == 0 and type(value) != bool ) and self.reject_zero) or (value == "" and self.reject_empty_string): 24 | raise ValidationError("Field can't have a null value, got: '%s'" % value) 25 | return True 26 | 27 | class Email(Validator): 28 | """Checks if the field contains an emailaddress""" 29 | def validate(self, value): 30 | import re 31 | pat = '^[A-z0-9._-]+@[A-z0-9.-]+\.[A-z]{2,4}$' 32 | if re.match(pat, value) is None: 33 | raise ValidationError("The email address: %s is invalid" % value) 34 | return True 35 | 36 | class Numeric(Validator): 37 | """checks if the value is numerical""" 38 | def validate(self, value): 39 | try: 40 | float(value) 41 | except: 42 | raise ValidationError("%s is not valid numerical value" % value) 43 | return True 44 | 45 | class Int(Validator): 46 | """The value must be an integer""" 47 | def validate(self, value): 48 | if not isinstance(value, int): 49 | raise ValidationError("%s is not a valid integer" % value) 50 | return True 51 | 52 | class Bool(Validator): 53 | """The value must be a boolean""" 54 | def validate(self, value): 55 | if not isinstance(value, bool): 56 | raise ValidationError("%s is not a valid boolean" % value) 57 | return True 58 | 59 | class String(Validator): 60 | """The value must be a string or unicode""" 61 | def validate(self, value): 62 | if not isinstance(value, str) and not isinstance(value, unicode): 63 | raise ValidationError("%s is not a valid string" % value) 64 | return True 65 | 66 | class Enumeration(Validator): 67 | """The value must be in the allowed ones""" 68 | def __init__(self, allowed): 69 | self.allowed = set(allowed) 70 | 71 | def validate(self, value): 72 | if value not in self.allowed: 73 | raise ValidationError("%s is not among the allowed values %s" % (value, self.allowed)) 74 | return True 75 | 76 | class Range(Validator): 77 | """The value must une [lower, upper] range""" 78 | def __init__(self, lower, upper): 79 | self.lower = lower 80 | self.upper = upper 81 | 82 | def validate(self, value): 83 | if value < self.lower or value > self.upper: 84 | raise ValidationError("%s is not in [%s, %s]" % (value, self.lower, self.upper)) 85 | 86 | def __str__(self): 87 | return "%s[%s, %s]" % (self.__class__.__name__, self.minLen, self.maxLen) 88 | 89 | 90 | class Length(Validator): 91 | """validates that the value length is between given bounds""" 92 | def __init__(self, minLen, maxLen): 93 | self.minLen = minLen 94 | self.maxLen = maxLen 95 | 96 | def validate(self, value): 97 | try: 98 | length = len(value) 99 | except: 100 | raise ValidationError("Field '%s' of type '%s' has no length" % (value, type(value))) 101 | 102 | if self.minLen <= len(value) and len(value) <= self.maxLen: 103 | return True 104 | raise ValidationError("Field must have a length in ['%s';'%s'] got: '%s'" % (self.minLen, self.maxLen, len(value))) 105 | 106 | def __str__(self): 107 | return "%s[%s, %s]" % (self.__class__.__name__, self.minLen, self.maxLen) 108 | -------------------------------------------------------------------------------- /run_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | PYTHON=python3 3 | while [ $# -gt 0 ]; do 4 | case "$1" in 5 | --instanceUrl) 6 | shift 7 | export ARANGODB_URL=$1 8 | shift 9 | ;; 10 | --instanceEndpoint) 11 | shift 12 | shift 13 | # don't care 14 | ;; 15 | --auth) 16 | shift 17 | shift 18 | # don't care 19 | ;; 20 | --username) 21 | shift 22 | export ARANGODB_ROOT_USERNAME=$1 23 | shift 24 | ;; 25 | --password) 26 | shift 27 | export ARANGODB_ROOT_PASSWORD=$1 28 | shift 29 | ;; 30 | --enterprise) 31 | shift 32 | # don't care 33 | ;; 34 | --no-enterprise) 35 | shift 36 | # don't care 37 | ;; 38 | --host) 39 | shift 40 | shift 41 | # don't care... 42 | ;; 43 | --port) 44 | shift 45 | shift 46 | # don't care... 47 | ;; 48 | --deployment-mode) 49 | shift 50 | shift 51 | # don't care... 52 | ;; 53 | --testsuite) 54 | shift 55 | shift 56 | # TODO: howto pass testsuite filters? 57 | ;; 58 | --filter) 59 | shift 60 | shift 61 | # TODO: howto pass testcase filters? 62 | ;; 63 | --python-exe) 64 | shift 65 | export PYTHON=$1 66 | shift 67 | ;; 68 | *) 69 | echo "What? my mother was a saint! $1" 70 | shift 71 | esac 72 | done 73 | 74 | $PYTHON setup.py build 75 | 76 | export PYTHONPATH=$(pwd)/build/lib 77 | 78 | exec $PYTHON pyArango/tests/tests.py 2>&1 79 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | 2 | [bdist_wheel] 3 | universal = 1 4 | 5 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | from codecs import open 3 | from os import path 4 | 5 | here = path.abspath(path.dirname(__file__)) 6 | 7 | # Get the long description from the relevant file 8 | with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f: 9 | long_description = f.read() 10 | 11 | setup( 12 | name='pyArango', 13 | 14 | version='2.1.1', 15 | 16 | description='An easy to use python driver for ArangoDB with built-in validation', 17 | long_description=long_description, 18 | 19 | url='https://github.com/tariqdaouda/pyArango', 20 | 21 | author='Tariq Daouda', 22 | author_email='tariq.daouda@umontreal.ca', 23 | 24 | license='ApacheV2', 25 | 26 | # See https://pypi.python.org/pypi?%3Aaction=list_classifiers 27 | classifiers=[ 28 | # How mature is this project? Common values are 29 | # 3 - Alpha 30 | # 4 - Beta 31 | # 5 - Production/Stable 32 | 'Development Status :: 5 - Production/Stable', 33 | # 'Development Status :: 4 - Beta', 34 | 35 | 'Intended Audience :: Developers', 36 | 'Intended Audience :: System Administrators', 37 | 'Topic :: Software Development :: Libraries', 38 | 'Topic :: Database', 39 | 'Topic :: Database :: Database Engines/Servers', 40 | 41 | 'License :: OSI Approved :: Apache Software License', 42 | 43 | 'Programming Language :: Python :: 2.7', 44 | 'Programming Language :: Python :: 3', 45 | ], 46 | 47 | install_requires=['requests>=2.7.0', 'future', 'datetime'], 48 | 49 | keywords='database ORM nosql arangodb driver validation', 50 | 51 | packages=find_packages(), 52 | 53 | entry_points={ 54 | 'console_scripts': [ 55 | 'sample=sample:main', 56 | ], 57 | }, 58 | ) 59 | --------------------------------------------------------------------------------