├── .bandit ├── .coveragerc ├── .editorconfig ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── PULL_REQUEST_TEMPLATE.md ├── .gitignore ├── .travis.yml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── pytest.ini ├── requirements.txt ├── rethinkdb ├── __init__.py ├── __main__.py ├── _dump.py ├── _export.py ├── _import.py ├── _index_rebuild.py ├── _restore.py ├── ast.py ├── asyncio_net │ ├── __init__.py │ └── net_asyncio.py ├── backports │ ├── __init__.py │ └── ssl_match_hostname │ │ ├── LICENSE.txt │ │ ├── README.txt │ │ └── __init__.py ├── docs.py ├── errors.py ├── gevent_net │ ├── __init__.py │ └── net_gevent.py ├── handshake.py ├── helpers.py ├── logger.py ├── net.py ├── query.py ├── tornado_net │ ├── __init__.py │ └── net_tornado.py ├── trio_net │ ├── __init__.py │ └── net_trio.py ├── twisted_net │ ├── __init__.py │ └── net_twisted.py ├── utils_common.py └── version.py ├── scripts ├── convert_protofile.py ├── install-db.sh ├── prepare_remote_test.py ├── upload-coverage.sh └── upload-pypi.sh ├── setup.cfg ├── setup.py └── tests ├── __init__.py ├── conftest.py ├── helpers.py ├── integration ├── __init__.py ├── test_asyncio.py ├── test_connect.py ├── test_cursor.py ├── test_data_write.py ├── test_database.py ├── test_date_and_time.py ├── test_index.py ├── test_ping.py ├── test_repl.py ├── test_table.py ├── test_tornado.py ├── test_trio.py └── test_write_hooks.py ├── test_date_and_time.py ├── test_handshake.py ├── test_helpers.py ├── test_logger.py ├── test_net.py └── test_utils_common.py /.bandit: -------------------------------------------------------------------------------- 1 | [bandit] 2 | exclude: /tests 3 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | include = rethinkdb/* 3 | 4 | [report] 5 | exclude_lines = 6 | pragma: no cover 7 | 8 | def __unicode__ 9 | def __repr__ 10 | 11 | omit = 12 | rethinkdb/version.py 13 | 14 | show_missing = True 15 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # Copyright 2018 RethinkDB 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | root = true 16 | 17 | [*] 18 | indent_style = space 19 | indent_size = 4 20 | trim_trailing_whitespace = true 21 | insert_final_newline = true 22 | charset = utf-8 23 | end_of_line = lf 24 | 25 | [LICENSE] 26 | insert_final_newline = false 27 | 28 | [Makefile] 29 | indent_style = tab 30 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: rethinkdb 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 13 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug, not qualified 6 | 7 | --- 8 | 9 | **Describe the bug** 10 | A clear and concise description of what the bug is. 11 | 12 | **To Reproduce** 13 | Steps to reproduce the behavior: 14 | 1. TODO 15 | 16 | **Expected behavior** 17 | A clear and concise description of what you expected to happen. 18 | 19 | **Screenshots** 20 | If applicable, add screenshots to help explain your problem. 21 | 22 | **System info** 23 | - OS: [e.g. macOS Mojave 10.14.3] 24 | - RethinkDB Version: [e.g. 2.4.0] 25 | 26 | **Additional context** 27 | Add any other context about the problem here. 28 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement, not qualified, question 6 | 7 | --- 8 | 9 | **Is your feature request related to a problem? Please describe.** 10 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 11 | 12 | **Describe the solution you'd like** 13 | A clear and concise description of what you want to happen. 14 | 15 | **Describe alternatives you've considered** 16 | A clear and concise description of any alternative solutions or features you've considered. 17 | 18 | **Additional context** 19 | Add any other context or screenshots about the feature request here. 20 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | **Reason for the change** 2 | If applicable, link the related issue/bug report or write down in few sentences the motivation. 3 | 4 | **Description** 5 | A clear and concise description of what did you changed and why. 6 | 7 | **Code examples** 8 | If applicable, add code examples to help explain your changes. 9 | 10 | **Checklist** 11 | - [ ] I have read and agreed to the [RethinkDB Contributor License Agreement](http://rethinkdb.com/community/cla/) 12 | 13 | **References** 14 | Anything else related to the change e.g. documentations, RFCs, etc. 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # pyenv 55 | .python-version 56 | 57 | # Environments 58 | *.pid 59 | .env 60 | .venv 61 | env/ 62 | venv/ 63 | ENV/ 64 | env.bak/ 65 | venv.bak/ 66 | virtualenv/ 67 | 68 | # RethinkDB 69 | rethinkdb/ql2_pb2.py 70 | rethinkdb/*.proto 71 | rethinkdb_data/ 72 | rebirthdb_data/ 73 | 74 | # Editors 75 | .vscode/ 76 | .idea/ 77 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | cache: pip 2 | dist: xenial 3 | language: python 4 | sudo: required 5 | 6 | python: 7 | - "2.7" 8 | - "3.5" 9 | - "3.6" 10 | - "3.7" 11 | - "3.8" 12 | 13 | allow_failure: 14 | - python: "3.8" 15 | 16 | install: 17 | - pip install -r requirements.txt 18 | - pip freeze 19 | 20 | before_script: 21 | - make prepare 22 | - make install-db 23 | 24 | script: 25 | - make test-ci 26 | 27 | after_success: 28 | - make upload-coverage 29 | 30 | deploy: 31 | provider: script 32 | script: make upload-pypi 33 | on: 34 | python: 3.8 35 | tags: true 36 | 37 | notifications: 38 | email: false 39 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behaviour that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behaviour by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or advances 26 | * Trolling, insulting/derogatory comments, and personal or political attacks 27 | * Public or private harassment 28 | * Publishing others' private information, such as a physical or electronic address, without explicit permission 29 | * Other conduct which could reasonably be considered inappropriate in a professional setting 30 | 31 | ## Our Responsibilities 32 | 33 | Project maintainers are responsible for clarifying the standards of acceptable 34 | behaviour and are expected to take appropriate and fair corrective action in 35 | response to any instances of unacceptable behaviour. 36 | 37 | Project maintainers have the right and responsibility to remove, edit, or 38 | reject comments, commits, code, wiki edits, issues, and other contributions 39 | that are not aligned to this Code of Conduct, or to ban temporarily or 40 | permanently any contributor for other behaviors that they deem inappropriate, 41 | threatening, offensive, or harmful. 42 | 43 | ## Scope 44 | 45 | This Code of Conduct applies both within project spaces and in public spaces 46 | when an individual is representing the project or its community. Examples of 47 | representing a project or community include using an official project e-mail 48 | address, posting via an official social media account, or acting as an appointed 49 | representative at an online or offline event. Representation of a project may be 50 | further defined and clarified by project maintainers. 51 | 52 | ## Enforcement 53 | 54 | Instances of abusive, harassing, or otherwise unacceptable behaviour may be 55 | reported by contacting the project team at open@rethinkdb.com. All 56 | complaints will be reviewed and investigated and will result in a response that 57 | is deemed necessary and appropriate to the circumstances. The project team is 58 | obligated to maintain confidentiality with regard to the reporter of an incident. 59 | Further details of specific enforcement policies may be posted separately. 60 | 61 | Project maintainers who do not follow or enforce the Code of Conduct in good 62 | faith may face temporary or permanent repercussions as determined by other 63 | members of the project's leadership. 64 | 65 | ## Attribution 66 | 67 | This Code of Conduct is adapted from the Contributor Covenant, version 1.4, 68 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 69 | 70 | For answers to common questions about this code of conduct, see 71 | https://www.contributor-covenant.org/faq 72 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Contributions are welcome, and they are greatly appreciated! Every little bit helps! You can contribute in many ways, not limited to this document. 4 | 5 | ## Types of Contributions 6 | 7 | ### Report Bugs 8 | 9 | First of all, please check that the bug is not reported yet. If that's already reported then upvote the existing bug instead of opening a new bug report. 10 | 11 | Report bugs at https://github.com/rethinkdb/rethinkdb-python/issues. If you are reporting a bug, please include: 12 | 13 | - Your operating system name and version. 14 | - Any details about your local setup that might be helpful in troubleshooting. 15 | - Detailed steps to reproduce the bug. 16 | 17 | ### Fix Bugs 18 | 19 | Look through the GitHub issues for bugs. Anything tagged with "bug", "good first issue" and "help wanted" is open to whoever wants to implement it. 20 | 21 | ### Implement Features 22 | 23 | Look through the GitHub issues for features. Anything tagged with "enhancement", "good first issue" and "help wanted" is open to whoever wants to implement it. In case you added a new Rule or Precondition, do not forget to add them to the docs as well. 24 | 25 | ### Write Documentation 26 | 27 | RethinkDB could always use more documentation, whether as part of the official docs, in docstrings, or even on the web in blog posts, articles, and such. To extend the documentation on the website, visit the [www](https://github.com/rethinkdb/www) repo. For extending the docs, you can check the [docs](https://github.com/rethinkdb/docs) repo. 28 | 29 | ### Submit A Feature 30 | 31 | First of all, please check that the feature request is not reported yet. If that's already reported then upvote the existing request instead of opening a new one. 32 | 33 | If you are proposing a feature: 34 | 35 | - Check if there is an opened feature request for the same idea. 36 | - Explain in detail how it would work. 37 | - Keep the scope as narrow as possible, to make it easier to implement. 38 | - Remember that this is an open-source project, and that contributions are welcome :) 39 | 40 | ## Pull Request Guidelines 41 | 42 | Before you submit a pull request, check that it meets these guidelines: 43 | 44 | 1. The pull request should include tests (if applicable) 45 | 2. If the pull request adds functionality, the docs should be updated too. 46 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include *.txt 3 | include Makefile 4 | include pytest.ini 5 | include .coveragerc 6 | recursive-include scripts *.py 7 | recursive-include scripts *.sh 8 | recursive-include tests *.py 9 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # Copyright 2018 RethinkDB 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | .PHONY: default help test-unit test-integration test-remote upload-coverage upload-pypi clean prepare 16 | 17 | PACKAGE_NAME = rethinkdb 18 | 19 | PROTO_FILE_NAME = ql2.proto 20 | PROTO_FILE_URL = https://raw.githubusercontent.com/rethinkdb/rethinkdb/next/src/rdb_protocol/${PROTO_FILE_NAME} 21 | TARGET_PROTO_FILE = ${PACKAGE_NAME}/${PROTO_FILE_NAME} 22 | 23 | FILE_CONVERTER_NAME = ./scripts/convert_protofile.py 24 | REMOTE_TEST_SETUP_NAME = ./scripts/prepare_remote_test.py 25 | 26 | CONVERTED_PROTO_FILE_NAME = ql2_pb2.py 27 | TARGET_CONVERTED_PROTO_FILE = ${PACKAGE_NAME}/${CONVERTED_PROTO_FILE_NAME} 28 | 29 | 30 | default: help 31 | 32 | help: 33 | @echo "Usage:" 34 | @echo 35 | @echo " make help Print this help message" 36 | @echo " make test-unit Run unit tests" 37 | @echo " make test-integration Run integration tests" 38 | @echo " make test-integration-2.4 Run integration tests" 39 | @echo " make test-remote Run tests on digital ocean" 40 | @echo " make upload-coverage Upload unit test coverage" 41 | @echo " make upload-pypi Release ${PACKAGE_NAME} package to PyPi" 42 | @echo " make clean Cleanup source directory" 43 | @echo " make prepare Prepare ${PACKAGE_NAME} for build" 44 | 45 | test-unit: 46 | pytest -v -m unit 47 | 48 | test-integration: 49 | @rethinkdb& 50 | pytest -v -m integration 51 | @killall rethinkdb 52 | 53 | test-ci: 54 | @rethinkdb& 55 | pytest -v --cov rethinkdb --cov-report xml 56 | @killall rethinkdb 57 | 58 | test-remote: 59 | python ${REMOTE_TEST_SETUP_NAME} pytest -m integration 60 | 61 | install-db: 62 | @sh scripts/install-db.sh 63 | 64 | upload-coverage: 65 | @sh scripts/upload-coverage.sh 66 | 67 | upload-pypi: prepare 68 | @sh scripts/upload-pypi.sh 69 | 70 | clean: 71 | @rm -rf \ 72 | ${TARGET_PROTO_FILE} \ 73 | ${TARGET_CONVERTED_PROTO_FILE} \ 74 | .pytest_cache \ 75 | .eggs \ 76 | .dist \ 77 | *.egg-info 78 | 79 | prepare: 80 | curl -qo ${TARGET_PROTO_FILE} ${PROTO_FILE_URL} 81 | python ${FILE_CONVERTER_NAME} -l python -i ${TARGET_PROTO_FILE} -o ${TARGET_CONVERTED_PROTO_FILE} 82 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # RethinkDB Python driver 2 | [![PyPI version](https://badge.fury.io/py/rethinkdb.svg)](https://badge.fury.io/py/rethinkdb) [![Build Status](https://travis-ci.org/rethinkdb/rethinkdb-python.svg?branch=master)](https://travis-ci.org/rethinkdb/rethinkdb-python) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/2b5231a6f90a4a1ba2fc795f8466bbe4)](https://www.codacy.com/app/rethinkdb/rethinkdb-python?utm_source=github.com&utm_medium=referral&utm_content=rethinkdb/rethinkdb-python&utm_campaign=Badge_Grade) [![Codacy Badge](https://api.codacy.com/project/badge/Coverage/2b5231a6f90a4a1ba2fc795f8466bbe4)](https://www.codacy.com/app/rethinkdb/rethinkdb-python?utm_source=github.com&utm_medium=referral&utm_content=rethinkdb/rethinkdb-python&utm_campaign=Badge_Coverage) 3 | 4 | ## Overview 5 | 6 | ### What is RethinkDB? 7 | RethinkDB is the first open-source scalable database built for realtime applications. It exposes a new database access model -- instead of polling for changes, the developer can tell the database to continuously push updated query results to applications in realtime. RethinkDB allows developers to build scalable realtime apps in a fraction of the time with less effort. 8 | 9 | ## Installation 10 | ```bash 11 | $ pip install rethinkdb 12 | ``` 13 | *Note: this package is the extracted driver of RethinkDB's original python driver.* 14 | 15 | ## Quickstart 16 | The main difference with the previous driver (except the name of the package) is we are **not** importing RethinkDB as `r`. If you would like to use `RethinkDB`'s python driver as a drop in replacement, you should do the following: 17 | 18 | ```python 19 | from rethinkdb import r 20 | 21 | connection = r.connect(db='test') 22 | ``` 23 | 24 | ## Blocking and Non-blocking I/O 25 | This driver supports blocking I/O (i.e. standard Python sockets) as well as 26 | non-blocking I/O through multiple async frameworks: 27 | 28 | * [Asyncio](https://docs.python.org/3/library/asyncio.html) 29 | * [Gevent](http://www.gevent.org/) 30 | * [Tornado](https://www.tornadoweb.org/en/stable/) 31 | * [Trio](https://trio.readthedocs.io/en/latest/) 32 | * [Twisted](https://twistedmatrix.com/trac/) 33 | 34 | The following examples demonstrate how to use the driver in each mode. 35 | 36 | ### Default mode (blocking I/O) 37 | The driver's default mode of operation is to use blocking I/O, i.e. standard Python 38 | sockets. This example shows how to create a table, populate with data, and get every 39 | document. 40 | 41 | ```python 42 | from rethinkdb import r 43 | 44 | connection = r.connect(db='test') 45 | 46 | r.table_create('marvel').run(connection) 47 | 48 | marvel_heroes = r.table('marvel') 49 | marvel_heroes.insert({ 50 | 'id': 1, 51 | 'name': 'Iron Man', 52 | 'first_appearance': 'Tales of Suspense #39' 53 | }).run(connection) 54 | 55 | for hero in marvel_heroes.run(connection): 56 | print(hero['name']) 57 | ``` 58 | 59 | ### Asyncio mode 60 | Asyncio mode is compatible with Python ≥ 3.5. 61 | 62 | ```python 63 | import asyncio 64 | from rethinkdb import r 65 | 66 | async def main(): 67 | async with await r.connect(db='test') as connection: 68 | await r.table_create('marvel').run(connection) 69 | 70 | marvel_heroes = r.table('marvel') 71 | await marvel_heroes.insert({ 72 | 'id': 1, 73 | 'name': 'Iron Man', 74 | 'first_appearance': 'Tales of Suspense #39' 75 | }).run(connection) 76 | 77 | # "async for" is supported in Python ≥ 3.6. In earlier versions, you should 78 | # call "await cursor.next()" in a loop. 79 | cursor = await marvel_heroes.run(connection) 80 | async for hero in cursor: 81 | print(hero['name']) 82 | # The `with` block performs `await connection.close(noreply_wait=False)`. 83 | 84 | r.set_loop_type('asyncio') 85 | 86 | # "asyncio.run" was added in Python 3.7. In earlier versions, you 87 | # might try asyncio.get_event_loop().run_until_complete(main()). 88 | asyncio.run(main()) 89 | ``` 90 | 91 | ### Gevent mode 92 | 93 | ```python 94 | import gevent 95 | from rethinkdb import r 96 | 97 | def main(): 98 | r.set_loop_type('gevent') 99 | connection = r.connect(db='test') 100 | 101 | r.table_create('marvel').run(connection) 102 | 103 | marvel_heroes = r.table('marvel') 104 | marvel_heroes.insert({ 105 | 'id': 1, 106 | 'name': 'Iron Man', 107 | 'first_appearance': 'Tales of Suspense #39' 108 | }).run(connection) 109 | 110 | for hero in marvel_heroes.run(connection): 111 | print(hero['name']) 112 | 113 | gevent.joinall([gevent.spawn(main)]) 114 | ``` 115 | 116 | ### Tornado mode 117 | Tornado mode is compatible with Tornado < 5.0.0. Tornado 5 is not supported. 118 | 119 | ```python 120 | from rethinkdb import r 121 | from tornado import gen 122 | from tornado.ioloop import IOLoop 123 | 124 | @gen.coroutine 125 | def main(): 126 | r.set_loop_type('tornado') 127 | connection = yield r.connect(db='test') 128 | 129 | yield r.table_create('marvel').run(connection) 130 | 131 | marvel_heroes = r.table('marvel') 132 | yield marvel_heroes.insert({ 133 | 'id': 1, 134 | 'name': 'Iron Man', 135 | 'first_appearance': 'Tales of Suspense #39' 136 | }).run(connection) 137 | 138 | cursor = yield marvel_heroes.run(connection) 139 | while (yield cursor.fetch_next()): 140 | hero = yield cursor.next() 141 | print(hero['name']) 142 | 143 | IOLoop.current().run_sync(main) 144 | ``` 145 | 146 | ### Trio mode 147 | 148 | ```python 149 | from rethinkdb import r 150 | import trio 151 | 152 | async def main(): 153 | r.set_loop_type('trio') 154 | async with trio.open_nursery() as nursery: 155 | async with r.open(db='test', nursery=nursery) as conn: 156 | await r.table_create('marvel').run(conn) 157 | marvel_heroes = r.table('marvel') 158 | await marvel_heroes.insert({ 159 | 'id': 1, 160 | 'name': 'Iron Man', 161 | 'first_appearance': 'Tales of Suspense #39' 162 | }).run(conn) 163 | 164 | # "async for" is supported in Python ≥ 3.6. In earlier versions, you should 165 | # call "await cursor.next()" in a loop. 166 | cursor = await marvel_heroes.run(conn) 167 | async with cursor: 168 | async for hero in cursor: 169 | print(hero['name']) 170 | 171 | trio.run(main) 172 | ``` 173 | 174 | The Trio mode also supports a database connection pool. You can modify the example above 175 | as follows: 176 | 177 | ```python 178 | db_pool = r.ConnectionPool(db='test', nursery=nursery) 179 | async with db_pool.connection() as conn: 180 | ... 181 | await db_pool.close() 182 | ``` 183 | 184 | ### Twisted mode 185 | 186 | ```python 187 | from rethinkdb import r 188 | from twisted.internet import reactor, defer 189 | 190 | @defer.inlineCallbacks 191 | def main(): 192 | r.set_loop_type('twisted') 193 | connection = yield r.connect(db='test') 194 | 195 | yield r.table_create('marvel').run(connection) 196 | 197 | marvel_heroes = r.table('marvel') 198 | yield marvel_heroes.insert({ 199 | 'id': 1, 200 | 'name': 'Iron Man', 201 | 'first_appearance': 'Tales of Suspense #39' 202 | }).run(connection) 203 | 204 | cursor = yield marvel_heroes.run(connection) 205 | while (yield cursor.fetch_next()): 206 | hero = yield cursor.next() 207 | print(hero['name']) 208 | 209 | main().addCallback(lambda d: print("stopping") or reactor.stop()) 210 | reactor.run() 211 | ``` 212 | 213 | ## Misc 214 | To help the migration from rethinkdb<2.4 we introduced a shortcut which can easily replace the old `import rethinkdb as r` import with `from rethinkdb import r`. 215 | 216 | ## Run tests 217 | In the `Makefile` you can find three different test commands: `test-unit`, `test-integration` and `test-remote`. As RethinkDB has dropped the support of Windows, we would like to ensure that those of us who are using Windows for development can still contribute. Because of this, we support running integration tests against Digital Ocean Droplets as well. 218 | 219 | Before you run any test, make sure that you install the requirements. 220 | ```bash 221 | $ pip install -r requirements.txt 222 | $ make prepare 223 | ``` 224 | 225 | ### Running unit tests 226 | ```bash 227 | $ make test-unit 228 | ``` 229 | 230 | ### Running integration tests 231 | *To run integration tests locally, make sure you intstalled RethinkDB* 232 | ```bash 233 | $ make test-integration 234 | ``` 235 | 236 | ### Running remote integration tests 237 | *To run the remote tests, you need to have a Digital Ocean account and an API key.* 238 | 239 | Remote test will create a new temporary SSH key and a Droplet for you until the tests are finished. 240 | 241 | **Available environment variables** 242 | 243 | | Variable name | Default value | 244 | |---------------|---------------| 245 | | DO_TOKEN | N/A | 246 | | DO_SIZE | 512MB | 247 | | DO_REGION | sfo2 | 248 | 249 | ```bash 250 | $ pip install paramiko python-digitalocean 251 | $ export DO_TOKEN= 252 | $ make test-remote 253 | ``` 254 | 255 | ## Contributing 256 | Hurray! You reached this section which means, that you would like to contribute. Please read our contributing guide lines and feel free to open a pull request. 257 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | python_files = test_*.py 3 | markers = 4 | unit: Run unit tests 5 | integration: Run integration tests 6 | trio: Run trio related tests 7 | tornado: Run tornado related tests 8 | asyncio: Run asyncio related tests -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | async-generator==1.10; python_version>="3.6" 2 | coverage<=4.5.4; python_version<"3.5" 3 | coverage==5.5; python_version>="3.5" 4 | codacy-coverage==1.3.11 5 | looseversion==1.3.0 6 | mock==3.0.5 7 | pytest-cov==2.10.1 8 | pytest-tornasync==0.6.0.post2; python_version >= '3.5' 9 | pytest-trio==0.6.0; python_version>="3.6" 10 | pytest==4.6.6; python_version<"3.5" 11 | pytest==6.1.2; python_version>="3.5" 12 | six==1.15.0 13 | tornado==5.1.1; python_version<"3.6" 14 | tornado==6.0.4; python_version>="3.6" 15 | trio==0.16.0; python_version>="3.6" 16 | outcome==1.1.0; python_version>="3.6" 17 | outcome==1.0.1; python_version<="3.5" 18 | attrs==20.3.0; python_version>="3.5" 19 | -------------------------------------------------------------------------------- /rethinkdb/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 RethinkDB 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the 'License'); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an 'AS IS' BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from rethinkdb import errors, version 16 | 17 | # The builtins here defends against re-importing something obscuring `object`. 18 | try: 19 | import __builtin__ as builtins # Python 2 20 | except ImportError: 21 | import builtins # Python 3 22 | 23 | 24 | __all__ = ["RethinkDB"] + errors.__all__ 25 | __version__ = version.VERSION 26 | 27 | 28 | class RethinkDB(builtins.object): 29 | def __init__(self): 30 | super(RethinkDB, self).__init__() 31 | 32 | from rethinkdb import ( 33 | _dump, 34 | _export, 35 | _import, 36 | _index_rebuild, 37 | _restore, 38 | ast, 39 | query, 40 | net, 41 | ) 42 | 43 | self._dump = _dump 44 | self._export = _export 45 | self._import = _import 46 | self._index_rebuild = _index_rebuild 47 | self._restore = _restore 48 | 49 | # Re-export internal modules for backward compatibility 50 | self.ast = ast 51 | self.errors = errors 52 | self.net = net 53 | self.query = query 54 | 55 | net.Connection._r = self 56 | 57 | for module in (self.net, self.query, self.ast, self.errors): 58 | for function_name in module.__all__: 59 | setattr(self, function_name, getattr(module, function_name)) 60 | 61 | self.set_loop_type(None) 62 | 63 | def set_loop_type(self, library=None): 64 | if library == "asyncio": 65 | from rethinkdb.asyncio_net import net_asyncio 66 | self.connection_type = net_asyncio.Connection 67 | 68 | if library == "gevent": 69 | from rethinkdb.gevent_net import net_gevent 70 | self.connection_type = net_gevent.Connection 71 | 72 | if library == "tornado": 73 | from rethinkdb.tornado_net import net_tornado 74 | self.connection_type = net_tornado.Connection 75 | 76 | if library == "trio": 77 | from rethinkdb.trio_net import net_trio 78 | self.connection_type = net_trio.Connection 79 | 80 | if library == "twisted": 81 | from rethinkdb.twisted_net import net_twisted 82 | self.connection_type = net_twisted.Connection 83 | 84 | if library is None or self.connection_type is None: 85 | self.connection_type = self.net.DefaultConnection 86 | 87 | return 88 | 89 | def connect(self, *args, **kwargs): 90 | return self.make_connection(self.connection_type, *args, **kwargs) 91 | 92 | 93 | r = RethinkDB() 94 | -------------------------------------------------------------------------------- /rethinkdb/__main__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright 2018 RethinkDB 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the 'License'); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an 'AS IS' BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | # This file incorporates work covered by the following copyright: 18 | # Copyright 2010-2016 RethinkDB, all rights reserved. 19 | 20 | """Dispatcher for interactive functions such as repl and backup""" 21 | 22 | import code 23 | import sys 24 | import traceback 25 | 26 | from rethinkdb import errors, net, utils_common 27 | 28 | 29 | def startInterpreter(argv=None, prog=None): 30 | repl_variables = {"r": net.Connection._r, "rethinkdb": net.Connection._r} 31 | banner = "The RethinkDB driver has been imported as `r`." 32 | 33 | # -- get host/port setup 34 | 35 | # - parse command line 36 | parser = utils_common.CommonOptionsParser( 37 | prog=prog, 38 | description="An interactive Python shell (repl) with the RethinkDB driver imported", 39 | ) 40 | options, args = parser.parse_args( 41 | argv if argv is not None else sys.argv[1:], connect=False 42 | ) 43 | 44 | if args: 45 | parser.error( 46 | "No positional arguments supported. Unrecognized option(s): %s" % args 47 | ) 48 | 49 | # -- open connection 50 | 51 | try: 52 | repl_variables["conn"] = options.retryQuery.conn() 53 | repl_variables["conn"].repl() 54 | banner += """ 55 | A connection to %s:%d has been established as `conn` 56 | and can be used by calling `run()` on a query without any arguments.""" % ( 57 | options.hostname, 58 | options.driver_port, 59 | ) 60 | except errors.ReqlDriverError as e: 61 | banner += "\nWarning: %s" % str(e) 62 | if options.debug: 63 | banner += "\n" + traceback.format_exc() 64 | 65 | # -- start interpreter 66 | 67 | code.interact(banner=banner + "\n==========", local=repl_variables) 68 | 69 | 70 | if __name__ == "__main__": 71 | if __package__ is None: 72 | __package__ = "rethinkdb" 73 | 74 | # -- figure out which mode we are in 75 | modes = ["dump", "export", "import", "index_rebuild", "repl", "restore"] 76 | 77 | if len(sys.argv) < 2 or sys.argv[1] not in modes: 78 | sys.exit( 79 | "ERROR: Must be called with one of the following verbs: %s" 80 | % ", ".join(modes) 81 | ) 82 | 83 | verb = sys.argv[1] 84 | prog = "python -m rethinkdb" 85 | if sys.version_info < (2, 7) or ( 86 | sys.version_info >= (3, 0) and sys.version_info < (3, 4) 87 | ): 88 | prog += ".__main__" # Python versions 2.6, 3.0, 3.1 and 3.3 do not support running packages 89 | prog += " " + verb 90 | argv = sys.argv[2:] 91 | 92 | if verb == "dump": 93 | from . import _dump 94 | 95 | exit(_dump.main(argv, prog=prog)) 96 | elif verb == "export": 97 | from . import _export 98 | 99 | exit(_export.main(argv, prog=prog)) 100 | elif verb == "import": 101 | from . import _import 102 | 103 | exit(_import.main(argv, prog=prog)) 104 | elif verb == "index_rebuild": 105 | from . import _index_rebuild 106 | 107 | exit(_index_rebuild.main(argv, prog=prog)) 108 | elif verb == "repl": 109 | startInterpreter(argv, prog=prog) 110 | elif verb == "restore": 111 | from . import _restore 112 | 113 | exit(_restore.main(argv, prog=prog)) 114 | -------------------------------------------------------------------------------- /rethinkdb/_dump.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright 2018 RethinkDB 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the 'License'); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an 'AS IS' BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | # This file incorporates work covered by the following copyright: 18 | # Copyright 2010-2016 RethinkDB, all rights reserved. 19 | 20 | 21 | """`rethinkdb-dump` creates an archive of data from a RethinkDB cluster""" 22 | 23 | from __future__ import print_function 24 | 25 | import datetime 26 | import os 27 | import platform 28 | import shutil 29 | import sys 30 | import tarfile 31 | import tempfile 32 | import time 33 | import traceback 34 | 35 | from rethinkdb import _export, utils_common 36 | from rethinkdb.logger import default_logger 37 | 38 | usage = ( 39 | "rethinkdb dump [-c HOST:PORT] [-p] [--password-file FILENAME] [--tls-cert FILENAME] [-f FILE] " 40 | "[--clients NUM] [-e (DB | DB.TABLE)]..." 41 | ) 42 | help_epilog = """ 43 | EXAMPLES: 44 | rethinkdb dump -c mnemosyne:39500 45 | Archive all data from a cluster running on host 'mnemosyne' with a client port at 39500. 46 | 47 | rethinkdb dump -e test -f rdb_dump.tar.gz 48 | Archive only the 'test' database from a local cluster into a named file. 49 | 50 | rethinkdb dump -c hades -e test.subscribers -p 51 | Archive a specific table from a cluster running on host 'hades' which requires a password.""" 52 | 53 | 54 | def parse_options(argv, prog=None): 55 | parser = utils_common.CommonOptionsParser( 56 | usage=usage, epilog=help_epilog, prog=prog 57 | ) 58 | 59 | parser.add_option( 60 | "-f", 61 | "--file", 62 | dest="out_file", 63 | metavar="FILE", 64 | default=None, 65 | help="file to write archive to (defaults to rethinkdb_dump_DATE_TIME.tar.gz);\nif FILE is -, use standard " 66 | "output (note that intermediate files will still be written to the --temp-dir directory)", 67 | ) 68 | parser.add_option( 69 | "-e", 70 | "--export", 71 | dest="db_tables", 72 | metavar="DB|DB.TABLE", 73 | default=[], 74 | type="db_table", 75 | help="limit dump to the given database or table (may be specified multiple times)", 76 | action="append", 77 | ) 78 | 79 | parser.add_option( 80 | "--temp-dir", 81 | dest="temp_dir", 82 | metavar="directory", 83 | default=None, 84 | help="the directory to use for intermediary results", 85 | ) 86 | parser.add_option( 87 | "--overwrite-file", 88 | dest="overwrite", 89 | default=False, 90 | help="overwrite -f/--file if it exists", 91 | action="store_true", 92 | ) 93 | parser.add_option( 94 | "--clients", 95 | dest="clients", 96 | metavar="NUM", 97 | default=3, 98 | help="number of tables to export simultaneously (default: 3)", 99 | type="pos_int", 100 | ) 101 | parser.add_option( 102 | "--read-outdated", 103 | dest="outdated", 104 | default=False, 105 | help="use outdated read mode", 106 | action="store_true", 107 | ) 108 | 109 | options, args = parser.parse_args(argv) 110 | 111 | # Check validity of arguments 112 | if len(args) != 0: 113 | raise parser.error( 114 | "No positional arguments supported. Unrecognized option(s): %s" % args 115 | ) 116 | 117 | # Add dump name 118 | if platform.system() == "Windows" or platform.system().lower().startswith("cygwin"): 119 | options.dump_name = "rethinkdb_dump_%s" % datetime.datetime.today().strftime( 120 | "%Y-%m-%dT%H-%M-%S" 121 | ) # no colons in name 122 | else: 123 | options.dump_name = "rethinkdb_dump_%s" % datetime.datetime.today().strftime( 124 | "%Y-%m-%dT%H:%M:%S" 125 | ) 126 | 127 | # Verify valid output file 128 | if options.out_file == "-": 129 | options.out_file = sys.stdout 130 | options.quiet = True 131 | elif options.out_file is None: 132 | options.out_file = os.path.realpath("%s.tar.gz" % options.dump_name) 133 | else: 134 | options.out_file = os.path.realpath(options.out_file) 135 | 136 | if options.out_file is not sys.stdout: 137 | if os.path.exists(options.out_file) and not options.overwrite: 138 | parser.error("Output file already exists: %s" % options.out_file) 139 | if os.path.exists(options.out_file) and not os.path.isfile(options.out_file): 140 | parser.error( 141 | "There is a non-file at the -f/--file location: %s" % options.out_file 142 | ) 143 | 144 | # Verify valid client count 145 | if options.clients < 1: 146 | raise RuntimeError( 147 | "Error: invalid number of clients (%d), must be greater than zero" 148 | % options.clients 149 | ) 150 | 151 | # Make sure the temporary directory exists and is accessible 152 | if options.temp_dir is not None: 153 | if not os.path.exists(options.temp_dir): 154 | try: 155 | os.makedirs(options.temp_dir) 156 | except OSError: 157 | parser.error( 158 | "Could not create temporary directory: %s" % options.temp_dir 159 | ) 160 | if not os.path.isdir(options.temp_dir): 161 | parser.error( 162 | "Temporary directory doesn't exist or is not a directory: %s" 163 | % options.temp_dir 164 | ) 165 | if not os.access(options.temp_dir, os.W_OK): 166 | parser.error("Temporary directory inaccessible: %s" % options.temp_dir) 167 | 168 | return options 169 | 170 | 171 | def main(argv=None, prog=None): 172 | options = parse_options(argv or sys.argv[1:], prog=prog) 173 | try: 174 | if not options.quiet: 175 | # Print a warning about the capabilities of dump, so no one is confused (hopefully) 176 | print( 177 | """\ 178 | NOTE: 'rethinkdb-dump' saves data, secondary indexes, and write hooks, but does *not* save 179 | cluster metadata. You will need to recreate your cluster setup yourself after 180 | you run 'rethinkdb-restore'.""" 181 | ) 182 | 183 | try: 184 | start_time = time.time() 185 | archive = None 186 | 187 | # -- _export options - need to be kep in-sync with _export 188 | 189 | options.directory = os.path.realpath(tempfile.mkdtemp(dir=options.temp_dir)) 190 | options.fields = None 191 | options.delimiter = None 192 | options.format = "json" 193 | 194 | # -- export to a directory 195 | 196 | if not options.quiet: 197 | print(" Exporting to temporary directory...") 198 | 199 | try: 200 | _export.run(options) 201 | except Exception as exc: 202 | default_logger.exception(exc) 203 | 204 | if options.debug: 205 | sys.stderr.write("\n%s\n" % traceback.format_exc()) 206 | 207 | raise Exception("Error: export failed, %s" % exc) 208 | 209 | # -- zip directory 210 | 211 | if not options.quiet: 212 | print(" Zipping export directory...") 213 | 214 | try: 215 | if hasattr(options.out_file, "read"): 216 | archive = tarfile.open(fileobj=options.out_file, mode="w:gz") 217 | else: 218 | archive = tarfile.open(name=options.out_file, mode="w:gz") 219 | for curr, _, files in os.walk(os.path.realpath(options.directory)): 220 | for data_file in files: 221 | full_path = os.path.join(options.directory, curr, data_file) 222 | archive_path = os.path.join( 223 | options.dump_name, 224 | os.path.relpath(full_path, options.directory), 225 | ) 226 | archive.add(full_path, arcname=archive_path) 227 | os.unlink(full_path) 228 | finally: 229 | if archive: 230 | archive.close() 231 | 232 | # -- 233 | 234 | if not options.quiet: 235 | print( 236 | "Done (%.2f seconds): %s" 237 | % ( 238 | time.time() - start_time, 239 | options.out_file.name 240 | if hasattr(options.out_file, "name") 241 | else options.out_file, 242 | ) 243 | ) 244 | except KeyboardInterrupt: 245 | time.sleep(0.2) 246 | raise RuntimeError("Interrupted") 247 | finally: 248 | if os.path.exists(options.directory): 249 | shutil.rmtree(options.directory) 250 | 251 | except Exception as ex: 252 | if options.debug: 253 | traceback.print_exc() 254 | print(ex, file=sys.stderr) 255 | return 1 256 | return 0 257 | 258 | 259 | if __name__ == "__main__": 260 | sys.exit(main()) 261 | -------------------------------------------------------------------------------- /rethinkdb/_index_rebuild.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright 2018 RethinkDB 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the 'License'); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an 'AS IS' BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | # This file incorporates work covered by the following copyright: 18 | # Copyright 2010-2016 RethinkDB, all rights reserved. 19 | 20 | 21 | """'rethinkdb index-rebuild' recreates outdated secondary indexes in a cluster. 22 | This should be used after upgrading to a newer version of rethinkdb. There 23 | will be a notification in the web UI if any secondary indexes are out-of-date.""" 24 | 25 | from __future__ import print_function 26 | 27 | import sys 28 | import time 29 | import traceback 30 | 31 | from rethinkdb import query, utils_common 32 | 33 | usage = ( 34 | "rethinkdb index-rebuild [-c HOST:PORT] [-n NUM] [-r (DB | DB.TABLE)] [--tls-cert FILENAME] [-p] " 35 | "[--password-file FILENAME]..." 36 | ) 37 | help_epilog = """ 38 | FILE: the archive file to restore data from 39 | 40 | EXAMPLES: 41 | rethinkdb index-rebuild -c mnemosyne:39500 42 | rebuild all outdated secondary indexes from the cluster through the host 'mnemosyne', 43 | one at a time 44 | 45 | rethinkdb index-rebuild -r test -r production.users -n 5 46 | rebuild all outdated secondary indexes from a local cluster on all tables in the 47 | 'test' database as well as the 'production.users' table, five at a time 48 | """ 49 | 50 | # Prefix used for indexes that are being rebuilt 51 | TMP_INDEX_PREFIX = "$reql_temp_index$_" 52 | 53 | 54 | def parse_options(argv, prog=None): 55 | parser = utils_common.CommonOptionsParser( 56 | usage=usage, epilog=help_epilog, prog=prog 57 | ) 58 | 59 | parser.add_option( 60 | "-r", 61 | "--rebuild", 62 | dest="db_table", 63 | metavar="DB|DB.TABLE", 64 | default=[], 65 | help="databases or tables to rebuild indexes on (default: all, may be specified multiple times)", 66 | action="append", 67 | type="db_table", 68 | ) 69 | parser.add_option( 70 | "-n", 71 | dest="concurrent", 72 | metavar="NUM", 73 | default=1, 74 | help="concurrent indexes to rebuild (default: 1)", 75 | type="pos_int", 76 | ) 77 | parser.add_option( 78 | "--force", 79 | dest="force", 80 | action="store_true", 81 | default=False, 82 | help="rebuild non-outdated indexes", 83 | ) 84 | 85 | options, args = parser.parse_args(argv) 86 | 87 | # Check validity of arguments 88 | if len(args) != 0: 89 | parser.error( 90 | "Error: No positional arguments supported. Unrecognized option '%s'" 91 | % args[0] 92 | ) 93 | 94 | return options 95 | 96 | 97 | def rebuild_indexes(options): 98 | 99 | # flesh out options.db_table 100 | if not options.db_table: 101 | options.db_table = [ 102 | utils_common.DbTable(x["db"], x["name"]) 103 | for x in options.retryQuery( 104 | "all tables", 105 | query.db("rethinkdb").table("table_config").pluck(["db", "name"]), 106 | ) 107 | ] 108 | else: 109 | for db_table in options.db_table[:]: # work from a copy 110 | if not db_table[1]: 111 | options.db_table += [ 112 | utils_common.DbTable(db_table[0], x) 113 | for x in options.retryQuery( 114 | "table list of %s" % db_table[0], 115 | query.db(db_table[0]).table_list(), 116 | ) 117 | ] 118 | del options.db_table[db_table] 119 | 120 | # wipe out any indexes with the TMP_INDEX_PREFIX 121 | for db, table in options.db_table: 122 | for index in options.retryQuery( 123 | "list indexes on %s.%s" % (db, table), 124 | query.db(db).table(table).index_list(), 125 | ): 126 | if index.startswith(TMP_INDEX_PREFIX): 127 | options.retryQuery( 128 | "drop index: %s.%s:%s" % (db, table, index), 129 | query.db(index["db"]) 130 | .table(index["table"]) 131 | .index_drop(index["name"]), 132 | ) 133 | 134 | # get the list of indexes to rebuild 135 | indexes_to_build = [] 136 | for db, table in options.db_table: 137 | indexes = None 138 | if not options.force: 139 | indexes = options.retryQuery( 140 | "get outdated indexes from %s.%s" % (db, table), 141 | query.db(db) 142 | .table(table) 143 | .index_status() 144 | .filter({"outdated": True}) 145 | .get_field("index"), 146 | ) 147 | else: 148 | indexes = options.retryQuery( 149 | "get all indexes from %s.%s" % (db, table), 150 | query.db(db).table(table).index_status().get_field("index"), 151 | ) 152 | for index in indexes: 153 | indexes_to_build.append({"db": db, "table": table, "name": index}) 154 | 155 | # rebuild selected indexes 156 | 157 | total_indexes = len(indexes_to_build) 158 | indexes_completed = 0 159 | progress_ratio = 0.0 160 | highest_progress = 0.0 161 | indexes_in_progress = [] 162 | 163 | if not options.quiet: 164 | print( 165 | "Rebuilding %d index%s: %s" 166 | % ( 167 | total_indexes, 168 | "es" if total_indexes > 1 else "", 169 | ", ".join( 170 | ["`%(db)s.%(table)s:%(name)s`" % i for i in indexes_to_build] 171 | ), 172 | ) 173 | ) 174 | 175 | while len(indexes_to_build) > 0 or len(indexes_in_progress) > 0: 176 | # Make sure we're running the right number of concurrent index rebuilds 177 | while ( 178 | len(indexes_to_build) > 0 and len(indexes_in_progress) < options.concurrent 179 | ): 180 | index = indexes_to_build.pop() 181 | indexes_in_progress.append(index) 182 | index["temp_name"] = TMP_INDEX_PREFIX + index["name"] 183 | index["progress"] = 0 184 | index["ready"] = False 185 | 186 | existing_indexes = dict( 187 | (x["index"], x["function"]) 188 | for x in options.retryQuery( 189 | "existing indexes", 190 | query.db(index["db"]) 191 | .table(index["table"]) 192 | .index_status() 193 | .pluck("index", "function"), 194 | ) 195 | ) 196 | 197 | if index["name"] not in existing_indexes: 198 | raise AssertionError( 199 | "{index_name} is not part of existing indexes {indexes}".format( 200 | index_name=index["name"], indexes=", ".join(existing_indexes) 201 | ) 202 | ) 203 | 204 | if index["temp_name"] not in existing_indexes: 205 | options.retryQuery( 206 | "create temp index: %(db)s.%(table)s:%(name)s" % index, 207 | query.db(index["db"]) 208 | .table(index["table"]) 209 | .index_create(index["temp_name"], existing_indexes[index["name"]]), 210 | ) 211 | 212 | # Report progress 213 | highest_progress = max(highest_progress, progress_ratio) 214 | if not options.quiet: 215 | utils_common.print_progress(highest_progress) 216 | 217 | # Check the status of indexes in progress 218 | progress_ratio = 0.0 219 | for index in indexes_in_progress: 220 | status = options.retryQuery( 221 | "progress `%(db)s.%(table)s` index `%(name)s`" % index, 222 | query.db(index["db"]) 223 | .table(index["table"]) 224 | .index_status(index["temp_name"]) 225 | .nth(0), 226 | ) 227 | if status["ready"]: 228 | index["ready"] = True 229 | options.retryQuery( 230 | "rename `%(db)s.%(table)s` index `%(name)s`" % index, 231 | query.db(index["db"]) 232 | .table(index["table"]) 233 | .index_rename(index["temp_name"], index["name"], overwrite=True), 234 | ) 235 | else: 236 | progress_ratio += status.get("progress", 0) / total_indexes 237 | 238 | indexes_in_progress = [ 239 | index for index in indexes_in_progress if not index["ready"] 240 | ] 241 | indexes_completed = ( 242 | total_indexes - len(indexes_to_build) - len(indexes_in_progress) 243 | ) 244 | progress_ratio += float(indexes_completed) / total_indexes 245 | 246 | if len(indexes_in_progress) == options.concurrent or ( 247 | len(indexes_in_progress) > 0 and len(indexes_to_build) == 0 248 | ): 249 | # Short sleep to keep from killing the CPU 250 | time.sleep(0.1) 251 | 252 | # Make sure the progress bar says we're done and get past the progress bar line 253 | if not options.quiet: 254 | utils_common.print_progress(1.0) 255 | print("") 256 | 257 | 258 | def main(argv=None, prog=None): 259 | options = parse_options(argv or sys.argv[1:], prog=prog) 260 | start_time = time.time() 261 | try: 262 | rebuild_indexes(options) 263 | except Exception as ex: 264 | if options.debug: 265 | traceback.print_exc() 266 | if not options.quiet: 267 | print(ex, file=sys.stderr) 268 | return 1 269 | if not options.quiet: 270 | print("Done (%d seconds)" % (time.time() - start_time)) 271 | return 0 272 | 273 | 274 | if __name__ == "__main__": 275 | sys.exit(main()) 276 | -------------------------------------------------------------------------------- /rethinkdb/_restore.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright 2018 RethinkDB 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the 'License'); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an 'AS IS' BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | # This file incorporates work covered by the following copyright: 18 | # Copyright 2010-2016 RethinkDB, all rights reserved. 19 | 20 | 21 | """`rethinkdb restore` loads data into a RethinkDB cluster from an archive""" 22 | 23 | from __future__ import print_function 24 | 25 | import copy 26 | import multiprocessing 27 | import optparse 28 | import os 29 | import shutil 30 | import sys 31 | import tarfile 32 | import tempfile 33 | import time 34 | import traceback 35 | 36 | from rethinkdb import _import, utils_common 37 | 38 | usage = ( 39 | "rethinkdb restore FILE [-c HOST:PORT] [--tls-cert FILENAME] [-p] [--password-file FILENAME] [--clients NUM] " 40 | "[--shards NUM_SHARDS] [--replicas NUM_REPLICAS] [--force] [-i (DB | DB.TABLE)]..." 41 | ) 42 | help_epilog = """ 43 | FILE: 44 | the archive file to restore data from; 45 | if FILE is -, use standard input (note that 46 | intermediate files will still be written to 47 | the --temp-dir directory) 48 | 49 | EXAMPLES: 50 | 51 | rethinkdb restore rdb_dump.tar.gz -c mnemosyne:39500 52 | Import data into a cluster running on host 'mnemosyne' with a client port at 39500 using 53 | the named archive file. 54 | 55 | rethinkdb restore rdb_dump.tar.gz -i test 56 | Import data into a local cluster from only the 'test' database in the named archive file. 57 | 58 | rethinkdb restore rdb_dump.tar.gz -i test.subscribers -c hades -p 59 | Import data into a cluster running on host 'hades' which requires a password from only 60 | a specific table from the named archive file. 61 | 62 | rethinkdb restore rdb_dump.tar.gz --clients 4 --force 63 | Import data to a local cluster from the named archive file using only 4 client connections 64 | and overwriting any existing rows with the same primary key. 65 | """ 66 | 67 | 68 | def parse_options(argv, prog=None): 69 | parser = utils_common.CommonOptionsParser( 70 | usage=usage, epilog=help_epilog, prog=prog 71 | ) 72 | 73 | parser.add_option( 74 | "-i", 75 | "--import", 76 | dest="db_tables", 77 | metavar="DB|DB.TABLE", 78 | default=[], 79 | help="limit restore to the given database or table (may be specified multiple times)", 80 | action="append", 81 | type="db_table", 82 | ) 83 | 84 | parser.add_option( 85 | "--temp-dir", 86 | dest="temp_dir", 87 | metavar="DIR", 88 | default=None, 89 | help="directory to use for intermediary results", 90 | ) 91 | parser.add_option( 92 | "--clients", 93 | dest="clients", 94 | metavar="CLIENTS", 95 | default=8, 96 | help="client connections to use (default: 8)", 97 | type="pos_int", 98 | ) 99 | parser.add_option( 100 | "--hard-durability", 101 | dest="durability", 102 | action="store_const", 103 | default="soft", 104 | help="use hard durability writes (slower, uses less memory)", 105 | const="hard", 106 | ) 107 | parser.add_option( 108 | "--force", 109 | dest="force", 110 | action="store_true", 111 | default=False, 112 | help="import data even if a table already exists", 113 | ) 114 | parser.add_option( 115 | "--no-secondary-indexes", 116 | dest="indexes", 117 | action="store_false", 118 | default=None, 119 | help="do not create secondary indexes for the restored tables", 120 | ) 121 | 122 | parser.add_option( 123 | "--writers-per-table", 124 | dest="writers", 125 | default=multiprocessing.cpu_count(), 126 | help=optparse.SUPPRESS_HELP, 127 | type="pos_int", 128 | ) 129 | parser.add_option( 130 | "--batch-size", 131 | dest="batch_size", 132 | default=utils_common.default_batch_size, 133 | help=optparse.SUPPRESS_HELP, 134 | type="pos_int", 135 | ) 136 | 137 | # Replication settings 138 | replication_options_group = optparse.OptionGroup(parser, "Replication Options") 139 | replication_options_group.add_option( 140 | "--shards", 141 | dest="create_args", 142 | metavar="SHARDS", 143 | help="shards to setup on created tables (default: 1)", 144 | type="pos_int", 145 | action="add_key", 146 | ) 147 | replication_options_group.add_option( 148 | "--replicas", 149 | dest="create_args", 150 | metavar="REPLICAS", 151 | help="replicas to setup on created tables (default: 1)", 152 | type="pos_int", 153 | action="add_key", 154 | ) 155 | parser.add_option_group(replication_options_group) 156 | 157 | options, args = parser.parse_args(argv) 158 | 159 | # -- Check validity of arguments 160 | 161 | # - archive 162 | if len(args) == 0: 163 | parser.error( 164 | "Archive to import not specified. Provide an archive file created by rethinkdb-dump." 165 | ) 166 | elif len(args) != 1: 167 | parser.error("Only one positional argument supported") 168 | options.in_file = args[0] 169 | if options.in_file == "-": 170 | options.in_file = sys.stdin 171 | else: 172 | if not os.path.isfile(options.in_file): 173 | parser.error("Archive file does not exist: %s" % options.in_file) 174 | options.in_file = os.path.realpath(options.in_file) 175 | 176 | # - temp_dir 177 | if options.temp_dir: 178 | if not os.path.isdir(options.temp_dir): 179 | parser.error( 180 | "Temporary directory doesn't exist or is not a directory: %s" 181 | % options.temp_dir 182 | ) 183 | if not os.access(options.temp_dir, os.W_OK): 184 | parser.error("Temporary directory inaccessible: %s" % options.temp_dir) 185 | 186 | # - create_args 187 | if options.create_args is None: 188 | options.create_args = {} 189 | 190 | # -- 191 | 192 | return options 193 | 194 | 195 | def do_unzip(temp_dir, options): 196 | """extract the tarfile to the filesystem""" 197 | 198 | tables_to_export = set(options.db_tables) 199 | top_level = None 200 | files_ignored = [] 201 | files_found = False 202 | archive = None 203 | tarfile_options = { 204 | "mode": "r|*", 205 | "fileobj" if hasattr(options.in_file, "read") else "name": options.in_file, 206 | } 207 | try: 208 | archive = tarfile.open(**tarfile_options) 209 | for tarinfo in archive: 210 | # skip without comment anything but files 211 | if not tarinfo.isfile(): 212 | continue # skip everything but files 213 | 214 | # normalize the path 215 | relpath = os.path.relpath( 216 | os.path.realpath(tarinfo.name.strip().lstrip(os.sep)) 217 | ) 218 | 219 | # skip things that try to jump out of the folder 220 | if relpath.startswith(os.path.pardir): 221 | files_ignored.append(tarinfo.name) 222 | continue 223 | 224 | # skip files types other than what we use 225 | if not os.path.splitext(relpath)[1] in (".json", ".csv", ".info"): 226 | files_ignored.append(tarinfo.name) 227 | continue 228 | 229 | # ensure this looks like our structure 230 | try: 231 | top, db, file_name = relpath.split(os.sep) 232 | except ValueError: 233 | raise RuntimeError( 234 | "Error: Archive file has an unexpected directory structure: %s" 235 | % tarinfo.name 236 | ) 237 | 238 | if not top_level: 239 | top_level = top 240 | elif top != top_level: 241 | raise RuntimeError( 242 | "Error: Archive file has an unexpected directory structure (%s vs %s)" 243 | % (top, top_level) 244 | ) 245 | 246 | # filter out tables we are not looking for 247 | table = os.path.splitext(file_name)[0] 248 | if tables_to_export and not ( 249 | (db, table) in tables_to_export or (db, None) in tables_to_export 250 | ): 251 | continue # skip without comment 252 | 253 | # write the file out 254 | files_found = True 255 | dest_path = os.path.join(temp_dir, db, file_name) 256 | 257 | if not os.path.exists(os.path.dirname(dest_path)): 258 | os.makedirs(os.path.dirname(dest_path)) 259 | 260 | with open(dest_path, "wb") as dest: 261 | source = archive.extractfile(tarinfo) 262 | chunk = True 263 | while chunk: 264 | chunk = source.read(1024 * 128) 265 | dest.write(chunk) 266 | source.close() 267 | 268 | if not os.path.isfile(dest_path): 269 | raise AssertionError( 270 | "Was not able to write {destination_path}".format( 271 | destination_path=dest_path 272 | ) 273 | ) 274 | 275 | finally: 276 | if archive: 277 | archive.close() 278 | 279 | if not files_found: 280 | raise RuntimeError("Error: Archive file had no files") 281 | 282 | # - send the location and ignored list back to our caller 283 | return files_ignored 284 | 285 | 286 | def do_restore(options): 287 | # Create a temporary directory to store the extracted data 288 | temp_dir = tempfile.mkdtemp(dir=options.temp_dir) 289 | 290 | try: 291 | # - extract the archive 292 | if not options.quiet: 293 | print("Extracting archive file...") 294 | start_time = time.time() 295 | 296 | do_unzip(temp_dir, options) 297 | 298 | if not options.quiet: 299 | print(" Done (%d seconds)" % (time.time() - start_time)) 300 | 301 | # - default _import options 302 | 303 | options = copy.copy(options) 304 | options.fields = None 305 | options.directory = temp_dir 306 | options.file = None 307 | 308 | sources = _import.parse_sources(options) 309 | 310 | # - run the import 311 | if not options.quiet: 312 | print("Importing from directory...") 313 | 314 | try: 315 | _import.import_tables(options, sources) 316 | except RuntimeError as ex: 317 | if options.debug: 318 | traceback.print_exc() 319 | if str(ex) == "Warnings occurred during import": 320 | raise RuntimeError( 321 | "Warning: import did not create some secondary indexes." 322 | ) 323 | else: 324 | error_string = str(ex) 325 | if error_string.startswith("Error: "): 326 | error_string = error_string[len("Error: ") :] 327 | raise RuntimeError("Error: import failed: %s" % error_string) 328 | # 'Done' message will be printed by the import script 329 | finally: 330 | shutil.rmtree(temp_dir) 331 | 332 | 333 | def main(argv=None, prog=None): 334 | if argv is None: 335 | argv = sys.argv[1:] 336 | options = parse_options(argv, prog=prog) 337 | 338 | try: 339 | do_restore(options) 340 | except RuntimeError as ex: 341 | print(ex, file=sys.stderr) 342 | return 1 343 | return 0 344 | 345 | 346 | if __name__ == "__main__": 347 | exit(main()) 348 | -------------------------------------------------------------------------------- /rethinkdb/asyncio_net/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rethinkdb/rethinkdb-python/339cd9a8b895ed3b1d74b2d8878eb7f0a5d4a9e9/rethinkdb/asyncio_net/__init__.py -------------------------------------------------------------------------------- /rethinkdb/backports/__init__.py: -------------------------------------------------------------------------------- 1 | # This is a Python "namespace package" http://www.python.org/dev/peps/pep-0382/ 2 | from pkgutil import extend_path 3 | 4 | __path__ = extend_path(__path__, __name__) 5 | -------------------------------------------------------------------------------- /rethinkdb/backports/ssl_match_hostname/LICENSE.txt: -------------------------------------------------------------------------------- 1 | Python License (Python-2.0) 2 | 3 | Python License, Version 2 (Python-2.0) 4 | 5 | PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 6 | -------------------------------------------- 7 | 8 | 1. This LICENSE AGREEMENT is between the Python Software Foundation 9 | ("PSF"), and the Individual or Organization ("Licensee") accessing and 10 | otherwise using this software ("Python") in source or binary form and 11 | its associated documentation. 12 | 13 | 2. Subject to the terms and conditions of this License Agreement, PSF 14 | hereby grants Licensee a nonexclusive, royalty-free, world-wide 15 | license to reproduce, analyze, test, perform and/or display publicly, 16 | prepare derivative works, distribute, and otherwise use Python 17 | alone or in any derivative version, provided, however, that PSF's 18 | License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 19 | 2001-2013 Python Software Foundation; All Rights Reserved" are retained in 20 | Python alone or in any derivative version prepared by Licensee. 21 | 22 | 3. In the event Licensee prepares a derivative work that is based on 23 | or incorporates Python or any part thereof, and wants to make 24 | the derivative work available to others as provided herein, then 25 | Licensee hereby agrees to include in any such work a brief summary of 26 | the changes made to Python. 27 | 28 | 4. PSF is making Python available to Licensee on an "AS IS" 29 | basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR 30 | IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND 31 | DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS 32 | FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT 33 | INFRINGE ANY THIRD PARTY RIGHTS. 34 | 35 | 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 36 | FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS 37 | A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, 38 | OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 39 | 40 | 6. This License Agreement will automatically terminate upon a material 41 | breach of its terms and conditions. 42 | 43 | 7. Nothing in this License Agreement shall be deemed to create any 44 | relationship of agency, partnership, or joint venture between PSF and 45 | Licensee. This License Agreement does not grant permission to use PSF 46 | trademarks or trade name in a trademark sense to endorse or promote 47 | products or services of Licensee, or any third party. 48 | 49 | 8. By copying, installing or otherwise using Python, Licensee 50 | agrees to be bound by the terms and conditions of this License 51 | Agreement. 52 | -------------------------------------------------------------------------------- /rethinkdb/backports/ssl_match_hostname/README.txt: -------------------------------------------------------------------------------- 1 | 2 | The ssl.match_hostname() function from Python 3.4 3 | ================================================= 4 | 5 | The Secure Sockets layer is only actually *secure* 6 | if you check the hostname in the certificate returned 7 | by the server to which you are connecting, 8 | and verify that it matches to hostname 9 | that you are trying to reach. 10 | 11 | But the matching logic, defined in `RFC2818`_, 12 | can be a bit tricky to implement on your own. 13 | So the ``ssl`` package in the Standard Library of Python 3.2 14 | and greater now includes a ``match_hostname()`` function 15 | for performing this check instead of requiring every application 16 | to implement the check separately. 17 | 18 | This backport brings ``match_hostname()`` to users 19 | of earlier versions of Python. 20 | Simply make this distribution a dependency of your package, 21 | and then use it like this:: 22 | 23 | from backports.ssl_match_hostname import match_hostname, CertificateError 24 | ... 25 | sslsock = ssl.wrap_socket(sock, ssl_version=ssl.PROTOCOL_SSLv3, 26 | cert_reqs=ssl.CERT_REQUIRED, ca_certs=...) 27 | try: 28 | match_hostname(sslsock.getpeercert(), hostname) 29 | except CertificateError, ce: 30 | ... 31 | 32 | Note that the ``ssl`` module is only included in the Standard Library 33 | for Python 2.6 and later; 34 | users of Python 2.5 or earlier versions 35 | will also need to install the ``ssl`` distribution 36 | from the Python Package Index to use code like that shown above. 37 | 38 | Brandon Craig Rhodes is merely the packager of this distribution; 39 | the actual code inside comes verbatim from Python 3.4. 40 | 41 | History 42 | ------- 43 | * This function was introduced in python-3.2 44 | * It was updated for python-3.4a1 for a CVE 45 | (backports-ssl_match_hostname-3.4.0.1) 46 | * It was updated from RFC2818 to RFC 6125 compliance in order to fix another 47 | security flaw for python-3.3.3 and python-3.4a5 48 | (backports-ssl_match_hostname-3.4.0.2) 49 | 50 | 51 | .. _RFC2818: http://tools.ietf.org/html/rfc2818.html 52 | 53 | -------------------------------------------------------------------------------- /rethinkdb/backports/ssl_match_hostname/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 RethinkDB 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the 'License'); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an 'AS IS' BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | # This file incorporates work covered by the following copyright: 16 | # Copyright 2010-2016 RethinkDB, all rights reserved. 17 | 18 | 19 | """The match_hostname() function from Python 3.3.3, essential when using SSL.""" 20 | 21 | import re 22 | 23 | __version__ = "3.4.0.2" 24 | 25 | 26 | class CertificateError(ValueError): 27 | pass 28 | 29 | 30 | def _dnsname_match(domain_name, hostname, max_wildcards=1): 31 | """Matching according to RFC 6125, section 6.4.3 32 | 33 | http://tools.ietf.org/html/rfc6125#section-6.4.3 34 | """ 35 | pats = [] 36 | if not domain_name: 37 | return False 38 | 39 | # Ported from python3-syntax: 40 | # leftmost, *remainder = domain_name.split(r'.') 41 | parts = domain_name.split(r".") 42 | leftmost = parts[0] 43 | remainder = parts[1:] 44 | 45 | wildcards = leftmost.count("*") 46 | if wildcards > max_wildcards: 47 | # Issue #17980: avoid denials of service by refusing more 48 | # than one wildcard per fragment. A survey of established 49 | # policy among SSL implementations showed it to be a 50 | # reasonable choice. 51 | raise CertificateError( 52 | "too many wildcards in certificate DNS name: " + repr(domain_name) 53 | ) 54 | 55 | # speed up common case w/o wildcards 56 | if not wildcards: 57 | return domain_name.lower() == hostname.lower() 58 | 59 | # RFC 6125, section 6.4.3, subitem 1. 60 | # The client SHOULD NOT attempt to match a presented identifier in which 61 | # the wildcard character comprises a label other than the left-most label. 62 | if leftmost == "*": 63 | # When '*' is a fragment by itself, it matches a non-empty dotless 64 | # fragment. 65 | pats.append("[^.]+") 66 | elif leftmost.startswith("xn--") or hostname.startswith("xn--"): 67 | # RFC 6125, section 6.4.3, subitem 3. 68 | # The client SHOULD NOT attempt to match a presented identifier 69 | # where the wildcard character is embedded within an A-label or 70 | # U-label of an internationalized domain name. 71 | pats.append(re.escape(leftmost)) 72 | else: 73 | # Otherwise, '*' matches any dotless string, e.g. www* 74 | pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) 75 | 76 | # add the remaining fragments, ignore any wildcards 77 | for frag in remainder: 78 | pats.append(re.escape(frag)) 79 | 80 | pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) 81 | return pat.match(hostname) 82 | 83 | 84 | def match_hostname(cert, hostname): 85 | """Verify that *cert* (in decoded format as returned by 86 | SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 87 | rules are followed, but IP addresses are not accepted for *hostname*. 88 | 89 | CertificateError is raised on failure. On success, the function 90 | returns nothing. 91 | """ 92 | 93 | if not cert: 94 | raise ValueError("empty or no certificate") 95 | 96 | dnsnames = [] 97 | san = cert.get("subjectAltName", ()) 98 | for key, value in san: 99 | if key == "DNS": 100 | if _dnsname_match(value, hostname): 101 | return 102 | dnsnames.append(value) 103 | 104 | if not dnsnames: 105 | # The subject is only checked when there is no dNSName entry 106 | # in subjectAltName 107 | for sub in cert.get("subject", ()): 108 | for key, value in sub: 109 | # XXX according to RFC 2818, the most specific Common Name 110 | # must be used. 111 | if key == "commonName": 112 | if _dnsname_match(value, hostname): 113 | return 114 | dnsnames.append(value) 115 | 116 | if len(dnsnames) > 1: 117 | raise CertificateError( 118 | "hostname %r doesn't match either of %s" 119 | % (hostname, ", ".join(map(repr, dnsnames))) 120 | ) 121 | elif len(dnsnames) == 1: 122 | raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0])) 123 | else: 124 | raise CertificateError( 125 | "no appropriate commonName or subjectAltName fields were found" 126 | ) 127 | -------------------------------------------------------------------------------- /rethinkdb/errors.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 RethinkDB 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the 'License'); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an 'AS IS' BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | # This file incorporates work covered by the following copyright: 16 | # Copyright 2010-2016 RethinkDB, all rights reserved. 17 | 18 | 19 | __all__ = [ 20 | "ReqlAuthError", 21 | "ReqlAvailabilityError", 22 | "ReqlCompileError", 23 | "ReqlCursorEmpty", 24 | "ReqlDriverCompileError", 25 | "ReqlDriverError", 26 | "ReqlError", 27 | "ReqlInternalError", 28 | "ReqlNonExistenceError", 29 | "ReqlOpFailedError", 30 | "ReqlOpIndeterminateError", 31 | "ReqlPermissionError", 32 | "ReqlQueryLogicError", 33 | "ReqlResourceLimitError", 34 | "ReqlRuntimeError", 35 | "ReqlServerCompileError", 36 | "ReqlTimeoutError", 37 | "ReqlUserError", 38 | "RqlClientError", 39 | "RqlCompileError", 40 | "RqlCursorEmpty", 41 | "RqlDriverError", 42 | "RqlError", 43 | "RqlRuntimeError", 44 | "RqlTimeoutError", 45 | ] 46 | 47 | import sys 48 | 49 | try: 50 | unicode 51 | 52 | def convertForPrint(inputString): 53 | if isinstance(inputString, unicode): # noqa: F821 54 | encoding = "utf-8" 55 | if hasattr(sys.stdout, "encoding") and sys.stdout.encoding: 56 | encoding = sys.stdout.encoding 57 | return inputString.encode(encoding or "utf-8", "replace") 58 | else: 59 | return str(inputString) 60 | 61 | 62 | except NameError: 63 | 64 | def convertForPrint(inputString): 65 | return inputString 66 | 67 | 68 | try: 69 | {}.iteritems 70 | 71 | def dict_items(d): 72 | return d.iteritems() 73 | 74 | 75 | except AttributeError: 76 | 77 | def dict_items(d): 78 | return d.items() 79 | 80 | 81 | class ReqlCursorEmpty(Exception): 82 | def __init__(self): 83 | super(ReqlCursorEmpty, self).__init__("Cursor is empty.") 84 | self.message = "Cursor is empty." 85 | 86 | 87 | RqlCursorEmpty = ReqlCursorEmpty 88 | 89 | 90 | class ReqlError(Exception): 91 | def __init__(self, message, term=None, frames=None): 92 | super(ReqlError, self).__init__(message) 93 | self.message = message 94 | self.frames = frames 95 | if term is not None and frames is not None: 96 | self.query_printer = QueryPrinter(term, self.frames) 97 | 98 | def __str__(self): 99 | if self.frames is None: 100 | return convertForPrint(self.message) 101 | else: 102 | return convertForPrint( 103 | "%s in:\n%s\n%s" 104 | % ( 105 | self.message.rstrip("."), 106 | self.query_printer.print_query(), 107 | self.query_printer.print_carrots(), 108 | ) 109 | ) 110 | 111 | def __repr__(self): 112 | return "<%s instance: %s >" % (self.__class__.__name__, str(self)) 113 | 114 | 115 | RqlError = ReqlError 116 | 117 | 118 | class ReqlCompileError(ReqlError): 119 | pass 120 | 121 | 122 | RqlCompileError = ReqlCompileError 123 | 124 | 125 | class ReqlDriverCompileError(ReqlCompileError): 126 | pass 127 | 128 | 129 | class ReqlServerCompileError(ReqlCompileError): 130 | pass 131 | 132 | 133 | class ReqlRuntimeError(ReqlError): 134 | pass 135 | 136 | 137 | RqlRuntimeError = ReqlRuntimeError 138 | 139 | 140 | class ReqlQueryLogicError(ReqlRuntimeError): 141 | pass 142 | 143 | 144 | class ReqlNonExistenceError(ReqlQueryLogicError): 145 | pass 146 | 147 | 148 | class ReqlResourceLimitError(ReqlRuntimeError): 149 | pass 150 | 151 | 152 | class ReqlUserError(ReqlRuntimeError): 153 | pass 154 | 155 | 156 | class ReqlInternalError(ReqlRuntimeError): 157 | pass 158 | 159 | 160 | class ReqlAvailabilityError(ReqlRuntimeError): 161 | pass 162 | 163 | 164 | class ReqlOpFailedError(ReqlAvailabilityError): 165 | pass 166 | 167 | 168 | class ReqlOpIndeterminateError(ReqlAvailabilityError): 169 | pass 170 | 171 | 172 | class ReqlPermissionError(ReqlRuntimeError): 173 | pass 174 | 175 | 176 | class ReqlDriverError(ReqlError): 177 | pass 178 | 179 | 180 | RqlClientError = ReqlDriverError 181 | RqlDriverError = ReqlDriverError 182 | 183 | 184 | class ReqlAuthError(ReqlDriverError): 185 | def __init__(self, msg, host=None, port=None): 186 | if host is not None and port is not None: 187 | msg = "Could not connect to {}:{}, {}".format(host, port, msg) 188 | super(ReqlAuthError, self).__init__(msg) 189 | 190 | 191 | class _ReqlTimeoutError(ReqlDriverError): 192 | def __init__(self, host=None, port=None): 193 | msg = "Operation timed out." 194 | if host is not None and port is not None: 195 | msg = "Could not connect to {}:{}, {}".format(host, port, msg) 196 | super(_ReqlTimeoutError, self).__init__(msg) 197 | 198 | 199 | try: 200 | 201 | class ReqlTimeoutError(_ReqlTimeoutError, TimeoutError): 202 | pass 203 | 204 | 205 | except NameError: 206 | 207 | class ReqlTimeoutError(_ReqlTimeoutError): 208 | pass 209 | 210 | 211 | RqlTimeoutError = ReqlTimeoutError 212 | 213 | 214 | class QueryPrinter(object): 215 | def __init__(self, root, frames=None): 216 | self.root = root 217 | self.frames = list(frames or ()) 218 | 219 | def print_query(self): 220 | return "".join(self.compose_term(self.root)) 221 | 222 | def print_carrots(self): 223 | return "".join(self.compose_carrots(self.root, self.frames)) 224 | 225 | def compose_term(self, term): 226 | args = [self.compose_term(a) for a in term._args] 227 | optargs = {} 228 | for k, v in dict_items(term.optargs): 229 | optargs[k] = self.compose_term(v) 230 | return term.compose(args, optargs) 231 | 232 | def compose_carrots(self, term, frames): 233 | # This term is the cause of the error 234 | if len(frames) == 0: 235 | return ["^" for i in self.compose_term(term)] 236 | 237 | cur_frame = frames[0] 238 | args = [ 239 | self.compose_carrots(arg, frames[1:]) 240 | if cur_frame == i 241 | else self.compose_term(arg) 242 | for i, arg in enumerate(term._args) 243 | ] 244 | 245 | optargs = {} 246 | for k, v in dict_items(term.optargs): 247 | if cur_frame == k: 248 | optargs[k] = self.compose_carrots(v, frames[1:]) 249 | else: 250 | optargs[k] = self.compose_term(v) 251 | 252 | return [" " if i != "^" else "^" for i in term.compose(args, optargs)] 253 | 254 | 255 | # This 'enhanced' tuple recursively iterates over it's elements allowing us to 256 | # construct nested heirarchies that insert subsequences into tree. It's used 257 | # to construct the query representation used by the pretty printer. 258 | class T(object): 259 | # N.B Python 2.x doesn't allow keyword default arguments after *seq 260 | # In Python 3.x we can rewrite this as `__init__(self, *seq, intsp=''` 261 | def __init__(self, *seq, **opts): 262 | self.seq = seq 263 | self.intsp = opts.pop("intsp", "") 264 | 265 | def __iter__(self): 266 | itr = iter(self.seq) 267 | 268 | try: 269 | for sub in next(itr): 270 | yield sub 271 | except StopIteration: 272 | return 273 | 274 | for token in itr: 275 | for sub in self.intsp: 276 | yield sub 277 | 278 | for sub in token: 279 | yield sub 280 | -------------------------------------------------------------------------------- /rethinkdb/gevent_net/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rethinkdb/rethinkdb-python/339cd9a8b895ed3b1d74b2d8878eb7f0a5d4a9e9/rethinkdb/gevent_net/__init__.py -------------------------------------------------------------------------------- /rethinkdb/gevent_net/net_gevent.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 RethinkDB 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the 'License'); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an 'AS IS' BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | # This file incorporates work covered by the following copyright: 16 | # Copyright 2010-2016 RethinkDB, all rights reserved. 17 | 18 | import errno 19 | import ssl 20 | import struct 21 | 22 | import gevent 23 | import gevent.socket as socket 24 | from gevent.event import AsyncResult, Event 25 | from gevent.lock import Semaphore 26 | from rethinkdb import net, ql2_pb2 27 | from rethinkdb.errors import ( 28 | ReqlAuthError, 29 | ReqlCursorEmpty, 30 | ReqlDriverError, 31 | ReqlTimeoutError, 32 | RqlDriverError, 33 | RqlTimeoutError, 34 | ) 35 | from rethinkdb.logger import default_logger 36 | 37 | __all__ = ["Connection"] 38 | 39 | pResponse = ql2_pb2.Response.ResponseType 40 | pQuery = ql2_pb2.Query.QueryType 41 | 42 | 43 | class GeventCursorEmpty(ReqlCursorEmpty, StopIteration): 44 | pass 45 | 46 | 47 | # TODO: allow users to set sync/async? 48 | class GeventCursor(net.Cursor): 49 | def __init__(self, *args, **kwargs): 50 | super(GeventCursor, self).__init__(*args, **kwargs) 51 | self.new_response = Event() 52 | 53 | def __iter__(self): 54 | return self 55 | 56 | def __next__(self): 57 | return self._get_next(None) 58 | 59 | def _empty_error(self): 60 | return GeventCursorEmpty() 61 | 62 | def _extend(self, res_buf): 63 | super(GeventCursor, self)._extend(res_buf) 64 | self.new_response.set() 65 | self.new_response.clear() 66 | 67 | def _get_next(self, timeout): 68 | with gevent.Timeout(timeout, RqlTimeoutError()): 69 | self._maybe_fetch_batch() 70 | while len(self.items) == 0: 71 | if self.error is not None: 72 | raise self.error 73 | self.new_response.wait() 74 | return self.items.popleft() 75 | 76 | 77 | # TODO: would be nice to share this code with net.py 78 | # TODO(grandquista): code seems to already be a duplicate of superclass 79 | # revisit this after testing is inplace. 80 | class SocketWrapper(net.SocketWrapper): 81 | def __init__(self, parent): 82 | self.host = parent._parent.host 83 | self.port = parent._parent.port 84 | self._read_buffer = None 85 | self._socket = None 86 | self.ssl = parent._parent.ssl 87 | 88 | try: 89 | self._socket = socket.create_connection((self.host, self.port)) 90 | self._socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) 91 | self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) 92 | 93 | if len(self.ssl) > 0: 94 | try: 95 | if hasattr( 96 | ssl, "SSLContext" 97 | ): # Python2.7 and 3.2+, or backports.ssl 98 | ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) 99 | if hasattr(ssl_context, "options"): 100 | ssl_context.options |= getattr(ssl, "OP_NO_SSLv2", 0) 101 | ssl_context.options |= getattr(ssl, "OP_NO_SSLv3", 0) 102 | self.ssl_context.verify_mode = ssl.CERT_REQUIRED 103 | self.ssl_context.check_hostname = ( 104 | True # redundant with match_hostname 105 | ) 106 | self.ssl_context.load_verify_locations(self.ssl["ca_certs"]) 107 | self._socket = ssl_context.wrap_socket( 108 | self._socket, server_hostname=self.host 109 | ) 110 | else: # this does not disable SSLv2 or SSLv3 111 | self._socket = ssl.wrap_socket( 112 | self._socket, 113 | cert_reqs=ssl.CERT_REQUIRED, 114 | ssl_version=ssl.PROTOCOL_SSLv23, 115 | ca_certs=self.ssl["ca_certs"], 116 | ) 117 | except IOError as exc: 118 | self._socket.close() 119 | raise ReqlDriverError( 120 | "SSL handshake failed (see server log for more information): %s" 121 | % str(exc) 122 | ) 123 | try: 124 | ssl.match_hostname(self._socket.getpeercert(), hostname=self.host) 125 | except ssl.CertificateError: 126 | self._socket.close() 127 | raise 128 | 129 | parent._parent.handshake.reset() 130 | response = None 131 | while True: 132 | request = parent._parent.handshake.next_message(response) 133 | if request is None: 134 | break 135 | # This may happen in the `V1_0` protocol where we send two requests as 136 | # an optimization, then need to read each separately 137 | if request is not "": 138 | self.sendall(request) 139 | 140 | # The response from the server is a null-terminated string 141 | response = b"" 142 | while True: 143 | char = self.recvall(1) 144 | if char == b"\0": 145 | break 146 | response += char 147 | except (ReqlAuthError, ReqlTimeoutError): 148 | self.close() 149 | raise 150 | except ReqlDriverError as ex: 151 | self.close() 152 | error = ( 153 | str(ex) 154 | .replace("receiving from", "during handshake with") 155 | .replace("sending to", "during handshake with") 156 | ) 157 | raise ReqlDriverError(error) 158 | except Exception as ex: 159 | self.close() 160 | raise ReqlDriverError( 161 | "Could not connect to %s:%s. Error: %s" % (self.host, self.port, ex) 162 | ) 163 | 164 | def close(self): 165 | if self._socket is not None: 166 | try: 167 | self._socket.shutdown(socket.SHUT_RDWR) 168 | self._socket.close() 169 | except Exception as ex: 170 | default_logger.error(ex.message) 171 | finally: 172 | self._socket = None 173 | 174 | def recvall(self, length): 175 | res = b"" if self._read_buffer is None else self._read_buffer 176 | while len(res) < length: 177 | while True: 178 | try: 179 | chunk = self._socket.recv(length - len(res)) 180 | break 181 | except ReqlTimeoutError: 182 | raise 183 | except IOError as ex: 184 | if ex.errno == errno.ECONNRESET: 185 | self.close() 186 | raise ReqlDriverError("Connection is closed.") 187 | elif ex.errno != errno.EINTR: 188 | self.close() 189 | raise ReqlDriverError( 190 | "Connection interrupted receiving from %s:%s - %s" 191 | % (self.host, self.port, str(ex)) 192 | ) 193 | except Exception as ex: 194 | self.close() 195 | raise ReqlDriverError( 196 | "Error receiving from %s:%s - %s" 197 | % (self.host, self.port, str(ex)) 198 | ) 199 | if len(chunk) == 0: 200 | self.close() 201 | raise ReqlDriverError("Connection is closed.") 202 | res += chunk 203 | return res 204 | 205 | def sendall(self, data): 206 | offset = 0 207 | while offset < len(data): 208 | try: 209 | offset += self._socket.send(data[offset:]) 210 | except IOError as ex: 211 | if ex.errno == errno.ECONNRESET: 212 | self.close() 213 | raise ReqlDriverError("Connection is closed.") 214 | elif ex.errno != errno.EINTR: 215 | self.close() 216 | raise ReqlDriverError( 217 | ("Connection interrupted " + "sending to %s:%s - %s") 218 | % (self.host, self.port, str(ex)) 219 | ) 220 | except Exception as ex: 221 | self.close() 222 | raise ReqlDriverError( 223 | "Error sending to %s:%s - %s" % (self.host, self.port, str(ex)) 224 | ) 225 | 226 | 227 | class ConnectionInstance(object): 228 | def __init__(self, parent, io_loop=None): 229 | self._parent = parent 230 | self._closing = False 231 | self._user_queries = {} 232 | self._cursor_cache = {} 233 | 234 | self._write_mutex = Semaphore() 235 | self._socket = None 236 | 237 | def connect(self, timeout): 238 | with gevent.Timeout( 239 | timeout, RqlTimeoutError(self._parent.host, self._parent.port) 240 | ) as timeout: 241 | self._socket = SocketWrapper(self) 242 | 243 | # Start a parallel coroutine to perform reads 244 | gevent.spawn(self._reader) 245 | return self._parent 246 | 247 | def is_open(self): 248 | return self._socket is not None and self._socket.is_open() 249 | 250 | def close(self, noreply_wait=False, token=None, exception=None): 251 | self._closing = True 252 | if exception is not None: 253 | err_message = "Connection is closed (%s)." % str(exception) 254 | else: 255 | err_message = "Connection is closed." 256 | 257 | # Cursors may remove themselves when errored, so copy a list of them 258 | for cursor in list(self._cursor_cache.values()): 259 | cursor._error(err_message) 260 | 261 | for query, async_res in iter(self._user_queries.values()): 262 | async_res.set_exception(RqlDriverError(err_message)) 263 | 264 | self._user_queries = {} 265 | self._cursor_cache = {} 266 | 267 | if noreply_wait: 268 | noreply = net.Query(pQuery.NOREPLY_WAIT, token, None, None) 269 | self.run_query(noreply, False) 270 | 271 | try: 272 | self._socket.close() 273 | except OSError: 274 | pass 275 | 276 | # TODO: make connection recoverable if interrupted by a user's gevent.Timeout? 277 | def run_query(self, query, noreply): 278 | self._write_mutex.acquire() 279 | 280 | try: 281 | self._socket.sendall(query.serialize(self._parent._get_json_encoder(query))) 282 | finally: 283 | self._write_mutex.release() 284 | 285 | if noreply: 286 | return None 287 | 288 | async_res = AsyncResult() 289 | self._user_queries[query.token] = (query, async_res) 290 | return async_res.get() 291 | 292 | # The _reader coroutine runs in its own coroutine in parallel, reading responses 293 | # off of the socket and forwarding them to the appropriate AsyncResult or Cursor. 294 | # This is shut down as a consequence of closing the stream, or an error in the 295 | # socket/protocol from the server. Unexpected errors in this coroutine will 296 | # close the ConnectionInstance and be passed to any open AsyncResult or Cursors. 297 | def _reader(self): 298 | try: 299 | while True: 300 | buf = self._socket.recvall(12) 301 | (token, length,) = struct.unpack(" 0: 188 | # `func_wrap` only the last argument 189 | return ast.Map(*(args[:-1] + (ast.func_wrap(args[-1]),))) 190 | else: 191 | return ast.Map() 192 | 193 | 194 | # aggregation 195 | 196 | 197 | def group(*args): 198 | return ast.Group(*[ast.func_wrap(arg) for arg in args]) 199 | 200 | 201 | def reduce(*args): 202 | return ast.Reduce(*[ast.func_wrap(arg) for arg in args]) 203 | 204 | 205 | def count(*args): 206 | return ast.Count(*[ast.func_wrap(arg) for arg in args]) 207 | 208 | 209 | def sum(*args): 210 | return ast.Sum(*[ast.func_wrap(arg) for arg in args]) 211 | 212 | 213 | def avg(*args): 214 | return ast.Avg(*[ast.func_wrap(arg) for arg in args]) 215 | 216 | 217 | def min(*args): 218 | return ast.Min(*[ast.func_wrap(arg) for arg in args]) 219 | 220 | 221 | def max(*args): 222 | return ast.Max(*[ast.func_wrap(arg) for arg in args]) 223 | 224 | 225 | def distinct(*args): 226 | return ast.Distinct(*[ast.func_wrap(arg) for arg in args]) 227 | 228 | 229 | def contains(*args): 230 | return ast.Contains(*[ast.func_wrap(arg) for arg in args]) 231 | 232 | 233 | # orderBy orders 234 | def asc(*args): 235 | return ast.Asc(*[ast.func_wrap(arg) for arg in args]) 236 | 237 | 238 | def desc(*args): 239 | return ast.Desc(*[ast.func_wrap(arg) for arg in args]) 240 | 241 | 242 | # math and logic 243 | def eq(*args): 244 | return ast.Eq(*args) 245 | 246 | 247 | def ne(*args): 248 | return ast.Ne(*args) 249 | 250 | 251 | def lt(*args): 252 | return ast.Lt(*args) 253 | 254 | 255 | def le(*args): 256 | return ast.Le(*args) 257 | 258 | 259 | def gt(*args): 260 | return ast.Gt(*args) 261 | 262 | 263 | def ge(*args): 264 | return ast.Ge(*args) 265 | 266 | 267 | def add(*args): 268 | return ast.Add(*args) 269 | 270 | 271 | def sub(*args): 272 | return ast.Sub(*args) 273 | 274 | 275 | def mul(*args): 276 | return ast.Mul(*args) 277 | 278 | 279 | def div(*args): 280 | return ast.Div(*args) 281 | 282 | 283 | def mod(*args): 284 | return ast.Mod(*args) 285 | 286 | 287 | def bit_and(*args): 288 | return ast.BitAnd(*args) 289 | 290 | 291 | def bit_or(*args): 292 | return ast.BitOr(*args) 293 | 294 | 295 | def bit_xor(*args): 296 | return ast.BitXor(*args) 297 | 298 | 299 | def bit_not(*args): 300 | return ast.BitNot(*args) 301 | 302 | 303 | def bit_sal(*args): 304 | return ast.BitSal(*args) 305 | 306 | 307 | def bit_sar(*args): 308 | return ast.BitSar(*args) 309 | 310 | 311 | def floor(*args): 312 | return ast.Floor(*args) 313 | 314 | 315 | def ceil(*args): 316 | return ast.Ceil(*args) 317 | 318 | 319 | def round(*args): 320 | return ast.Round(*args) 321 | 322 | 323 | def not_(*args): 324 | return ast.Not(*args) 325 | 326 | 327 | def and_(*args): 328 | return ast.And(*args) 329 | 330 | 331 | def or_(*args): 332 | return ast.Or(*args) 333 | 334 | 335 | def type_of(*args): 336 | return ast.TypeOf(*args) 337 | 338 | 339 | def info(*args): 340 | return ast.Info(*args) 341 | 342 | 343 | def binary(data): 344 | return ast.Binary(data) 345 | 346 | 347 | def range(*args): 348 | return ast.Range(*args) 349 | 350 | 351 | def time(*args): 352 | return ast.Time(*args) 353 | 354 | 355 | def iso8601(*args, **kwargs): 356 | return ast.ISO8601(*args, **kwargs) 357 | 358 | 359 | def epoch_time(*args): 360 | return ast.EpochTime(*args) 361 | 362 | 363 | def now(*args): 364 | return ast.Now(*args) 365 | 366 | 367 | class RqlConstant(ast.RqlQuery): 368 | def __init__(self, statement, term_type): 369 | self.statement = statement 370 | self.term_type = term_type 371 | super(RqlConstant, self).__init__() 372 | 373 | def compose(self, args, optargs): 374 | return "r." + self.statement 375 | 376 | 377 | # Time enum values 378 | monday = RqlConstant("monday", ql2_pb2.Term.TermType.MONDAY) 379 | tuesday = RqlConstant("tuesday", ql2_pb2.Term.TermType.TUESDAY) 380 | wednesday = RqlConstant("wednesday", ql2_pb2.Term.TermType.WEDNESDAY) 381 | thursday = RqlConstant("thursday", ql2_pb2.Term.TermType.THURSDAY) 382 | friday = RqlConstant("friday", ql2_pb2.Term.TermType.FRIDAY) 383 | saturday = RqlConstant("saturday", ql2_pb2.Term.TermType.SATURDAY) 384 | sunday = RqlConstant("sunday", ql2_pb2.Term.TermType.SUNDAY) 385 | 386 | january = RqlConstant("january", ql2_pb2.Term.TermType.JANUARY) 387 | february = RqlConstant("february", ql2_pb2.Term.TermType.FEBRUARY) 388 | march = RqlConstant("march", ql2_pb2.Term.TermType.MARCH) 389 | april = RqlConstant("april", ql2_pb2.Term.TermType.APRIL) 390 | may = RqlConstant("may", ql2_pb2.Term.TermType.MAY) 391 | june = RqlConstant("june", ql2_pb2.Term.TermType.JUNE) 392 | july = RqlConstant("july", ql2_pb2.Term.TermType.JULY) 393 | august = RqlConstant("august", ql2_pb2.Term.TermType.AUGUST) 394 | september = RqlConstant("september", ql2_pb2.Term.TermType.SEPTEMBER) 395 | october = RqlConstant("october", ql2_pb2.Term.TermType.OCTOBER) 396 | november = RqlConstant("november", ql2_pb2.Term.TermType.NOVEMBER) 397 | december = RqlConstant("december", ql2_pb2.Term.TermType.DECEMBER) 398 | 399 | minval = RqlConstant("minval", ql2_pb2.Term.TermType.MINVAL) 400 | maxval = RqlConstant("maxval", ql2_pb2.Term.TermType.MAXVAL) 401 | 402 | 403 | def make_timezone(*args): 404 | return ast.RqlTzinfo(*args) 405 | 406 | 407 | # Merge values 408 | def literal(*args): 409 | return ast.Literal(*args) 410 | 411 | 412 | def object(*args): 413 | return ast.Object(*args) 414 | 415 | 416 | def uuid(*args): 417 | return ast.UUID(*args) 418 | 419 | 420 | # Global geospatial operations 421 | def geojson(*args): 422 | return ast.GeoJson(*args) 423 | 424 | 425 | def point(*args): 426 | return ast.Point(*args) 427 | 428 | 429 | def line(*args): 430 | return ast.Line(*args) 431 | 432 | 433 | def polygon(*args): 434 | return ast.Polygon(*args) 435 | 436 | 437 | def distance(*args, **kwargs): 438 | return ast.Distance(*args, **kwargs) 439 | 440 | 441 | def intersects(*args): 442 | return ast.Intersects(*args) 443 | 444 | 445 | def circle(*args, **kwargs): 446 | return ast.Circle(*args, **kwargs) 447 | -------------------------------------------------------------------------------- /rethinkdb/tornado_net/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rethinkdb/rethinkdb-python/339cd9a8b895ed3b1d74b2d8878eb7f0a5d4a9e9/rethinkdb/tornado_net/__init__.py -------------------------------------------------------------------------------- /rethinkdb/tornado_net/net_tornado.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 RethinkDB 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the 'License'); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an 'AS IS' BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | # This file incorporates work covered by the following copyright: 16 | # Copyright 2010-2016 RethinkDB, all rights reserved. 17 | 18 | import socket 19 | import struct 20 | 21 | from tornado import gen, iostream 22 | from tornado.concurrent import Future 23 | from tornado.ioloop import IOLoop 24 | from tornado.tcpclient import TCPClient 25 | 26 | from rethinkdb import ql2_pb2 27 | from rethinkdb.errors import ( 28 | ReqlAuthError, 29 | ReqlCursorEmpty, 30 | ReqlDriverError, 31 | ReqlTimeoutError, 32 | ) 33 | from rethinkdb.net import Connection as ConnectionBase 34 | from rethinkdb.net import Cursor, Query, Response, maybe_profile 35 | 36 | __all__ = ["Connection"] 37 | 38 | pResponse = ql2_pb2.Response.ResponseType 39 | pQuery = ql2_pb2.Query.QueryType 40 | 41 | 42 | @gen.coroutine 43 | def with_absolute_timeout(deadline, generator, **kwargs): 44 | if deadline is None: 45 | res = yield generator 46 | else: 47 | try: 48 | res = yield gen.with_timeout(deadline, generator, **kwargs) 49 | except gen.TimeoutError: 50 | raise ReqlTimeoutError() 51 | raise gen.Return(res) 52 | 53 | 54 | # The Tornado implementation of the Cursor object: 55 | # The `new_response` Future notifies any waiting coroutines that the can attempt 56 | # to grab the next result. In addition, the waiting coroutine will schedule a 57 | # timeout at the given deadline (if provided), at which point the future will be 58 | # errored. 59 | class TornadoCursor(Cursor): 60 | def __init__(self, *args, **kwargs): 61 | Cursor.__init__(self, *args, **kwargs) 62 | self.new_response = Future() 63 | 64 | def _extend(self, res_buf): 65 | Cursor._extend(self, res_buf) 66 | self.new_response.set_result(True) 67 | self.new_response = Future() 68 | 69 | # Convenience function so users know when they've hit the end of the cursor 70 | # without having to catch an exception 71 | @gen.coroutine 72 | def fetch_next(self, wait=True): 73 | timeout = Cursor._wait_to_timeout(wait) 74 | deadline = None if timeout is None else self.conn._io_loop.time() + timeout 75 | while len(self.items) == 0 and self.error is None: 76 | self._maybe_fetch_batch() 77 | yield with_absolute_timeout(deadline, self.new_response) 78 | # If there is a (non-empty) error to be received, we return True, so the 79 | # user will receive it on the next `next` call. 80 | raise gen.Return( 81 | len(self.items) != 0 or not isinstance(self.error, ReqlCursorEmpty) 82 | ) 83 | 84 | def _empty_error(self): 85 | # We do not have ReqlCursorEmpty inherit from StopIteration as that interferes 86 | # with Tornado's gen.coroutine and is the equivalent of gen.Return(None). 87 | return ReqlCursorEmpty() 88 | 89 | @gen.coroutine 90 | def _get_next(self, timeout): 91 | deadline = None if timeout is None else self.conn._io_loop.time() + timeout 92 | while len(self.items) == 0: 93 | self._maybe_fetch_batch() 94 | if self.error is not None: 95 | raise self.error 96 | yield with_absolute_timeout(deadline, self.new_response) 97 | raise gen.Return(self.items.popleft()) 98 | 99 | 100 | class ConnectionInstance(object): 101 | def __init__(self, parent, io_loop=None): 102 | self._parent = parent 103 | self._closing = False 104 | self._user_queries = {} 105 | self._cursor_cache = {} 106 | self._ready = Future() 107 | self._io_loop = io_loop 108 | self._stream = None 109 | if self._io_loop is None: 110 | self._io_loop = IOLoop.current() 111 | self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) 112 | self._socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) 113 | 114 | def client_port(self): 115 | if self.is_open(): 116 | return self._socket.getsockname()[1] 117 | 118 | def client_address(self): 119 | if self.is_open(): 120 | return self._socket.getsockname()[0] 121 | 122 | @gen.coroutine 123 | def connect(self, timeout): 124 | deadline = None if timeout is None else self._io_loop.time() + timeout 125 | 126 | try: 127 | if len(self._parent.ssl) > 0: 128 | ssl_options = {} 129 | if self._parent.ssl["ca_certs"]: 130 | ssl_options["ca_certs"] = self._parent.ssl["ca_certs"] 131 | ssl_options["cert_reqs"] = 2 # ssl.CERT_REQUIRED 132 | stream_future = TCPClient().connect( 133 | self._parent.host, self._parent.port, ssl_options=ssl_options 134 | ) 135 | else: 136 | stream_future = TCPClient().connect( 137 | self._parent.host, self._parent.port 138 | ) 139 | 140 | self._stream = yield with_absolute_timeout( 141 | deadline, stream_future, quiet_exceptions=(iostream.StreamClosedError) 142 | ) 143 | except Exception as err: 144 | raise ReqlDriverError( 145 | "Could not connect to %s:%s. Error: %s" 146 | % (self._parent.host, self._parent.port, str(err)) 147 | ) 148 | 149 | self._stream.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) 150 | self._stream.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) 151 | 152 | try: 153 | self._parent.handshake.reset() 154 | response = None 155 | while True: 156 | request = self._parent.handshake.next_message(response) 157 | if request is None: 158 | break 159 | # This may happen in the `V1_0` protocol where we send two requests as 160 | # an optimization, then need to read each separately 161 | if request is not "": 162 | self._stream.write(request) 163 | 164 | response = yield with_absolute_timeout( 165 | deadline, 166 | self._stream.read_until(b"\0"), 167 | quiet_exceptions=(iostream.StreamClosedError), 168 | ) 169 | response = response[:-1] 170 | except ReqlAuthError: 171 | try: 172 | self._stream.close() 173 | except iostream.StreamClosedError: 174 | pass 175 | raise 176 | except ReqlTimeoutError: 177 | try: 178 | self._stream.close() 179 | except iostream.StreamClosedError: 180 | pass 181 | raise ReqlTimeoutError(self._parent.host, self._parent.port) 182 | except Exception as err: 183 | try: 184 | self._stream.close() 185 | except iostream.StreamClosedError: 186 | pass 187 | raise ReqlDriverError( 188 | "Connection interrupted during handshake with %s:%s. Error: %s" 189 | % (self._parent.host, self._parent.port, str(err)) 190 | ) 191 | 192 | # Start a parallel function to perform reads 193 | self._io_loop.add_callback(self._reader) 194 | raise gen.Return(self._parent) 195 | 196 | def is_open(self): 197 | return not self._stream.closed() 198 | 199 | @gen.coroutine 200 | def close(self, noreply_wait=False, token=None, exception=None): 201 | self._closing = True 202 | if exception is not None: 203 | err_message = "Connection is closed (%s)." % str(exception) 204 | else: 205 | err_message = "Connection is closed." 206 | 207 | # Cursors may remove themselves when errored, so copy a list of them 208 | for cursor in list(self._cursor_cache.values()): 209 | cursor._error(err_message) 210 | 211 | for query, future in iter(self._user_queries.values()): 212 | future.set_exception(ReqlDriverError(err_message)) 213 | 214 | self._user_queries = {} 215 | self._cursor_cache = {} 216 | 217 | if noreply_wait: 218 | noreply = Query(pQuery.NOREPLY_WAIT, token, None, None) 219 | yield self.run_query(noreply, False) 220 | 221 | try: 222 | self._stream.close() 223 | except iostream.StreamClosedError: 224 | pass 225 | raise gen.Return(None) 226 | 227 | @gen.coroutine 228 | def run_query(self, query, noreply): 229 | yield self._stream.write(query.serialize(self._parent._get_json_encoder(query))) 230 | if noreply: 231 | raise gen.Return(None) 232 | 233 | response_future = Future() 234 | self._user_queries[query.token] = (query, response_future) 235 | res = yield response_future 236 | raise gen.Return(res) 237 | 238 | # The _reader coroutine runs in its own context at the top level of the 239 | # Tornado.IOLoop it was created with. It runs in parallel, reading responses 240 | # off of the socket and forwarding them to the appropriate Future or Cursor. 241 | # This is shut down as a consequence of closing the stream, or an error in the 242 | # socket/protocol from the server. Unexpected errors in this coroutine will 243 | # close the ConnectionInstance and be passed to any open Futures or Cursors. 244 | @gen.coroutine 245 | def _reader(self): 246 | try: 247 | while True: 248 | buf = yield self._stream.read_bytes(12) 249 | (token, length,) = struct.unpack("\w+) \{') 90 | valueRegex = re.compile(r'\s*(?P\w+)\s*=\s*(?P\w+)') 91 | endRegex = re.compile(r'\s*\}') 92 | 93 | indentLevel = languageDefs[language]["initialIndentLevel"] 94 | lastIndentLevel = languageDefs[language]["initialIndentLevel"] - 1 95 | 96 | # -- write headers 97 | 98 | outputFile.write(languageDefs[language]["header"]) 99 | 100 | # -- convert the body 101 | 102 | levelHasContent = False 103 | 104 | for line in inputFile: 105 | # - open 106 | match = messageRegex.match(line) 107 | if match is not None: 108 | if indentLevel == lastIndentLevel: 109 | outputFile.write(languageDefs[language]["separator"]) 110 | if levelHasContent: 111 | outputFile.write("\n" + "\t" * indentLevel) 112 | outputFile.write(languageDefs[language]["open"] % { 113 | 'tabs': "\t" * indentLevel, 114 | 'name': match.group('name') 115 | }) 116 | lastIndentLevel = indentLevel 117 | indentLevel += 1 118 | levelHasContent = False 119 | continue 120 | 121 | # - value 122 | match = valueRegex.match(line) 123 | if match is not None: 124 | if indentLevel == lastIndentLevel: 125 | outputFile.write(languageDefs[language]["separator"]) 126 | value = match.group('value') 127 | if value.startswith('0x'): 128 | value = int(value, 0) 129 | outputFile.write(languageDefs[language]["value"] % { 130 | 'tabs': "\t" * indentLevel, 131 | 'name': match.group('name'), 132 | 'value': value, 133 | }) 134 | lastIndentLevel = indentLevel 135 | levelHasContent = True 136 | continue 137 | 138 | # - close 139 | match = endRegex.match(line) 140 | if match is not None: 141 | if not levelHasContent and \ 142 | languageDefs[language]["empty"] is not None: 143 | outputFile.write( 144 | "\n" + "\t" * indentLevel + 145 | languageDefs[language]["empty"] 146 | ) 147 | lastIndentLevel = indentLevel 148 | if languageDefs[language]["close"] is not None: 149 | if indentLevel == lastIndentLevel or \ 150 | languageDefs[language]["closeAlwaysNewLine"] is True: 151 | outputFile.write("\n" + "\t" * (indentLevel - 1)) 152 | outputFile.write(languageDefs[language]["close"]) 153 | indentLevel -= 1 154 | lastIndentLevel = indentLevel 155 | levelHasContent = True 156 | 157 | # -- write footer 158 | outputFile.write(languageDefs[language]["footer"]) 159 | 160 | if __name__ == '__main__': 161 | import optparse 162 | 163 | inputFile = sys.stdin 164 | outputFile = sys.stdout 165 | 166 | # -- parse input 167 | 168 | parser = optparse.OptionParser() 169 | parser.add_option( 170 | "-l", "--language", 171 | dest="language", 172 | help="write output for language", 173 | metavar="LANG", 174 | choices=list(languageDefs.keys()), 175 | default=None, 176 | ) 177 | parser.add_option( 178 | "-i", "--input-file", 179 | dest="inputFile", 180 | help="read from FILE (default STDIN)", 181 | metavar="FILE", 182 | default=None, 183 | ) 184 | parser.add_option( 185 | "-o", "--output-file", 186 | dest="outputFile", 187 | help="write to FILE (default STDOUT)", 188 | metavar="FILE", 189 | default=None, 190 | ) 191 | 192 | (options, args) = parser.parse_args() 193 | 194 | if options.language is None: 195 | parser.error("A language option is required") 196 | 197 | if options.inputFile is not None: 198 | try: 199 | inputFile = open(options.inputFile, 'r') 200 | except Exception as e: 201 | parser.error("Unable to open the given input file <<%s>>" 202 | ", got error: %s" % (inputFile, str(e))) 203 | 204 | if options.outputFile is not None: 205 | try: 206 | outputFile = open(options.outputFile, 'w') 207 | except Exception as e: 208 | parser.error("Unable to open the given output file <<%s>>," 209 | " got error: %s" % (outputFile, str(e))) 210 | 211 | convertFile(inputFile, outputFile, options.language) 212 | -------------------------------------------------------------------------------- /scripts/install-db.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | set -u 5 | 6 | export DISTRIB_CODENAME=$(lsb_release -sc) 7 | 8 | sudo apt-key adv --keyserver keys.gnupg.net --recv-keys "539A 3A8C 6692 E6E3 F69B 3FE8 1D85 E93F 801B B43F" 9 | echo "deb https://download.rethinkdb.com/repository/ubuntu-xenial xenial main" | sudo tee /etc/apt/sources.list.d/rethinkdb.list 10 | 11 | sudo apt-get update --option Acquire::Retries=100 --option Acquire::http::Timeout="300" 12 | sudo apt-get install -y --option Acquire::Retries=100 --option Acquire::http::Timeout="300" rethinkdb 13 | -------------------------------------------------------------------------------- /scripts/prepare_remote_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018-present RethinkDB 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may not use 4 | # this file except in compliance with the License. You may obtain a copy of the 5 | # License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software distributed 10 | # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 11 | # CONDITIONS OF ANY KIND, either express or implied. See the License for the 12 | # specific language governing permissions and limitations under the License. 13 | # 14 | # This file incorporates work covered by the following copyright: 15 | # 16 | # Copyright 2010-present, The Linux Foundation, portions copyright Google and 17 | # others and used with permission or subject to their respective license 18 | # agreements. 19 | # 20 | # Licensed under the Apache License, Version 2.0 (the "License"); 21 | # you may not use this file except in compliance with the License. 22 | # You may obtain a copy of the License at 23 | # 24 | # http://www.apache.org/licenses/LICENSE-2.0 25 | # 26 | # Unless required by applicable law or agreed to in writing, software 27 | # distributed under the License is distributed on an "AS IS" BASIS, 28 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 29 | # See the License for the specific language governing permissions and 30 | # limitations under the License. 31 | 32 | import os 33 | import sys 34 | import uuid 35 | from datetime import datetime 36 | from subprocess import check_call 37 | from time import sleep 38 | 39 | import digitalocean 40 | import paramiko 41 | 42 | DROPLET_NAME = 'test-{uuid}'.format(uuid=str(uuid.uuid4())) 43 | SSH_KEY_NAME = 'key-{name}'.format(name=DROPLET_NAME) 44 | DROPLET_STATUS_COMPLETED = 'completed' 45 | BINTRAY_USERNAME = os.getenv('BINTRAY_USERNAME') 46 | 47 | 48 | class DropletSetup(object): 49 | def __init__(self, token, size, region): 50 | super(DropletSetup, self).__init__() 51 | self.token = token 52 | self.size = size 53 | self.region = region 54 | self.ssh_client = paramiko.SSHClient() 55 | self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 56 | self.ssh_key = None 57 | self.digital_ocean_ssh_key = None 58 | 59 | self._generate_ssh_key() 60 | self.droplet = digitalocean.Droplet( 61 | token=self.token, 62 | name=DROPLET_NAME, 63 | region=self.region, 64 | image='ubuntu-16-04-x64', 65 | size_slug=self.size, 66 | ssh_keys=[self.digital_ocean_ssh_key.id] 67 | ) 68 | 69 | @staticmethod 70 | def _print_info(message): 71 | print('[{timestamp}]\t{message}'.format(timestamp=datetime.now().isoformat(), message=message)) 72 | 73 | def _execute_command(self, command): 74 | self._print_info('executing {command}'.format(command=command)) 75 | std_in, _, std_err = self.ssh_client.exec_command(command) 76 | std_in.close() 77 | 78 | has_err = False 79 | for line in std_err.readlines(): 80 | has_err = True 81 | print(line.replace('\n', '')) 82 | 83 | if has_err: 84 | raise Exception('Script execution failed') 85 | 86 | def _generate_ssh_key(self): 87 | self._print_info('generating ssh key') 88 | self.ssh_key = paramiko.rsakey.RSAKey.generate(2048, str(uuid.uuid4())) 89 | 90 | self._print_info('create ssh key on DigitalOcean') 91 | self.digital_ocean_ssh_key = digitalocean.SSHKey( 92 | token=self.token, 93 | name=SSH_KEY_NAME, 94 | public_key='ssh-rsa {key}'.format(key=str(self.ssh_key.get_base64())) 95 | ) 96 | 97 | self.digital_ocean_ssh_key.create() 98 | 99 | def create_droplet(self): 100 | self._print_info('creating droplet') 101 | self.droplet.create() 102 | 103 | self._print_info('waiting for droplet to be ready') 104 | self._wait_for_droplet() 105 | 106 | def _wait_for_droplet(self): 107 | actions = self.droplet.get_actions() 108 | for action in actions: 109 | if action.status == DROPLET_STATUS_COMPLETED: 110 | self.droplet.load() 111 | return 112 | 113 | self._wait_for_droplet() 114 | 115 | def __enter__(self): 116 | """ 117 | Connect to DigitalOcean instance with forever retry. 118 | """ 119 | self._print_info('connecting to droplet') 120 | try: 121 | self.ssh_client.connect( 122 | hostname=self.droplet.ip_address, 123 | username='root', 124 | allow_agent=True, 125 | pkey=self.ssh_key 126 | ) 127 | except Exception as exc: 128 | self._print_info(str(exc)) 129 | self._print_info('reconnecting') 130 | sleep(3) 131 | return self.__enter__() 132 | return self 133 | 134 | def install_rethinkdb(self): 135 | self._print_info('getting rethinkdb') 136 | 137 | self._execute_command('source /etc/lsb-release && echo "deb https://download.rethinkdb.com/apt $DISTRIB_CODENAME main" | sudo tee /etc/apt/sources.list.d/rethinkdb.list') 138 | self._execute_command('wget -qO- https://download.rethinkdb.com/apt/pubkey.gpg | sudo apt-key add -') 139 | 140 | self._print_info('installing rethinkdb') 141 | self._execute_command('apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install --allow-unauthenticated -y rethinkdb') 142 | self._execute_command('echo "bind=all" > /etc/rethinkdb/instances.d/default.conf') 143 | 144 | def start_rethinkdb(self): 145 | self._print_info('restarting rethinkdb') 146 | self._execute_command('/etc/init.d/rethinkdb restart') 147 | 148 | def run_script(self, script, script_arguments): 149 | self._print_info('executing script') 150 | os.environ["RETHINKDB_HOST"] = self.droplet.ip_address 151 | check_call([script, ' '.join(script_arguments)]) 152 | 153 | def __exit__(self, *args): 154 | """ 155 | Cleanup DigitalOcean instance connection. 156 | """ 157 | self._print_info('destroying droplet') 158 | self.droplet.destroy() 159 | 160 | self._print_info('removing ssh key') 161 | self.digital_ocean_ssh_key.destroy() 162 | 163 | 164 | def main(): 165 | script = sys.argv[1] 166 | script_arguments = sys.argv[2:] 167 | 168 | setup = DropletSetup( 169 | token=os.getenv('DO_TOKEN'), 170 | size=os.getenv('DO_SIZE', '512MB'), 171 | region=os.getenv('DO_REGION', 'sfo2') 172 | ) 173 | 174 | setup.create_droplet() 175 | 176 | with setup: 177 | setup.install_rethinkdb() 178 | setup.start_rethinkdb() 179 | setup.run_script(script, script_arguments) 180 | 181 | 182 | if __name__ == '__main__': 183 | main() 184 | -------------------------------------------------------------------------------- /scripts/upload-coverage.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | set -u 5 | 6 | if [ "${CODACY_PROJECT_TOKEN}" = "" ]; then 7 | echo "Skipping coverage upload for missing CODACY_PROJECT_TOKEN" 8 | exit; 9 | fi 10 | 11 | set -ex 12 | 13 | python-codacy-coverage -r coverage.xml 14 | -------------------------------------------------------------------------------- /scripts/upload-pypi.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | set -u 5 | 6 | export UPLOAD_STAGING= 7 | 8 | if [ "${TRAVIS_PULL_REQUEST}" = "true" ]; then 9 | echo 'Using staging pypi upload for PR' 10 | export UPLOAD_STAGING='yes' 11 | fi 12 | 13 | if [ "${TRAVIS_EVENT_TYPE}" = "cron" ]; then 14 | echo 'Using staging pypi upload for cron job' 15 | export UPLOAD_STAGING='yes' 16 | fi 17 | 18 | set -ex 19 | 20 | python3 -m pip install --upgrade setuptools wheel 21 | 22 | if [ "${UPLOAD_STAGING}" = "yes" ]; then 23 | export RETHINKDB_VERSION_DESCRIBE=$(git describe --tags --abbrev=0) 24 | else 25 | export RETHINKDB_VERSION_DESCRIBE=$(git describe --tags --abbrev=8) 26 | fi 27 | 28 | python3 setup.py sdist bdist_wheel 29 | 30 | python3 -m pip install --upgrade twine 31 | 32 | if [ "${UPLOAD_STAGING}" = "yes" ]; then 33 | export TWINE_PASSWORD="${TWINE_STAGEING_PASSWORD}" 34 | export TWINE_USERNAME="${TWINE_STAGEING_USERNAME}" 35 | 36 | twine upload --repository-url 'https://test.pypi.org/legacy/' dist/* 37 | python3 -m pip install --index-url 'https://test.pypi.org/simple/' rethinkdb 38 | else 39 | twine upload dist/* 40 | python3 -m pip install rethinkdb 41 | fi 42 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # Copyright 2018 RethinkDB 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | [bdist_wheel] 17 | universal = 1 18 | 19 | [aliases] 20 | test = pytest 21 | 22 | [tool:pytest] 23 | collect_ignore = ['setup.py'] 24 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 RethinkDB 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the 'License'); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an 'AS IS' BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | # This file incorporates work covered by the following copyright: 16 | # Copyright 2010-2016 RethinkDB, all rights reserved. 17 | 18 | 19 | import os 20 | import re 21 | 22 | import setuptools 23 | 24 | try: 25 | import asyncio 26 | 27 | CONDITIONAL_PACKAGES = ['rethinkdb.asyncio_net'] 28 | except ImportError: 29 | CONDITIONAL_PACKAGES = [] 30 | 31 | 32 | RETHINKDB_VERSION_DESCRIBE = os.environ.get("RETHINKDB_VERSION_DESCRIBE") 33 | VERSION_RE = r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?Ppost[1-9]\d*)" 34 | 35 | with open("rethinkdb/version.py", "r") as f: 36 | version_parts = re.search(VERSION_RE, f.read()).groups() 37 | VERSION = ".".join(filter(lambda x: x is not None, version_parts)) 38 | 39 | 40 | if RETHINKDB_VERSION_DESCRIBE: 41 | version_parts = re.match(VERSION_RE, RETHINKDB_VERSION_DESCRIBE) 42 | 43 | if not version_parts: 44 | raise RuntimeError("{!r} does not match version format {!r}".format( 45 | RETHINKDB_VERSION_DESCRIBE, VERSION_RE)) 46 | 47 | VERSION = ".".join(filter(lambda x: x is not None, version_parts.groups())) 48 | 49 | 50 | setuptools.setup( 51 | name='rethinkdb', 52 | zip_safe=True, 53 | version=VERSION, 54 | description='Python driver library for the RethinkDB database server.', 55 | long_description=open('README.md', 'r').read(), 56 | long_description_content_type='text/markdown', 57 | url='https://github.com/RethinkDB/rethinkdb-python', 58 | maintainer='RethinkDB.', 59 | maintainer_email='bugs@rethinkdb.com', 60 | classifiers=[ 61 | 'Intended Audience :: Developers', 62 | 'Natural Language :: English', 63 | 'Programming Language :: Python :: 2', 64 | 'Programming Language :: Python :: 2.7', 65 | 'Programming Language :: Python :: 3', 66 | 'Programming Language :: Python :: 3.5', 67 | 'Programming Language :: Python :: 3.6', 68 | 'Programming Language :: Python :: 3.7', 69 | 'Programming Language :: Python :: 3.8', 70 | 'Programming Language :: Python :: 3.9', 71 | 'Programming Language :: Python :: 3.10', 72 | 'Programming Language :: Python :: 3.11', 73 | 'Programming Language :: Python :: 3.12', 74 | ], 75 | packages=[ 76 | 'rethinkdb', 77 | 'rethinkdb.tornado_net', 78 | 'rethinkdb.twisted_net', 79 | 'rethinkdb.gevent_net', 80 | 'rethinkdb.trio_net', 81 | 'rethinkdb.backports', 82 | 'rethinkdb.backports.ssl_match_hostname' 83 | ] + CONDITIONAL_PACKAGES, 84 | package_dir={'rethinkdb': 'rethinkdb'}, 85 | package_data={'rethinkdb': ['backports/ssl_match_hostname/*.txt']}, 86 | entry_points={ 87 | 'console_scripts': [ 88 | 'rethinkdb-import = rethinkdb._import:main', 89 | 'rethinkdb-dump = rethinkdb._dump:main', 90 | 'rethinkdb-export = rethinkdb._export:main', 91 | 'rethinkdb-restore = rethinkdb._restore:main', 92 | 'rethinkdb-index-rebuild = rethinkdb._index_rebuild:main', 93 | 'rethinkdb-repl = rethinkdb.__main__:startInterpreter' 94 | ] 95 | }, 96 | python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, >=3.5", 97 | install_requires=[ 98 | 'six', 99 | 'looseversion' 100 | ], 101 | test_suite='tests' 102 | ) 103 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 RethinkDB 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | collect_ignore = [] 4 | 5 | if sys.version_info < (3, 6): 6 | collect_ignore += [ 7 | "integration/test_asyncio.py", 8 | "integration/test_asyncio_coroutine.py", 9 | "integration/test_tornado.py", 10 | "integration/test_trio.py", 11 | ] 12 | -------------------------------------------------------------------------------- /tests/helpers.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from rethinkdb import r 4 | 5 | INTEGRATION_TEST_DB = "integration_test" 6 | 7 | 8 | class IntegrationTestCaseBase(object): 9 | def _create_database(self, conn): 10 | if INTEGRATION_TEST_DB not in self.r.db_list().run(conn): 11 | self.r.db_create(INTEGRATION_TEST_DB).run(conn) 12 | 13 | conn.use(INTEGRATION_TEST_DB) 14 | 15 | def setup_method(self): 16 | self.r = r 17 | self.rethinkdb_host = os.getenv("RETHINKDB_HOST", "127.0.0.1") 18 | 19 | self.conn = self.r.connect(host=self.rethinkdb_host) 20 | 21 | self._create_database(self.conn) 22 | 23 | def teardown_method(self): 24 | self.r.db_drop(INTEGRATION_TEST_DB).run(self.conn) 25 | self.conn.close() 26 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rethinkdb/rethinkdb-python/339cd9a8b895ed3b1d74b2d8878eb7f0a5d4a9e9/tests/integration/__init__.py -------------------------------------------------------------------------------- /tests/integration/test_asyncio.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from asyncio import coroutine 3 | 4 | import pytest 5 | 6 | from tests.helpers import INTEGRATION_TEST_DB, IntegrationTestCaseBase 7 | 8 | 9 | @pytest.mark.asyncio 10 | @pytest.mark.integration 11 | @pytest.mark.skipif( 12 | sys.version_info == (3, 4) or sys.version_info == (3, 5), 13 | reason="requires python3.4 or python3.5", 14 | ) 15 | class TestAsyncio(IntegrationTestCaseBase): 16 | def setup_method(self): 17 | super(TestAsyncio, self).setup_method() 18 | self.table_name = "test_asyncio" 19 | self.r.set_loop_type("asyncio") 20 | 21 | def teardown_method(self): 22 | super(TestAsyncio, self).teardown_method() 23 | self.r.set_loop_type(None) 24 | 25 | @coroutine 26 | def test_flow_coroutine_paradigm(self): 27 | connection = yield from self.conn 28 | 29 | yield from self.r.table_create(self.table_name).run(connection) 30 | 31 | table = self.r.table(self.table_name) 32 | yield from table.insert( 33 | {"id": 1, "name": "Iron Man", "first_appearance": "Tales of Suspense #39"} 34 | ).run(connection) 35 | 36 | cursor = yield from table.run(connection) 37 | 38 | while (yield from cursor.fetch_next()): 39 | hero = yield from cursor.__anext__() 40 | assert hero["name"] == "Iron Man" 41 | 42 | yield from connection.close() 43 | -------------------------------------------------------------------------------- /tests/integration/test_connect.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from rethinkdb import r 6 | from tests.helpers import INTEGRATION_TEST_DB, IntegrationTestCaseBase 7 | 8 | 9 | @pytest.mark.integration 10 | class TestConnect(IntegrationTestCaseBase): 11 | def setup_method(self): 12 | super(TestConnect, self).setup_method() 13 | 14 | def test_connect(self): 15 | db_url = "rethinkdb://{host}".format(host=self.rethinkdb_host) 16 | 17 | assert self.r.connect(url=db_url) is not None 18 | 19 | def test_connect_with_username(self): 20 | db_url = "rethinkdb://admin@{host}".format(host=self.rethinkdb_host) 21 | 22 | assert self.r.connect(url=db_url) is not None 23 | 24 | def test_connect_to_db(self): 25 | db_url = "rethinkdb://{host}/{database}".format( 26 | host=self.rethinkdb_host, database=INTEGRATION_TEST_DB 27 | ) 28 | 29 | assert self.r.connect(url=db_url) is not None 30 | -------------------------------------------------------------------------------- /tests/integration/test_cursor.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from rethinkdb.errors import ReqlCursorEmpty, ReqlTimeoutError 4 | from tests.helpers import IntegrationTestCaseBase 5 | 6 | 7 | @pytest.mark.integration 8 | class TestCursor(IntegrationTestCaseBase): 9 | def setup_method(self): 10 | super(TestCursor, self).setup_method() 11 | self.table_name = "test_cursor" 12 | self.r.table_create(self.table_name).run(self.conn) 13 | self.documents = [ 14 | {"id": 1, "name": "Testing Cursor/Next 1"}, 15 | {"id": 2, "name": "Testing Cursor/Next 2"}, 16 | {"id": 3, "name": "Testing Cursor/Next 3"}, 17 | {"id": 4, "name": "Testing Cursor/Next 4"}, 18 | {"id": 5, "name": "Testing Cursor/Next 5"}, 19 | ] 20 | 21 | def test_get_next_document(self): 22 | self.r.table(self.table_name).insert(self.documents).run(self.conn) 23 | documents = list() 24 | 25 | cursor = self.r.table(self.table_name).run(self.conn) 26 | 27 | for document in reversed(self.documents): 28 | documents.append(cursor.next()) 29 | 30 | assert sorted(documents, key=lambda doc: doc.get("id")) == self.documents 31 | 32 | def test_cursor_empty_no_document(self): 33 | cursor = self.r.table(self.table_name).run(self.conn) 34 | 35 | with pytest.raises(ReqlCursorEmpty): 36 | cursor.next() 37 | 38 | def test_cursor_empty_iteration(self): 39 | self.r.table(self.table_name).insert(self.documents).run(self.conn) 40 | 41 | cursor = self.r.table(self.table_name).run(self.conn) 42 | 43 | for i in range(0, len(self.documents)): 44 | cursor.next() 45 | 46 | with pytest.raises(ReqlCursorEmpty): 47 | cursor.next() 48 | 49 | def test_stop_iteration(self): 50 | self.r.table(self.table_name).insert(self.documents).run(self.conn) 51 | 52 | cursor = self.r.table(self.table_name).run(self.conn) 53 | 54 | with pytest.raises(StopIteration): 55 | for i in range(0, len(self.documents) + 1): 56 | cursor.next() 57 | 58 | def test_iteration_after_timeout(self): 59 | """Getting a `ReqlTimeoutError` while using a cursor, should not 60 | close the underlying connection to the server. 61 | """ 62 | # Note that this cursor is different to the others - it uses `.changes()` 63 | cursor = self.r.table(self.table_name).changes().run(self.conn) 64 | 65 | # Attempting to set `wait=False` on this changes query will timeout, 66 | # as data is not available yet 67 | with pytest.raises(ReqlTimeoutError): 68 | cursor.next(wait=False) 69 | 70 | # We should be able to call the cursor again after a timeout, 71 | # such a timeout should not cause the underlying connection to close 72 | with pytest.raises(ReqlTimeoutError): 73 | cursor.next(wait=False) 74 | 75 | def test_for_loop(self): 76 | self.r.table(self.table_name).insert(self.documents).run(self.conn) 77 | 78 | documents = list() 79 | 80 | for document in self.r.table(self.table_name).run(self.conn): 81 | documents.append(document) 82 | 83 | assert sorted(documents, key=lambda doc: doc.get("id")) == self.documents 84 | 85 | def test_next(self): 86 | self.r.table(self.table_name).insert(self.documents).run(self.conn) 87 | 88 | cursor = self.r.table(self.table_name).run(self.conn) 89 | 90 | assert hasattr(cursor, "__next__") 91 | 92 | def test_iter(self): 93 | self.r.table(self.table_name).insert(self.documents).run(self.conn) 94 | 95 | cursor = self.r.table(self.table_name).run(self.conn) 96 | 97 | assert hasattr(cursor, "__iter__") 98 | 99 | def test_close_cursor(self): 100 | cursor = self.r.table(self.table_name).run(self.conn) 101 | cursor.close() 102 | 103 | assert cursor.conn.is_open() 104 | assert isinstance(cursor.error, ReqlCursorEmpty) 105 | -------------------------------------------------------------------------------- /tests/integration/test_database.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from rethinkdb.errors import ReqlRuntimeError 4 | from tests.helpers import INTEGRATION_TEST_DB, IntegrationTestCaseBase 5 | 6 | 7 | @pytest.mark.integration 8 | class TestDatabase(IntegrationTestCaseBase): 9 | def setup_method(self): 10 | super(TestDatabase, self).setup_method() 11 | self.test_db_name = "test_database" 12 | 13 | def test_db_create(self): 14 | result = self.r.db_create(self.test_db_name).run(self.conn) 15 | self.r.db_drop(self.test_db_name).run(self.conn) 16 | 17 | assert result["dbs_created"] == 1 18 | assert result["config_changes"][0]["old_val"] is None 19 | assert result["config_changes"][0]["new_val"]["name"] == self.test_db_name 20 | 21 | def test_db_create_twice(self): 22 | self.r.db_create(self.test_db_name).run(self.conn) 23 | 24 | with pytest.raises(ReqlRuntimeError): 25 | self.r.db_create(self.test_db_name).run(self.conn) 26 | 27 | self.r.db_drop(self.test_db_name).run(self.conn) 28 | 29 | def test_db_create_not_alphanumeric(self): 30 | test_db_name = "!!!" 31 | 32 | with pytest.raises(ReqlRuntimeError): 33 | self.r.db_create(test_db_name).run(self.conn) 34 | 35 | def test_db_drop(self): 36 | self.r.db_create(self.test_db_name).run(self.conn) 37 | result = self.r.db_drop(self.test_db_name).run(self.conn) 38 | 39 | assert result["dbs_dropped"] == 1 40 | assert result["tables_dropped"] == 0 41 | assert result["config_changes"][0]["new_val"] is None 42 | assert result["config_changes"][0]["old_val"]["name"] == self.test_db_name 43 | 44 | def test_db_drop_twice(self): 45 | self.r.db_create(self.test_db_name).run(self.conn) 46 | self.r.db_drop(self.test_db_name).run(self.conn) 47 | 48 | with pytest.raises(ReqlRuntimeError): 49 | self.r.db_drop(self.test_db_name).run(self.conn) 50 | 51 | def test_db_list(self): 52 | expected_result = [INTEGRATION_TEST_DB, "rethinkdb", "test"] 53 | 54 | result = self.r.db_list().run(self.conn) 55 | 56 | assert sorted(result) == sorted(expected_result) 57 | -------------------------------------------------------------------------------- /tests/integration/test_date_and_time.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | 3 | import pytest 4 | 5 | from tests.helpers import IntegrationTestCaseBase 6 | 7 | 8 | @pytest.mark.integration 9 | class TestDateAndTime(IntegrationTestCaseBase): 10 | def setup_method(self): 11 | super(TestDateAndTime, self).setup_method() 12 | self.table_name = "test_now" 13 | self.r.table_create(self.table_name).run(self.conn) 14 | 15 | self.expected_insert_response = { 16 | "deleted": 0, 17 | "errors": 0, 18 | "inserted": 1, 19 | "replaced": 0, 20 | "skipped": 0, 21 | "unchanged": 0, 22 | } 23 | 24 | @staticmethod 25 | def compare_seconds(a, b): 26 | """ 27 | During the tests, the milliseconds are a little different, so we need to look at the results in seconds. 28 | """ 29 | 30 | def second_precision(dt): 31 | return str(dt).split(".")[0] 32 | 33 | assert second_precision(a) == second_precision(b) 34 | 35 | def test_insert_with_now(self): 36 | now = self.r.now() 37 | insert_data = { 38 | "id": 1, 39 | "name": "Captain America", 40 | "real_name": "Steven Rogers", 41 | "universe": "Earth-616", 42 | "created_at": now, 43 | } 44 | 45 | response = self.r.table(self.table_name).insert(insert_data).run(self.conn) 46 | document = self.r.table(self.table_name).get(1).run(self.conn) 47 | 48 | assert response == self.expected_insert_response 49 | self.compare_seconds(document["created_at"], self.r.now().run(self.conn)) 50 | -------------------------------------------------------------------------------- /tests/integration/test_index.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from rethinkdb.errors import ReqlOpFailedError, ReqlRuntimeError 4 | from tests.helpers import INTEGRATION_TEST_DB, IntegrationTestCaseBase 5 | 6 | 7 | @pytest.mark.integration 8 | class TestTable(IntegrationTestCaseBase): 9 | def setup_method(self): 10 | super(TestTable, self).setup_method() 11 | self.table_name = "test_index" 12 | self.r.table_create(self.table_name).run(self.conn) 13 | 14 | def test_create_index(self): 15 | index_field = "name" 16 | 17 | result = self.r.table(self.table_name).index_create(index_field).run(self.conn) 18 | 19 | assert result["created"] == 1 20 | 21 | def test_create_nested_field_index(self): 22 | index_field = "author_name" 23 | 24 | result = ( 25 | self.r.table(self.table_name) 26 | .index_create(index_field, [self.r.row["author"]["name"]]) 27 | .run(self.conn) 28 | ) 29 | 30 | assert result["created"] == 1 31 | 32 | def test_create_index_geo(self): 33 | index_field = "location" 34 | 35 | result = ( 36 | self.r.table(self.table_name) 37 | .index_create(index_field, geo=True) 38 | .run(self.conn) 39 | ) 40 | 41 | assert result["created"] == 1 42 | 43 | def test_create_compound_index(self): 44 | index_field = "name_and_age" 45 | 46 | result = ( 47 | self.r.table(self.table_name) 48 | .index_create(index_field, [self.r.row["name"], self.r.row["age"]]) 49 | .run(self.conn) 50 | ) 51 | 52 | assert result["created"] == 1 53 | 54 | def test_create_multi_index(self): 55 | index_field = "name" 56 | 57 | result = ( 58 | self.r.table(self.table_name) 59 | .index_create(index_field, multi=True) 60 | .run(self.conn) 61 | ) 62 | 63 | assert result["created"] == 1 64 | 65 | def test_create_index_twice(self): 66 | index_field = "name" 67 | 68 | self.r.table(self.table_name).index_create(index_field).run(self.conn) 69 | 70 | with pytest.raises(ReqlRuntimeError): 71 | self.r.table(self.table_name).index_create(index_field).run(self.conn) 72 | 73 | def test_drop_index(self): 74 | index_field = "name" 75 | self.r.table(self.table_name).index_create(index_field).run(self.conn) 76 | 77 | result = self.r.table(self.table_name).index_drop(index_field).run(self.conn) 78 | 79 | assert result["dropped"] == 1 80 | 81 | def test_drop_index_twice(self): 82 | index_field = "name" 83 | self.r.table(self.table_name).index_create(index_field).run(self.conn) 84 | self.r.table(self.table_name).index_drop(index_field).run(self.conn) 85 | 86 | with pytest.raises(ReqlRuntimeError): 87 | self.r.table(self.table_name).index_drop(index_field).run(self.conn) 88 | 89 | def test_list_index(self): 90 | index_field = "name" 91 | expected_index_list = [index_field] 92 | 93 | self.r.table(self.table_name).index_create(index_field).run(self.conn) 94 | result = self.r.table(self.table_name).index_list().run(self.conn) 95 | 96 | assert len(result) == 1 97 | assert result == expected_index_list 98 | 99 | def test_rename_index(self): 100 | index_field = "name" 101 | renamed_field = "username" 102 | 103 | self.r.table(self.table_name).index_create(index_field).run(self.conn) 104 | result = ( 105 | self.r.table(self.table_name) 106 | .index_rename(index_field, renamed_field) 107 | .run(self.conn) 108 | ) 109 | 110 | assert len(result) == 1 111 | assert result["renamed"] == 1 112 | 113 | def test_rename_index_same_key(self): 114 | index_field = "name" 115 | 116 | self.r.table(self.table_name).index_create(index_field).run(self.conn) 117 | result = ( 118 | self.r.table(self.table_name) 119 | .index_rename(index_field, index_field) 120 | .run(self.conn) 121 | ) 122 | 123 | assert len(result) == 1 124 | assert result["renamed"] == 0 125 | 126 | def test_rename_index_overwrite(self): 127 | index_field = "name" 128 | renamed_field = "username" 129 | 130 | self.r.table(self.table_name).index_create(index_field).run(self.conn) 131 | self.r.table(self.table_name).index_create(renamed_field).run(self.conn) 132 | result = ( 133 | self.r.table(self.table_name) 134 | .index_rename(index_field, renamed_field, overwrite=True) 135 | .run(self.conn) 136 | ) 137 | 138 | assert len(result) == 1 139 | assert result["renamed"] == 1 140 | 141 | def test_rename_index_without_overwrite(self): 142 | index_field = "name" 143 | renamed_field = "username" 144 | 145 | self.r.table(self.table_name).index_create(index_field).run(self.conn) 146 | self.r.table(self.table_name).index_create(renamed_field).run(self.conn) 147 | 148 | with pytest.raises(ReqlOpFailedError): 149 | result = ( 150 | self.r.table(self.table_name) 151 | .index_rename(index_field, renamed_field) 152 | .run(self.conn) 153 | ) 154 | 155 | def test_table_index_status(self): 156 | index_field = "name" 157 | 158 | self.r.table(self.table_name).index_create(index_field).run(self.conn) 159 | result = self.r.table(self.table_name).index_status().run(self.conn) 160 | 161 | assert len(result) == 1 162 | assert result[0]["index"] == index_field 163 | assert result[0]["multi"] == False 164 | assert result[0]["outdated"] == False 165 | 166 | def test_index_status_empty(self): 167 | result = self.r.table(self.table_name).index_status().run(self.conn) 168 | 169 | assert len(result) == 0 170 | 171 | def test_index_status_non_existing(self): 172 | index_field = "name" 173 | 174 | with pytest.raises(ReqlOpFailedError): 175 | self.r.table(self.table_name).index_status(index_field).run(self.conn) 176 | -------------------------------------------------------------------------------- /tests/integration/test_ping.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from tests.helpers import IntegrationTestCaseBase 6 | 7 | BAD_PASSWORD = "0xDEADBEEF" 8 | 9 | 10 | @pytest.mark.integration 11 | class TestPing(IntegrationTestCaseBase): 12 | def teardown_method(self): 13 | with self.r.connect(host=self.rethinkdb_host) as conn: 14 | self.r.db("rethinkdb").table("users").filter( 15 | self.r.row["id"].ne("admin") 16 | ).delete().run(conn) 17 | super(TestPing, self).teardown_method() 18 | 19 | def test_bad_password(self): 20 | with pytest.raises(self.r.ReqlAuthError): 21 | self.r.connect(password=BAD_PASSWORD, host=self.rethinkdb_host) 22 | 23 | def test_password_connect(self): 24 | new_user = "user" 25 | with self.r.connect( 26 | user="admin", password="", host=self.rethinkdb_host 27 | ) as conn: 28 | curr = ( 29 | self.r.db("rethinkdb") 30 | .table("users") 31 | .insert({"id": new_user, "password": BAD_PASSWORD}) 32 | .run(conn) 33 | ) 34 | assert curr == { 35 | "deleted": 0, 36 | "errors": 0, 37 | "inserted": 1, 38 | "replaced": 0, 39 | "skipped": 0, 40 | "unchanged": 0, 41 | } 42 | curr = self.r.grant(new_user, {"read": True}).run(conn) 43 | assert curr == { 44 | "granted": 1, 45 | "permissions_changes": [{"new_val": {"read": True}, "old_val": None}], 46 | } 47 | with self.r.connect( 48 | user=new_user, password=BAD_PASSWORD, host=self.rethinkdb_host 49 | ) as conn: 50 | with pytest.raises(self.r.ReqlPermissionError): 51 | # Only administrators may access system tables 52 | curr = self.r.db("rethinkdb").table("users").get("admin").run(conn) 53 | 54 | with pytest.raises(self.r.ReqlPermissionError): 55 | # No permission for write. Only for read. 56 | self.r.db("rethinkdb").table("users").insert( 57 | {"id": "bob", "password": ""} 58 | ).run(conn) 59 | 60 | def test_context_manager(self): 61 | with self.r.connect(host=self.rethinkdb_host) as conn: 62 | assert conn.is_open() is True 63 | assert conn.is_open() is False 64 | -------------------------------------------------------------------------------- /tests/integration/test_repl.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from tests.helpers import INTEGRATION_TEST_DB, IntegrationTestCaseBase 4 | 5 | 6 | @pytest.mark.integration 7 | class TestREPL(IntegrationTestCaseBase): 8 | def setup_method(self): 9 | super(TestREPL, self).setup_method() 10 | self.conn = self.conn.repl() 11 | 12 | def test_repl_does_not_require_conn(self): 13 | databases = self.r.db_list().run() 14 | assert INTEGRATION_TEST_DB in databases 15 | -------------------------------------------------------------------------------- /tests/integration/test_table.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from rethinkdb.errors import ReqlOpFailedError, ReqlRuntimeError 4 | from tests.helpers import INTEGRATION_TEST_DB, IntegrationTestCaseBase 5 | 6 | 7 | @pytest.mark.integration 8 | class TestTable(IntegrationTestCaseBase): 9 | def setup_method(self): 10 | super(TestTable, self).setup_method() 11 | self.test_table_name = "test_table" 12 | 13 | def test_table_create(self): 14 | result = self.r.table_create(self.test_table_name).run(self.conn) 15 | 16 | assert result["tables_created"] == 1 17 | assert len(result["config_changes"]) == 1 18 | assert result["config_changes"][0]["old_val"] is None 19 | assert result["config_changes"][0]["new_val"]["name"] == self.test_table_name 20 | assert result["config_changes"][0]["new_val"]["db"] == INTEGRATION_TEST_DB 21 | assert result["config_changes"][0]["new_val"]["durability"] == "hard" 22 | assert result["config_changes"][0]["new_val"]["primary_key"] == "id" 23 | assert result["config_changes"][0]["new_val"]["write_acks"] == "majority" 24 | assert len(result["config_changes"][0]["new_val"]["shards"]) == 1 25 | 26 | def test_table_different_primary_key(self): 27 | expected_primary_key = "bazinga" 28 | 29 | result = self.r.table_create( 30 | self.test_table_name, primary_key=expected_primary_key 31 | ).run(self.conn) 32 | 33 | assert result["tables_created"] == 1 34 | assert len(result["config_changes"]) == 1 35 | assert ( 36 | result["config_changes"][0]["new_val"]["primary_key"] 37 | == expected_primary_key 38 | ) 39 | 40 | def test_table_multiple_shards(self): 41 | expected_shards = 2 42 | 43 | result = self.r.table_create(self.test_table_name, shards=expected_shards).run( 44 | self.conn 45 | ) 46 | 47 | assert result["tables_created"] == 1 48 | assert len(result["config_changes"]) == 1 49 | assert len(result["config_changes"][0]["new_val"]["shards"]) == expected_shards 50 | 51 | def test_table_create_with_replicas(self): 52 | expected_replicas = 1 53 | 54 | result = self.r.table_create( 55 | self.test_table_name, replicas=expected_replicas 56 | ).run(self.conn) 57 | 58 | assert result["tables_created"] == 1 59 | assert len(result["config_changes"]) == 1 60 | assert ( 61 | len(result["config_changes"][0]["new_val"]["shards"][0]["replicas"]) 62 | == expected_replicas 63 | ) 64 | 65 | def test_table_multiple_replicas(self): 66 | expected_replicas = 2 67 | 68 | # Can't put 2 replicas, it's impossible to have more replicas than the number of servers 69 | with pytest.raises(ReqlOpFailedError): 70 | self.r.table_create(self.test_table_name, replicas=expected_replicas).run( 71 | self.conn 72 | ) 73 | 74 | def test_table_create_twice(self): 75 | self.r.table_create(self.test_table_name).run(self.conn) 76 | 77 | with pytest.raises(ReqlRuntimeError): 78 | self.r.table_create(self.test_table_name).run(self.conn) 79 | 80 | def test_table_drop(self): 81 | self.r.table_create(self.test_table_name).run(self.conn) 82 | 83 | result = self.r.table_drop(self.test_table_name).run(self.conn) 84 | 85 | assert result["tables_dropped"] == 1 86 | assert len(result["config_changes"]) == 1 87 | assert result["config_changes"][0]["new_val"] is None 88 | assert result["config_changes"][0]["old_val"]["name"] == self.test_table_name 89 | assert result["config_changes"][0]["old_val"]["db"] == INTEGRATION_TEST_DB 90 | assert result["config_changes"][0]["old_val"]["durability"] == "hard" 91 | assert result["config_changes"][0]["old_val"]["primary_key"] == "id" 92 | assert result["config_changes"][0]["old_val"]["write_acks"] == "majority" 93 | assert len(result["config_changes"][0]["old_val"]["shards"]) == 1 94 | 95 | def test_table_drop_twice(self): 96 | self.r.table_create(self.test_table_name).run(self.conn) 97 | self.r.table_drop(self.test_table_name).run(self.conn) 98 | 99 | with pytest.raises(ReqlOpFailedError): 100 | self.r.table_drop(self.test_table_name).run(self.conn) 101 | 102 | def test_table_list(self): 103 | self.r.table_create(self.test_table_name).run(self.conn) 104 | 105 | expected_result = [self.test_table_name] 106 | 107 | result = self.r.table_list().run(self.conn) 108 | 109 | assert result == expected_result 110 | -------------------------------------------------------------------------------- /tests/integration/test_tornado.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | import pytest 4 | 5 | from tests.helpers import IntegrationTestCaseBase 6 | 7 | 8 | @pytest.mark.tornado 9 | @pytest.mark.integration 10 | @pytest.mark.skipif(sys.version_info < (3, 6), reason="requires python3.6 or higher") 11 | class TestTornado(IntegrationTestCaseBase): 12 | def setup_method(self): 13 | super(TestTornado, self).setup_method() 14 | self.table_name = "test_tornado" 15 | self.r.set_loop_type("tornado") 16 | self.r.table_create(self.table_name).run(self.conn) 17 | 18 | def teardown_method(self): 19 | super(TestTornado, self).teardown_method() 20 | self.r.set_loop_type(None) 21 | 22 | async def test_tornado_list_tables(self): 23 | """ 24 | Test the flow for 3.6 and up, async generators are 25 | not supported in 3.5. 26 | """ 27 | 28 | tables = self.r.table_list().run(self.conn) 29 | assert isinstance(tables, list) 30 | -------------------------------------------------------------------------------- /tests/integration/test_trio.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from tests.helpers import INTEGRATION_TEST_DB, IntegrationTestCaseBase 4 | 5 | 6 | @pytest.mark.trio 7 | @pytest.mark.integration 8 | class TestTrio(IntegrationTestCaseBase): 9 | def setup_method(self): 10 | super(TestTrio, self).setup_method() 11 | self.table_name = "test_trio" 12 | self.r.set_loop_type("trio") 13 | self.r.table_create(self.table_name).run(self.conn) 14 | 15 | def teardown_method(self): 16 | super(TestTrio, self).teardown_method() 17 | self.r.set_loop_type(None) 18 | 19 | async def test_trio(self, nursery): 20 | """ 21 | Test the flow for 3.6 and up, async generators are 22 | not supported in 3.5. 23 | """ 24 | 25 | async with self.r.open(db=INTEGRATION_TEST_DB, nursery=nursery) as conn: 26 | await self.r.table(self.table_name).insert( 27 | { 28 | "id": 1, 29 | "name": "Iron Man", 30 | "first_appearance": "Tales of Suspense #39", 31 | } 32 | ).run(conn) 33 | 34 | cursor = await self.r.table(self.table_name).run(conn) 35 | async for hero in cursor: 36 | hero["name"] == "Iron Man" 37 | -------------------------------------------------------------------------------- /tests/integration/test_write_hooks.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from tests.helpers import IntegrationTestCaseBase 4 | 5 | 6 | @pytest.mark.integration 7 | class TestWriteHooks(IntegrationTestCaseBase): 8 | def setup_method(self): 9 | super(TestWriteHooks, self).setup_method() 10 | 11 | self.table_name = "test_write_hooks" 12 | self.documents = [ 13 | {"id": 1, "name": "Testing write hooks 1"}, 14 | ] 15 | 16 | self.r.table_create(self.table_name).run(self.conn) 17 | self.r.table(self.table_name).insert(self.documents).run(self.conn) 18 | 19 | def test_set_write_hook(self): 20 | response = ( 21 | self.r.table(self.table_name) 22 | .set_write_hook( 23 | lambda context, old_val, new_val: new_val.merge( 24 | {"modified_at": context["timestamp"]} 25 | ) 26 | ) 27 | .run(self.conn) 28 | ) 29 | 30 | assert response == {"created": 1} 31 | 32 | def test_write_hook_add_extra_data(self): 33 | self.r.table(self.table_name).set_write_hook( 34 | lambda context, old_val, new_val: new_val.merge( 35 | {"modified_at": context["timestamp"]} 36 | ) 37 | ).run(self.conn) 38 | 39 | self.r.table(self.table_name).insert( 40 | {"id": 2, "name": "Testing write hooks 1"} 41 | ).run(self.conn) 42 | 43 | document = self.r.table(self.table_name).get(2).run(self.conn) 44 | 45 | assert document.get("modified_at") != None 46 | 47 | def test_get_write_hook(self): 48 | self.r.table(self.table_name).set_write_hook( 49 | lambda context, old_val, new_val: new_val.merge( 50 | {"modified_at": context["timestamp"]} 51 | ) 52 | ).run(self.conn) 53 | 54 | hook = self.r.table(self.table_name).get_write_hook().run(self.conn) 55 | 56 | assert list(sorted(hook.keys())) == ["function", "query"] 57 | -------------------------------------------------------------------------------- /tests/test_date_and_time.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from mock import ANY, Mock, call, patch 3 | 4 | from rethinkdb import ast, r 5 | 6 | 7 | @pytest.mark.unit 8 | class TestNow(object): 9 | def setup_method(self): 10 | pass 11 | 12 | def test_get_now(self): 13 | now = r.now() 14 | assert type(now) == ast.Now 15 | -------------------------------------------------------------------------------- /tests/test_handshake.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import json 3 | import struct 4 | 5 | import pytest 6 | import six 7 | from mock import ANY, Mock, call, patch 8 | 9 | from rethinkdb.errors import ReqlAuthError, ReqlDriverError 10 | from rethinkdb.handshake import HandshakeV1_0, LocalThreadCache 11 | from rethinkdb.helpers import chain_to_bytes 12 | from rethinkdb.ql2_pb2 import VersionDummy 13 | 14 | 15 | @pytest.mark.unit 16 | class TestLocalThreadCache(object): 17 | def setup_method(self): 18 | self.cache = LocalThreadCache() 19 | self.cache_key = "test" 20 | self.cache_value = "cache" 21 | 22 | def test_initialization(self): 23 | assert self.cache._cache == dict() 24 | 25 | def test_add_to_cache(self): 26 | self.cache.set(self.cache_key, self.cache_value) 27 | 28 | assert self.cache._cache == {self.cache_key: self.cache_value} 29 | 30 | def test_get_from_cache(self): 31 | self.cache._cache = {self.cache_key: self.cache_value} 32 | 33 | cached_value = self.cache.get(self.cache_key) 34 | 35 | assert cached_value == self.cache_value 36 | 37 | 38 | @pytest.mark.unit 39 | class TestHandshake(object): 40 | def setup_method(self): 41 | self.encoder = json.JSONEncoder() 42 | self.decoder = json.JSONDecoder() 43 | 44 | self.handshake = self._get_handshake() 45 | 46 | def _get_handshake(self): 47 | return HandshakeV1_0( 48 | json_encoder=self.encoder, 49 | json_decoder=self.decoder, 50 | host="localhost", 51 | port=28015, 52 | username="admin", 53 | password="", 54 | ) 55 | 56 | @patch("rethinkdb.handshake.HandshakeV1_0._get_pbkdf2_hmac") 57 | @patch("rethinkdb.handshake.HandshakeV1_0._get_compare_digest") 58 | def test_initialization(self, mock_get_compare_digest, mock_get_pbkdf2_hmac): 59 | handshake = self._get_handshake() 60 | 61 | assert handshake.VERSION == VersionDummy.Version.V1_0 62 | assert handshake.PROTOCOL == VersionDummy.Protocol.JSON 63 | assert mock_get_compare_digest.called is True 64 | assert mock_get_pbkdf2_hmac.called is True 65 | 66 | @patch("rethinkdb.handshake.hmac") 67 | def test_get_builtin_compare_digest(self, mock_hmac): 68 | mock_hmac.compare_digest = Mock 69 | handshake = self._get_handshake() 70 | 71 | assert handshake._compare_digest == mock_hmac.compare_digest 72 | 73 | @patch("rethinkdb.handshake.compare_digest") 74 | @patch("rethinkdb.handshake.hmac") 75 | def test_get_own_compare_digest(self, mock_hmac, mock_compare_digest): 76 | delattr(mock_hmac, "compare_digest") 77 | handshake = self._get_handshake() 78 | 79 | assert handshake._compare_digest == mock_compare_digest 80 | 81 | @patch("rethinkdb.handshake.hashlib") 82 | def test_get_builtin_get_pbkdf2_hmac(self, mock_hashlib): 83 | mock_hashlib.pbkdf2_hmac = Mock 84 | handshake = self._get_handshake() 85 | 86 | assert handshake._pbkdf2_hmac == mock_hashlib.pbkdf2_hmac 87 | 88 | @patch("rethinkdb.handshake.pbkdf2_hmac") 89 | @patch("rethinkdb.handshake.hashlib") 90 | def test_get_own_get_pbkdf2_hmac(self, mock_hashlib, mock_pbkdf2_hmac): 91 | delattr(mock_hashlib, "pbkdf2_hmac") 92 | handshake = self._get_handshake() 93 | 94 | assert handshake._pbkdf2_hmac == mock_pbkdf2_hmac 95 | 96 | def test_decode_json_response(self): 97 | expected_response = {"success": True} 98 | 99 | decoded_response = self.handshake._decode_json_response( 100 | json.dumps(expected_response) 101 | ) 102 | 103 | assert decoded_response == expected_response 104 | 105 | def test_decode_json_response_utf8_encoded(self): 106 | expected_response = {"success": True} 107 | 108 | decoded_response = self.handshake._decode_json_response( 109 | json.dumps(expected_response), True 110 | ) 111 | 112 | assert decoded_response == expected_response 113 | 114 | def test_decode_json_response_auth_error(self): 115 | expected_response = { 116 | "success": False, 117 | "error_code": 15, 118 | "error": "test error message", 119 | } 120 | 121 | with pytest.raises(ReqlAuthError): 122 | decoded_response = self.handshake._decode_json_response( 123 | json.dumps(expected_response) 124 | ) 125 | 126 | def test_decode_json_response_driver_error(self): 127 | expected_response = { 128 | "success": False, 129 | "error_code": 30, 130 | "error": "test error message", 131 | } 132 | 133 | with pytest.raises(ReqlDriverError): 134 | decoded_response = self.handshake._decode_json_response( 135 | json.dumps(expected_response) 136 | ) 137 | 138 | def test_next_state(self): 139 | previous_state = self.handshake._state 140 | self.handshake._next_state() 141 | new_state = self.handshake._state 142 | 143 | assert previous_state == 0 144 | assert new_state == 1 145 | 146 | def test_reset(self): 147 | self.handshake._random_nonce = Mock() 148 | self.handshake._first_client_message = Mock() 149 | self.handshake._server_signature = Mock() 150 | self.handshake._state = Mock() 151 | 152 | self.handshake.reset() 153 | 154 | assert self.handshake._random_nonce is None 155 | assert self.handshake._first_client_message is None 156 | assert self.handshake._server_signature is None 157 | assert self.handshake._state == 0 158 | 159 | @patch("rethinkdb.handshake.base64") 160 | def test_init_connection(self, mock_base64): 161 | self.handshake._next_state = Mock() 162 | encoded_string = "test" 163 | mock_base64.standard_b64encode.return_value = encoded_string 164 | first_client_message = chain_to_bytes( 165 | "n=", self.handshake._username, ",r=", encoded_string 166 | ) 167 | 168 | expected_result = chain_to_bytes( 169 | struct.pack("