├── .coveragerc ├── .github └── workflows │ ├── cd.yml │ └── ci.yml ├── .gitignore ├── LICENSE ├── NOTICE ├── README.md ├── dokklib_db ├── __init__.py ├── errors │ ├── __init__.py │ ├── client.py │ ├── exceptions.py │ └── transaction.py ├── index.py ├── keys.py ├── op_args.py ├── py.typed ├── serializer.py └── table.py ├── environment.yml ├── mypy.ini ├── requirements ├── dev-requirements.in ├── dev-requirements.txt ├── test-requirements.in └── test-requirements.txt ├── scripts ├── create_db.py ├── generate_exceptions.py └── pip_compile.py ├── setup.py ├── tests ├── __init__.py ├── integration │ ├── __init__.py │ ├── cloudformation.yml │ └── dynamodb_tests.py └── unit │ ├── __init__.py │ ├── exceptions │ ├── __init__.py │ └── transaction_test.py │ ├── keys_test.py │ ├── op_args_test.py │ ├── serializer_test.py │ ├── table_test.py │ └── test_base.py └── tox.ini /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | exclude_lines = 3 | pragma: no cover 4 | 5 | # Abstract methods need not be tested. 6 | raise NotImplementedError 7 | -------------------------------------------------------------------------------- /.github/workflows/cd.yml: -------------------------------------------------------------------------------- 1 | name: Release Python Package 2 | 3 | on: 4 | release: 5 | types: [created] 6 | 7 | jobs: 8 | deploy: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - name: Set up Python 13 | uses: actions/setup-python@v1 14 | with: 15 | python-version: '3.6' 16 | - name: Install dependencies 17 | run: | 18 | python -m pip install --upgrade pip 19 | pip install setuptools wheel twine 20 | - name: Build and publish 21 | env: 22 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 23 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 24 | run: | 25 | python setup.py sdist bdist_wheel 26 | twine upload dist/* 27 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | 8 | runs-on: ubuntu-latest 9 | strategy: 10 | # Integration tests write to the same table, hence 1 max in parallel 11 | max-parallel: 1 12 | matrix: 13 | python-version: [3.6, 3.7, 3.8] 14 | 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Set up Python ${{ matrix.python-version }} 18 | uses: actions/setup-python@v1 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | - name: Install dependencies 22 | run: | 23 | python -m pip install --upgrade pip 24 | pip install -r requirements/dev-requirements.txt 25 | - name: Configure AWS credentials 26 | uses: aws-actions/configure-aws-credentials@v1 27 | with: 28 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} 29 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 30 | aws-region: us-west-2 31 | - name: Tox 32 | run: tox 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Dev 2 | tmp.py 3 | 4 | # IDE 5 | .idea 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *$py.class 11 | 12 | # C extensions 13 | *.so 14 | 15 | # Distribution / packaging 16 | .Python 17 | build/ 18 | develop-eggs/ 19 | dist/ 20 | downloads/ 21 | eggs/ 22 | .eggs/ 23 | lib/ 24 | lib64/ 25 | parts/ 26 | sdist/ 27 | var/ 28 | wheels/ 29 | pip-wheel-metadata/ 30 | share/python-wheels/ 31 | *.egg-info/ 32 | .installed.cfg 33 | *.egg 34 | MANIFEST 35 | 36 | # PyInstaller 37 | # Usually these files are written by a python script from a template 38 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 39 | *.manifest 40 | *.spec 41 | 42 | # Installer logs 43 | pip-log.txt 44 | pip-delete-this-directory.txt 45 | 46 | # Unit test / coverage reports 47 | htmlcov/ 48 | .tox/ 49 | .nox/ 50 | .coverage 51 | .coverage.* 52 | .cache 53 | nosetests.xml 54 | coverage.xml 55 | *.cover 56 | *.py,cover 57 | .hypothesis/ 58 | .pytest_cache/ 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # IPython 87 | profile_default/ 88 | ipython_config.py 89 | 90 | # pyenv 91 | .python-version 92 | 93 | # pipenv 94 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 95 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 96 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 97 | # install all needed dependencies. 98 | #Pipfile.lock 99 | 100 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 101 | __pypackages__/ 102 | 103 | # Celery stuff 104 | celerybeat-schedule 105 | celerybeat.pid 106 | 107 | # SageMath parsed files 108 | *.sage.py 109 | 110 | # Environments 111 | .env 112 | .venv 113 | env/ 114 | venv/ 115 | ENV/ 116 | env.bak/ 117 | venv.bak/ 118 | 119 | # Spyder project settings 120 | .spyderproject 121 | .spyproject 122 | 123 | # Rope project settings 124 | .ropeproject 125 | 126 | # mkdocs documentation 127 | /site 128 | 129 | # mypy 130 | .mypy_cache/ 131 | .dmypy.json 132 | dmypy.json 133 | 134 | # Pyre type checker 135 | .pyre/ 136 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | dokklib-db 2 | Copyright 2020 Agost Biro. All Rights Reserved. 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Dokklib-DB (UNMAINTAINED) 2 | 3 | ![CI](https://github.com/dokklib/dokklib-db/workflows/CI/badge.svg) [![Join the chat at https://gitter.im/dokklib/dokklib-db](https://badges.gitter.im/dokklib/dokklib-db.svg)](https://gitter.im/dokklib/dokklib-db?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) 4 | 5 | Dokklib-DB is a Python library for the DynamoDB single table pattern. 6 | 7 | ## Features 8 | 9 | - Simple, Pythonic query interface on top of Boto3. No more nested dict literals! 10 | - Type safety for primary keys and indices (for documentation and data integrity). 11 | - Easy error handling. 12 | - Full type hint & unit test coverage + integration testing. 13 | 14 | ## [Documentation](https://github.com/dokklib/dokklib/tree/master/docs) 15 | 16 | ## Install 17 | 18 | Install with: 19 | 20 | `pip install "boto3>=1.10.34,<2" dokklib-db` 21 | 22 | Requires Python 3.6 or later. 23 | 24 | Note that Boto3 is not specified as an installation requirement for Dokklib-DB, so you have to install it separately (like in the example command above). 25 | The reason for this is to make Dokklib-DB easier to use in AWS Lambda where Boto3 is part of the default environment. 26 | The earliest supported Boto3 version is `1.10.34` which is the same version as the Boto3 package in the Python 3 AWS Lambda environments. 27 | 28 | ## Example usage 29 | 30 | ```python 31 | import dokklib_db as db 32 | 33 | 34 | class User(db.EntityName): 35 | """User entity name. 36 | 37 | Key value: unique user name, eg. 'alice'. 38 | Example key: 'USER#alice'. 39 | 40 | """ 41 | 42 | 43 | class Group(db.EntityName): 44 | """Group entity name. 45 | 46 | Key value: unique group name, eg. 'my-group'. 47 | Example key: 'GROUP#my-group'. 48 | 49 | """ 50 | 51 | 52 | table = db.Table('SingleTable') 53 | 54 | # Construct entity keys. 55 | pk_alice = db.PartitionKey(User, 'alice') 56 | pk_bob = db.PartitionKey(User, 'bob') 57 | sk_group1 = db.SortKey(Group, 'group1') 58 | 59 | # Add users to group one. 60 | # Insert is a `PutItem` operation that fails if the item already exists. 61 | table.insert(pk_alice, sk_group1) 62 | table.insert(pk_bob, sk_group1) 63 | 64 | # Get all users in group one. 65 | pk_group = db.PartitionKey(Group, 'group1') 66 | user_prefix = db.PrefixSortKey(User) 67 | group_members = table.query_prefix(pk_group, user_prefix, 68 | global_index=db.InversePrimaryIndex()) 69 | 70 | print(group_members) 71 | # [{'PK': 'alice'}, {'PK': 'bob'}] 72 | 73 | # Move users from group one to group two atomically. 74 | sk_group2 = db.SortKey(Group, 'group2') 75 | table.transact_write_items([ 76 | db.DeleteArg(pk_alice, sk_group1), 77 | db.DeleteArg(pk_bob, sk_group1), 78 | db.InsertArg(pk_alice, sk_group2), 79 | db.InsertArg(pk_bob, sk_group2) 80 | ]) 81 | ``` 82 | 83 | ## Status 84 | 85 | The project is no longer maintained. 86 | 87 | ~The library is in beta and under heavy development as I'm working on it while building a [serverless project](https://github.com/dokknet/dokknet-api) that relies on it. 88 | I have only implemented parts of the DynamoDB API that I needed so far, but I'm planning on achieving full coverage. 89 | Feature and pull requests are welcome. (Please open an issue, before starting work on a pull request to avoid wasted effort.)~ 90 | -------------------------------------------------------------------------------- /dokklib_db/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | # mypy: implicit-reexport 3 | 4 | # Flake 8 would complain about unused imports if it was enabled on this file. 5 | 6 | import dokklib_db.errors 7 | 8 | from dokklib_db.table import ( 9 | Table, 10 | ItemResult 11 | ) 12 | from dokklib_db.index import ( 13 | GlobalIndex, 14 | GlobalSecondaryIndex, 15 | InversePrimaryIndex, 16 | PrimaryGlobalIndex 17 | ) 18 | from dokklib_db.keys import ( 19 | AnySortKey, 20 | EntityName, 21 | PartitionKey, 22 | PrefixSortKey, 23 | PrimaryKey, 24 | SortKey, 25 | ) 26 | from dokklib_db.op_args import ( 27 | Attributes, 28 | DeleteArg, 29 | GetArg, 30 | InsertArg, 31 | OpArg, 32 | PutArg, 33 | QueryArg, 34 | UpdateArg 35 | ) 36 | -------------------------------------------------------------------------------- /dokklib_db/errors/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | # mypy: implicit-reexport 3 | 4 | # Flake 8 would complain about unused imports if it was enabled on this file. 5 | 6 | from dokklib_db.errors.exceptions import * 7 | from dokklib_db.errors.client import ClientError 8 | from dokklib_db.errors.transaction import TransactionCanceledException 9 | -------------------------------------------------------------------------------- /dokklib_db/errors/client.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict 2 | 3 | 4 | class ClientError(Exception): 5 | """Base class of Dokklib-DB client errors.""" 6 | 7 | def __init__(self, 8 | message: str, 9 | error_response: Dict[str, Any], 10 | operation_name: str): 11 | """Initialize a ClientError instance. 12 | 13 | Args: 14 | message: The error message. 15 | error_response: The error response dict from Boto. 16 | operation_name: The DynamoDB API operation name. 17 | 18 | """ 19 | super().__init__(message) 20 | self.response = error_response 21 | self.operation_name = operation_name 22 | -------------------------------------------------------------------------------- /dokklib_db/errors/exceptions.py: -------------------------------------------------------------------------------- 1 | """Autogenerated DynamoDB exceptions. 2 | 3 | This file was autogenerated by scripts/generate_exceptions.py. 4 | Do not edit it manually! 5 | 6 | """ 7 | from dokklib_db.errors.client import ClientError 8 | 9 | 10 | class BackupInUseException(ClientError): 11 | """Please check DynamoDB docs for documentation.""" 12 | 13 | 14 | class BackupNotFoundException(ClientError): 15 | """Please check DynamoDB docs for documentation.""" 16 | 17 | 18 | class ConditionalCheckFailedException(ClientError): 19 | """Please check DynamoDB docs for documentation.""" 20 | 21 | 22 | class ContinuousBackupsUnavailableException(ClientError): 23 | """Please check DynamoDB docs for documentation.""" 24 | 25 | 26 | class GlobalTableAlreadyExistsException(ClientError): 27 | """Please check DynamoDB docs for documentation.""" 28 | 29 | 30 | class GlobalTableNotFoundException(ClientError): 31 | """Please check DynamoDB docs for documentation.""" 32 | 33 | 34 | class IdempotentParameterMismatchException(ClientError): 35 | """Please check DynamoDB docs for documentation.""" 36 | 37 | 38 | class IndexNotFoundException(ClientError): 39 | """Please check DynamoDB docs for documentation.""" 40 | 41 | 42 | class InternalServerError(ClientError): 43 | """Please check DynamoDB docs for documentation.""" 44 | 45 | 46 | class InvalidRestoreTimeException(ClientError): 47 | """Please check DynamoDB docs for documentation.""" 48 | 49 | 50 | class ItemCollectionSizeLimitExceededException(ClientError): 51 | """Please check DynamoDB docs for documentation.""" 52 | 53 | 54 | class LimitExceededException(ClientError): 55 | """Please check DynamoDB docs for documentation.""" 56 | 57 | 58 | class PointInTimeRecoveryUnavailableException(ClientError): 59 | """Please check DynamoDB docs for documentation.""" 60 | 61 | 62 | class ProvisionedThroughputExceededException(ClientError): 63 | """Please check DynamoDB docs for documentation.""" 64 | 65 | 66 | class ReplicaAlreadyExistsException(ClientError): 67 | """Please check DynamoDB docs for documentation.""" 68 | 69 | 70 | class ReplicaNotFoundException(ClientError): 71 | """Please check DynamoDB docs for documentation.""" 72 | 73 | 74 | class RequestLimitExceeded(ClientError): 75 | """Please check DynamoDB docs for documentation.""" 76 | 77 | 78 | class ResourceInUseException(ClientError): 79 | """Please check DynamoDB docs for documentation.""" 80 | 81 | 82 | class ResourceNotFoundException(ClientError): 83 | """Please check DynamoDB docs for documentation.""" 84 | 85 | 86 | class TableAlreadyExistsException(ClientError): 87 | """Please check DynamoDB docs for documentation.""" 88 | 89 | 90 | class TableInUseException(ClientError): 91 | """Please check DynamoDB docs for documentation.""" 92 | 93 | 94 | class TableNotFoundException(ClientError): 95 | """Please check DynamoDB docs for documentation.""" 96 | 97 | 98 | class TransactionConflictException(ClientError): 99 | """Please check DynamoDB docs for documentation.""" 100 | 101 | 102 | class TransactionInProgressException(ClientError): 103 | """Please check DynamoDB docs for documentation.""" 104 | 105 | 106 | class ValidationError(ClientError): 107 | """Please check DynamoDB docs for documentation.""" 108 | 109 | 110 | class ThrottlingError(ClientError): 111 | """Please check DynamoDB docs for documentation.""" 112 | -------------------------------------------------------------------------------- /dokklib_db/errors/transaction.py: -------------------------------------------------------------------------------- 1 | import re 2 | from typing import Any, Dict, List, Optional, Type 3 | 4 | from dokklib_db.errors import exceptions as ex 5 | from dokklib_db.errors.client import ClientError 6 | from dokklib_db.op_args import OpArg 7 | 8 | 9 | CancellationReasons = List[Optional[Type[ClientError]]] 10 | 11 | 12 | class TransactionCanceledException(ClientError): 13 | """The entire transaction request was canceled. 14 | 15 | Please see DynamoDB docs for details. 16 | https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_TransactWriteItems.html 17 | 18 | """ 19 | 20 | # Example match: "reasons [ConditionalCheckFailed, None]" 21 | _reasons_re = re.compile(r'reasons\W+\[([A-Za-z0-9, ]+)]', re.MULTILINE) 22 | 23 | _codes_to_exceptions: Dict[str, Type[ClientError]] = { 24 | 'ConditionalCheckFailed': ex.ConditionalCheckFailedException, 25 | 'ItemCollectionSizeLimitExceeded': ex.ItemCollectionSizeLimitExceededException, # noqa 501 26 | 'TransactionConflict': ex.TransactionConflictException, 27 | 'ProvisionedThroughputExceeded': ex.ProvisionedThroughputExceededException, # noqa 501 28 | 'ThrottlingError': ex.ThrottlingError, 29 | 'ValidationError': ex.ValidationError 30 | } 31 | 32 | def __init__(self, op_args: List[OpArg], *args: Any, **kwargs: Any): 33 | """Initialize a TransactionCanceledException instance. 34 | 35 | Args: 36 | op_args: The list of operations that were the inputs to this 37 | transaction. 38 | 39 | """ 40 | super().__init__(*args, **kwargs) 41 | self._op_args = list(op_args) 42 | self._reasons: Optional[CancellationReasons] = None 43 | 44 | def _extract_reasons(self, message: str) -> List[str]: 45 | match = re.search(self._reasons_re, message) 46 | if not match: 47 | return [] 48 | else: 49 | reasons = match.group(1) 50 | split = reasons.split(', ') 51 | if split[0] == reasons: 52 | return reasons.split(',') 53 | else: 54 | return split 55 | 56 | def _get_reasons(self) -> CancellationReasons: 57 | db_error = self.response.get('Error', {}) 58 | message = db_error.get('Message', '') 59 | reasons = self._extract_reasons(message) 60 | res: CancellationReasons = [] 61 | for r in reasons: 62 | if r == 'None': 63 | res.append(None) 64 | else: 65 | exception = self._codes_to_exceptions.get(r, ClientError) 66 | res.append(exception) 67 | if len(res) != len(self.op_args): 68 | msg = f'Transaction cancellation reasons don\'t match ' \ 69 | f'transaction arguments in error:\n{message}' 70 | raise ValueError(msg) 71 | return res 72 | 73 | @property 74 | def op_args(self) -> List[OpArg]: 75 | """Get the list of inputs to the transaction.""" 76 | return self._op_args 77 | 78 | @property 79 | def reasons(self) -> CancellationReasons: 80 | """List of cancellation reasons for each item in the transaction. 81 | 82 | Corresponds to order of `op_args`. 83 | 84 | """ 85 | if self._reasons is None: 86 | self._reasons = self._get_reasons() 87 | return self._reasons 88 | 89 | def has_error(self, exception: Type[ClientError]) -> bool: 90 | """Whether the transaction failed due to a particular exception. 91 | 92 | Args: 93 | exception: The exception type to check for, eg. `ValidationError`. 94 | 95 | Returns: 96 | True if any of the failure reasons match the exception type. 97 | 98 | """ 99 | for r in self.reasons: 100 | if r is exception: 101 | return True 102 | else: 103 | return False 104 | -------------------------------------------------------------------------------- /dokklib_db/index.py: -------------------------------------------------------------------------------- 1 | # Type checks are enough to test this module. 2 | # pragma: no cover 3 | from abc import ABC, abstractmethod 4 | 5 | from typing_extensions import Literal 6 | 7 | 8 | class GlobalIndex(ABC): 9 | """Base class for global indices.""" 10 | 11 | @property 12 | @abstractmethod 13 | def partition_key(self) -> str: 14 | """Get the name of the partition key.""" 15 | raise NotImplementedError 16 | 17 | @property 18 | @abstractmethod 19 | def sort_key(self) -> str: 20 | """Get the name of the sort key.""" 21 | raise NotImplementedError 22 | 23 | 24 | class GlobalSecondaryIndex(GlobalIndex): 25 | """Base class for global secondary indices.""" 26 | 27 | @property 28 | @abstractmethod 29 | def name(self) -> str: 30 | """Get the name of the secondary index.""" 31 | raise NotImplementedError 32 | 33 | 34 | class PrimaryGlobalIndex(GlobalIndex): 35 | """Primary global DynamoDB primary_index.""" 36 | 37 | @property 38 | def partition_key(self) -> Literal['PK']: 39 | """Get the name of the partition key.""" 40 | return 'PK' 41 | 42 | @property 43 | def sort_key(self) -> Literal['SK']: 44 | """Get the name of the sort key.""" 45 | return 'SK' 46 | 47 | 48 | class InversePrimaryIndex(GlobalSecondaryIndex): 49 | """Inverted global secondary index.""" 50 | 51 | @property 52 | def name(self) -> Literal['GSI_1']: 53 | """Get the global secondary index name.""" 54 | return 'GSI_1' 55 | 56 | @property 57 | def partition_key(self) -> Literal['SK']: 58 | """Get the name of the partition key.""" 59 | return 'SK' 60 | 61 | @property 62 | def sort_key(self) -> Literal['PK']: 63 | """Get the name of the sort key.""" 64 | return 'PK' 65 | -------------------------------------------------------------------------------- /dokklib_db/keys.py: -------------------------------------------------------------------------------- 1 | """Types for entity names and keys in the single table. 2 | 3 | Each item in the table is uniquely identified by its primary key. 4 | The primary key is a composite of the partition key and the sort key. 5 | The partition key for an item is the concatenation of its entity type and the 6 | key value: `ENTITY#key-value`. 7 | The sort key equals the partition key if the item does not model a relation and 8 | `OTHER_ENTITY#other_key` if the item models a relation. 9 | Sort keys without values may be used to query relations with a certain 10 | entity type. For example, to query all the subscriptions of a user, one 11 | would use the `USER#foo@example.com` partition key and the `SUBSCRIPTION#` 12 | sort key. 13 | 14 | """ 15 | from abc import ABC 16 | from typing import Any, Dict, List, Optional, Tuple, Type, Union, cast 17 | 18 | from dokklib_db.index import GlobalIndex 19 | from dokklib_db.serializer import Serializer 20 | 21 | AnySortKey = Union['SortKey', 'PrefixSortKey'] 22 | 23 | 24 | class EntityName(ABC): 25 | """Abstract base class of entity names. 26 | 27 | Applications must define their entities by inheriting from this class. 28 | Eg. in "app/entities.py": 29 | 30 | ```python 31 | import dokklib_db as db 32 | 33 | class User(db.EntityName): 34 | pass 35 | 36 | class Product(db.EntityName): 37 | pass 38 | 39 | ... 40 | 41 | ``` 42 | 43 | """ 44 | 45 | def __new__(cls) -> 'EntityName': # pragma: no cover 46 | """Prevent creating abstract base class.""" 47 | raise TypeError(f'{cls.__name__} can not be instantiated.') 48 | 49 | @classmethod 50 | def to_prefix(cls) -> str: 51 | """Convert class name to key prefix. 52 | 53 | Returns: 54 | The key prefix. Eg. if class name is 'User', then the prefix is 55 | 'USER#'. 56 | 57 | """ 58 | if cls is EntityName: 59 | raise TypeError(f'Entity names must inherit from {cls.__name__}.') # pragma: no cover # noqa 501 60 | return cls.__name__.upper() + '#' 61 | 62 | 63 | class EntityKey(ABC): 64 | """Abstract base class of table keys.""" 65 | 66 | def __init__(self, entity_name: Type[EntityName], value: str): 67 | """Initialize an EntityKey instance. 68 | 69 | Args: 70 | entity_name: The entity type name. 71 | value: The key value. 72 | 73 | """ 74 | self._prefix = entity_name.to_prefix() 75 | self._value = value 76 | 77 | # New must match init + subclasses' init as well. 78 | def __new__(cls, *args: List[Any], **kwargs: Dict[str, Any]) \ 79 | -> 'EntityKey': 80 | """Prevent creating abstract base class.""" 81 | if cls is EntityKey: 82 | raise TypeError(f'{EntityKey.__name__} can not be instantiated.') # pragma: no cover # noqa 501 83 | return cast(EntityKey, object.__new__(cls)) 84 | 85 | def __str__(self) -> str: 86 | """Get the string representation.""" 87 | # Eg. ENTITY#value 88 | return f'{self._prefix}{self._value}' 89 | 90 | def __hash__(self) -> int: 91 | """Get the hash value.""" 92 | return hash(str(self)) 93 | 94 | def __eq__(self, other: Any) -> bool: 95 | """Compare semantic equality.""" 96 | return str(self) == str(other) 97 | 98 | @property 99 | def prefix(self) -> str: 100 | """Get the entity prefix of the key.""" 101 | return self._prefix 102 | 103 | @property 104 | def value(self) -> Optional[str]: 105 | """Get the value of the key.""" 106 | return self._value 107 | 108 | 109 | class PartitionKey(EntityKey): 110 | """Partition key.""" 111 | 112 | 113 | class SortKey(EntityKey): 114 | """Sort key with a value.""" 115 | 116 | 117 | # Shouldn't inherit from `SortKey` as `PrefixSortKey` shouldn't pass where a 118 | # `SortKey` is required. 119 | class PrefixSortKey(EntityKey): 120 | """Prefix only sort key to query relations.""" 121 | 122 | def __init__(self, entity_name: Type[EntityName], value: str = ''): 123 | """Initialize a PrefixSortKey instance. 124 | 125 | Args: 126 | entity_name: The entity type name. 127 | value: Optional prefix value. 128 | 129 | """ 130 | super().__init__(entity_name, value) 131 | 132 | 133 | class PrimaryKey: 134 | """Primary (composite) key of a DynamoDB item.""" 135 | 136 | def __init__(self, partition_key: PartitionKey, sort_key: SortKey): 137 | """Initialize a PrimaryKey instance.""" 138 | super().__init__() 139 | 140 | self._pk = partition_key 141 | self._sk = sort_key 142 | self._serializer = Serializer() 143 | 144 | def __hash__(self) -> int: 145 | return hash(self._tuple) 146 | 147 | def __eq__(self, other: object) -> bool: 148 | if isinstance(other, self.__class__): 149 | return self._tuple == other._tuple 150 | else: 151 | return self._tuple == other 152 | 153 | @property 154 | def _tuple(self) -> Tuple[str, str]: 155 | return str(self.partition_key), str(self.sort_key) 156 | 157 | @property 158 | def partition_key(self) -> PartitionKey: # pragma: no cover 159 | """Get the partition key.""" 160 | return self._pk 161 | 162 | @property 163 | def sort_key(self) -> SortKey: # pragma: no cover 164 | """Get the sort key.""" 165 | return self._sk 166 | 167 | def serialize(self, global_index: GlobalIndex) -> Dict[str, Any]: 168 | """Serialize the primary key to a DynamoDB item. 169 | 170 | Args: 171 | global_index: The global index where this key will be used. 172 | 173 | Returns: 174 | The serialized key. 175 | 176 | """ 177 | pk_name = global_index.partition_key 178 | sk_name = global_index.sort_key 179 | item = { 180 | pk_name: str(self.partition_key), 181 | sk_name: str(self.sort_key) 182 | } 183 | return self._serializer.serialize_dict(item) 184 | -------------------------------------------------------------------------------- /dokklib_db/op_args.py: -------------------------------------------------------------------------------- 1 | """DynamoDB operation arguments.""" 2 | from abc import ABC, abstractmethod 3 | from datetime import datetime 4 | from typing import Any, List, Mapping, Optional, Union 5 | 6 | import boto3.dynamodb.conditions as cond 7 | 8 | from typing_extensions import Literal 9 | 10 | from dokklib_db.index import GlobalIndex, GlobalSecondaryIndex 11 | from dokklib_db.keys import PartitionKey, PrimaryKey, SortKey 12 | from dokklib_db.serializer import Serializer 13 | 14 | _DynamoValue = Union[str, bool] 15 | 16 | # Can't narrow value types down, because of TypedDict-Mapping 17 | # incompatibiltiy. See https://stackoverflow.com/q/60304154 18 | Attributes = Mapping[str, Any] 19 | Kwargs = Mapping[str, Any] 20 | 21 | 22 | class OpArg(ABC): 23 | """DynamoDB operation argument base class.""" 24 | 25 | @staticmethod 26 | def _iso_now() -> str: 27 | now = datetime.utcnow() 28 | return now.replace(microsecond=0).isoformat() 29 | 30 | def __init__(self) -> None: 31 | """Initialize an OpArg instance.""" 32 | self._serializer = Serializer() 33 | 34 | @property 35 | @abstractmethod 36 | def op_name(self) -> str: 37 | """Get the operation name for which this object is an argument. 38 | 39 | Must correspond to TransactWriteItem argument. 40 | """ 41 | raise NotImplementedError 42 | 43 | @abstractmethod 44 | def get_kwargs(self, table_name: str, primary_index: GlobalIndex) \ 45 | -> Kwargs: 46 | """Get key-word arguments that can be passed to the DynamoDB operation. 47 | 48 | Args: 49 | table_name: The DynamoDB table name for the operation. 50 | primary_index: The primary global index of the table. 51 | 52 | Returns: 53 | The key-word arguments. 54 | 55 | """ 56 | raise NotImplementedError 57 | 58 | def _serialize_primary_key(self, primary_index: GlobalIndex, 59 | pk: PartitionKey, sk: SortKey) \ 60 | -> Mapping[str, Mapping[str, _DynamoValue]]: 61 | """Serialize composite key.""" 62 | primary_key = PrimaryKey(pk, sk) 63 | return primary_key.serialize(primary_index) 64 | 65 | 66 | class DeleteArg(OpArg): 67 | """Argument to a DynamoDB DeleteItem operation.""" 68 | 69 | def __init__(self, pk: PartitionKey, sk: SortKey, 70 | idempotent: bool = True): 71 | """Initialize a DeleteArg instance. 72 | 73 | Args: 74 | pk: The partition key of the item. 75 | sk: The sort key of the item. 76 | idempotent: If false, the op raises an error if the item to 77 | delete doesn't exist. Defaults to to true. 78 | 79 | """ 80 | super().__init__() 81 | self._pk = pk 82 | self._sk = sk 83 | self._idempotent = idempotent 84 | 85 | @property 86 | def op_name(self) -> Literal['Delete']: # pragma: no cover 87 | """Get the operation name for which this object is an argument.""" 88 | return 'Delete' 89 | 90 | def get_kwargs(self, table_name: str, primary_index: GlobalIndex) \ 91 | -> Kwargs: 92 | """Get key-word arguments that can be passed to a DeleteItem operation. 93 | 94 | Args: 95 | table_name: The DynamoDB table name for the DeleteItem operation. 96 | primary_index: The primary global index of the table. 97 | 98 | Returns: 99 | The key-word arguments. 100 | 101 | """ 102 | key = self._serialize_primary_key(primary_index, self._pk, self._sk) 103 | kwargs = { 104 | 'TableName': table_name, 105 | 'Key': key 106 | } 107 | if not self._idempotent: 108 | # This check is performed after the item is retrieved by the 109 | # composite key, so no need to specify SK. 110 | kwargs['ConditionExpression'] = 'attribute_exists(PK)' 111 | 112 | return kwargs 113 | 114 | 115 | class GetArg(OpArg): 116 | """Argument to a DynamoDB GetItem operation.""" 117 | 118 | def __init__(self, pk: PartitionKey, sk: SortKey, 119 | attributes: Optional[List[str]] = None, 120 | consistent: bool = False): 121 | """Initialize a GetArg instance. 122 | 123 | Args: 124 | pk: The partition key of the item. 125 | sk: The sort key of the item. 126 | attributes: The attributes to get. Returns all attributes if 127 | omitted. 128 | consistent: Whether the read is strongly consistent or not. 129 | 130 | """ 131 | super().__init__() 132 | self._pk = pk 133 | self._sk = sk 134 | self._attributes = attributes 135 | self._consistent = consistent 136 | 137 | @property 138 | def op_name(self) -> Literal['Get']: # pragma: no cover 139 | """Get the operation name for which this object is an argument.""" 140 | return 'Get' 141 | 142 | def get_kwargs(self, table_name: str, primary_index: GlobalIndex) \ 143 | -> Kwargs: 144 | """Get key-word arguments that can be passed to a GetItem operation. 145 | 146 | Args: 147 | table_name: The DynamoDB table name for the GetItem operation. 148 | primary_index: The primary global index of the table. 149 | 150 | Returns: 151 | The key-word arguments. 152 | 153 | """ 154 | key = self._serialize_primary_key(primary_index, self._pk, self._sk) 155 | kwargs = { 156 | 'TableName': table_name, 157 | 'Key': key, 158 | 'ConsistentRead': self._consistent 159 | } 160 | if self._attributes: 161 | # TODO (abiro) convert inputs to expression attribute names 162 | kwargs['ProjectionExpression'] = ','.join(self._attributes) 163 | 164 | return kwargs 165 | 166 | 167 | class PutArg(OpArg): 168 | """Argument to a DynamoDB PutItem operation. 169 | 170 | This op will replace the entire item. User `UpdateArg` if you just want to 171 | update a specific attribute. 172 | 173 | The `CreatedAt` attribute of the item is automatically set to the current 174 | ISO timestamp without microseconds (eg. '2020-02-15T19:09:38'). 175 | 176 | """ 177 | 178 | def __init__(self, pk: PartitionKey, sk: SortKey, 179 | attributes: Optional[Attributes] = None, 180 | allow_overwrite: bool = True): 181 | """Initialize a PutArg instance. 182 | 183 | Args: 184 | pk: The partition key of the item. 185 | sk: The sort key of the item. 186 | attributes: Optional additional attributes of the item. 187 | allow_overwrite: Whether to allow overwriting an existing item. 188 | 189 | """ 190 | super().__init__() 191 | self._pk = pk 192 | self._sk = sk 193 | self._attributes = attributes 194 | self._allow_overwrite = allow_overwrite 195 | 196 | @property 197 | def op_name(self) -> Literal['Put']: # pragma: no cover 198 | """Get the operation name for which this object is an argument.""" 199 | return 'Put' 200 | 201 | def _get_dynamo_item(self, primary_index: GlobalIndex) \ 202 | -> Mapping[str, Mapping[str, _DynamoValue]]: 203 | keys_item = self._serialize_primary_key(primary_index, 204 | self._pk, 205 | self._sk) 206 | 207 | item: Attributes = { 208 | 'CreatedAt': self._iso_now() 209 | } 210 | if self._attributes: 211 | # `item` keys overwrite `_attributes` keys 212 | item = {**self._attributes, **item} 213 | 214 | dynamo_item = self._serializer.serialize_dict(item) 215 | return {**dynamo_item, **keys_item} 216 | 217 | def get_kwargs(self, table_name: str, primary_index: GlobalIndex) \ 218 | -> Kwargs: 219 | """Get key-word arguments that can be passed to a PutItem operation. 220 | 221 | Args: 222 | table_name: The DynamoDB table name for the PutItem operation. 223 | primary_index: The primary global index of the table. 224 | 225 | Returns: 226 | The key-word arguments. 227 | 228 | """ 229 | kwargs = { 230 | 'TableName': table_name, 231 | 'Item': self._get_dynamo_item(primary_index) 232 | } 233 | if not self._allow_overwrite: 234 | # The condition only checks if the item with the same composite key 235 | # exists. Ie. if there is an item (PK=foo, SK=0) in the table, 236 | # and we insert a new item (PK=foo, SK=1), the insert will succeed. 237 | kwargs['ConditionExpression'] = 'attribute_not_exists(PK)' 238 | 239 | return kwargs 240 | 241 | 242 | class InsertArg(PutArg): 243 | """DynamoDB PutItem argument that prevents overwriting existing items.""" 244 | 245 | def __init__(self, pk: PartitionKey, sk: SortKey, 246 | attributes: Optional[Attributes] = None): 247 | """Initialize an InsertArg instance. 248 | 249 | The `UpdateAt` attribute of the item is automatically set. 250 | 251 | Args: 252 | pk: The partition key of the item. 253 | sk: The sort key of the item. 254 | attributes: Optional additional attributes of the item. 255 | 256 | """ 257 | super().__init__(pk, sk, 258 | attributes=attributes, 259 | allow_overwrite=False) 260 | 261 | 262 | class QueryArg(OpArg): 263 | """DynamoDB query operation argument. 264 | 265 | Note that query can not be used in a transaction, the purpose of this class 266 | is to provide a simplified interface to the boto3 DynamoDB table class. 267 | 268 | """ 269 | 270 | _max_limit = 1000 271 | 272 | @staticmethod 273 | def _serialize_key_condition(key_cond: cond.ConditionBase) \ 274 | -> cond.BuiltConditionExpression: 275 | builder = cond.ConditionExpressionBuilder() 276 | return builder.build_expression(key_cond, is_key_condition=True) 277 | 278 | def __init__(self, key_condition: cond.ConditionBase, 279 | global_index: Optional[GlobalSecondaryIndex] = None, 280 | attributes: Optional[List[str]] = None, 281 | consistent: bool = False, 282 | limit: Optional[int] = None): 283 | """Initialize a QueryArg instance. 284 | 285 | Args: 286 | key_condition: The key condition. Eg.: 287 | `Key('PK').eq(str(pk)) & Key('SK').begins_with(str(sk))` 288 | global_index: The global secondary index to query. Defaults to the 289 | primary index. 290 | attributes: The attributes to get. Defaults to `SK`. 291 | consistent: Whether the read is strongly consistent or not. 292 | limit: The maximum number of items to fetch. Defaults to 1000 which 293 | is also the maximum allowed value. 294 | 295 | """ 296 | super().__init__() 297 | self._key_cond = key_condition 298 | self._attributes = attributes 299 | self._consistent = consistent 300 | self._global_index = global_index 301 | 302 | if limit is not None: 303 | if limit > self._max_limit: 304 | raise ValueError(f'Limit {limit} is greater than max ' 305 | f'{self._max_limit}') 306 | self._limit = limit 307 | else: 308 | self._limit = self._max_limit 309 | 310 | @property 311 | def op_name(self) -> Literal['Query']: # pragma: no cover 312 | """Get the operation name for which this object is an argument. 313 | 314 | Note that query can not be used in a transaction. 315 | 316 | """ 317 | return 'Query' 318 | 319 | def _serialize_primary_key(self, primary_index: GlobalIndex, 320 | pk: PartitionKey, 321 | sk: SortKey) \ 322 | -> Mapping[str, Mapping[str, _DynamoValue]]: 323 | """Serialize composite key.""" 324 | # Using this inherited method in QueryArg would be a mistake, because 325 | # it wouldn't take into account the global secondary index. 326 | raise NotImplementedError 327 | 328 | def get_kwargs(self, table_name: str, primary_index: GlobalIndex) \ 329 | -> Kwargs: 330 | """Get key-word arguments that can be passed to a boto3 DynamoDB table. 331 | 332 | Args: 333 | table_name: The DynamoDB table name for the operation. 334 | primary_index: The primary global index of the table. 335 | 336 | Returns: 337 | The key-word arguments. 338 | 339 | """ 340 | kc = self._serialize_key_condition(self._key_cond) 341 | kc_value_placeholders = self._serializer.serialize_dict( 342 | kc.attribute_value_placeholders) 343 | kwargs = { 344 | 'TableName': table_name, 345 | 'Select': 'SPECIFIC_ATTRIBUTES', 346 | 'KeyConditionExpression': kc.condition_expression, 347 | 'ExpressionAttributeNames': kc.attribute_name_placeholders, 348 | 'ExpressionAttributeValues': kc_value_placeholders, 349 | 'ConsistentRead': self._consistent, 350 | 'Limit': self._limit 351 | } 352 | if self._attributes: 353 | # TODO (abiro) convert inputs to expression attribute names 354 | kwargs['ProjectionExpression'] = ','.join(self._attributes) 355 | else: 356 | kwargs['ProjectionExpression'] = 'SK' 357 | if self._global_index: 358 | kwargs['IndexName'] = self._global_index.name 359 | return kwargs 360 | 361 | 362 | class UpdateArg(OpArg): 363 | """Argument to a DynamoDB UpdateItem operation. 364 | 365 | This op updates the specified attributes or creates a new item if it 366 | doesn't exist yet. 367 | 368 | The `UpdatedAt` attribute of the item is automatically set to the current 369 | ISO timestamp without microseconds (eg. '2020-02-15T19:09:38'). 370 | 371 | """ 372 | 373 | def __init__(self, pk: PartitionKey, sk: SortKey, 374 | attr_updates: Optional[Attributes] = None): 375 | """Initialize an UpdateArg instance. 376 | 377 | Args: 378 | pk: The partition key of the item. 379 | sk: The sort key of the item. 380 | attr_updates: Optional attributes to update for the item. These 381 | attributes will be overwritten if they exist, or created if 382 | they don't exist. 383 | 384 | """ 385 | super().__init__() 386 | self._pk = pk 387 | self._sk = sk 388 | self._attr_updates = attr_updates 389 | 390 | @property 391 | def op_name(self) -> Literal['Update']: # pragma: no cover 392 | """Get the operation name for which this object is an argument.""" 393 | return 'Update' 394 | 395 | def _get_attr_updates(self) -> Mapping[str, Mapping[str, Any]]: 396 | item = { 397 | 'UpdatedAt': self._iso_now() 398 | } 399 | if self._attr_updates: 400 | # `item` keys overwrite `_attributes` keys 401 | item = {**self._attr_updates, **item} 402 | res = {} 403 | for k, v in item.items(): 404 | res[k] = { 405 | 'Action': 'PUT', 406 | 'Value': self._serializer.serialize_val(v) 407 | } 408 | return res 409 | 410 | def get_kwargs(self, table_name: str, primary_index: GlobalIndex) \ 411 | -> Kwargs: 412 | """Get key-word arguments that can be passed to a PutItem operation. 413 | 414 | Args: 415 | table_name: The DynamoDB table name for the PutItem operation. 416 | primary_index: The primary global index of the table. 417 | 418 | Returns: 419 | The key-word arguments. 420 | 421 | """ 422 | keys = self._serialize_primary_key(primary_index, self._pk, self._sk) 423 | attr_updates = self._get_attr_updates() 424 | kwargs = { 425 | 'TableName': table_name, 426 | 'Key': keys, 427 | 'AttributeUpdates': attr_updates 428 | } 429 | return kwargs 430 | -------------------------------------------------------------------------------- /dokklib_db/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dokklib/dokklib-db/3665775c6d369ca08431b5295c1c181a9dca97cd/dokklib_db/py.typed -------------------------------------------------------------------------------- /dokklib_db/serializer.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Mapping, Optional, cast 2 | 3 | from boto3.dynamodb.types import TypeDeserializer, TypeSerializer 4 | 5 | 6 | _StrKeyDict = Dict[str, Any] 7 | 8 | 9 | class Serializer: 10 | """Convert between Python and DynamoDB values.""" 11 | 12 | def __init__(self) -> None: 13 | """Initialize a Serializer instance.""" 14 | # Lazy-initialize serializers 15 | self._ser_handle: Optional[TypeSerializer] = None 16 | self._deser_handle: Optional[TypeDeserializer] = None 17 | 18 | @property 19 | def _deser(self) -> TypeDeserializer: 20 | if self._deser_handle is None: 21 | self._deser_handle = TypeDeserializer() 22 | return self._deser_handle 23 | 24 | @property 25 | def _ser(self) -> TypeSerializer: 26 | if self._ser_handle is None: 27 | self._ser_handle = TypeSerializer() 28 | return self._ser_handle 29 | 30 | def deserialize_dict(self, dynamo_item: Mapping[str, Any]) -> _StrKeyDict: 31 | """Deserialize a dictionary while preserving its top level keys. 32 | 33 | Args: 34 | dynamo_item: The dictionary from DynamoDB to deserialize. 35 | 36 | Returns: 37 | The deserialized dictionary. 38 | 39 | Raises: 40 | TypeError: if an unsupported type is encountered. 41 | 42 | """ 43 | return {k: self.deserialize_val(v) for k, v in dynamo_item.items()} 44 | 45 | def deserialize_val(self, dynamo_val: Mapping[str, Any]) -> Any: 46 | """Convert a DynamoDB value to a Python value. 47 | 48 | Args: 49 | dynamo_val: The value from DynamoDB to deserialize. 50 | 51 | Returns: 52 | The deserialized value. 53 | 54 | Raises: 55 | TypeError: if an unsupported type is encountered. 56 | 57 | """ 58 | return cast(_StrKeyDict, self._deser.deserialize(dynamo_val)) 59 | 60 | def serialize_val(self, val: Any) -> _StrKeyDict: 61 | """Convert a Python value to a DynamoDB value. 62 | 63 | Args: 64 | val: The value to serialize. 65 | 66 | Returns: 67 | The serialized DynamoDB value. 68 | 69 | Raises: 70 | TypeError: if an unsupported type is encountered. 71 | 72 | """ 73 | return cast(_StrKeyDict, self._ser.serialize(val)) 74 | 75 | def serialize_dict(self, item: Mapping[str, Any]) -> _StrKeyDict: 76 | """Serialize a dictionary while preserving its top level keys. 77 | 78 | Args: 79 | item: The dictionary to serialize. 80 | 81 | Returns: 82 | The serialized DynamoDB dictionary. 83 | 84 | Raises: 85 | TypeError: if an unsupported type is encountered. 86 | 87 | """ 88 | return {k: self.serialize_val(v) for k, v in item.items()} 89 | -------------------------------------------------------------------------------- /dokklib_db/table.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import re 3 | from contextlib import contextmanager 4 | from typing import Any, Dict, Iterable, Iterator, List, Mapping, NamedTuple, \ 5 | Optional, Tuple, Type, Union, cast 6 | 7 | import boto3 8 | import boto3.dynamodb.conditions as cond 9 | 10 | import botocore.client 11 | import botocore.exceptions as botoex 12 | 13 | import dokklib_db.errors as err 14 | from dokklib_db.index import GlobalIndex, GlobalSecondaryIndex, \ 15 | PrimaryGlobalIndex 16 | from dokklib_db.keys import PartitionKey, PrefixSortKey, PrimaryKey, SortKey 17 | from dokklib_db.op_args import Attributes, DeleteArg, GetArg, InsertArg, \ 18 | OpArg, PutArg, QueryArg, UpdateArg 19 | from dokklib_db.serializer import Serializer 20 | 21 | 22 | ItemResult = Mapping[str, Any] 23 | 24 | 25 | class BatchGetResult(NamedTuple): 26 | """Result from a `Table.batch_get` operation.""" 27 | 28 | items: List[ItemResult] 29 | unprocessed_keys: List[PrimaryKey] 30 | 31 | 32 | class Table: 33 | """DynamoDB table for the single table pattern. 34 | 35 | Table instances are not safe to share across threads. 36 | """ 37 | 38 | @staticmethod 39 | def _get_error_code(error: botoex.ClientError) -> str: 40 | db_error = error.response.get('Error', {}) 41 | return cast(str, db_error.get('Code', 'None')) 42 | 43 | @staticmethod 44 | def _remove_entity_prefix(string: str) -> str: 45 | # Entity names are upper-cased Python class names. 46 | pattern = r'^[A-Z0-9_]+#(.+)$' 47 | match = re.match(pattern, string) 48 | if match: 49 | return match.group(1) 50 | else: 51 | return string 52 | 53 | @classmethod 54 | @contextmanager 55 | def _dispatch_transaction_error(cls, op_args: List[OpArg]) \ 56 | -> Iterator[None]: 57 | """Raise appropriate exception based on ClientError code.""" 58 | try: 59 | yield None 60 | except botoex.ClientError as e: 61 | code = cls._get_error_code(e) 62 | if code == 'TransactionCanceledException': 63 | raise err.TransactionCanceledException(op_args, 64 | str(e), 65 | e.response, 66 | e.operation_name) 67 | else: 68 | raise cls._get_exception(e) 69 | 70 | @classmethod 71 | @contextmanager 72 | def _dispatch_error(cls) -> Iterator[None]: 73 | """Raise appropriate exception based on ClientError code.""" 74 | try: 75 | yield None 76 | except botoex.ClientError as e: 77 | raise cls._get_exception(e) 78 | 79 | @classmethod 80 | def _get_exception(cls, error: botoex.ClientError) -> err.ClientError: 81 | code = cls._get_error_code(error) 82 | try: 83 | ex_class = cast(Type[err.ClientError], getattr(err, code)) 84 | except AttributeError: # pragma: no cover 85 | # Type checks are enough to test this. 86 | ex_class = err.ClientError 87 | return ex_class(str(error), error.response, error.operation_name) 88 | 89 | @classmethod 90 | def _strip_prefixes(cls, item: Dict[str, Any]) -> ItemResult: 91 | """Strip entity prefixes from a DB item.""" 92 | item_copy = copy.deepcopy(item) 93 | for k, v in item_copy.items(): 94 | if isinstance(v, str): 95 | item_copy[k] = cls._remove_entity_prefix(v) 96 | return item_copy 97 | 98 | def __init__(self, table_name: str, 99 | primary_index: Optional[GlobalIndex] = None): 100 | """Initialize a Table instance. 101 | 102 | Args: 103 | table_name: The DynamoDB table name. 104 | primary_index: The primary global index of the table. 105 | Defaults to `db.PrimaryGlobalIndex` that has 'PK' as the 106 | partition key name and 'SK' as the sort key name. 107 | 108 | """ 109 | self._table_name = table_name 110 | if primary_index: 111 | self._primary_index = primary_index 112 | else: 113 | self._primary_index = PrimaryGlobalIndex() 114 | self._serializer = Serializer() 115 | 116 | # The boto objects are lazy-initialzied. Connections are not created 117 | # until the first request. 118 | self._client_handle = boto3.client('dynamodb') 119 | 120 | @property 121 | def _client(self) -> 'botocore.client.DynamoDB': 122 | # Helps mock the client at test time. 123 | return self._client_handle 124 | 125 | @property 126 | def primary_index(self) -> GlobalIndex: 127 | """Get the primary global index of the table.""" 128 | return self._primary_index 129 | 130 | @property 131 | def table_name(self) -> str: 132 | """Get the DynamoDB table name.""" 133 | return self._table_name 134 | 135 | def _normalize_item(self, item: Dict[str, Any]) -> ItemResult: 136 | des_item = self._serializer.deserialize_dict(item) 137 | return self._strip_prefixes(des_item) 138 | 139 | def _normalize_items(self, items: List[Dict[str, Any]]) \ 140 | -> List[ItemResult]: 141 | return [self._normalize_item(item) for item in items] 142 | 143 | def _put_item(self, put_arg: PutArg) -> None: 144 | kwargs = put_arg.get_kwargs(self.table_name, self.primary_index) 145 | with self._dispatch_error(): 146 | self._client.put_item(**kwargs) 147 | 148 | def _query(self, query_arg: QueryArg) -> List[ItemResult]: 149 | args = query_arg.get_kwargs(self.table_name, self.primary_index) 150 | with self._dispatch_error(): 151 | query_res = self._client.query(**args) 152 | items = query_res.get('Items', []) 153 | return self._normalize_items(items) 154 | 155 | def _update_item(self, update_arg: UpdateArg) -> None: 156 | """Update an item or insert a new item if it doesn't exist. 157 | 158 | Args: 159 | update_arg: The update item op argument. 160 | 161 | Raises: 162 | dokklib_db.DatabaseError if there was a problem connecting to 163 | DynamoDB. 164 | 165 | """ 166 | kwargs = update_arg.get_kwargs(self.table_name, self.primary_index) 167 | with self._dispatch_error(): 168 | self._client.update_item(**kwargs) 169 | 170 | def batch_get(self, keys: Iterable[PrimaryKey], 171 | attributes: Optional[List[str]] = None, 172 | consistent: bool = False) -> BatchGetResult: 173 | """Fetch multiple items by their primary keys from the table. 174 | 175 | Note that the Dynamodb BatchGetItem API operation doesn't return items 176 | in order, that's why the primary key (PK and SK) of the item is always 177 | included in the Table.batch_get results. 178 | 179 | Further, note that while it's possible to make indiviual reads in 180 | strongly consistent, the returned snapshot has no isolation guarantees. 181 | If you need a consistent snapshot of multiple items in the database, 182 | you should use a transaction. 183 | 184 | Doesn't handle `UnprocessedKeys` in response. 185 | 186 | Args: 187 | keys: The primary keys of the items to get. 188 | attributes: The attributes to get. Returns all attributes if 189 | omitted. The partition and sort keys are always included even 190 | if not specified here. 191 | consistent: Whether the read is strongly consistent or not. 192 | 193 | Returns: 194 | The item if it exists. 195 | 196 | """ 197 | attr_s = set(attributes or []) 198 | attr_s.add(self.primary_index.partition_key) 199 | attr_s.add(self.primary_index.sort_key) 200 | # TODO (abiro) convert inputs to expression attribute names 201 | proj_expr = ','.join(attr_s) 202 | 203 | key_map: Dict[Union[PrimaryKey, Tuple[str, str]], PrimaryKey] = {} 204 | key_items = [] 205 | for key in keys: 206 | key_map[key] = key 207 | ser_key = key.serialize(self.primary_index) 208 | key_items.append(ser_key) 209 | 210 | request_items = { 211 | self.table_name: { 212 | 'Keys': key_items, 213 | 'ProjectionExpression': proj_expr, 214 | 'ConsistentRead': consistent 215 | } 216 | } 217 | with self._dispatch_error(): 218 | res = self._client.batch_get_item(RequestItems=request_items) 219 | 220 | responses = res.get('Responses', {}) 221 | items = responses.get(self.table_name, []) 222 | norm_items = self._normalize_items(items) 223 | 224 | # Map unprocessed keys back to original `PrimaryKey` arguments. 225 | unproc = res.get('UnprocessedKeys', {}) 226 | unproc_items = unproc.get(self.table_name, {}) 227 | unproc_keys = [] 228 | for item in unproc_items.get('Keys', []): 229 | pk_dynamo = item[self.primary_index.partition_key] 230 | sk_dynamo = item[self.primary_index.sort_key] 231 | pk_val = self._serializer.deserialize_val(pk_dynamo) 232 | sk_val = self._serializer.deserialize_val(sk_dynamo) 233 | key_tuple = (cast(str, pk_val), cast(str, sk_val)) 234 | key = key_map[key_tuple] 235 | unproc_keys.append(key) 236 | 237 | return BatchGetResult(items=norm_items, unprocessed_keys=unproc_keys) 238 | 239 | def delete(self, pk: PartitionKey, sk: SortKey, 240 | idempotent: bool = True) -> None: 241 | """Delete an item from the table. 242 | 243 | Args: 244 | pk: The primary key. 245 | sk: The sort key. 246 | idempotent: Whether the operation is idempotent. Defaults to True. 247 | 248 | """ 249 | delete_arg = DeleteArg(pk, sk, idempotent=idempotent) 250 | kwargs = delete_arg.get_kwargs(self.table_name, self.primary_index) 251 | with self._dispatch_error(): 252 | self._client.delete_item(**kwargs) 253 | 254 | def get(self, pk: PartitionKey, sk: SortKey, 255 | attributes: Optional[List[str]] = None, 256 | consistent: bool = False) -> Optional[ItemResult]: 257 | """Fetch an item by its primary key from the table. 258 | 259 | Args: 260 | pk: The primary key. 261 | sk: The sort key. 262 | attributes: The attributes to get. Returns all attributes if 263 | omitted. 264 | consistent: Whether the read is strongly consistent or not. 265 | 266 | Returns: 267 | The item if it exists. 268 | 269 | """ 270 | get_arg = GetArg(pk, sk, attributes=attributes, consistent=consistent) 271 | kwargs = get_arg.get_kwargs(self.table_name, self.primary_index) 272 | with self._dispatch_error(): 273 | res = self._client.get_item(**kwargs) 274 | item = res.get('Item') 275 | if item: 276 | return self._normalize_item(item) 277 | else: 278 | return None 279 | 280 | # Type checks are sufficient to test this function, so it's excluded from 281 | # unit test coverage. 282 | def insert(self, pk: PartitionKey, sk: SortKey, 283 | attributes: Optional[Attributes] = None) -> None: # pragma: no cover # noqa 501 284 | """Insert a new item into the table. 285 | 286 | The UpdateAt attribute of the item is automatically set. 287 | The insert fails if an item with the same composite key (PK, SK) 288 | exists. 289 | 290 | Args: 291 | pk: The partition key. 292 | sk: The sort key. 293 | attributes: Dictionary with additional attributes of the item. 294 | 295 | Raises: 296 | dokklib_db.ItemExistsError if the item with the same composite 297 | key already exists. 298 | dokklib_db.DatabaseError if there was a problem connecting to 299 | DynamoDB. 300 | 301 | """ 302 | put_arg = InsertArg(pk, sk, attributes=attributes) 303 | self._put_item(put_arg) 304 | 305 | # Type checks are sufficient to test this function, so it's excluded from 306 | # unit test coverage. 307 | def put(self, pk: PartitionKey, sk: SortKey, 308 | attributes: Optional[Attributes] = None, 309 | allow_overwrite: bool = True) -> None: # pragma: no cover # noqa 501 310 | """Insert a new item or replace an existing item. 311 | 312 | Args: 313 | pk: The partition key of the item. 314 | sk: The sort key of the item. 315 | attributes: Optional additional attributes of the item. 316 | allow_overwrite: Whether to allow overwriting an existing item. 317 | 318 | Raises: 319 | dokklib_db.DatabaseError if there was a problem connecting to 320 | DynamoDB. 321 | 322 | """ 323 | put_arg = PutArg(pk, sk, 324 | attributes=attributes, 325 | allow_overwrite=allow_overwrite) 326 | self._put_item(put_arg) 327 | 328 | # Type checks are sufficient to test this function, so it's excluded from 329 | # unit test coverage. 330 | def query(self, key_condition: cond.ConditionBase, 331 | global_index: Optional[GlobalSecondaryIndex] = None, 332 | attributes: Optional[List[str]] = None, 333 | consistent: bool = False, 334 | limit: Optional[int] = None) -> List[ItemResult]: # pragma: no cover # noqa 501 335 | """Fetch items from the table based on a key condition. 336 | 337 | Doesn't support pagination. 338 | 339 | Args: 340 | key_condition: The key condition. Eg.: 341 | `Key('PK').eq(str(pk)) & Key('SK').begins_with(str(sk))` 342 | global_index: The global secondary index to query. Defaults to the 343 | primary index. 344 | attributes: The attributes to get. Defaults to `SK`. 345 | consistent: Whether the read is strongly consistent or not. 346 | limit: The maximum number of items to fetch. Defaults to 1000. 347 | 348 | Returns: 349 | The requested items with the entity name prefixes stripped, 350 | eg. if the value of an attribute is 'USER#foo@example.com', 351 | only 'foo@example.com' is returned. 352 | 353 | Raises: 354 | dokklib_db.DatabaseError if there was an error querying the 355 | table. 356 | 357 | """ 358 | query_arg = QueryArg(key_condition, 359 | global_index=global_index, 360 | attributes=attributes, 361 | consistent=consistent, 362 | limit=limit) 363 | return self._query(query_arg) 364 | 365 | def query_prefix(self, pk: PartitionKey, sk: PrefixSortKey, 366 | global_index: Optional[GlobalSecondaryIndex] = None, 367 | attributes: Optional[List[str]] = None, 368 | consistent: bool = False, 369 | limit: Optional[int] = None) -> List[ItemResult]: 370 | """Fetch a items from the table based on a sort key prefix. 371 | 372 | Doesn't support pagination. 373 | 374 | Args: 375 | pk: The partition key. 376 | sk: The sort key prefix. 377 | global_index: The global secondary index to query. Defaults to the 378 | primary index. 379 | attributes: The attributes to get. Defaults to 380 | `[self.primary_index.sort_key]` if no `global_index` is 381 | provided and `[global_index.sort_key]` if it is provided. 382 | consistent: Whether the read is strongly consistent or not. 383 | limit: The maximum number of items to fetch. Defaults to 1000. 384 | 385 | Returns: 386 | The requested items with the `PK` and `SK` prefixes stripped. 387 | 388 | Raises: 389 | dokklib_db.DatabaseError if there was an error querying DynamoDB. 390 | 391 | """ 392 | if global_index: 393 | pk_name = global_index.partition_key 394 | sk_name = global_index.sort_key 395 | else: 396 | pk_name = self.primary_index.partition_key 397 | sk_name = self.primary_index.sort_key 398 | 399 | if not attributes: 400 | attributes = [sk_name] 401 | 402 | key_condition = cond.Key(pk_name).eq(str(pk)) & \ 403 | cond.Key(sk_name).begins_with(str(sk)) 404 | query_arg = QueryArg(key_condition, 405 | global_index=global_index, 406 | attributes=attributes, 407 | consistent=consistent, 408 | limit=limit) 409 | return self._query(query_arg) 410 | 411 | def transact_write_items(self, op_args: List[OpArg]) -> None: 412 | """Write multiple items in a transaction. 413 | 414 | Args: 415 | op_args: Write operation arguments. 416 | 417 | Raises: 418 | dokklib_db.TransactionError if the transaction fails. 419 | dokklib_db.DatabaseError if there was a problem connecting 420 | DynamoDB. 421 | 422 | """ 423 | transact_items = [] 424 | for a in op_args: 425 | kwargs = a.get_kwargs(self.table_name, self.primary_index) 426 | transact_items.append({a.op_name: kwargs}) 427 | with self._dispatch_transaction_error(op_args): 428 | self._client.transact_write_items(TransactItems=transact_items) 429 | 430 | # Type checks are sufficient to test this function, so it's excluded from 431 | # unit test coverage. 432 | def update_attributes(self, pk: PartitionKey, sk: SortKey, 433 | attributes: Attributes) -> None: # pragma: no cover 434 | """Update an item or insert a new item if it doesn't exist. 435 | 436 | The `UpdatedAt` attribute of the item is automatically set. 437 | 438 | Args: 439 | pk: The partition key. 440 | sk: The sort key. 441 | attributes: Dictionary with attributes to updates. These attributes 442 | will overwritten if they exist or created if they don't exist. 443 | 444 | Raises: 445 | dokklib_db.DatabaseError if there was a problem connecting to 446 | DynamoDB. 447 | 448 | """ 449 | update_arg = UpdateArg(pk, sk, attr_updates=attributes) 450 | self._update_item(update_arg) 451 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | # Conda environment file 2 | # Create a new Conda environment for development with: 3 | # `conda env create -f environment.yml` 4 | # Activate environment: 5 | # `conda activate dokklib_db` 6 | name: dokklib_db 7 | dependencies: 8 | - python=3.6 9 | - ipython 10 | - pip 11 | - pip: 12 | - -rrequirements/dev-requirements.txt 13 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | # Equivalent to using the --strict flag 3 | follow_imports = 'error' 4 | warn_unused_configs = True 5 | disallow_subclassing_any = True 6 | disallow_any_generics = True 7 | disallow_untyped_calls = True 8 | disallow_untyped_defs = True 9 | disallow_incomplete_defs = True 10 | check_untyped_defs = True 11 | disallow_untyped_decorators = True 12 | no_implicit_optional = True 13 | warn_redundant_casts = True 14 | warn_unused_ignores = True 15 | warn_return_any = True 16 | no_implicit_reexport = True 17 | 18 | # No type defs for these libs 19 | [mypy-botocore.*] 20 | ignore_missing_imports = True 21 | [mypy-boto3.*] 22 | ignore_missing_imports = True 23 | 24 | -------------------------------------------------------------------------------- /requirements/dev-requirements.in: -------------------------------------------------------------------------------- 1 | awscli~=1.18 2 | autopep8~=1.5 3 | bandit~=1.6 4 | cfn-lint~=0.28 5 | coverage~=5.0 6 | flake8~=3.7 7 | flake8-colors~=0.1 8 | flake8-docstrings~=1.5 9 | flake8-import-order~=0.18 10 | flake8-print~=3.1.4 11 | mypy~=0.761 12 | pep8-naming~=0.9 13 | pip-tools~=4.5 14 | toml~=0.10 15 | tox~=3.14 -------------------------------------------------------------------------------- /requirements/dev-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile 3 | # To update, run: 4 | # 5 | # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/dev-requirements.txt requirements/dev-requirements.in 6 | # 7 | appdirs==1.4.3 \ 8 | --hash=sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92 \ 9 | --hash=sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e \ 10 | # via virtualenv 11 | attrs==19.3.0 \ 12 | --hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \ 13 | --hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72 \ 14 | # via jsonschema 15 | autopep8==1.5 \ 16 | --hash=sha256:0f592a0447acea0c2b0a9602be1e4e3d86db52badd2e3c84f0193bfd89fd3a43 \ 17 | # via -r requirements/dev-requirements.in (line 2) 18 | aws-sam-translator==1.21.0 \ 19 | --hash=sha256:320a341d659580ad04e79c46fc93f50996363745a0c2728f9cd025ad78c43e36 \ 20 | # via cfn-lint 21 | awscli==1.18.5 \ 22 | --hash=sha256:3c2cab2a780535e274a3c8ca66950324a8e19219540974c1552f14d31cb73a0f \ 23 | --hash=sha256:62dffd77132afb6b74e22668199f9656781a9178991156edd1997a811463d91e \ 24 | # via -r requirements/dev-requirements.in (line 1) 25 | bandit==1.6.2 \ 26 | --hash=sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952 \ 27 | --hash=sha256:41e75315853507aa145d62a78a2a6c5e3240fe14ee7c601459d0df9418196065 \ 28 | # via -r requirements/dev-requirements.in (line 3) 29 | boto3==1.12.5 \ 30 | --hash=sha256:599608a8ed50dc184e83852e3050e6997d7825b404a6e3612c27425e4f12ec10 \ 31 | --hash=sha256:627a0c2366e420383065d9b191fa90a315a4d394fd007c2a85cfa5afa93ba974 \ 32 | # via aws-sam-translator 33 | botocore==1.15.5 \ 34 | --hash=sha256:8c9fa943e1890b44a7f31be2654cd4f4f88e634adadb931b0f298f1cf03a52a4 \ 35 | --hash=sha256:9de7885e9e9d6dbc30b9846d7edd8f98251db7f00395a6f4cfd765efc7098bdb \ 36 | # via awscli, boto3, s3transfer 37 | cfn-lint==0.28.2 \ 38 | --hash=sha256:82d2d2872ed77bb6858e698c924267676c00878d8fbd612b903cb9fc71063bcb \ 39 | --hash=sha256:91915e60c0c9971740895b20010bca3b6d016e7c6ed78b32cd9eb51529a5711f \ 40 | # via -r requirements/dev-requirements.in (line 4) 41 | click==7.0 \ 42 | --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ 43 | --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ 44 | # via pip-tools 45 | colorama==0.4.3 \ 46 | --hash=sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff \ 47 | --hash=sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1 \ 48 | # via awscli 49 | coverage==5.0.3 \ 50 | --hash=sha256:15cf13a6896048d6d947bf7d222f36e4809ab926894beb748fc9caa14605d9c3 \ 51 | --hash=sha256:1daa3eceed220f9fdb80d5ff950dd95112cd27f70d004c7918ca6dfc6c47054c \ 52 | --hash=sha256:1e44a022500d944d42f94df76727ba3fc0a5c0b672c358b61067abb88caee7a0 \ 53 | --hash=sha256:25dbf1110d70bab68a74b4b9d74f30e99b177cde3388e07cc7272f2168bd1477 \ 54 | --hash=sha256:3230d1003eec018ad4a472d254991e34241e0bbd513e97a29727c7c2f637bd2a \ 55 | --hash=sha256:3dbb72eaeea5763676a1a1efd9b427a048c97c39ed92e13336e726117d0b72bf \ 56 | --hash=sha256:5012d3b8d5a500834783689a5d2292fe06ec75dc86ee1ccdad04b6f5bf231691 \ 57 | --hash=sha256:51bc7710b13a2ae0c726f69756cf7ffd4362f4ac36546e243136187cfcc8aa73 \ 58 | --hash=sha256:527b4f316e6bf7755082a783726da20671a0cc388b786a64417780b90565b987 \ 59 | --hash=sha256:722e4557c8039aad9592c6a4213db75da08c2cd9945320220634f637251c3894 \ 60 | --hash=sha256:76e2057e8ffba5472fd28a3a010431fd9e928885ff480cb278877c6e9943cc2e \ 61 | --hash=sha256:77afca04240c40450c331fa796b3eab6f1e15c5ecf8bf2b8bee9706cd5452fef \ 62 | --hash=sha256:7afad9835e7a651d3551eab18cbc0fdb888f0a6136169fbef0662d9cdc9987cf \ 63 | --hash=sha256:9bea19ac2f08672636350f203db89382121c9c2ade85d945953ef3c8cf9d2a68 \ 64 | --hash=sha256:a8b8ac7876bc3598e43e2603f772d2353d9931709345ad6c1149009fd1bc81b8 \ 65 | --hash=sha256:b0840b45187699affd4c6588286d429cd79a99d509fe3de0f209594669bb0954 \ 66 | --hash=sha256:b26aaf69713e5674efbde4d728fb7124e429c9466aeaf5f4a7e9e699b12c9fe2 \ 67 | --hash=sha256:b63dd43f455ba878e5e9f80ba4f748c0a2156dde6e0e6e690310e24d6e8caf40 \ 68 | --hash=sha256:be18f4ae5a9e46edae3f329de2191747966a34a3d93046dbdf897319923923bc \ 69 | --hash=sha256:c312e57847db2526bc92b9bfa78266bfbaabac3fdcd751df4d062cd4c23e46dc \ 70 | --hash=sha256:c60097190fe9dc2b329a0eb03393e2e0829156a589bd732e70794c0dd804258e \ 71 | --hash=sha256:c62a2143e1313944bf4a5ab34fd3b4be15367a02e9478b0ce800cb510e3bbb9d \ 72 | --hash=sha256:cc1109f54a14d940b8512ee9f1c3975c181bbb200306c6d8b87d93376538782f \ 73 | --hash=sha256:cd60f507c125ac0ad83f05803063bed27e50fa903b9c2cfee3f8a6867ca600fc \ 74 | --hash=sha256:d513cc3db248e566e07a0da99c230aca3556d9b09ed02f420664e2da97eac301 \ 75 | --hash=sha256:d649dc0bcace6fcdb446ae02b98798a856593b19b637c1b9af8edadf2b150bea \ 76 | --hash=sha256:d7008a6796095a79544f4da1ee49418901961c97ca9e9d44904205ff7d6aa8cb \ 77 | --hash=sha256:da93027835164b8223e8e5af2cf902a4c80ed93cb0909417234f4a9df3bcd9af \ 78 | --hash=sha256:e69215621707119c6baf99bda014a45b999d37602cb7043d943c76a59b05bf52 \ 79 | --hash=sha256:ea9525e0fef2de9208250d6c5aeeee0138921057cd67fcef90fbed49c4d62d37 \ 80 | --hash=sha256:fca1669d464f0c9831fd10be2eef6b86f5ebd76c724d1e0706ebdff86bb4adf0 \ 81 | # via -r requirements/dev-requirements.in (line 5) 82 | distlib==0.3.0 \ 83 | --hash=sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21 \ 84 | # via virtualenv 85 | docutils==0.15.2 \ 86 | --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \ 87 | --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \ 88 | --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99 \ 89 | # via awscli, botocore 90 | entrypoints==0.3 \ 91 | --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \ 92 | --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \ 93 | # via flake8 94 | filelock==3.0.12 \ 95 | --hash=sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59 \ 96 | --hash=sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836 \ 97 | # via tox, virtualenv 98 | flake8-colors==0.1.6 \ 99 | --hash=sha256:508fcf6efc15826f2146b42172ab41999555e07af43fcfb3e6a28ad596189560 \ 100 | # via -r requirements/dev-requirements.in (line 7) 101 | flake8-docstrings==1.5.0 \ 102 | --hash=sha256:3d5a31c7ec6b7367ea6506a87ec293b94a0a46c0bce2bb4975b7f1d09b6f3717 \ 103 | --hash=sha256:a256ba91bc52307bef1de59e2a009c3cf61c3d0952dbe035d6ff7208940c2edc \ 104 | # via -r requirements/dev-requirements.in (line 8) 105 | flake8-import-order==0.18.1 \ 106 | --hash=sha256:90a80e46886259b9c396b578d75c749801a41ee969a235e163cfe1be7afd2543 \ 107 | --hash=sha256:a28dc39545ea4606c1ac3c24e9d05c849c6e5444a50fb7e9cdd430fc94de6e92 \ 108 | # via -r requirements/dev-requirements.in (line 9) 109 | flake8-polyfill==1.0.2 \ 110 | --hash=sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9 \ 111 | --hash=sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda \ 112 | # via pep8-naming 113 | flake8-print==3.1.4 \ 114 | --hash=sha256:324f9e59a522518daa2461bacd7f82da3c34eb26a4314c2a54bd493f8b394a68 \ 115 | # via -r requirements/dev-requirements.in (line 10) 116 | flake8==3.7.9 \ 117 | --hash=sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb \ 118 | --hash=sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca \ 119 | # via -r requirements/dev-requirements.in (line 6), flake8-colors, flake8-docstrings, flake8-polyfill, flake8-print 120 | gitdb2==3.0.2 \ 121 | --hash=sha256:0375d983fd887d03c8942e81b1b0abc6c320cfb500cd3fe0d9c0eac87fbf2b52 \ 122 | --hash=sha256:b2b3a67090c17dc61f8407ca485e79ae811225ab5ebcd98ac5ee01448e8987b5 \ 123 | # via gitpython 124 | gitpython==3.0.8 \ 125 | --hash=sha256:620b3c729bbc143b498cfea77e302999deedc55faec5b1067086c9ef90e101bc \ 126 | --hash=sha256:a43a5d88a5bbc3cf32bb5223e4b4e68fd716db5e9996cad6e561bbfee6e5f4af \ 127 | # via bandit 128 | importlib-metadata==1.5.0 \ 129 | --hash=sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302 \ 130 | --hash=sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b \ 131 | # via jsonschema, pluggy, tox, virtualenv 132 | importlib-resources==1.0.2 \ 133 | --hash=sha256:6e2783b2538bd5a14678284a3962b0660c715e5a0f10243fd5e00a4b5974f50b \ 134 | --hash=sha256:d3279fd0f6f847cced9f7acc19bd3e5df54d34f93a2e7bb5f238f81545787078 \ 135 | # via cfn-lint, virtualenv 136 | jmespath==0.9.4 \ 137 | --hash=sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6 \ 138 | --hash=sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c \ 139 | # via boto3, botocore 140 | jsonpatch==1.25 \ 141 | --hash=sha256:cc3a7241010a1fd3f50145a3b33be2c03c1e679faa19934b628bb07d0f64819e \ 142 | --hash=sha256:ddc0f7628b8bfdd62e3cbfbc24ca6671b0b6265b50d186c2cf3659dc0f78fd6a \ 143 | # via cfn-lint 144 | jsonpointer==2.0 \ 145 | --hash=sha256:c192ba86648e05fdae4f08a17ec25180a9aef5008d973407b581798a83975362 \ 146 | --hash=sha256:ff379fa021d1b81ab539f5ec467c7745beb1a5671463f9dcc2b2d458bd361c1e \ 147 | # via jsonpatch 148 | jsonschema==3.2.0 \ 149 | --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ 150 | --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a \ 151 | # via aws-sam-translator, cfn-lint 152 | mccabe==0.6.1 \ 153 | --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ 154 | --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \ 155 | # via flake8 156 | mypy-extensions==0.4.3 \ 157 | --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ 158 | --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 \ 159 | # via mypy 160 | mypy==0.761 \ 161 | --hash=sha256:0a9a45157e532da06fe56adcfef8a74629566b607fa2c1ac0122d1ff995c748a \ 162 | --hash=sha256:2c35cae79ceb20d47facfad51f952df16c2ae9f45db6cb38405a3da1cf8fc0a7 \ 163 | --hash=sha256:4b9365ade157794cef9685791032521233729cb00ce76b0ddc78749abea463d2 \ 164 | --hash=sha256:53ea810ae3f83f9c9b452582261ea859828a9ed666f2e1ca840300b69322c474 \ 165 | --hash=sha256:634aef60b4ff0f650d3e59d4374626ca6153fcaff96ec075b215b568e6ee3cb0 \ 166 | --hash=sha256:7e396ce53cacd5596ff6d191b47ab0ea18f8e0ec04e15d69728d530e86d4c217 \ 167 | --hash=sha256:7eadc91af8270455e0d73565b8964da1642fe226665dd5c9560067cd64d56749 \ 168 | --hash=sha256:7f672d02fffcbace4db2b05369142e0506cdcde20cea0e07c7c2171c4fd11dd6 \ 169 | --hash=sha256:85baab8d74ec601e86134afe2bcccd87820f79d2f8d5798c889507d1088287bf \ 170 | --hash=sha256:87c556fb85d709dacd4b4cb6167eecc5bbb4f0a9864b69136a0d4640fdc76a36 \ 171 | --hash=sha256:a6bd44efee4dc8c3324c13785a9dc3519b3ee3a92cada42d2b57762b7053b49b \ 172 | --hash=sha256:c6d27bd20c3ba60d5b02f20bd28e20091d6286a699174dfad515636cb09b5a72 \ 173 | --hash=sha256:e2bb577d10d09a2d8822a042a23b8d62bc3b269667c9eb8e60a6edfa000211b1 \ 174 | --hash=sha256:f97a605d7c8bc2c6d1172c2f0d5a65b24142e11a58de689046e62c2d632ca8c1 \ 175 | # via -r requirements/dev-requirements.in (line 11) 176 | packaging==20.1 \ 177 | --hash=sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73 \ 178 | --hash=sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334 \ 179 | # via tox 180 | pbr==5.4.4 \ 181 | --hash=sha256:139d2625547dbfa5fb0b81daebb39601c478c21956dc57e2e07b74450a8c506b \ 182 | --hash=sha256:61aa52a0f18b71c5cc58232d2cf8f8d09cd67fcad60b742a60124cb8d6951488 \ 183 | # via stevedore 184 | pep8-naming==0.9.1 \ 185 | --hash=sha256:45f330db8fcfb0fba57458c77385e288e7a3be1d01e8ea4268263ef677ceea5f \ 186 | --hash=sha256:a33d38177056321a167decd6ba70b890856ba5025f0a8eca6a3eda607da93caf \ 187 | # via -r requirements/dev-requirements.in (line 12) 188 | pip-tools==4.5.0 \ 189 | --hash=sha256:144fbd764e88f761246f832370721dccabfefbbc4ce3aa8468f6802ac6519217 \ 190 | --hash=sha256:61455cfdaa183930eefd8259f393812d94005fb9f8249edb640ed1b66f678116 \ 191 | # via -r requirements/dev-requirements.in (line 13) 192 | pluggy==0.13.1 \ 193 | --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ 194 | --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d \ 195 | # via tox 196 | py==1.8.1 \ 197 | --hash=sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa \ 198 | --hash=sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0 \ 199 | # via tox 200 | pyasn1==0.4.8 \ 201 | --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ 202 | --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba \ 203 | # via rsa 204 | pycodestyle==2.5.0 \ 205 | --hash=sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56 \ 206 | --hash=sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c \ 207 | # via autopep8, flake8, flake8-import-order, flake8-print 208 | pydocstyle==5.0.2 \ 209 | --hash=sha256:da7831660b7355307b32778c4a0dbfb137d89254ef31a2b2978f50fc0b4d7586 \ 210 | --hash=sha256:f4f5d210610c2d153fae39093d44224c17429e2ad7da12a8b419aba5c2f614b5 \ 211 | # via flake8-docstrings 212 | pyflakes==2.1.1 \ 213 | --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \ 214 | --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2 \ 215 | # via flake8 216 | pyparsing==2.4.6 \ 217 | --hash=sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f \ 218 | --hash=sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec \ 219 | # via packaging 220 | pyrsistent==0.15.7 \ 221 | --hash=sha256:cdc7b5e3ed77bed61270a47d35434a30617b9becdf2478af76ad2c6ade307280 \ 222 | # via jsonschema 223 | python-dateutil==2.8.1 \ 224 | --hash=sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c \ 225 | --hash=sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a \ 226 | # via botocore 227 | pyyaml==5.2 \ 228 | --hash=sha256:0e7f69397d53155e55d10ff68fdfb2cf630a35e6daf65cf0bdeaf04f127c09dc \ 229 | --hash=sha256:2e9f0b7c5914367b0916c3c104a024bb68f269a486b9d04a2e8ac6f6597b7803 \ 230 | --hash=sha256:35ace9b4147848cafac3db142795ee42deebe9d0dad885ce643928e88daebdcc \ 231 | --hash=sha256:38a4f0d114101c58c0f3a88aeaa44d63efd588845c5a2df5290b73db8f246d15 \ 232 | --hash=sha256:483eb6a33b671408c8529106df3707270bfacb2447bf8ad856a4b4f57f6e3075 \ 233 | --hash=sha256:4b6be5edb9f6bb73680f5bf4ee08ff25416d1400fbd4535fe0069b2994da07cd \ 234 | --hash=sha256:7f38e35c00e160db592091751d385cd7b3046d6d51f578b29943225178257b31 \ 235 | --hash=sha256:8100c896ecb361794d8bfdb9c11fce618c7cf83d624d73d5ab38aef3bc82d43f \ 236 | --hash=sha256:c0ee8eca2c582d29c3c2ec6e2c4f703d1b7f1fb10bc72317355a746057e7346c \ 237 | --hash=sha256:e4c015484ff0ff197564917b4b4246ca03f411b9bd7f16e02a2f586eb48b6d04 \ 238 | --hash=sha256:ebc4ed52dcc93eeebeae5cf5deb2ae4347b3a81c3fa12b0b8c976544829396a4 \ 239 | # via awscli, bandit, cfn-lint 240 | rsa==3.4.2 \ 241 | --hash=sha256:25df4e10c263fb88b5ace923dd84bf9aa7f5019687b5e55382ffcdb8bede9db5 \ 242 | --hash=sha256:43f682fea81c452c98d09fc316aae12de6d30c4b5c84226642cf8f8fd1c93abd \ 243 | # via awscli 244 | s3transfer==0.3.3 \ 245 | --hash=sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13 \ 246 | --hash=sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db \ 247 | # via awscli, boto3 248 | six==1.14.0 \ 249 | --hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \ 250 | --hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c \ 251 | # via aws-sam-translator, bandit, cfn-lint, flake8-print, jsonschema, packaging, pip-tools, pyrsistent, python-dateutil, stevedore, tox, virtualenv 252 | smmap2==2.0.5 \ 253 | --hash=sha256:0555a7bf4df71d1ef4218e4807bbf9b201f910174e6e08af2e138d4e517b4dde \ 254 | --hash=sha256:29a9ffa0497e7f2be94ca0ed1ca1aa3cd4cf25a1f6b4f5f87f74b46ed91d609a \ 255 | # via gitdb2 256 | snowballstemmer==2.0.0 \ 257 | --hash=sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0 \ 258 | --hash=sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52 \ 259 | # via pydocstyle 260 | stevedore==1.32.0 \ 261 | --hash=sha256:18afaf1d623af5950cc0f7e75e70f917784c73b652a34a12d90b309451b5500b \ 262 | --hash=sha256:a4e7dc759fb0f2e3e2f7d8ffe2358c19d45b9b8297f393ef1256858d82f69c9b \ 263 | # via bandit 264 | toml==0.10.0 \ 265 | --hash=sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c \ 266 | --hash=sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e \ 267 | # via -r requirements/dev-requirements.in (line 14), tox 268 | tox==3.14.5 \ 269 | --hash=sha256:0cbe98369081fa16bd6f1163d3d0b2a62afa29d402ccfad2bd09fb2668be0956 \ 270 | --hash=sha256:676f1e3e7de245ad870f956436b84ea226210587d1f72c8dfb8cd5ac7b6f0e70 \ 271 | # via -r requirements/dev-requirements.in (line 15) 272 | typed-ast==1.4.1 \ 273 | --hash=sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355 \ 274 | --hash=sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919 \ 275 | --hash=sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa \ 276 | --hash=sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652 \ 277 | --hash=sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75 \ 278 | --hash=sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01 \ 279 | --hash=sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d \ 280 | --hash=sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1 \ 281 | --hash=sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907 \ 282 | --hash=sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c \ 283 | --hash=sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3 \ 284 | --hash=sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b \ 285 | --hash=sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614 \ 286 | --hash=sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb \ 287 | --hash=sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b \ 288 | --hash=sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41 \ 289 | --hash=sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6 \ 290 | --hash=sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34 \ 291 | --hash=sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe \ 292 | --hash=sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4 \ 293 | --hash=sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7 \ 294 | # via mypy 295 | typing-extensions==3.7.4.1 \ 296 | --hash=sha256:091ecc894d5e908ac75209f10d5b4f118fbdb2eb1ede6a63544054bb1edb41f2 \ 297 | --hash=sha256:910f4656f54de5993ad9304959ce9bb903f90aadc7c67a0bef07e678014e892d \ 298 | --hash=sha256:cf8b63fedea4d89bab840ecbb93e75578af28f76f66c35889bd7065f5af88575 \ 299 | # via mypy 300 | urllib3==1.25.8 \ 301 | --hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \ 302 | --hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc \ 303 | # via botocore 304 | virtualenv==20.0.5 \ 305 | --hash=sha256:531b142e300d405bb9faedad4adbeb82b4098b918e35209af2adef3129274aae \ 306 | --hash=sha256:5dd42a9f56307542bddc446cfd10ef6576f11910366a07609fe8d0d88fa8fb7e \ 307 | # via tox 308 | zipp==3.0.0 \ 309 | --hash=sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2 \ 310 | --hash=sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a \ 311 | # via importlib-metadata 312 | 313 | # The following packages are considered to be unsafe in a requirements file: 314 | setuptools==45.2.0 \ 315 | --hash=sha256:316484eebff54cc18f322dea09ed031b7e3eb00811b19dcedb09bc09bba7d93d \ 316 | --hash=sha256:89c6e6011ec2f6d57d43a3f9296c4ef022c2cbf49bab26b407fe67992ae3397f \ 317 | # via flake8-import-order, jsonschema 318 | -------------------------------------------------------------------------------- /requirements/test-requirements.in: -------------------------------------------------------------------------------- 1 | # Versions match Boto packages in default Python 3.8 AWS Lambda environment. 2 | boto3==1.10.34 3 | botocore==1.13.34 4 | # Earliest version of typing-extension that we support. 5 | typing-extensions==3.7.2 6 | -------------------------------------------------------------------------------- /requirements/test-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile 3 | # To update, run: 4 | # 5 | # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/test-requirements.txt requirements/test-requirements.in 6 | # 7 | boto3==1.10.34 \ 8 | --hash=sha256:95253e9d48585095dca6d1668f75ad977f4514b253c11e8a4e59f9795702970a \ 9 | --hash=sha256:a8610e660326c84cc32a804acfb895f3e5291bf1b762c3d1d12597d11917ce14 \ 10 | # via -r requirements/test-requirements.in (line 2) 11 | botocore==1.13.34 \ 12 | --hash=sha256:55c82a4eda277ae145691783c064b0d6600e60e80bd379ca07eaef1cfe34a148 \ 13 | --hash=sha256:7350d9bcaec0ec3127f808bfb75fa0114b5b6b09a485b0dba66d739c3654521b \ 14 | # via -r requirements/test-requirements.in (line 3), boto3, s3transfer 15 | docutils==0.15.2 \ 16 | --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \ 17 | --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \ 18 | --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99 \ 19 | # via botocore 20 | jmespath==0.9.4 \ 21 | --hash=sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6 \ 22 | --hash=sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c \ 23 | # via boto3, botocore 24 | python-dateutil==2.8.0 \ 25 | --hash=sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb \ 26 | --hash=sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e \ 27 | # via botocore 28 | s3transfer==0.2.1 \ 29 | --hash=sha256:6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d \ 30 | --hash=sha256:b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba \ 31 | # via boto3 32 | six==1.14.0 \ 33 | --hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \ 34 | --hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c \ 35 | # via python-dateutil 36 | typing-extensions==3.7.2 \ 37 | --hash=sha256:07b2c978670896022a43c4b915df8958bec4a6b84add7f2c87b2b728bda3ba64 \ 38 | --hash=sha256:f3f0e67e1d42de47b5c67c32c9b26641642e9170fe7e292991793705cd5fef7c \ 39 | --hash=sha256:fb2cd053238d33a8ec939190f30cfd736c00653a85a2919415cecf7dc3d9da71 \ 40 | # via -r requirements/test-requirements.in (line 5) 41 | urllib3==1.25.8 \ 42 | --hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \ 43 | --hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc \ 44 | # via botocore 45 | -------------------------------------------------------------------------------- /scripts/create_db.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Create DynamoDB table for integration tests. 3 | 4 | Run from repo root. 5 | """ 6 | import argparse 7 | import logging 8 | from pathlib import Path 9 | 10 | import boto3 11 | 12 | 13 | def create_stack(stack_name: str, template_path: Path): 14 | logging.info(f'Using template {template_path}') 15 | with open(template_path) as f: 16 | template_body = f.read() 17 | logging.info(f'Creating stack "{stack_name}"...') 18 | client = boto3.client('cloudformation') 19 | client.create_stack( 20 | StackName=stack_name, 21 | TemplateBody=template_body, 22 | Capabilities=['CAPABILITY_IAM'] 23 | ) 24 | waiter = client.get_waiter('stack_create_complete') 25 | waiter.wait( 26 | StackName=stack_name, 27 | WaiterConfig={ 28 | 'Delay': 10 # seconds 29 | } 30 | ) 31 | logging.info(f'Successfully created stack {stack_name}') 32 | 33 | 34 | if __name__ == '__main__': 35 | logging.basicConfig(level=logging.INFO) 36 | parser = argparse.ArgumentParser(description=__doc__) 37 | parser.add_argument('-n', '--stack_name', type=str, 38 | default='DokklibDB-IntegrationTest', 39 | help='Stack name') 40 | parser.add_argument('-p', '--template_path', type=Path, 41 | default='./tests/integration/cloudformation.yml', 42 | help='Path to the template file name') 43 | args = parser.parse_args() 44 | 45 | create_stack(args.stack_name, args.template_path) 46 | -------------------------------------------------------------------------------- /scripts/generate_exceptions.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Generate DynamoDB exceptions. 3 | 4 | Boto3 creates exceptions dynamically from a service model which is not 5 | desirable, because it breaks static analysis. This script generates a Python 6 | file from the dynamically loaded Boto3 exceptions and puts it in 7 | dokklib_db/exceptions.py 8 | 9 | """ 10 | import boto3 11 | 12 | 13 | IGNORE_EXCEPTIONS = { 14 | # We provide our own ClientError implementation for type checking coverage 15 | 'ClientError', 16 | # We provide our own TransactionCanceledException that contains 17 | # cancellation reasons. 18 | 'TransactionCanceledException' 19 | } 20 | 21 | MISSING_EXCEPTIONS = { 22 | # These exceptions are not raised by Boto3, but they are returned as 23 | # transaction cancellation reasons. 24 | 'ThrottlingError', 25 | 'ValidationError' 26 | } 27 | 28 | 29 | def _get_exception(name): 30 | return [ 31 | f'class {name}(ClientError):', 32 | ' """Please check DynamoDB docs for documentation."""', 33 | '', 34 | '' 35 | ] 36 | 37 | 38 | # Must match earliest support version 39 | assert boto3.__version__ == '1.10.34', boto3.__version__ 40 | 41 | 42 | lines = [ 43 | '"""Autogenerated DynamoDB exceptions.', 44 | '', 45 | 'This file was autogenerated by scripts/generate_exceptions.py.', 46 | 'Do not edit it manually!', 47 | '', 48 | '"""', 49 | 'from dokklib_db.errors.client import ClientError', 50 | '', 51 | '' 52 | ] 53 | 54 | client = boto3.client('dynamodb') 55 | for name in dir(client.exceptions): 56 | if name[0].isupper() and name not in IGNORE_EXCEPTIONS: 57 | assert name not in MISSING_EXCEPTIONS, name 58 | lines.extend(_get_exception(name)) 59 | 60 | for name in MISSING_EXCEPTIONS: 61 | lines.extend(_get_exception(name)) 62 | 63 | # Remove duplicate empty line 64 | lines = lines[:-1] 65 | with open('dokklib_db/errors/exceptions.py', 'w') as f: 66 | f.write('\n'.join(lines)) 67 | -------------------------------------------------------------------------------- /scripts/pip_compile.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Compile pinned dependencies with pip-compile.""" 3 | import subprocess 4 | from pathlib import Path 5 | 6 | 7 | _DEPS_ORDER = [ 8 | 'test-requirements.in', 'dev-requirements.in' 9 | ] 10 | 11 | 12 | this_file = Path(__file__) 13 | repo_root = this_file.parents[1] 14 | deps_dir = repo_root / 'requirements' 15 | 16 | # The --allow-unsafe flag lets us pin setuptools. 17 | # The flag will be soon deprecated and made the default behaviour. 18 | # More info: https://github.com/jazzband/pip-tools/issues/989 19 | # TODO (abiro) remove --allow-unsafe once it's deprecated 20 | cmd_template = 'pip-compile -q --allow-unsafe --generate-hashes ' \ 21 | '--output-file {out_file} {in_file}' 22 | 23 | for dep in _DEPS_ORDER: 24 | p = deps_dir / dep 25 | out_file = p.with_suffix('.txt') 26 | cmd = cmd_template.format(in_file=p, out_file=out_file) 27 | subprocess.run(cmd.split(' '), check=True) 28 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | 4 | with open('README.md') as f: 5 | long_description = f.read() 6 | 7 | print(find_packages(exclude=['tests*'])) 8 | 9 | setup( 10 | name='dokklib_db', 11 | author='Agost Biro', 12 | author_email='agost+dokklib_db@dokknet.com', 13 | description='DynamoDB Single Table Library', 14 | long_description=long_description, 15 | long_description_content_type='text/markdown', 16 | url='https://dokklib.com/libs/db/', 17 | packages=find_packages(exclude=['tests*']), 18 | use_scm_version=True, 19 | # Needed to let mypy use package for type hints 20 | zip_safe=False, 21 | package_data={"dokklib_db": ["py.typed"]}, 22 | setup_requires=['setuptools_scm'], 23 | python_requires='>=3.6', 24 | install_requires=[ 25 | 'typing-extensions>=3.7.2,<4' 26 | ], 27 | classifiers=[ 28 | 'Development Status :: 7 - Inactive', 29 | 'Programming Language :: Python :: 3.8', 30 | 'Programming Language :: Python :: 3.7', 31 | 'Programming Language :: Python :: 3.6', 32 | 'License :: OSI Approved :: Apache Software License', 33 | 'Operating System :: OS Independent', 34 | 'Topic :: Database' 35 | ] 36 | ) 37 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dokklib/dokklib-db/3665775c6d369ca08431b5295c1c181a9dca97cd/tests/__init__.py -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dokklib/dokklib-db/3665775c6d369ca08431b5295c1c181a9dca97cd/tests/integration/__init__.py -------------------------------------------------------------------------------- /tests/integration/cloudformation.yml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: "2010-09-09" 2 | Description: DynamoDB Single Table Resource 3 | 4 | Resources: 5 | # DynamoDB single table pattern 6 | SingleTable: 7 | Type: AWS::DynamoDB::Table 8 | # Make sure to change "Delete" to "Retain" on the lines below, if you use 9 | # this for anything other than testing. 10 | DeletionPolicy: Delete 11 | UpdateReplacePolicy: Delete 12 | Properties: 13 | TableName: !Sub "${AWS::StackName}-SingleTable" 14 | BillingMode: PAY_PER_REQUEST 15 | KeySchema: 16 | - 17 | AttributeName: "PK" 18 | KeyType: "HASH" 19 | - 20 | AttributeName: "SK" 21 | KeyType: "RANGE" 22 | AttributeDefinitions: 23 | - 24 | AttributeName: "PK" 25 | AttributeType: "S" 26 | - 27 | AttributeName: "SK" 28 | AttributeType: "S" 29 | # If the item has no TTL, omit this property and it won't be deleted. 30 | TimeToLiveSpecification: 31 | AttributeName: "ExpiresAt" 32 | Enabled: true 33 | # Inverse primary index for querying relational data. 34 | GlobalSecondaryIndexes: 35 | - 36 | IndexName: "GSI_1" 37 | KeySchema: 38 | - 39 | AttributeName: "SK" 40 | KeyType: "HASH" 41 | - 42 | AttributeName: "PK" 43 | KeyType: "RANGE" 44 | Projection: 45 | ProjectionType: "KEYS_ONLY" 46 | ReadWritePolicy: 47 | Type: AWS::IAM::ManagedPolicy 48 | Properties: 49 | PolicyDocument: 50 | Version: 2012-10-17 51 | Statement: 52 | - 53 | Effect: Allow 54 | Sid: ListAndDescribe 55 | Action: 56 | - dynamodb:List* 57 | - dynamodb:DescribeReservedCapacity* 58 | - dynamodb:DescribeLimits 59 | - dynamodb:DescribeTimeToLive 60 | Resource: "*" 61 | - 62 | Effect: Allow 63 | Sid: SpecificTable 64 | Action: 65 | - dynamodb:BatchGet* 66 | - dynamodb:DescribeStream 67 | - dynamodb:DescribeTable 68 | - dynamodb:Get* 69 | - dynamodb:Query 70 | - dynamodb:Scan 71 | 72 | - dynamodb:BatchWrite* 73 | - dynamodb:Delete* 74 | - dynamodb:Update* 75 | - dynamodb:PutItem 76 | Resource: 77 | - !Sub 78 | - "arn:aws:dynamodb:${AWS::Region}:${AWS::AccountId}:table/${TableName}" 79 | - TableName: !Ref SingleTable 80 | - !Sub 81 | - "arn:aws:dynamodb:${AWS::Region}:${AWS::AccountId}:table/${TableName}/index/GSI_1" 82 | - TableName: !Ref SingleTable 83 | 84 | Outputs: 85 | TableId: 86 | Description: DynamoDB single table name 87 | Value: !Ref SingleTable 88 | Export: 89 | Name: !Sub "${AWS::StackName}-SingleTable" 90 | ReadWritePolicy: 91 | Description: DynamoDB single table read&write policy ARN 92 | Value: !Ref ReadWritePolicy 93 | Export: 94 | Name: !Sub "${AWS::StackName}-SingleTable-ReadWritePolicy" 95 | -------------------------------------------------------------------------------- /tests/integration/dynamodb_tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """DynamoDB integration tests. 3 | 4 | !!! This script will clear the table first, so make sure to only use it on a 5 | testing table. !!! 6 | """ 7 | import logging 8 | 9 | import boto3 10 | 11 | import dokklib_db as db 12 | 13 | 14 | TABLE_NAME = 'DokklibDB-IntegrationTest-SingleTable' 15 | 16 | logging.basicConfig(level=logging.INFO) 17 | 18 | 19 | class Order(db.EntityName): 20 | """Order entity name. 21 | 22 | Key value: unique order id with date prefixed, eg: '2020-02-21|order-id' 23 | Example key: 'ORDER#2020-02-21|order-id'. 24 | 25 | """ 26 | 27 | 28 | class User(db.EntityName): 29 | """User entity name. 30 | 31 | Key value: user email, eg: 'alice@example.com'. 32 | Example key: 'USER#alice@example.com'. 33 | 34 | """ 35 | 36 | 37 | class Product(db.EntityName): 38 | """Order entity name. 39 | 40 | Key v 41 | alue: unique product name, eg: 'my-book'. 42 | Example key: 'PRODUCT#my-book'. 43 | 44 | """ 45 | 46 | 47 | # From: https://stackoverflow.com/a/56616499 48 | def _clear_db(table_name: str): 49 | logging.info('Clearing table') 50 | table = boto3.resource('dynamodb').Table(table_name) 51 | scan = None 52 | 53 | with table.batch_writer() as batch: 54 | while scan is None or 'LastEvaluatedKey' in scan: 55 | if scan is not None and 'LastEvaluatedKey' in scan: 56 | scan = table.scan( 57 | ProjectionExpression='PK,SK', 58 | ExclusiveStartKey=scan['LastEvaluatedKey'], 59 | ) 60 | else: 61 | scan = table.scan(ProjectionExpression='PK,SK') 62 | 63 | for item in scan['Items']: 64 | batch.delete_item(Key={'PK': item['PK'], 'SK': item['SK']}) 65 | 66 | 67 | logging.info('Starting integration tests') 68 | 69 | # We clear the DB instead of recreating it to save time. 70 | _clear_db(TABLE_NAME) 71 | table = db.Table(TABLE_NAME) 72 | 73 | # Users 74 | pk_alice = db.PartitionKey(User, 'alice@example.com') 75 | sk_alice = db.SortKey(User, 'alice@example.com') 76 | 77 | # Products 78 | pk_book = db.PartitionKey(Product, 'book') 79 | 80 | # Orders 81 | pk_order1 = db.PartitionKey(Order, '2020-02-21|order-1') 82 | sk_order1 = db.SortKey(Order, '2020-02-21|order-1') 83 | sk_order2 = db.SortKey(Order, '2020-02-21|order-2') 84 | 85 | logging.info('Testing insert') 86 | table.insert(pk_alice, sk_alice) 87 | 88 | logging.info('Testing update_attributes') 89 | table.update_attributes(pk_alice, sk_alice, {'MyJson': {'A': 1}}) 90 | 91 | logging.info('Testing get_item') 92 | res = table.get(pk_alice, sk_alice, 93 | attributes=['MyJson'], 94 | consistent=True) 95 | assert res['MyJson']['A'] == 1, res 96 | 97 | logging.info('Testing transact_write_items') 98 | table.transact_write_items([ 99 | db.InsertArg(pk_alice, sk_order1), 100 | db.InsertArg(pk_book, sk_order1), 101 | db.InsertArg(pk_alice, sk_order2) 102 | ]) 103 | 104 | logging.info('Testing transact_write_items error handling') 105 | try: 106 | table.transact_write_items([ 107 | db.InsertArg(pk_alice, sk_order1), 108 | db.InsertArg(pk_book, sk_order2) 109 | ]) 110 | except db.errors.TransactionCanceledException as e: 111 | assert e.reasons[0] is db.errors.ConditionalCheckFailedException, e.reasons 112 | assert e.reasons[1] is None, e.reasons 113 | 114 | logging.info('Testing batch_get') 115 | res = table.batch_get([ 116 | db.PrimaryKey(pk_alice, sk_order1), 117 | db.PrimaryKey(pk_book, sk_order1), 118 | db.PrimaryKey(pk_alice, sk_order2), 119 | ], consistent=True) 120 | assert len(res.items) == 3, res 121 | 122 | logging.info('Testing query_prefix') 123 | res = table.query_prefix(pk_alice, db.PrefixSortKey(Order)) 124 | assert len(res) == 2, res 125 | 126 | logging.info('Testing query_prefix on inverse index') 127 | res = table.query_prefix(pk_order1, db.PrefixSortKey(User), 128 | global_index=db.InversePrimaryIndex()) 129 | assert len(res) == 1, res 130 | 131 | logging.info('Testing delete') 132 | table.delete(pk_alice, sk_alice, idempotent=False) 133 | 134 | logging.info('+++ Success +++') 135 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | # mypy: implicit-reexport 2 | 3 | from tests.unit.test_base import TestBase 4 | 5 | 6 | __all__ = ['TestBase'] 7 | -------------------------------------------------------------------------------- /tests/unit/exceptions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dokklib/dokklib-db/3665775c6d369ca08431b5295c1c181a9dca97cd/tests/unit/exceptions/__init__.py -------------------------------------------------------------------------------- /tests/unit/exceptions/transaction_test.py: -------------------------------------------------------------------------------- 1 | import dokklib_db as db 2 | from dokklib_db.errors.transaction import TransactionCanceledException 3 | 4 | from tests.unit import TestBase 5 | 6 | 7 | class TestTransactionCanceledException(TestBase): 8 | _op_name = 'TransactWriteItems' 9 | 10 | def _get_error(self, msg): 11 | return { 12 | 'Error': { 13 | 'Message': msg, 14 | 'Code': 'TransactionCanceledException' 15 | } 16 | } 17 | 18 | def test_empty_message(self): 19 | error = self._get_error('') 20 | e = TransactionCanceledException([], '', error, self._op_name) 21 | self.assertListEqual(e.reasons, []) 22 | 23 | def test_mismatch(self): 24 | error = self._get_error('') 25 | e = TransactionCanceledException(['1'], '', error, self._op_name) 26 | with self.assertRaises(ValueError): 27 | e.reasons 28 | 29 | def test_one_reason(self): 30 | msg = 'Transaction cancelled, please refer cancellation reasons for ' \ 31 | 'specific reasons [ConditionalCheckFailed]' 32 | error = self._get_error(msg) 33 | e = TransactionCanceledException(['1'], '', error, self._op_name) 34 | exp = [db.errors.ConditionalCheckFailedException] 35 | self.assertListEqual(e.reasons, exp) 36 | 37 | def test_two_reasons(self): 38 | msg = 'Transaction cancelled, please refer cancellation reasons for ' \ 39 | 'specific reasons [ConditionalCheckFailed, None]' 40 | error = self._get_error(msg) 41 | e = TransactionCanceledException(['oparg1', 'oparg2'], 42 | '', 43 | error, 44 | self._op_name) 45 | exp = [db.errors.ConditionalCheckFailedException, None] 46 | self.assertListEqual(e.reasons, exp) 47 | 48 | def test_no_space_reasons(self): 49 | msg = 'Transaction cancelled, please refer cancellation reasons for ' \ 50 | 'specific reasons [ConditionalCheckFailed,None]' 51 | error = self._get_error(msg) 52 | e = TransactionCanceledException(['oparg1', 'oparg2'], 53 | '', 54 | error, 55 | self._op_name) 56 | exp = [db.errors.ConditionalCheckFailedException, None] 57 | self.assertListEqual(e.reasons, exp) 58 | 59 | def test_has_error(self): 60 | msg = 'Transaction cancelled, please refer cancellation reasons for ' \ 61 | 'specific reasons [ConditionalCheckFailed, None]' 62 | error = self._get_error(msg) 63 | e = TransactionCanceledException(['oparg1', 'oparg2'], 64 | '', 65 | error, 66 | self._op_name) 67 | self.assertTrue(e.has_error(db.errors.ConditionalCheckFailedException)) 68 | 69 | def test_has_no_error(self): 70 | msg = 'Transaction cancelled, please refer cancellation reasons for ' \ 71 | 'specific reasons [ConditionalCheckFailed, None]' 72 | error = self._get_error(msg) 73 | e = TransactionCanceledException(['oparg1', 'oparg2'], 74 | '', 75 | error, 76 | self._op_name) 77 | self.assertFalse(e.has_error(db.errors.ValidationError)) 78 | -------------------------------------------------------------------------------- /tests/unit/keys_test.py: -------------------------------------------------------------------------------- 1 | import dokklib_db as db 2 | import dokklib_db.keys as m 3 | 4 | from tests.unit import TestBase 5 | 6 | 7 | class Project(db.EntityName): 8 | pass 9 | 10 | 11 | class Session(db.EntityName): 12 | pass 13 | 14 | 15 | class Subscription(db.EntityName): 16 | pass 17 | 18 | 19 | class User(db.EntityName): 20 | pass 21 | 22 | 23 | class TestToPrefix(TestBase): 24 | def test_correct_prefix(self): 25 | self.assertEqual(User.to_prefix(), 'USER#') 26 | 27 | def test_pk_prefix(self): 28 | pk = m.PartitionKey(User, 'val') 29 | self.assertEqual(User.to_prefix(), pk.prefix) 30 | 31 | 32 | class TestEntityKeyEq(TestBase): 33 | def test_self(self): 34 | pk = m.PartitionKey(User, 'value') 35 | self.assertEqual(pk, pk) 36 | 37 | def test_eq(self): 38 | pk_1 = m.PartitionKey(User, 'value') 39 | pk_2 = m.PartitionKey(User, 'value') 40 | self.assertEqual(pk_1, pk_2) 41 | 42 | def test_pk_eq_sq(self): 43 | pk = m.PartitionKey(User, 'value') 44 | sk = m.SortKey(User, 'value') 45 | self.assertEqual(pk, sk) 46 | 47 | 48 | class TestEntityKeyHash(TestBase): 49 | def test_hash_eq(self): 50 | pk_1 = m.PartitionKey(User, 'value') 51 | pk_2 = m.PartitionKey(User, 'value') 52 | self.assertEqual(hash(pk_1), hash(pk_2)) 53 | 54 | 55 | class TestStrValueMixin: 56 | _constructor = None 57 | 58 | def test_value(self): 59 | value = 'value' 60 | pk = self._constructor(Project, value) 61 | self.assertEqual(pk, f'PROJECT#{value}') 62 | 63 | def test_different_types(self): 64 | value = 'value' 65 | pk_domain = self._constructor(Project, value) 66 | pk_user = self._constructor(User, value) 67 | self.assertNotEqual(pk_domain, pk_user) 68 | 69 | 70 | class TestPartitionKeyStr(TestBase, TestStrValueMixin): 71 | _constructor = m.PartitionKey 72 | 73 | 74 | class TestSortKeyStr(TestBase, TestStrValueMixin): 75 | _constructor = m.SortKey 76 | 77 | 78 | class TestPrefixSortKeyStr(TestBase): 79 | def test_no_value(self): 80 | sk_domain = m.PrefixSortKey(Subscription) 81 | self.assertEqual(sk_domain, 'SUBSCRIPTION#') 82 | 83 | 84 | class TestEntityKeyRepr(TestBase): 85 | def test_pk_repr_no_leak(self): 86 | """Representation of PK shouldn't leak DB data.""" 87 | value = 'pk-value-1234' 88 | pk = m.PartitionKey(Session, value) 89 | self.assertNotIn(value, repr(pk)) 90 | 91 | def test_sk_repr_no_leak(self): 92 | """Representation of SK shouldn't leak DB data.""" 93 | value = 'sk-value-5678' 94 | sk = m.SortKey(Session, value) 95 | self.assertNotIn(value, repr(sk)) 96 | 97 | 98 | class TestPrimaryKey(TestBase): 99 | def setUp(self): 100 | self._pk = m.PartitionKey(User, 'alice') 101 | self._sk = m.SortKey(Project, 'foo') 102 | self._primary = m.PrimaryKey(self._pk, self._sk) 103 | 104 | def test_repr_no_leak(self): 105 | """Representation of primary key shouldn't leak DB data.""" 106 | self.assertNotIn(self._pk.value, repr(self._primary)) 107 | self.assertNotIn(self._sk.value, repr(self._primary)) 108 | 109 | def test_self_eq(self): 110 | p = m.PrimaryKey(self._pk, self._sk) 111 | return self.assertEqual(self._primary, p) 112 | 113 | def test_str_eq(self): 114 | t = (str(self._pk), str(self._sk)) 115 | return self.assertEqual(self._primary, t) 116 | 117 | def test_self_hash(self): 118 | p = m.PrimaryKey(self._pk, self._sk) 119 | return self.assertEqual(hash(self._primary), hash(p)) 120 | 121 | def test_str_hash(self): 122 | t = (str(self._pk), str(self._sk)) 123 | return self.assertEqual(hash(self._primary), hash(t)) 124 | 125 | def test_serialize(self): 126 | index = db.PrimaryGlobalIndex() 127 | res = self._primary.serialize(index) 128 | des_res = self._primary._serializer.deserialize_dict(res) 129 | exp = { 130 | index.partition_key: str(self._pk), 131 | index.sort_key: str(self._sk) 132 | } 133 | self.assertDictEqual(des_res, exp) 134 | -------------------------------------------------------------------------------- /tests/unit/op_args_test.py: -------------------------------------------------------------------------------- 1 | import re 2 | from unittest.mock import patch 3 | 4 | import boto3.dynamodb.conditions as cond 5 | 6 | import dokklib_db as db 7 | import dokklib_db.op_args as m 8 | 9 | from tests.unit import TestBase 10 | 11 | 12 | class User(db.EntityName): 13 | pass 14 | 15 | 16 | class Subscription(db.EntityName): 17 | pass 18 | 19 | 20 | class TestOpArg(TestBase): 21 | def test_iso_now(self): 22 | res = m.OpArg._iso_now() 23 | iso_format = r'\d{4}-\d{2}-\d{2}T\d{2}\:\d{2}\:\d{2}' 24 | self.assertTrue(re.match(iso_format, res)) 25 | 26 | 27 | class OpTestMixin: 28 | def _get_kwargs(self): 29 | return self._op_arg.get_kwargs(self._table_name, self._primary_index) 30 | 31 | def setUp(self): 32 | self._pk = db.PartitionKey(User, 'eva.lu-ator@example.com') 33 | self._sk = db.SortKey(Subscription, 'mitpress.mit.edu') 34 | self._table_name = 'my-table' 35 | self._primary_index = db.PrimaryGlobalIndex() 36 | 37 | def test_table_name(self): 38 | kwargs = self._get_kwargs() 39 | self.assertEqual(kwargs['TableName'], self._table_name) 40 | 41 | 42 | class ConsistencyTestMixin: 43 | def test_key(self): 44 | kwargs = self._get_kwargs() 45 | self.assertFalse(kwargs['ConsistentRead']) 46 | 47 | 48 | class KeyTestMixin: 49 | def test_key(self): 50 | kwargs = self._get_kwargs() 51 | key = kwargs['Key'] 52 | self.assertEqual(key['PK']['S'], str(self._pk)) 53 | self.assertEqual(key['SK']['S'], str(self._sk)) 54 | 55 | 56 | class TestDeleteArg(KeyTestMixin, OpTestMixin, TestBase): 57 | def setUp(self): 58 | super().setUp() 59 | self._op_arg = m.DeleteArg(self._pk, self._sk) 60 | 61 | def test_not_idempotent(self): 62 | op_arg = m.DeleteArg(self._pk, self._sk, idempotent=False) 63 | kwargs = op_arg.get_kwargs(self._table_name, self._primary_index) 64 | self.assertEqual(kwargs['ConditionExpression'], 65 | 'attribute_exists(PK)') 66 | 67 | def test_idempotent(self): 68 | kwargs = self._get_kwargs() 69 | self.assertNotIn('ConditionExpression', kwargs) 70 | 71 | 72 | class TestGetArg(ConsistencyTestMixin, KeyTestMixin, OpTestMixin, TestBase): 73 | def setUp(self): 74 | super().setUp() 75 | self._op_arg = m.GetArg(self._pk, self._sk) 76 | 77 | def test_projection(self): 78 | op_arg = m.GetArg(self._pk, self._sk, attributes=['PK', 'SK', 'foo']) 79 | kwargs = op_arg.get_kwargs(self._table_name, self._primary_index) 80 | proj = kwargs['ProjectionExpression'] 81 | self.assertLessEqual(proj, 'PK,SK,foo') 82 | 83 | 84 | class TestPutArg(OpTestMixin, TestBase): 85 | def setUp(self): 86 | super().setUp() 87 | self._op_arg = m.PutArg(self._pk, self._sk) 88 | 89 | @patch('dokklib_db.op_args.PutArg._iso_now') 90 | def test_adds_created_at(self, iso_now): 91 | exp_created_at = 'test-time-stamp' 92 | iso_now.return_value = exp_created_at 93 | item = self._op_arg._get_dynamo_item(self._primary_index) 94 | self.assertEqual(item['CreatedAt']['S'], exp_created_at) 95 | 96 | def test_keys_added(self): 97 | item = self._op_arg._get_dynamo_item(self._primary_index) 98 | self.assertEqual(item['PK']['S'], self._pk) 99 | self.assertEqual(item['SK']['S'], self._sk) 100 | 101 | def test_adds_attributes(self): 102 | put_arg = m.PutArg(self._pk, self._sk, 103 | attributes={'foo': '1', 'bar': 2}) 104 | item = put_arg._get_dynamo_item(self._primary_index) 105 | self.assertEqual(item['foo']['S'], '1') 106 | self.assertEqual(item['bar']['N'], '2') 107 | 108 | def test_attributes_dont_overwrite_keys(self): 109 | attributes = { 110 | 'foo': '1', 111 | 'bar': 2, 112 | 'PK': 'my-pk', 113 | 'SK': 'my-sk' 114 | } 115 | put_arg = m.PutArg(self._pk, self._sk, attributes=attributes) 116 | item = put_arg._get_dynamo_item(self._primary_index) 117 | self.assertEqual(item['PK']['S'], self._pk) 118 | self.assertEqual(item['SK']['S'], self._sk) 119 | 120 | def test_disallow_overwrite(self): 121 | put_arg = m.PutArg(self._pk, self._sk, allow_overwrite=False) 122 | kwargs = put_arg.get_kwargs(self._table_name, self._primary_index) 123 | cond_expression = 'attribute_not_exists(PK)' 124 | self.assertEqual(kwargs['ConditionExpression'], cond_expression) 125 | 126 | 127 | class TestInsertArg(OpTestMixin, TestBase): 128 | def setUp(self): 129 | super().setUp() 130 | self._op_arg = m.InsertArg(self._pk, self._sk) 131 | 132 | def test_no_overwrite(self): 133 | kwargs = self._get_kwargs() 134 | cond_expression = 'attribute_not_exists(PK)' 135 | self.assertEqual(kwargs['ConditionExpression'], cond_expression) 136 | 137 | 138 | class TestQueryArg(ConsistencyTestMixin, 139 | OpTestMixin, 140 | TestBase): 141 | def setUp(self): 142 | super().setUp() 143 | self._cond = cond.Key('PK').eq(str(self._pk)) 144 | self._op_arg = m.QueryArg(self._cond) 145 | 146 | def test_key_cond(self): 147 | kwargs = self._get_kwargs() 148 | key_cond = kwargs['KeyConditionExpression'] 149 | tokens = key_cond.split(' ') 150 | self.assertEqual(tokens[1], '=') 151 | 152 | def test_expr_attribute_names(self): 153 | kwargs = self._get_kwargs() 154 | key_cond = kwargs['KeyConditionExpression'] 155 | tokens = key_cond.split(' ') 156 | attr_names = kwargs['ExpressionAttributeNames'] 157 | key, val = list(attr_names.items())[0] 158 | self.assertEqual(tokens[0], key) 159 | self.assertEqual(val, 'PK') 160 | 161 | def test_expr_attribute_values(self): 162 | kwargs = self._get_kwargs() 163 | key_cond = kwargs['KeyConditionExpression'] 164 | tokens = key_cond.split(' ') 165 | attr_vals = kwargs['ExpressionAttributeValues'] 166 | key, val = list(attr_vals.items())[0] 167 | self.assertEqual(tokens[2], key) 168 | self.assertEqual(val['S'], str(self._pk)) 169 | 170 | def test_limit(self): 171 | limit = 10 172 | op_arg = m.QueryArg(self._cond, limit=limit) 173 | kwargs = op_arg.get_kwargs(self._table_name, self._primary_index) 174 | self.assertLessEqual(kwargs['Limit'], limit) 175 | 176 | def test_default_limit(self): 177 | kwargs = self._get_kwargs() 178 | limit = kwargs['Limit'] 179 | self.assertLessEqual(limit, 1000) 180 | 181 | def test_over_limit(self): 182 | with self.assertRaises(ValueError): 183 | m.QueryArg(self._cond, limit=10000) 184 | 185 | def test_default_projection(self): 186 | kwargs = self._get_kwargs() 187 | proj = kwargs['ProjectionExpression'] 188 | self.assertLessEqual(proj, 'SK') 189 | 190 | def test_projection(self): 191 | op_arg = m.QueryArg(self._cond, attributes=['PK', 'SK', 'foo']) 192 | kwargs = op_arg.get_kwargs(self._table_name, self._primary_index) 193 | proj = kwargs['ProjectionExpression'] 194 | self.assertLessEqual(proj, 'PK,SK,foo') 195 | 196 | 197 | class TestUpdateArg(OpTestMixin, TestBase): 198 | def setUp(self): 199 | super().setUp() 200 | self._op_arg = m.UpdateArg(self._pk, self._sk) 201 | 202 | def test_key(self): 203 | kwargs = self._get_kwargs() 204 | key = kwargs['Key'] 205 | self.assertEqual(key['PK']['S'], str(self._pk)) 206 | self.assertEqual(key['SK']['S'], str(self._sk)) 207 | 208 | def test_put_args(self): 209 | put_attrs = {'foo': 1} 210 | op_arg = m.UpdateArg(self._pk, self._sk, attr_updates=put_attrs) 211 | kwargs = op_arg.get_kwargs(self._table_name, self._primary_index) 212 | foo_update = kwargs['AttributeUpdates']['foo'] 213 | self.assertEqual(foo_update['Action'], 'PUT') 214 | self.assertEqual(foo_update['Value']['N'], str(put_attrs['foo'])) 215 | -------------------------------------------------------------------------------- /tests/unit/serializer_test.py: -------------------------------------------------------------------------------- 1 | import dokklib_db.serializer as m 2 | 3 | from tests.unit import TestBase 4 | 5 | 6 | class TestSerialize(TestBase): 7 | def setUp(self): 8 | self._ser = m.Serializer() 9 | 10 | def test_deserialize_dict(self): 11 | d = { 12 | 'foo': {'S': 'bar'}, 13 | 'baz': {'N': '1'} 14 | } 15 | exp = { 16 | 'foo': 'bar', 17 | 'baz': 1 18 | } 19 | res = self._ser.deserialize_dict(d) 20 | self.assertDictEqual(exp, res) 21 | 22 | def test_deserialize_val(self): 23 | val = {'L': [{'N': '1'}, {'N': '2'}]} 24 | res = self._ser.deserialize_val(val) 25 | self.assertListEqual(res, [1, 2]) 26 | 27 | def test_serialize_dict(self): 28 | d = { 29 | 'foo': False, 30 | 'bar': 1 31 | } 32 | res = self._ser.serialize_dict(d) 33 | exp = { 34 | 'foo': {'BOOL': False}, 35 | 'bar': {'N': '1'} 36 | } 37 | self.assertDictEqual(res, exp) 38 | 39 | def test_serialize_val(self): 40 | s = {'1', '2'} 41 | res = self._ser.serialize_val(s) 42 | self.assertSetEqual(set(res['SS']), s) 43 | -------------------------------------------------------------------------------- /tests/unit/table_test.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from unittest.mock import MagicMock 3 | 4 | from boto3.dynamodb.conditions import Key 5 | 6 | from botocore.exceptions import ClientError 7 | 8 | import dokklib_db as db 9 | from dokklib_db.table import Table 10 | 11 | from tests.unit import TestBase 12 | 13 | 14 | class User(db.EntityName): 15 | pass 16 | 17 | 18 | class Subscription(db.EntityName): 19 | pass 20 | 21 | 22 | class TestRemoveEntityPrefix(TestBase): 23 | def _test_val(self, prefix, val): 24 | res = Table._remove_entity_prefix(f'{prefix}{val}') 25 | self.assertEqual(val, res) 26 | 27 | def test_noop_on_no_match(self): 28 | val = 'foo' 29 | res = Table._remove_entity_prefix(val) 30 | self.assertEqual(val, res) 31 | 32 | def test_removes_class_uppercase(self): 33 | prefix = 'A1B2_CD3#' 34 | val = 'foo' 35 | self._test_val(prefix, val) 36 | 37 | def test_handles_multiple_hashes(self): 38 | prefix = 'PREFIX#' 39 | val = '#foo#bar#' 40 | self._test_val(prefix, val) 41 | 42 | def test_handles_pipe(self): 43 | prefix = 'PREFIX#' 44 | val = 'foo|bar' 45 | self._test_val(prefix, val) 46 | 47 | 48 | class TestStripPrefixes(TestBase): 49 | def setUp(self): 50 | self._pk = db.PartitionKey(User, 'foo@example.com') 51 | self._sk = db.SortKey(Subscription, 'docs.example.com') 52 | 53 | def test_noop_on_no_prefix(self): 54 | item = { 55 | 'foo': 'bar' 56 | } 57 | res = Table._strip_prefixes(item) 58 | self.assertDictEqual(item, res) 59 | 60 | def test_strips_prefixes(self): 61 | item = { 62 | 'PK': str(self._pk), 63 | 'SK': str(self._sk), 64 | 'Foo': str(self._sk) 65 | } 66 | res = Table._strip_prefixes(item) 67 | self.assertEqual(res['PK'], self._pk.value) 68 | self.assertEqual(res['SK'], self._sk.value) 69 | self.assertEqual(res['Foo'], self._sk.value) 70 | 71 | def test_makes_copy(self): 72 | item = { 73 | 'PK': str(self._pk), 74 | 'SK': str(self._sk) 75 | } 76 | res = Table._strip_prefixes(item) 77 | self.assertNotEqual(item['PK'], res['PK']) 78 | self.assertNotEqual(item['SK'], res['SK']) 79 | 80 | 81 | class TestInit(TestBase): 82 | _to_patch = [ 83 | 'dokklib_db.table.boto3' 84 | ] 85 | 86 | def test_client(self): 87 | boto3 = self._mocks['boto3'] 88 | 89 | table = Table('my-table') 90 | self.assertEqual(table._client, boto3.client.return_value) 91 | 92 | def test_primary_index(self): 93 | pk_name = 'my-pk-name' 94 | sk_name = 'my-sk-name' 95 | 96 | class TestIndex(db.GlobalIndex): 97 | @property 98 | def partition_key(self) -> str: 99 | return pk_name 100 | 101 | @property 102 | def sort_key(self) -> str: 103 | return sk_name 104 | 105 | table = Table('my-table', TestIndex()) 106 | self.assertEqual(table.primary_index.partition_key, pk_name) 107 | self.assertEqual(table.primary_index.sort_key, sk_name) 108 | 109 | 110 | class TableTestCaseMixin(ABC): 111 | _to_patch = [ 112 | 'dokklib_db.table.boto3', 113 | 'dokklib_db.table.Table._client#PROPERTY' 114 | ] 115 | 116 | @abstractmethod 117 | def _call_test_fn(self): 118 | raise NotImplementedError 119 | 120 | @property 121 | @abstractmethod 122 | def _dynamo_method(self): 123 | raise NotImplementedError 124 | 125 | def setUp(self): 126 | super().setUp() 127 | 128 | self._client = MagicMock() 129 | self._mocks['_client'].return_value = self._client 130 | self._pk = db.PartitionKey(User, 'foo@example.com') 131 | self._sk = db.SortKey(Subscription, 'docs.example.com') 132 | self._sk_prefix = db.PrefixSortKey(Subscription) 133 | 134 | def test_handlers_throughput_error(self): 135 | error_response = { 136 | 'Error': { 137 | 'Code': 'ProvisionedThroughputExceededException', 138 | } 139 | } 140 | self._dynamo_method.side_effect = ClientError(error_response, 141 | 'OpName') 142 | with self.assertRaises(db.errors.ProvisionedThroughputExceededException): # noqa 501 143 | self._call_test_fn() 144 | 145 | 146 | class TestBatchGet(TableTestCaseMixin, TestBase): 147 | def setUp(self): 148 | super().setUp() 149 | self._pk_2 = db.PartitionKey(User, 'bar@example.com') 150 | self._sk_2 = db.SortKey(Subscription, 'docs.bar.com') 151 | self._keys = [ 152 | db.PrimaryKey(self._pk, self._sk), 153 | db.PrimaryKey(self._pk_2, self._sk_2) 154 | ] 155 | self._table_name = 'my-table' 156 | self._table = Table(self._table_name) 157 | 158 | def _call_test_fn(self, attributes=None, consistent=False): 159 | attributes = attributes or [] 160 | return self._table.batch_get(self._keys, 161 | attributes=attributes, 162 | consistent=consistent) 163 | 164 | def _get_call_arg(self, name, consistent=False, attributes=None): 165 | self._call_test_fn(attributes=attributes, consistent=consistent) 166 | _, kwargs = self._dynamo_method.call_args 167 | return kwargs['RequestItems'][self._table_name][name] 168 | 169 | def _get_attributes_call_arg(self, attributes=None): 170 | pe = self._get_call_arg('ProjectionExpression', attributes) 171 | attributes = pe.split(',') 172 | return attributes 173 | 174 | @property 175 | def _dynamo_method(self): 176 | return self._client.batch_get_item 177 | 178 | def test_keys(self): 179 | keys = self._get_call_arg('Keys', consistent=True) 180 | self.assertEqual(len(keys), 2) 181 | for i, key in enumerate(keys): 182 | exp = self._keys[i].serialize(self._table.primary_index) 183 | self.assertDictEqual(keys[i], exp) 184 | 185 | def test_consistent(self): 186 | consistent = self._get_call_arg('ConsistentRead', consistent=True) 187 | self.assertTrue(consistent) 188 | 189 | def test_retrieves_keys(self): 190 | attributes = self._get_attributes_call_arg(['foo']) 191 | self.assertTrue(set(attributes).issuperset({'PK', 'SK'})) 192 | 193 | def test_retrieves_keys_default(self): 194 | attributes = self._get_attributes_call_arg() 195 | self.assertSetEqual(set(attributes), {'PK', 'SK'}) 196 | 197 | def test_results(self): 198 | key_1_ser = self._keys[0].serialize(self._table.primary_index) 199 | key_2_ser = self._keys[1].serialize(self._table.primary_index) 200 | self._dynamo_method.return_value = { 201 | 'Responses': { 202 | self._table_name: [ 203 | key_1_ser 204 | ] 205 | }, 206 | 'UnprocessedKeys': { 207 | self._table_name: { 208 | 'Keys': [ 209 | key_2_ser 210 | ] 211 | } 212 | } 213 | } 214 | res = self._call_test_fn() 215 | self.assertEqual(len(res.items), 1) 216 | self.assertEqual(res.items[0]['PK'], self._pk.value) 217 | self.assertEqual(res.items[0]['SK'], self._sk.value) 218 | 219 | self.assertEqual(len(res.unprocessed_keys), 1) 220 | self.assertEqual(res.unprocessed_keys[0], self._keys[1]) 221 | 222 | 223 | class TestDeleteItem(TableTestCaseMixin, TestBase): 224 | def _call_test_fn(self, table_name='my-table'): 225 | table = Table(table_name) 226 | return table.delete(self._pk, self._sk) 227 | 228 | @property 229 | def _dynamo_method(self): 230 | return self._client.delete_item 231 | 232 | 233 | class QueryTestMixin(TableTestCaseMixin): 234 | def test_handles_no_result(self): 235 | self._dynamo_method.return_value = {} 236 | self.assertFalse(self._call_test_fn()) 237 | 238 | def test_handles_empty_result(self): 239 | self._dynamo_method.return_value = {'Items': []} 240 | self.assertFalse(self._call_test_fn()) 241 | 242 | 243 | class TestQuery(QueryTestMixin, TestBase): 244 | def _call_test_fn(self, table_name='my-table'): 245 | table = Table(table_name) 246 | key_cond = Key('PK').eq(str(self._pk)) 247 | query_arg = db.QueryArg(key_cond) 248 | return table._query(query_arg) 249 | 250 | @property 251 | def _dynamo_method(self): 252 | return self._client.query 253 | 254 | def test_strips_prefixes(self): 255 | self._dynamo_method.return_value = { 256 | 'Items': [{'PK': {'S': str(self._pk)}}] 257 | } 258 | res = self._call_test_fn() 259 | self.assertEqual(res[0]['PK'], self._pk.value) 260 | 261 | 262 | class TestGetItem(QueryTestMixin, TestBase): 263 | def _call_test_fn(self, attributes=None): 264 | table = Table('my-table') 265 | return table.get(self._pk, self._sk, 266 | attributes=attributes) 267 | 268 | @property 269 | def _dynamo_method(self): 270 | return self._client.get_item 271 | 272 | def test_strips_prefixes(self): 273 | self._dynamo_method.return_value = { 274 | 'Item': {'PK': {'S': str(self._pk)}} 275 | } 276 | res = self._call_test_fn() 277 | self.assertEqual(res['PK'], self._pk.value) 278 | 279 | 280 | class TestQueryPrefix(QueryTestMixin, TestBase): 281 | def _call_test_fn(self, global_index=None, attributes=None): 282 | table = Table('my-table') 283 | return table.query_prefix(self._pk, self._sk_prefix, 284 | global_index=global_index, 285 | attributes=attributes) 286 | 287 | @property 288 | def _dynamo_method(self): 289 | return self._client.query 290 | 291 | def test_correct_key(self): 292 | self._call_test_fn() 293 | _, kwargs = self._dynamo_method.call_args 294 | kc = kwargs['KeyConditionExpression'] 295 | self.assertEqual('(#n0 = :v0 AND begins_with(#n1, :v1))', kc) 296 | 297 | def test_global_index(self): 298 | index = db.InversePrimaryIndex() 299 | self._call_test_fn(global_index=index) 300 | _, kwargs = self._dynamo_method.call_args 301 | attr_names = kwargs['ExpressionAttributeNames'] 302 | self.assertEqual(attr_names['#n0'], index.partition_key) 303 | self.assertEqual(attr_names['#n1'], index.sort_key) 304 | 305 | def test_defaults_to_global_index_sk_if_provided(self): 306 | index = db.InversePrimaryIndex() 307 | self._call_test_fn(global_index=index) 308 | _, kwargs = self._dynamo_method.call_args 309 | self.assertEqual(kwargs['ProjectionExpression'], index.sort_key) 310 | 311 | 312 | class PutItemTestMixin(TableTestCaseMixin): 313 | def test_handles_conditional_check_failed(self): 314 | error_response = {'Error': {'Code': 'ConditionalCheckFailedException'}} 315 | self._dynamo_method.side_effect = ClientError(error_response, 316 | 'PutItem') 317 | with self.assertRaises(db.errors.ConditionalCheckFailedException): 318 | self._call_test_fn() 319 | 320 | 321 | class TestPutItem(PutItemTestMixin, TestBase): 322 | def _call_test_fn(self, table_name='my-table'): 323 | table = Table(table_name) 324 | put_arg = db.PutArg(self._pk, self._sk) 325 | return table._put_item(put_arg) 326 | 327 | @property 328 | def _dynamo_method(self): 329 | return self._client.put_item 330 | 331 | 332 | class TestTransactWriteItems(PutItemTestMixin, TestBase): 333 | 334 | def _call_test_fn(self, items=None, table_name='my-table'): 335 | table = Table(table_name) 336 | if not items: 337 | items = [] 338 | return table.transact_write_items(items) 339 | 340 | @property 341 | def _dynamo_method(self): 342 | return self._client.transact_write_items 343 | 344 | def _setup_error(self, message=''): 345 | error_response = { 346 | 'Error': { 347 | 'Code': 'TransactionCanceledException', 348 | 'Message': message 349 | } 350 | } 351 | self._dynamo_method.side_effect = ClientError(error_response, 352 | 'TransactWriteItems') 353 | 354 | def test_converts_to_op_name_dicts(self): 355 | op_name = 'my-op-name' 356 | table_name = 'foo-table-name' 357 | 358 | arg_mock = MagicMock(spec=db.PutArg) 359 | arg_mock.get_kwargs.return_value = 1 360 | arg_mock.op_name = op_name 361 | expected_item = {op_name: 1} 362 | 363 | self._call_test_fn(items=[arg_mock], table_name=table_name) 364 | arg_mock.get_kwargs.assert_called_once() 365 | args, _ = arg_mock.get_kwargs.call_args 366 | self.assertEqual(args[0], table_name) 367 | _, kwargs = self._dynamo_method.call_args 368 | self.assertDictEqual(kwargs, {'TransactItems': [expected_item]}) 369 | 370 | def test_handles_transaction_failed(self): 371 | self._setup_error() 372 | with self.assertRaises(db.errors.TransactionCanceledException): 373 | self._call_test_fn() 374 | 375 | 376 | class TestUpdateItem(TableTestCaseMixin, TestBase): 377 | def _call_test_fn(self, table_name='my-table'): 378 | table = Table(table_name) 379 | put_attributes = { 380 | 'foo': 'bar' 381 | } 382 | update_arg = db.UpdateArg(self._pk, self._sk, 383 | attr_updates=put_attributes) 384 | return table._update_item(update_arg) 385 | 386 | @property 387 | def _dynamo_method(self): 388 | return self._client.update_item 389 | -------------------------------------------------------------------------------- /tests/unit/test_base.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, List 2 | from unittest import TestCase 3 | from unittest.mock import MagicMock, PropertyMock, patch 4 | 5 | 6 | class TestBase(TestCase): 7 | """Base class for unit tests.""" 8 | 9 | # Paths in this list will be automatically patched for all test cases. 10 | # Overwrite in subclasses to populate the list. 11 | _to_patch: List[str] = [] 12 | 13 | def __init__(self, *args: str, **kwargs: str): 14 | """Initialize a TestBase instance. 15 | 16 | Args 17 | args: positional arguments for unittest.TestCase 18 | kwargs: keyword arguments for unittest.TestCase 19 | 20 | """ 21 | super().__init__(*args, **kwargs) 22 | 23 | self._mocks: Dict[str, MagicMock] 24 | 25 | def setUp(self): 26 | self._mocks = {} 27 | for path in self._to_patch: 28 | if path.endswith('#PROPERTY'): 29 | path, _ = path.split('#PROPERTY') 30 | name = path.split('.')[-1] 31 | patcher = patch(path, new_callable=PropertyMock) 32 | prop_mock = patcher.start() 33 | self._mocks[name] = prop_mock 34 | else: 35 | patcher = patch(path) 36 | name = path.split('.')[-1] 37 | self._mocks[name] = patcher.start() 38 | self.addCleanup(patcher.stop) 39 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | minversion = 3.0.0 3 | # We support py36, py37 and py38. All three versions are tested in CI. 4 | envlist = mypy,py3,linters,integration 5 | 6 | [testenv:mypy] 7 | deps = -rrequirements/dev-requirements.txt 8 | commands = 9 | mypy dokklib_db 10 | 11 | # Unit tests 12 | [testenv:py3] 13 | deps = -rrequirements/test-requirements.txt 14 | commands = 15 | python -m unittest discover -v -s tests/unit -p "*_test.py" 16 | 17 | # Integration tests 18 | [testenv:integration] 19 | deps = -rrequirements/test-requirements.txt 20 | passenv = AWS* 21 | commands = 22 | python tests/integration/dynamodb_tests.py 23 | 24 | # Linters 25 | [testenv:autopep8] 26 | deps = 27 | autopep8~=1.4 28 | commands = 29 | autopep8 --in-place -aaa --recursive dokklib_db tests scripts 30 | 31 | [testenv:flake8] 32 | deps = -rrequirements/dev-requirements.txt 33 | commands = 34 | flake8 --ignore=D100,D104,D105 dokklib_db 35 | 36 | # Ignore some additional errors for test files and scripts 37 | [testenv:flake8_unittest] 38 | deps = {[testenv:flake8]deps} 39 | commands = 40 | flake8 --ignore=D100,D101,D102,D103,D104,D105 tests scripts 41 | 42 | # Flake 8 config 43 | [flake8] 44 | application_import_names = dokklib_db, tests 45 | 46 | [testenv:bandit] 47 | deps = bandit~=1.6 48 | commands = 49 | bandit -r dokklib_db 50 | 51 | [testenv:coverage] 52 | deps = -rrequirements/dev-requirements.txt 53 | commands = 54 | coverage erase 55 | coverage run -m unittest discover -q -s dokklib_db tests -p "*_test.py" 56 | coverage report --omit=.tox/* --fail-under=100 --show-missing --skip-covered 57 | 58 | # CloudFormation lint 59 | [testenv:cfn_lint] 60 | deps = cfn-lint~=0.25 61 | commands = 62 | cfn-lint ./tests/integration/cloudformation.yml 63 | 64 | [testenv:linters] 65 | deps = -rrequirements/dev-requirements.txt 66 | commands = 67 | {[testenv:flake8]commands} 68 | {[testenv:flake8_unittest]commands} 69 | {[testenv:cfn_lint]commands} 70 | {[testenv:bandit]commands} 71 | {[testenv:coverage]commands} 72 | --------------------------------------------------------------------------------