├── .gitignore
├── .travis.yml
├── CHANGELOG.rst
├── Jenkinsfile
├── LICENSE
├── README.rst
├── docs
├── _static
│ └── dynamallow-logo.png
├── api.rst
├── conf.py
├── developing.rst
├── index.rst
├── motivation.rst
├── pull_request_template.md
└── usage.rst
├── dynamorm
├── __init__.py
├── exceptions.py
├── indexes.py
├── local.py
├── model.py
├── relationships.py
├── signals.py
├── table.py
└── types
│ ├── __init__.py
│ ├── _marshmallow.py
│ ├── _schematics.py
│ └── base.py
├── setup.cfg
├── setup.py
├── tests
├── __init__.py
├── conftest.py
├── test_local.py
├── test_model.py
├── test_relationships.py
├── test_signals.py
└── test_table.py
└── tox.ini
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | .*.sw[op]
3 |
4 | /build/
5 | /configs/
6 | /docs/_build/
7 | dist/
8 | .cache/
9 | .eggs/
10 |
11 | *.egg-info/
12 | *.dist-info/
13 |
14 | .coverage
15 | .pytest_cache/
16 | .idea/
17 | .tox/
18 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | sudo: false
3 |
4 | branches:
5 | only:
6 | - master
7 |
8 | matrix:
9 | fast_finish: true
10 | include:
11 | - python: 3.7
12 | env: TOXENV=black
13 |
14 | - python: 3.7
15 | env: TOXENV=docs
16 |
17 | - python: 2.7
18 | env: TOXENV=py27-schematics-codecov
19 | - python: 2.7
20 | env: TOXENV=py27-marshmallow-codecov
21 |
22 | - python: 3.5
23 | env: TOXENV=py35-schematics-codecov
24 | - python: 3.5
25 | env: TOXENV=py35-marshmallow-codecov
26 |
27 | - python: 3.6
28 | env: TOXENV=py36-schematics-codecov
29 | - python: 3.6
30 | env: TOXENV=py36-marshmallow-codecov
31 |
32 | - python: 3.7
33 | env: TOXENV=py37-schematics-codecov
34 | - python: 3.7
35 | env: TOXENV=py37-marshmallow-codecov
36 |
37 | - python: 3.8
38 | env: TOXENV=py38-schematics-codecov
39 | - python: 3.8
40 | env: TOXENV=py38-marshmallow-codecov
41 |
42 | - python: pypy
43 | env: TOXENV=pypy2-schematics-codecov
44 | - python: pypy
45 | env: TOXENV=pypy2-marshmallow-codecov
46 |
47 | - python: pypy3
48 | env: TOXENV=pypy3-schematics-codecov
49 | - python: pypy3
50 | env: TOXENV=pypy3-marshmallow-codecov
51 |
52 |
53 | install: pip install tox
54 | script: TOX_SKIP_MISSING_INTERPRETERS="False" tox
55 |
56 | env:
57 | global:
58 | # GITHUB_TOKEN
59 | - secure: "lQxi+1FV7Lzs3y1/99EmJ4jQpdfc9dupQSFohobsvBCNngTlyE8QU0UK0UwOxeyvrTG9P31KO1jHJdhy7qaMuTUBDBNEaQoenThBAAltD4QOsQuR5ob0H9givBSHF1d0/3jFcnmKnOPTHJXmvewPtz67LDvQB7z0CcLbdiFgj1UB3h6Mk+/aykCHlsGrHyK8UKJQtf6dEzMMqQZPx3UOg81xh6/Kebdlnr5HUqAPtsnO/C7rneeJFcBq3kh0XGpEPxjlbmCGyEZzRPtvIjdCvucilv31nwJphoipnALujBw3RP+FLOIhATBHyUBHN5ELjZgNbhIBIjGhUb+jOJosNwVjdJVcjUPwfeJI/iyes5EA6CTbxmPqVzV5qHDl+BaWDNKPyP/wpY/rBOj5EFihIIxXqgyxQNRl4aJSfs+KC34txTb00NL/LNZRJFnbV0eaC+KHwBg5Qk2dyc4UPUdMlrsxhebRDYynQuF0/oX74T2kQoQiK+oS86P0q8skVLpEmK8HbLtLBXdYh0tFktLqsxBDuF8IxPULZWemNEiJ5Xk9kYYvsjc0aFtQZoDwulvX40RoCP06Zb+qVmzA21kRjxMC2xVIaHGkF2lea2ePh21hemWPs0PbHu4+VLQO1qvYGB/ytajmIN7tJJ7taG780daxl0c0O37kMhNx3taRi/g="
60 |
61 | deploy:
62 | - provider: pypi
63 | user: __token__
64 | password:
65 | secure: "C0hRA/TnDOxMQyCUngaoUVp4PGVfPhIeh8Yr6sFa2XN+y0/afJVXBzbz2zrzXflg93zsZIppNTS+sUuYauae/v8ZRqvU1pjS5xVdlZytMBRt53omej2pe+m4DPe8RADIy5fkfuEyt9F1j7ZKXnxwXI+kz1x6cXShM09pCtCuvc26vTNQN3HycUeA11Fp3Bir4CGMx1qKE6hWwzM1+TP2Ci9Vp4mgAh6oqwpfKYIKtbJRd9Uy5r8RPmfc2suaau5S4kCKjqSGvrcbAOjpTTl6TZoTTpJstPnXb46kDwRaAWC34CRrgK3f0dgfyTm2OKgRMCrhR4JiZp016YvcADKySTtQOlyYn+x9jbV4j0USP8yebJLtPrWX124JAy1Y4tnCXsW7E3Uh7yK1pm3DO1WquHgT3qwUcsL6NCaoGCt9NwzunV0jTTRAVr43zpxkBaD73Vejv3gHaLpMrwccqywFZEjjUh0qJLk3qBIAWefV3aKhPoefRqFRN56BZFbcr0KZTCjZfFGUjno6ncblHqNbYd365SC95dt+FOh9GP7gkHX3l+R9F/eGxfyubPrZLrZSn01y0gSQFLEFHIyYzVofq8p0OZN6ZB/lebOE3r3SHrfiUJF5hoGCyCrqERCCiO2fjn8WmR+n1MLTmdafVlMCnm+FMq1d7pcodK8QDjYk6vo="
66 | on:
67 | branch: master
68 | python: 3.7
69 | condition: $TOXENV = py37-marshmallow-codecov
70 | distributions: "sdist bdist_wheel"
71 |
72 | - provider: pages
73 | local_dir: docs/_build/html/
74 | skip_cleanup: true
75 | github_token: $GITHUB_TOKEN
76 | on:
77 | branch: master
78 | python: 3.7
79 | condition: $TOXENV = docs
80 |
--------------------------------------------------------------------------------
/CHANGELOG.rst:
--------------------------------------------------------------------------------
1 | 0.11.0 - 2020.08.24
2 | ###################
3 |
4 | * **BREAKING** Inner ``Schema`` classes that use inheritance must use bases that explicitly extend from the base class of the serialization package.
5 |
6 | Previously this was allowed::
7 |
8 | from schematics.types import StringType
9 |
10 |
11 | class Mixin:
12 | foo = StringType(required=True)
13 |
14 | class MyModel(DynaModel):
15 | class Table:
16 | name = "table"
17 | hash_key = "foo"
18 | read = 1
19 | write = 1
20 |
21 | class Schema(Mixin):
22 | bar = StringType()
23 |
24 | DynamORM would implicitly add the base Schematics model to the MRO of the Schema. However, this caused problems when you want to use an explicitly declared model as the base mixin since our serialization packages already apply metaclass logic to the fields.
25 |
26 | Now, you must always explicitly inherit from the base class::
27 |
28 | from schematics.models import Model
29 | from schematics.types import StringType
30 |
31 |
32 | class Mixin(Model):
33 | foo = String(required=True)
34 |
35 | class MyModel(DynaModel):
36 | class Table:
37 | name = "table"
38 | hash_key = "foo"
39 | read = 1
40 | write = 1
41 |
42 | class Schema(Mixin):
43 | bar = String()
44 |
45 | * The internal ``DynamORMSchema.base_field_type()`` function was unused and has been removed
46 |
47 | 0.10.0 - 2020.02.05
48 | ###################
49 |
50 | * Enable support for updating nested paths in ``Table.update``. Like functions (like ``minus`` or ``if_not_exists``) nested paths are also separated using the double-underscore syntax. For example, given an attribute ``foo`` of an item ``i``:::
51 |
52 | "foo": {
53 | "bar": {
54 | "a": 1
55 | },
56 | "baz": 10
57 | }
58 |
59 | i.update(foo__bar__a=42)
60 | "foo": {
61 | "bar": {
62 | "a": 42
63 | },
64 | "baz": 10
65 | }
66 |
67 | i.update(foo__baz__plus=32)
68 | "foo": {
69 | "bar": {
70 | "a": 42
71 | },
72 | "baz": 42
73 | }
74 |
75 | This works because DynamoDB allows updating of nested attributes, using something like JSON path. From the `DynamoDB Developer Guide`_::
76 |
77 | aws dynamodb update-item \
78 | --table-name ProductCatalog \
79 | --key '{"Id":{"N":"789"}}' \
80 | --update-expression "SET #pr.#5star[1] = :r5, #pr.#3star = :r3" \
81 | --expression-attribute-names '{
82 | "#pr": "ProductReviews",
83 | "#5star": "FiveStar",
84 | "#3star": "ThreeStar"
85 | }' \
86 | --expression-attribute-values '{
87 | ":r5": { "S": "Very happy with my purchase" },
88 | ":r3": {
89 | "L": [
90 | { "S": "Just OK - not that great" }
91 | ]
92 | }
93 | }' \
94 | --return-values ALL_NEW
95 |
96 | Note that the attribute names along the nested path are broken up - this helps distinguish a nested update from a flat key like ``my.flat.key`` that contains a period.
97 |
98 | .. _`DynamoDB Developer Guide`: https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html#Expressions.UpdateExpressions.SET.AddingNestedMapAttributes
99 |
100 | 0.9.15 - 2020.02.06
101 | ###################
102 |
103 | * Address ``DeprecationWarning`` for ``collections.abc`` in Python 3.3+
104 |
105 | 0.9.14 - 2019.12.13
106 | ###################
107 |
108 | * Ensure that ``dynamorm_validate`` actually calls ``schematics`` validation.
109 |
110 | 0.9.13 - 2019.12.12
111 | ###################
112 |
113 | * Check that recursive mode is enabled before warning about trying to use both limit and recursive.
114 |
115 | 0.9.12 - 2019.09.30
116 | ###################
117 |
118 | * Ensure GitHub pages serves our static documentation content
119 | * No functional library changes
120 |
121 | 0.9.11 - 2019.09.30
122 | ###################
123 |
124 | * Bug fix: Don't mutate dictionaries passed to table methods.
125 |
126 | This caused problems with ``ReadIterator`` objects that called ``.again()`` because the underlying Table object would end up mutating state on the iterator object.
127 |
128 | 0.9.10 - 2019.09.30
129 | ###################
130 |
131 | * Bug fix: Ensure keys are normalized when calling ``.delete()`` on a model.
132 |
133 | 0.9.9 - 2019.09.30
134 | ##################
135 |
136 | * Performance: Avoid validating twice when calling ``.save()`` on a model.
137 |
138 | 0.9.8 - 2019.09.29
139 | ##################
140 |
141 | * Fix documentation deployment (broken since 0.9.6)
142 |
143 | 0.9.7 - 2019.09.29
144 | ##################
145 |
146 | * Use Black (https://github.com/psf/black) for formatting code
147 | * No functional library changes
148 |
149 | 0.9.6 - 2019.09.26
150 | ##################
151 |
152 | * Switch to ``tox`` for running tests
153 | * Documentation improvements
154 | * No functional library changes
155 |
156 | 0.9.5 - 2019.09.26
157 | ##################
158 |
159 | * Add support for Marshmallow version 3
160 |
161 | 0.9.4 - 2019.09.28
162 | ##################
163 |
164 | * Bump minimum schematics version to 2.10
165 | * Ignore schematics warnings during test
166 |
167 | 0.9.3 - 2019.04.30
168 | ##################
169 |
170 | * Add extras_require to setup.py to specify minimum versions of schematics & marshmallow
171 |
172 | 0.9.2
173 | #####
174 |
175 | * Documentation update
176 |
177 | 0.9.1 - 2018.09.07
178 | ##################
179 |
180 | https://github.com/NerdWalletOSS/dynamorm/pull/61
181 |
182 | * **BACKWARDS INCOMPATIBLE CHANGE!**
183 |
184 | ``Model.query`` and ``Model.scan`` no longer return ALL available items.
185 | Instead they stop at each 1Mb page. You can keep the existing behavior by
186 | adding a ``.recursive()`` call to the return value.
187 |
188 | Before::
189 |
190 | books = Books.scan()
191 |
192 | After::
193 |
194 | books = Books.scan().recursive()
195 |
196 | * This version introduces the ``ReadIterator`` object which is returned from
197 | query and scan operations. This object exposes functions that allow for
198 | better control over how a query/scan is executed. See the usage docs for full
199 | details.
200 |
--------------------------------------------------------------------------------
/Jenkinsfile:
--------------------------------------------------------------------------------
1 | #!groovy
2 |
3 | indy {
4 | base = 'ubuntu-2017.05.23'
5 | }
6 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2016, Evan Borgstrom
2 | Copyright 2016, NerdWallet
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | DynamORM
2 | ========
3 |
4 | .. image:: https://img.shields.io/travis/NerdWalletOSS/dynamorm.svg
5 | :target: https://travis-ci.org/NerdWalletOSS/dynamorm
6 |
7 | .. image:: https://img.shields.io/codecov/c/github/NerdWalletOSS/dynamorm.svg
8 | :target: https://codecov.io/github/NerdWalletOSS/dynamorm
9 |
10 | .. image:: https://img.shields.io/pypi/v/dynamorm.svg
11 | :target: https://pypi.python.org/pypi/dynamorm
12 | :alt: Latest PyPI version
13 |
14 | .. image:: https://img.shields.io/pypi/pyversions/dynamorm.svg
15 | :target: https://pypi.python.org/pypi/dynamorm
16 | :alt: Supported Python Versions
17 |
18 | ----
19 |
20 | *This package is a work in progress -- Feedback / Suggestions / Etc welcomed!*
21 |
22 | Python + DynamoDB ♡
23 |
24 | DynamORM (pronounced *Dynamo-R-M*) is a Python object & relation mapping library for Amazon's `DynamoDB`_ service.
25 |
26 | The project has two goals:
27 |
28 | 1. **Abstract away the interaction with the underlying DynamoDB libraries**. Python access to the DynamoDB service has evolved quickly, from `Dynamo v1 in boto to Dynamo v2 in boto`_ and then the `new resource model in boto3`_. By providing a consistent interface that will feel familiar to users of other Python ORMs (SQLAlchemy, Django, Peewee, etc) means that we can always provide best-practices for queries and take advantages of new features without needing to refactor any application logic.
29 |
30 | 2. **Delegate schema validation and serialization to more focused libraries**. Building "ORM" semantics is "easy", doing data validation and serialization is not. We support both `Marshmallow`_ and `Schematics`_ for building your object schemas. You can take advantage of the full power of these libraries as they are transparently exposed in your code.
31 |
32 | .. _DynamoDB: http://aws.amazon.com/dynamodb/
33 | .. _Dynamo v1 in boto to Dynamo v2 in boto: http://boto.cloudhackers.com/en/latest/migrations/dynamodb_v1_to_v2.html
34 | .. _new resource model in boto3: http://boto3.readthedocs.io/en/latest/guide/dynamodb.html
35 | .. _Marshmallow: https://marshmallow.readthedocs.io/en/latest/
36 | .. _Schematics: https://schematics.readthedocs.io/en/latest/
37 |
38 |
39 | Supported Schema Validation Libraries
40 | -------------------------------------
41 |
42 | * `Schematics`_ >= 2.1.0
43 | * `Marshmallow`_ >= 2.15.1
44 |
45 |
46 | Example
47 | -------
48 |
49 | .. code-block:: python
50 |
51 | import datetime
52 |
53 | from dynamorm import DynaModel, GlobalIndex, ProjectAll
54 |
55 | # In this example we'll use Marshmallow, but you can also use Schematics too!
56 | # You can see that you have to import the schema library yourself, it is not abstracted at all
57 | from marshmallow import fields
58 |
59 | # Our objects are defined as DynaModel classes
60 | class Book(DynaModel):
61 | # Define our DynamoDB properties
62 | class Table:
63 | name = 'prod-books'
64 | hash_key = 'isbn'
65 | read = 25
66 | write = 5
67 |
68 | class ByAuthor(GlobalIndex):
69 | name = 'by-author'
70 | hash_key = 'author'
71 | read = 25
72 | write = 5
73 | projection = ProjectAll()
74 |
75 | # Define our data schema, each property here will become a property on instances of the Book class
76 | class Schema:
77 | isbn = fields.String(validate=validate_isbn)
78 | title = fields.String()
79 | author = fields.String()
80 | publisher = fields.String()
81 |
82 | # NOTE: Marshmallow uses the `missing` keyword during deserialization, which occurs when we save
83 | # an object to Dynamo and the attr has no value, versus the `default` keyword, which is used when
84 | # we load a document from Dynamo and the value doesn't exist or is null.
85 | year = fields.Number(missing=lambda: datetime.datetime.utcnow().year)
86 |
87 |
88 | # Store new documents directly from dictionaries
89 | Book.put({
90 | "isbn": "12345678910",
91 | "title": "Foo",
92 | "author": "Mr. Bar",
93 | "publisher": "Publishorama"
94 | })
95 |
96 | # Work with the classes as objects. You can pass attributes from the schema to the constructor
97 | foo = Book(isbn="12345678910", title="Foo", author="Mr. Bar",
98 | publisher="Publishorama")
99 | foo.save()
100 |
101 | # Or assign attributes
102 | foo = Book()
103 | foo.isbn = "12345678910"
104 | foo.title = "Foo"
105 | foo.author = "Mr. Bar"
106 | foo.publisher = "Publishorama"
107 |
108 | # In all cases they go through Schema validation, calls to .put or .save can result in ValidationError
109 | foo.save()
110 |
111 | # You can then fetch, query and scan your tables.
112 | # Get on the hash key, and/or range key
113 | book = Book.get(isbn="12345678910")
114 |
115 | # Update items, with conditions
116 | # Here our condition ensures we don't have a race condition where someone else updates the title first
117 | book.update(title='Corrected Foo', conditions=(title=book.title,))
118 |
119 | # Query based on the keys
120 | Book.query(isbn__begins_with="12345")
121 |
122 | # Scan based on attributes
123 | Book.scan(author="Mr. Bar")
124 | Book.scan(author__ne="Mr. Bar")
125 |
126 | # Query based on indexes
127 | Book.ByAuthor.query(author="Mr. Bar")
128 |
129 |
130 | Documentation
131 | =============
132 |
133 | Full documentation is built from the sources each build and can be found online at:
134 |
135 | https://nerdwalletoss.github.io/dynamorm/
136 |
137 |
138 | The ``tests/`` also contain the most complete documentation on how to actually use the library, so you are encouraged to read through them to really familiarize yourself with some of the more advanced concepts and use cases.
139 |
--------------------------------------------------------------------------------
/docs/_static/dynamallow-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NerdWalletOSS/dynamorm/1270a85ae25a1cc4d1993bd64c752733c3e12072/docs/_static/dynamallow-logo.png
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | DynamORM API
2 | ============
3 |
4 |
5 | ``dynamorm``
6 | --------------
7 | .. automodule:: dynamorm
8 |
9 |
10 | ``dynamorm.model``
11 | --------------------
12 | .. automodule:: dynamorm.model
13 | :members:
14 |
15 |
16 | ``dynamorm.table``
17 | --------------------
18 | .. automodule:: dynamorm.table
19 | :members:
20 |
21 |
22 | ``dynamorm.relationships``
23 | --------------------------
24 | .. automodule:: dynamorm.relationships
25 | :members:
26 |
27 |
28 | ``dynamorm.signals``
29 | --------------------
30 | .. automodule:: dynamorm.signals
31 | :members:
32 |
33 | .. autodata:: model_prepared
34 | :annotation:
35 | .. autodata:: pre_init
36 | :annotation:
37 | .. autodata:: post_init
38 | :annotation:
39 | .. autodata:: pre_save
40 | :annotation:
41 | .. autodata:: post_save
42 | :annotation:
43 | .. autodata:: pre_update
44 | :annotation:
45 | .. autodata:: post_update
46 | :annotation:
47 | .. autodata:: pre_delete
48 | :annotation:
49 | .. autodata:: post_delete
50 | :annotation:
51 |
52 |
53 | ``dynamorm.exceptions``
54 | -------------------------
55 | .. automodule:: dynamorm.exceptions
56 | :members:
57 |
58 |
59 | ``dynamorm.local``
60 | --------------------
61 | .. automodule:: dynamorm.local
62 | :members:
63 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | #
4 | # DynamORM documentation build configuration file, created by
5 | # sphinx-quickstart on Mon Jun 6 20:52:42 2016.
6 | #
7 | # This file is execfile()d with the current directory set to its
8 | # containing dir.
9 | #
10 | # Note that not all possible configuration values are present in this
11 | # autogenerated file.
12 | #
13 | # All configuration values have a default; values that are commented out
14 | # serve to show the default.
15 |
16 | # If extensions (or modules to document with autodoc) are in another directory,
17 | # add these directories to sys.path here. If the directory is relative to the
18 | # documentation root, use os.path.abspath to make it absolute, like shown here.
19 | #
20 | # import os
21 | # import sys
22 | # sys.path.insert(0, os.path.abspath('.'))
23 |
24 | import datetime
25 | import pkg_resources
26 |
27 | # -- General configuration ------------------------------------------------
28 |
29 | # If your documentation needs a minimal Sphinx version, state it here.
30 | #
31 | needs_sphinx = "1.4"
32 |
33 | # Add any Sphinx extension module names here, as strings. They can be
34 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
35 | # ones.
36 | extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx"]
37 |
38 | intersphinx_mapping = {
39 | "boto3": ("https://boto3.readthedocs.io/en/latest", None),
40 | "marshmallow": ("https://marshmallow.readthedocs.io/en/latest", None),
41 | }
42 |
43 | # Add any paths that contain templates here, relative to this directory.
44 | templates_path = ["_templates"]
45 |
46 | # The suffix(es) of source filenames.
47 | # You can specify multiple suffix as a list of string:
48 | #
49 | # source_suffix = ['.rst', '.md']
50 | source_suffix = ".rst"
51 |
52 | # The encoding of source files.
53 | #
54 | # source_encoding = 'utf-8-sig'
55 |
56 | # The master toctree document.
57 | master_doc = "index"
58 |
59 | # General information about the project.
60 | project = "DynamORM"
61 | author = "NerdWallet"
62 | copyright = ' {0:%Y} {1}'.format(
63 | datetime.datetime.utcnow(), author
64 | )
65 |
66 | # The version info for the project you're documenting, acts as replacement for
67 | # |version| and |release|, also used in various other places throughout the
68 | # built documents.
69 | #
70 |
71 | # The full version, including alpha/beta/rc tags.
72 | release = pkg_resources.get_distribution("dynamorm").version
73 |
74 | # The short X.Y version.
75 | version = release.rsplit(".", 1)[0]
76 |
77 | # The language for content autogenerated by Sphinx. Refer to documentation
78 | # for a list of supported languages.
79 | #
80 | # This is also used if you do content translation via gettext catalogs.
81 | # Usually you set "language" from the command line for these cases.
82 | language = None
83 |
84 | # There are two options for replacing |today|: either, you set today to some
85 | # non-false value, then it is used:
86 | #
87 | # today = ''
88 | #
89 | # Else, today_fmt is used as the format for a strftime call.
90 | #
91 | # today_fmt = '%B %d, %Y'
92 |
93 | # List of patterns, relative to source directory, that match files and
94 | # directories to ignore when looking for source files.
95 | # This patterns also effect to html_static_path and html_extra_path
96 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
97 |
98 | # The reST default role (used for this markup: `text`) to use for all
99 | # documents.
100 | #
101 | # default_role = None
102 |
103 | # If true, '()' will be appended to :func: etc. cross-reference text.
104 | #
105 | # add_function_parentheses = True
106 |
107 | # If true, the current module name will be prepended to all description
108 | # unit titles (such as .. function::).
109 | #
110 | # add_module_names = True
111 |
112 | # If true, sectionauthor and moduleauthor directives will be shown in the
113 | # output. They are ignored by default.
114 | #
115 | # show_authors = False
116 |
117 | # The name of the Pygments (syntax highlighting) style to use.
118 | pygments_style = "sphinx"
119 |
120 | # A list of ignored prefixes for module index sorting.
121 | # modindex_common_prefix = []
122 |
123 | # If true, keep warnings as "system message" paragraphs in the built documents.
124 | # keep_warnings = False
125 |
126 | # If true, `todo` and `todoList` produce output, else they produce nothing.
127 | todo_include_todos = False
128 |
129 |
130 | # -- Options for HTML output ----------------------------------------------
131 |
132 | # The theme to use for HTML and HTML Help pages. See the documentation for
133 | # a list of builtin themes.
134 | #
135 | html_theme = "alabaster"
136 |
137 | # Theme options are theme-specific and customize the look and feel of a theme
138 | # further. For a list of options available for each theme, see the
139 | # documentation.
140 | #
141 | html_theme_options = {
142 | "description": u"Python object & relation mapping library for Amazon's DynamoDB service.
",
143 | "github_user": "NerdWalletOSS",
144 | "github_repo": "dynamorm",
145 | "github_banner": True,
146 | "github_type": "star",
147 | "travis_button": True,
148 | "codecov_button": True,
149 | }
150 |
151 | # Add any paths that contain custom themes here, relative to this directory.
152 | # html_theme_path = []
153 |
154 | # The name for this set of Sphinx documents.
155 | # " v documentation" by default.
156 | #
157 | # html_title = 'DynamORM v0.0.1'
158 |
159 | # A shorter title for the navigation bar. Default is the same as html_title.
160 | #
161 | # html_short_title = None
162 |
163 | # The name of an image file (relative to this directory) to place at the top
164 | # of the sidebar.
165 | #
166 | # html_logo = None
167 |
168 | # The name of an image file (relative to this directory) to use as a favicon of
169 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
170 | # pixels large.
171 | #
172 | # html_favicon = None
173 |
174 | # Add any paths that contain custom static files (such as style sheets) here,
175 | # relative to this directory. They are copied after the builtin static files,
176 | # so a file named "default.css" will overwrite the builtin "default.css".
177 | html_static_path = ["_static"]
178 |
179 | # Add any extra paths that contain custom files (such as robots.txt or
180 | # .htaccess) here, relative to this directory. These files are copied
181 | # directly to the root of the documentation.
182 | #
183 | # html_extra_path = []
184 |
185 | # If not None, a 'Last updated on:' timestamp is inserted at every page
186 | # bottom, using the given strftime format.
187 | # The empty string is equivalent to '%b %d, %Y'.
188 | #
189 | # html_last_updated_fmt = None
190 |
191 | # If true, SmartyPants will be used to convert quotes and dashes to
192 | # typographically correct entities.
193 | #
194 | # html_use_smartypants = True
195 |
196 | # Custom sidebar templates, maps document names to template names.
197 | #
198 | # html_sidebars = {}
199 | html_sidebars = {
200 | "**": [
201 | "about.html",
202 | "navigation.html",
203 | "relations.html",
204 | "searchbox.html",
205 | "donate.html",
206 | ]
207 | }
208 |
209 | # Additional templates that should be rendered to pages, maps page names to
210 | # template names.
211 | #
212 | # html_additional_pages = {}
213 |
214 | # If false, no module index is generated.
215 | #
216 | # html_domain_indices = True
217 |
218 | # If false, no index is generated.
219 | #
220 | # html_use_index = True
221 |
222 | # If true, the index is split into individual pages for each letter.
223 | #
224 | # html_split_index = False
225 |
226 | # If true, links to the reST sources are added to the pages.
227 | #
228 | html_show_sourcelink = False
229 |
230 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
231 | #
232 | # html_show_sphinx = True
233 |
234 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
235 | #
236 | # html_show_copyright = True
237 |
238 | # If true, an OpenSearch description file will be output, and all pages will
239 | # contain a tag referring to it. The value of this option must be the
240 | # base URL from which the finished HTML is served.
241 | #
242 | # html_use_opensearch = ''
243 |
244 | # This is the file name suffix for HTML files (e.g. ".xhtml").
245 | # html_file_suffix = None
246 |
247 | # Language to be used for generating the HTML full-text search index.
248 | # Sphinx supports the following languages:
249 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
250 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
251 | #
252 | # html_search_language = 'en'
253 |
254 | # A dictionary with options for the search language support, empty by default.
255 | # 'ja' uses this config value.
256 | # 'zh' user can custom change `jieba` dictionary path.
257 | #
258 | # html_search_options = {'type': 'default'}
259 |
260 | # The name of a javascript file (relative to the configuration directory) that
261 | # implements a search results scorer. If empty, the default will be used.
262 | #
263 | # html_search_scorer = 'scorer.js'
264 |
265 | # Output file base name for HTML help builder.
266 | htmlhelp_basename = "DynamORM_doc"
267 |
268 | # -- Options for LaTeX output ---------------------------------------------
269 |
270 | latex_elements = {
271 | # The paper size ('letterpaper' or 'a4paper').
272 | #
273 | # 'papersize': 'letterpaper',
274 | # The font size ('10pt', '11pt' or '12pt').
275 | #
276 | # 'pointsize': '10pt',
277 | # Additional stuff for the LaTeX preamble.
278 | #
279 | # 'preamble': '',
280 | # Latex figure (float) alignment
281 | #
282 | # 'figure_align': 'htbp',
283 | }
284 |
285 | # Grouping the document tree into LaTeX files. List of tuples
286 | # (source start file, target name, title,
287 | # author, documentclass [howto, manual, or own class]).
288 | latex_documents = [
289 | (master_doc, "DynamORM.tex", "DynamORM Documentation", "NerdWallet", "manual")
290 | ]
291 |
292 | # The name of an image file (relative to this directory) to place at the top of
293 | # the title page.
294 | #
295 | # latex_logo = None
296 |
297 | # For "manual" documents, if this is true, then toplevel headings are parts,
298 | # not chapters.
299 | #
300 | # latex_use_parts = False
301 |
302 | # If true, show page references after internal links.
303 | #
304 | # latex_show_pagerefs = False
305 |
306 | # If true, show URL addresses after external links.
307 | #
308 | # latex_show_urls = False
309 |
310 | # Documents to append as an appendix to all manuals.
311 | #
312 | # latex_appendices = []
313 |
314 | # If false, no module index is generated.
315 | #
316 | # latex_domain_indices = True
317 |
318 |
319 | # -- Options for manual page output ---------------------------------------
320 |
321 | # One entry per manual page. List of tuples
322 | # (source start file, name, description, authors, manual section).
323 | man_pages = [(master_doc, "dynamorm", "DynamORM Documentation", [author], 1)]
324 |
325 | # If true, show URL addresses after external links.
326 | #
327 | # man_show_urls = False
328 |
329 |
330 | # -- Options for Texinfo output -------------------------------------------
331 |
332 | # Grouping the document tree into Texinfo files. List of tuples
333 | # (source start file, target name, title, author,
334 | # dir menu entry, description, category)
335 | texinfo_documents = [
336 | (
337 | master_doc,
338 | "DynamORM",
339 | "DynamORM Documentation",
340 | author,
341 | "DynamORM",
342 | "One line description of project.",
343 | "Miscellaneous",
344 | )
345 | ]
346 |
347 | # Documents to append as an appendix to all manuals.
348 | #
349 | # texinfo_appendices = []
350 |
351 | # If false, no module index is generated.
352 | #
353 | # texinfo_domain_indices = True
354 |
355 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
356 | #
357 | # texinfo_show_urls = 'footnote'
358 |
359 | # If true, do not generate a @detailmenu in the "Top" node's menu.
360 | #
361 | # texinfo_no_detailmenu = False
362 |
--------------------------------------------------------------------------------
/docs/developing.rst:
--------------------------------------------------------------------------------
1 | Developing
2 | ==========
3 |
4 | DynamORM is designed to work with Python2.7, Python3.5, Python3.6, and Python3.7 as well as with both Marshmallow and Schematics for serialization. When you open a Pull Request on GitHub tests will be against a full matrix of these options to guarantee compatibility.
5 |
6 | Locally we use tox_ to provide a similar test matrix. By default when you run ``tox`` the tests will run against ``python2`` and ``python3`` for both Marshmallow and Schematics.
7 |
8 | The tests will pull down the latest copy of DynamoDB Local from S3 and place it in ``build/dynamo-local``. A copy will be started on a random high port, running with ``-inMemory``, at the start of the test run and shutdown after the run.
9 |
10 |
11 | Testing with tox_
12 | -----------------
13 |
14 | tox_ can be installed with ``pip``, or with your local package manager (i.e. on OSX ``brew install tox``).
15 |
16 | Once installed simply run ``tox``::
17 |
18 | tox
19 |
20 | This will create virtualenvs for the full matrix of ``python2`` and ``python3`` with both schematics and marshmallow.
21 |
22 | .. _tox: https://tox.readthedocs.io/en/latest/
23 |
24 |
25 | Formatting with black_
26 | ----------------------
27 |
28 | DynamORM uses black_ for formatting so that we don't spend time worrying about formatting.
29 |
30 | When you run ``tox`` it will check all files before running tests, and will fail if any of them are not formatted to black's standards.
31 |
32 | To re-format those files you can run::
33 |
34 | tox -e black -- .
35 |
36 | .. _black: https://github.com/psf/black
37 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 |
3 | Contents
4 | --------
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 |
9 | usage
10 | developing
11 | api
12 | motivation
13 |
14 |
15 | Indices and tables
16 | ------------------
17 |
18 | * :ref:`genindex`
19 | * :ref:`modindex`
20 | * :ref:`search`
21 |
--------------------------------------------------------------------------------
/docs/motivation.rst:
--------------------------------------------------------------------------------
1 | Motivation for creating DynamORM
2 | ================================
3 |
4 | *This was written Q2 2016*
5 |
6 | * Both `PynamoDB`_ and `dynamodb-mapper`_ provide their own implementation of a "schema", neither of which have strong validation. Also, getting the schema implementation right is the much harder problem to solve than the actual interaction with the DynamoDB service. After using `Marshmallow`_ with other NoSQL systems it was clear that the library providing the abstraction of the data service should simply defer to a far more mature schema implementation that is specifically built to be agnostic.
7 |
8 | * `dynamodb-mapper`_ uses the v1 of DynamoDB API via the ``boto`` library. `PynamoDB`_ uses v2 via ``botocore`` directly. The preference was to maintain parity with the officially supported library and implement functionality based on ``boto3``, and v3 of the API. The implementation is done in such a way that when v4 rolls around the abstraction can be added without the end user implementation needing to be changed.
9 |
10 | * There was a desire for an explicit declaration where the table properties and the schema properties were defined on their own, while still making sense semantically. For example, instead of annotating a schema field that it is the hash key, the name of the hash key should be defined on the table and the properties of the field should be defined on the schema. When the table needs information about the hash key it can simply reference it from the schema by name.
11 |
12 | .. _PynamoDB: https://github.com/jlafon/PynamoDB
13 | .. _dynamodb-mapper: https://bitbucket.org/Ludia/dynamodb-mapper/overview
14 | .. _Marshmallow: https://github.com/marshmallow-code/marshmallow
15 |
--------------------------------------------------------------------------------
/docs/pull_request_template.md:
--------------------------------------------------------------------------------
1 | Thank-you for your interst in contributing to DyanmORM!
2 |
3 | Please replace this text with a description of WHAT your changes do/fix/improve/etc, and WHY the change is necessary.
4 |
5 | ### Checklist
6 |
7 | - [ ] Tests have been written to cover any new or updated functionality
8 | - [ ] All tests pass when running `tox` locally
9 | - [ ] The documentation in `docs/` has been updated to cover any changes
10 | - [ ] The version in `setup.py` has been bumped to the next version -- follow semver!
11 | - [ ] Add an entry to `CHANGELOG.rst` has been added
12 |
13 | @NerdWalletOSS/dynamorm
14 |
--------------------------------------------------------------------------------
/docs/usage.rst:
--------------------------------------------------------------------------------
1 | Usage
2 | =====
3 |
4 | Using DynamORM is straight forward. Simply define your models with some specific meta data to represent the DynamoDB Table structure as well as the document schema. You can then use class level methods to query for and get items, represented as instances of the class, as well as class level methods to interact with specific documents in the table.
5 |
6 | .. note::
7 |
8 | Not all functionality is covered in this documentation yet. See `the tests`_ for all "supported" functionality (like: batch puts, unique puts, etc).
9 |
10 | .. _the tests: https://github.com/NerdWalletOSS/DynamORM/tree/master/tests
11 |
12 |
13 | Setting up Boto3
14 | -----------------
15 |
16 | Make sure you have `configured boto3`_ and can access DynamoDB from the Python console.
17 |
18 | .. code-block:: python
19 |
20 | import boto3
21 | dynamodb = boto3.resource('dynamodb')
22 | list(dynamodb.tables.all()) # --> ['table1', 'table2', 'etc...']
23 |
24 | .. _configured boto3: https://boto3.readthedocs.io/en/latest/guide/quickstart.html#configuration
25 |
26 |
27 | Configuring the Boto3 resource
28 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
29 |
30 | The above example is relying on the files ``~/.aws/credentials`` & ``~/.aws/config`` to provide access information and region selection. You can provide explicit configuration for `boto3 sessions`_ and `boto3 resources`_ as part of your ``Table`` definition.
31 |
32 | For example, if you develop against a local dynamo service your models may look something like:
33 |
34 | .. code-block:: python
35 |
36 |
37 | class MyModel(DynaModel):
38 | class Table:
39 | session_kwargs = {
40 | 'region_name': 'us-east-2'
41 | }
42 | resource_kwargs = {
43 | 'endpoint_url': 'http://localhost:33333'
44 | }
45 |
46 |
47 | You would obviously want the session and resource configuration to come from some sort of configuration provider that could provide the correct options depending on where your application is being run.
48 |
49 | .. _boto3 sessions: http://boto3.readthedocs.io/en/latest/reference/core/session.html
50 | .. _boto3 resources: http://boto3.readthedocs.io/en/latest/reference/services/dynamodb.html#service-resource
51 | .. _Flask: http://flask.pocoo.org/
52 |
53 |
54 | Using Dynamo Local
55 | ~~~~~~~~~~~~~~~~~~
56 |
57 | If you're using `Dynamo Local`_ for development you can use the following config for the table resource:
58 |
59 | .. code-block:: python
60 |
61 | MyModel.Table.get_resource(
62 | aws_access_key_id="-",
63 | aws_secret_access_key="-",
64 | region_name="us-west-2",
65 | endpoint_url="http://localhost:8000"
66 | )
67 |
68 | .. _Dynamo Local: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.html
69 |
70 |
71 | Defining your Models -- Tables & Schemas
72 | ----------------------------------------
73 |
74 | .. automodule:: dynamorm.model
75 | :noindex:
76 |
77 | .. autoclass:: dynamorm.model.DynaModel
78 | :noindex:
79 |
80 |
81 | Table Data Model
82 | ----------------
83 |
84 | .. automodule:: dynamorm.table
85 | :noindex:
86 |
87 |
88 | .. _creating-new-documents:
89 |
90 | Creating new documents
91 | ----------------------
92 |
93 | Using objects:
94 |
95 | .. code-block:: python
96 |
97 | thing = Thing(id="thing1", name="Thing One", color="purple")
98 | thing.save()
99 |
100 | .. code-block:: python
101 |
102 | thing = Thing()
103 | thing.id = "thing1"
104 | thing.name = "Thing One"
105 | thing.color = "purple"
106 | thing.save()
107 |
108 |
109 | Using raw documents:
110 |
111 | .. code-block:: python
112 |
113 | Thing.put({
114 | "id": "thing1",
115 | "name": "Thing One",
116 | "color": "purple"
117 | })
118 |
119 | In all cases, the attributes go through validation against the Schema.
120 |
121 | .. code-block:: python
122 |
123 | thing = Thing(id="thing1", name="Thing One", color="orange")
124 |
125 | # the call to save will result in a ValidationError because orange is an invalid choice.
126 | thing.save()
127 |
128 | .. note::
129 |
130 | Remember, if you have a ``String`` field it will use ``unicode`` (py2) or ``str`` (py3) on any value assigned to it, which means that if you assign a ``list``, ``dict``, ``int``, etc then the validation will succeed and what will be stored is the representative string value.
131 |
132 |
133 | Fetching existing documents
134 | ---------------------------
135 |
136 | Get based on primary key
137 | ~~~~~~~~~~~~~~~~~~~~~~~~
138 |
139 | To fetch an existing document based on its primary key you use the ``.get`` class method on your models:
140 |
141 | .. code-block:: python
142 |
143 | thing1 = Thing.get(id="thing1")
144 | assert thing1.color == 'purple'
145 |
146 | To do a `Consistent Read`_ just pass ``consistent=True``:
147 |
148 | .. code-block:: python
149 |
150 | thing1 = Thing.get(id="thing1", consistent=True)
151 | assert thing1.color == 'purple'
152 |
153 | .. _Consistent Read: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.ReadConsistency.html
154 |
155 | Querying
156 | ~~~~~~~~
157 |
158 | .. epigraph::
159 |
160 | A Query operation uses the primary key of a table or a secondary index to directly access items from that table or index.
161 |
162 | -- `Table query docs`_
163 |
164 | .. _Table query docs: https://boto3.readthedocs.io/en/latest/reference/services/dynamodb.html#DynamoDB.Table.query
165 |
166 |
167 | Like a ``get`` operation this takes arguments that map to the key names, but you can also specify a comparison operator for that key using the "double-under" syntax (``__``). For example to query a ``Book`` model for all entries with the ``isbn`` field that start with a specific value you would use the ``begins_with`` comparison operator:
168 |
169 | .. code-block:: python
170 |
171 | books = Book.query(isbn__begins_with="12345")
172 |
173 | You can find the full list of supported comparison operators in the `DynamoDB Condition docs`_.
174 |
175 | .. _DynamoDB Condition docs: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/customizations/dynamodb.html#dynamodb-conditions
176 |
177 | Scanning
178 | ~~~~~~~~
179 |
180 | .. epigraph::
181 |
182 | The Scan operation returns one or more items and item attributes **by accessing every item** in a table or a secondary index.
183 |
184 | -- `Table scan docs`_
185 |
186 | .. _Table scan docs: https://boto3.readthedocs.io/en/latest/reference/services/dynamodb.html#DynamoDB.Table.scan
187 |
188 | Scanning works exactly the same as querying.
189 |
190 | .. code-block:: python
191 |
192 | # Scan based on attributes
193 | Book.scan(author="Mr. Bar")
194 | Book.scan(author__ne="Mr. Bar")
195 |
196 |
197 | .. _read-iterators:
198 |
199 | Read Iterator object
200 | ~~~~~~~~~~~~~~~~~~~~
201 |
202 | Calling ``.query`` or ``.scan`` will return a ``ReadIterator`` object that will not actually send the API call to DynamoDB until you try to access an item in the object by iterating (``for book in books:``, ``list(books)``, etc...).
203 |
204 | The iterator objects have a number of methods on them that can be used to influence their behavior. All of the methods described here (except ``.count()``) are "chained methods", meaning that they return the iterator object such that you can chain them together.
205 |
206 | .. code-block:: python
207 |
208 | next_10_books = Book.query(hash_key=the_hash_key).start(previous_last).limit(10)
209 |
210 |
211 | Returning the Count (``.count()``)
212 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
213 |
214 | Unlike the rest of the methods in this section, ``.count()`` is the only one that does not return the iterator object. Instead it changes the SELECT_ parameter to ``COUNT`` and immediately sends the request, returning the count.
215 |
216 | .. code-block:: python
217 |
218 | books_matching_hash_key = Books.query(hash_key=the_hash_key).count()
219 |
220 |
221 | .. _SELECT: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_Query.html#DDB-Query-request-Select
222 |
223 |
224 | Requesting consistent results (``.consistent()``)
225 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
226 |
227 | Queries & scans return eventually consistent results by default. You can use ``.consistent()`` to return results that ensure all in-flight writes finished and no new writes were launched.
228 |
229 | .. code-block:: python
230 |
231 | Books.query(hash_key=the_hash_key).consistent()
232 |
233 |
234 | Changing the returned attributes (``.specific_attributes()``)
235 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
236 |
237 | By default, query & scan operations will return ALL attributes from the table or index. If you'd like to change the attributes to only return subset of the attributes you can pass a list to ``.specific_attributes([...])``. Each attribute passed in should match the syntax from `Specifying Item Attributes`_ in the docs.
238 |
239 | .. code-block:: python
240 |
241 | Books.query(hash_key=the_hash_key).specific_attributes(['isbn', 'title', 'publisher.name'])
242 |
243 | .. _Specifying Item Attributes: https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.Attributes.html
244 |
245 |
246 | Paging (``.last``, ``.start()`` & ``.again()``)
247 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
248 |
249 | .. epigraph::
250 |
251 | A single Query operation will read up to the maximum number of items set (if using the Limit parameter) or a maximum of 1 MB of data and then apply any filtering to the results
252 |
253 | -- `Table query docs`_
254 |
255 | When you query a table with many items, or with a limit, the iterator object will set its ``.last`` attribute to the key of the last item it received. You can pass that item into a subsequent query via the ``start()`` method, or if you have the existing iterator object simply call ``.again()``.
256 |
257 | .. code-block:: python
258 |
259 | books = Book.scan()
260 | print(list(books))
261 |
262 | if books.last:
263 | print("The last book seen was: {}".format(books.last))
264 | print(list(books.again()))
265 |
266 |
267 | .. code-block:: python
268 |
269 | last = get_last_from_request()
270 | books = Book.scan().start(last)
271 |
272 |
273 | Limiting (``.limit()``)
274 | ^^^^^^^^^^^^^^^^^^^^^^^
275 |
276 | .. epigraph::
277 |
278 | The maximum number of items to evaluate (not necessarily the number of matching items). If DynamoDB processes the number of items up to the limit while processing the results, it stops the operation and returns the matching values up to that point.
279 |
280 | -- `Table query docs`_
281 |
282 | You can also use the ``.limit()`` method on the iterator object to apply a Limit to your query.
283 |
284 | .. code-block:: python
285 |
286 | books = Book.scan().limit(1)
287 | assert len(books) == 1
288 |
289 |
290 | Reversing (``.reverse()`` - Queries Only)
291 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
292 |
293 | To have the indexed scanned in reverse for your query, use ``.reverse()``
294 |
295 | .. note::
296 |
297 | Scanning does not support reversing.
298 |
299 | .. code-block:: python
300 |
301 | books = Book.query(hash_key=the_hash_key).reverse()
302 |
303 |
304 | Recursion (``.recursive()``)
305 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
306 |
307 | If you wish to get ALL items from a query or scan without having to deal with paging your self, then you can use the ``recursive()`` method to have the iterator handle the paging for you.
308 |
309 | .. code-block:: python
310 |
311 | books = Book.scan().recursive()
312 |
313 |
314 | .. _q-objects:
315 |
316 | ``Q`` objects
317 | ~~~~~~~~~~~~~
318 |
319 | .. autofunction:: dynamorm.table.Q
320 | :noindex:
321 |
322 | See the :py:func:`dynamorm.model.DynaModel.scan` docs for more examples.
323 |
324 |
325 | Indexes
326 | ~~~~~~~
327 |
328 | By default the hash & range keys of your table make up the "Primary Index". `Secondary Indexes`_ provide different ways to query & scan your data. They are defined on your Model alongside the main Table definition as inner classes inheriting from either the ``GlobalIndex`` or ``LocalIndex`` classes.
329 |
330 | .. _Secondary Indexes: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/SecondaryIndexes.html
331 |
332 | Here's an excerpt from the model used in the readme:
333 |
334 | .. code-block:: python
335 |
336 | class Book(DynaModel):
337 | # Define our DynamoDB properties
338 | class Table:
339 | name = 'prod-books'
340 | hash_key = 'isbn'
341 | read = 25
342 | write = 5
343 |
344 | class ByAuthor(GlobalIndex):
345 | name = 'by-author'
346 | hash_key = 'author'
347 | read = 25
348 | write = 5
349 | projection = ProjectAll()
350 |
351 | With the index defined we can now call ``Book.ByAuthor.query`` or ``Book.ByAuthor.scan`` to query or scan the index. The query & scan semantics on the Index are the same as on the main table.
352 |
353 | .. code-block:: python
354 |
355 | Book.ByAuthor.query(author='Some Author')
356 | Book.ByAuthor.query(author__ne='Some Author')
357 |
358 | Indexes uses "projection" to determine which attributes of your documents are available in the index. The ``ProjectAll`` projection puts ALL attributes from your Table into the Index. The ``ProjectKeys`` projection puts just the keys from the table (and also the keys from the index themselves) into the index. The ``ProjectInclude('attr1', 'attr2')`` projection allows you to specify which attributes you wish to project.
359 |
360 | Using the ``ProjectKeys`` or ``ProjectInclude`` projection will result in partially validated documents, since we won't have all of the require attributes.
361 |
362 | A common pattern is to define a "sparse index" with just the keys (``ProjectKeys``), load the keys of the documents you want from the index and then do a batch get to fetch them all from the main table.
363 |
364 |
365 | Updating documents
366 | ------------------
367 |
368 | There are a number of ways to send updates back to the Table from your Model classes and indexes. The :ref:`creating-new-documents` section already showed you the :py:func:`dynamorm.model.DynaModel.save` methods for creating new documents. ``save`` can also be used to update existing documents:
369 |
370 | .. code-block:: python
371 |
372 | # Our book is no longer in print
373 | book = Book.get(isbn='1234567890')
374 | book.in_print = False
375 | book.save()
376 |
377 | When you call ``.save()`` on an instance the WHOLE document is put back into the table as save simply invokes the :py:func:`dynamorm.model.DynaModel.put` function. This means that if you have large models it may cost you more in Write Capacity Units to put the whole document back.
378 |
379 | You can also do a "partial save" by passing ``partial=True`` when calling save, in which case the :py:func:`dynamorm.model.DynaModel.update` function will be used to only send the attributes that have been modified since the document was loaded. The following two code blocks will result in the same operations:
380 |
381 | .. code-block:: python
382 |
383 | # Our book is no longer in print
384 | book = Book.get(isbn='1234567890')
385 | book.in_print = False
386 | book.save(partial=True)
387 |
388 | .. code-block:: python
389 |
390 | # Our book is no longer in print
391 | book = Book.get(isbn='1234567890')
392 | book.update(in_print=False)
393 |
394 | Doing partial saves (``.save(partial=True)``) is a very convenient way to work with existing instances, but using the :py:func:`dynamorm.model.DynaModel.update` directly allows for you to also send `Update Expressions`_ and `Condition Expressions`_ with the update. Combined with consistent reads, this allows you to do things like acquire locks that ensure race conditions cannot happen:
395 |
396 | .. code-block:: python
397 |
398 | class Lock(DynaModel):
399 | class Table:
400 | name = 'locks'
401 | hash_key = 'name'
402 | read = 1
403 | write = 1
404 |
405 | class Schema:
406 | name = String(required=True)
407 | updated = Integer(required=True, default=0)
408 | key = String()
409 | is_locked = Boolean(default=False)
410 |
411 | @classmethod
412 | def lock(self, name, key):
413 | inst = cls.get(name=name, consistent=True)
414 |
415 | if inst is None:
416 | inst = Lock(name=name)
417 | inst.save()
418 |
419 | if not inst.is_locked:
420 | inst.update(
421 | is_locked=True,
422 | key=key,
423 | updated=time.time(),
424 | conditions=dict(
425 | updated=inst.updated,
426 | )
427 | )
428 | return inst
429 |
430 | @classmethod
431 | def unlock(cls, name, key):
432 | inst = cls.get(name=name, consistent=True)
433 |
434 | if key == inst.key:
435 | inst.update(
436 | is_locked=False,
437 | key=None,
438 | updated=time.time(),
439 | conditions=dict(
440 | updated=inst.updated,
441 | )
442 | )
443 |
444 | return inst
445 |
446 | lock = Lock.lock('my-lock', 'my-key')
447 | if lock.key != 'my-key':
448 | print("Failed to lock!")
449 | else:
450 | print("Lock acquired!")
451 |
452 |
453 | Just like Scanning or Querying a table, you can use :ref:`q-objects` for your update expressions.
454 |
455 | .. _Update Expressions: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.UpdateExpressions.html
456 | .. _Condition Expressions: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.ConditionExpressions.html
457 |
458 |
459 | Relationships
460 | -------------
461 |
462 | .. automodule:: dynamorm.relationships
463 | :noindex:
464 |
--------------------------------------------------------------------------------
/dynamorm/__init__.py:
--------------------------------------------------------------------------------
1 | """The base module namespace simply imports the most frequently used objects to simplify imports in clients:
2 |
3 | .. code-block:: python
4 |
5 | from dynamorm import DynaModel
6 |
7 | """
8 | from .model import DynaModel # noqa
9 | from .indexes import (
10 | GlobalIndex,
11 | LocalIndex,
12 | ProjectAll,
13 | ProjectKeys,
14 | ProjectInclude,
15 | ) # noqa
16 | from .relationships import ManyToOne, OneToMany, OneToOne # noqa
17 | from .table import Q # noqa
18 |
--------------------------------------------------------------------------------
/dynamorm/exceptions.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import six
4 |
5 | log = logging.getLogger(__name__)
6 |
7 |
8 | # --- Base exception ---
9 | class DynamoException(Exception):
10 | """Base exception for all DynamORM raised exceptions"""
11 |
12 |
13 | # --- Schema exceptions ---
14 | class DynaModelException(DynamoException):
15 | """Base exception for DynaModel problems"""
16 |
17 |
18 | @six.python_2_unicode_compatible
19 | class ValidationError(DynaModelException):
20 | """Schema validation failed"""
21 |
22 | def __init__(self, raw, schema_name, errors, *args, **kwargs):
23 | super(ValidationError, self).__init__(*args, **kwargs)
24 | self.errors = errors
25 | self.raw = raw
26 | self.schema_name = schema_name
27 |
28 | def __str__(self):
29 | log.debug("Validation failure for data: {0}".format(self.raw))
30 | return "Validation failed for schema {0}. Errors: {1}".format(
31 | self.schema_name, self.errors
32 | )
33 |
34 |
35 | # --- Table exceptions ---
36 |
37 |
38 | class DynamoTableException(DynamoException):
39 | """Base exception class for all DynamoTable errors"""
40 |
41 |
42 | class MissingTableAttribute(DynamoTableException):
43 | """A required attribute is missing"""
44 |
45 |
46 | class InvalidSchemaField(DynamoTableException):
47 | """A field provided does not exist in the schema"""
48 |
49 |
50 | class InvalidKey(DynamoTableException):
51 | """A parameter is not a valid key"""
52 |
53 |
54 | class HashKeyExists(DynamoTableException):
55 | """A operating requesting a unique hash key failed"""
56 |
57 |
58 | class ConditionFailed(DynamoTableException):
59 | """A condition check failed"""
60 |
61 |
62 | class TableNotActive(DynamoTableException):
63 | """The table is not ACTIVE, and you do not want to wait"""
64 |
--------------------------------------------------------------------------------
/dynamorm/indexes.py:
--------------------------------------------------------------------------------
1 | class Index(object):
2 | def __init__(self, model, index):
3 | self.model = model
4 | self.index = index
5 |
6 | def query(self, query_kwargs=None, **kwargs):
7 | """Execute a query on this index
8 |
9 | See DynaModel.query for documentation on how to pass query arguments.
10 | """
11 | try:
12 | query_kwargs["IndexName"] = self.index.name
13 | except TypeError:
14 | query_kwargs = {"IndexName": self.index.name}
15 |
16 | return self.model.query(query_kwargs=query_kwargs, **kwargs).partial(
17 | self.projection.partial
18 | )
19 |
20 | def scan(self, scan_kwargs=None, **kwargs):
21 | """Execute a scan on this index
22 |
23 | See DynaModel.scan for documentation on how to pass scan arguments.
24 | """
25 | try:
26 | scan_kwargs["IndexName"] = self.index.name
27 | except TypeError:
28 | scan_kwargs = {"IndexName": self.index.name}
29 |
30 | return self.model.scan(scan_kwargs=scan_kwargs, **kwargs).partial(
31 | self.projection.partial
32 | )
33 |
34 |
35 | class LocalIndex(Index):
36 | """Represents a Local Secondary Index on your table"""
37 |
38 | pass
39 |
40 |
41 | class GlobalIndex(Index):
42 | """Represents a Local Secondary Index on your table"""
43 |
44 | pass
45 |
46 |
47 | class Projection(object):
48 | pass
49 |
50 |
51 | class ProjectAll(Projection):
52 | """Project all attributes from the Table into the Index
53 |
54 | Documents loaded using this projection will be fully validated by the schema.
55 | """
56 |
57 | partial = False
58 |
59 |
60 | class ProjectKeys(Projection):
61 | """Project the keys from the Table into the Index.
62 |
63 | Documents loaded using this projection will be partially validated by the schema.
64 | """
65 |
66 | partial = True
67 |
68 |
69 | class ProjectInclude(Projection):
70 | """Project the specified attributes into the Index.
71 |
72 | Documents loaded using this projection will be partially validated by the schema.
73 |
74 | .. code-block:: python
75 |
76 | class ByAuthor(GlobalIndex):
77 | ...
78 | projection = ProjectInclude('some_attr', 'other_attr')
79 | """
80 |
81 | partial = True
82 |
83 | def __init__(self, *include):
84 | self.include = include
85 |
--------------------------------------------------------------------------------
/dynamorm/local.py:
--------------------------------------------------------------------------------
1 | import atexit
2 | import logging
3 | import os
4 | import random
5 | import socket
6 | import subprocess
7 | import tarfile
8 | import tempfile
9 |
10 | try:
11 | from urllib import urlretrieve
12 | except ImportError:
13 | from urllib.request import urlretrieve
14 |
15 | log = logging.getLogger(__name__)
16 |
17 |
18 | class DynamoLocal(object):
19 | """
20 | Spins up a local dynamo instance. This should ONLY be used for testing!! This instance
21 | will register the cleanup method ``shutdown`` with the ``atexit`` module.
22 | """
23 |
24 | def __init__(self, dynamo_dir, port=None):
25 | self.port = port or get_random_port()
26 | if not os.path.isdir(dynamo_dir):
27 | log.info("Creating dynamo_local_dir: {0}".format(dynamo_dir))
28 | assert not os.path.exists(dynamo_dir)
29 | os.makedirs(dynamo_dir, 0o755)
30 |
31 | if not os.path.exists(os.path.join(dynamo_dir, "DynamoDBLocal.jar")):
32 | temp_fd, temp_file = tempfile.mkstemp()
33 | os.close(temp_fd)
34 | log.info("Downloading dynamo local to: {0}".format(temp_file))
35 | urlretrieve(
36 | "https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_latest.tar.gz",
37 | temp_file,
38 | )
39 |
40 | log.info("Extracting dynamo local...")
41 | archive = tarfile.open(temp_file, "r:gz")
42 | archive.extractall(dynamo_dir)
43 | archive.close()
44 |
45 | os.unlink(temp_file)
46 |
47 | log.info(
48 | "Running dynamo from {dir} on port {port}".format(
49 | dir=dynamo_dir, port=self.port
50 | )
51 | )
52 | self.dynamo_proc = subprocess.Popen(
53 | (
54 | "java",
55 | "-Djava.library.path=./DynamoDBLocal_lib",
56 | "-jar",
57 | "DynamoDBLocal.jar",
58 | "-sharedDb",
59 | "-inMemory",
60 | "-port",
61 | str(self.port),
62 | ),
63 | stdout=subprocess.PIPE,
64 | stderr=subprocess.PIPE,
65 | cwd=dynamo_dir,
66 | )
67 | atexit.register(self.shutdown)
68 |
69 | def shutdown(self):
70 | if self.dynamo_proc:
71 | self.dynamo_proc.terminate()
72 | self.dynamo_proc.wait()
73 | self.dynamo_proc = None
74 |
75 |
76 | def get_random_port():
77 | """Find a random port that appears to be available"""
78 | random_port = random.randint(25000, 55000)
79 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
80 | try:
81 | result = sock.connect_ex(("127.0.0.1", random_port))
82 | finally:
83 | sock.close()
84 | if result == 0:
85 | return get_random_port()
86 | return random_port
87 |
--------------------------------------------------------------------------------
/dynamorm/model.py:
--------------------------------------------------------------------------------
1 | """Models represent tables in DynamoDB and define the characteristics of the Dynamo service as well as the Marshmallow
2 | or Schematics schema that is used for validating and marshalling your data.
3 | """
4 |
5 | import inspect
6 | import logging
7 | import sys
8 |
9 | import six
10 |
11 | from .exceptions import DynaModelException
12 | from .indexes import Index
13 | from .relationships import Relationship
14 | from .signals import (
15 | model_prepared,
16 | pre_init,
17 | post_init,
18 | pre_save,
19 | post_save,
20 | pre_update,
21 | post_update,
22 | pre_delete,
23 | post_delete,
24 | )
25 | from .table import DynamoTable3, QueryIterator, ScanIterator
26 |
27 | log = logging.getLogger(__name__)
28 |
29 |
30 | class DynaModelMeta(type):
31 | """DynaModelMeta is a metaclass for the DynaModel class that transforms our Table and Schema classes
32 |
33 | Since we can inspect the data we need to build the full data structures needed for working with tables and indexes
34 | users can define for more concise and readable table definitions that we transform into the final. To allow for a
35 | more concise definition of DynaModels we do not require that users define their inner Schema class as extending
36 | from the :class:`~marshmallow.Schema`. Instead, when the class is being defined we take the inner Schema and
37 | transform it into a new class named Schema, extending from :class:`~marshmallow.Schema`. For example, on a
38 | model named ``Foo`` the resulting ``Foo.Schema`` object would be an instance of a class named ``FooSchema``, rather
39 | than a class named ``Schema``
40 | """
41 |
42 | def __new__(cls, name, parents, attrs):
43 | if name in ("DynaModel", "DynaModelMeta"):
44 | return super(DynaModelMeta, cls).__new__(cls, name, parents, attrs)
45 |
46 | def should_transform(inner_class):
47 | """Closure to determine if we should transfer an inner class (Schema or Table)"""
48 | # if the inner class exists in our own attributes we use that
49 | if inner_class in attrs:
50 | return True
51 |
52 | # if any of our parent classes have the class then we use that
53 | for parent in parents:
54 | try:
55 | getattr(parent, inner_class)
56 | return False
57 | except AttributeError:
58 | pass
59 |
60 | raise DynaModelException(
61 | "You must define an inner '{inner}' class on your '{name}' class".format(
62 | inner=inner_class, name=name
63 | )
64 | )
65 |
66 | # collect our indexes & relationships
67 | indexes = dict(
68 | (name, val)
69 | for name, val in six.iteritems(attrs)
70 | if inspect.isclass(val) and issubclass(val, Index)
71 | )
72 |
73 | attrs["relationships"] = dict(
74 | (name, val)
75 | for name, val in six.iteritems(attrs)
76 | if isinstance(val, Relationship)
77 | )
78 |
79 | # Transform the Schema.
80 | if should_transform("Schema"):
81 | if "marshmallow" in sys.modules:
82 | from .types._marshmallow import Schema
83 | elif "schematics" in sys.modules:
84 | from .types._schematics import Schema
85 | else:
86 | raise DynaModelException(
87 | "Unknown Schema definitions, we couldn't find any supported fields/types"
88 | )
89 |
90 | if issubclass(attrs["Schema"], Schema.base_schema_type()):
91 | SchemaClass = type(
92 | "{name}Schema".format(name=name),
93 | (Schema, attrs["Schema"]),
94 | {},
95 | )
96 | else:
97 | SchemaClass = type(
98 | "{name}Schema".format(name=name),
99 | (Schema,) + attrs["Schema"].__bases__,
100 | dict(attrs["Schema"].__dict__),
101 | )
102 | attrs["Schema"] = SchemaClass
103 |
104 | # transform the Table
105 | if should_transform("Table"):
106 | TableClass = type(
107 | "{name}Table".format(name=name),
108 | (DynamoTable3,) + attrs["Table"].__bases__,
109 | dict(attrs["Table"].__dict__),
110 | )
111 | attrs["Table"] = TableClass(schema=attrs["Schema"], indexes=indexes)
112 |
113 | # call our parent to get the new instance
114 | model = super(DynaModelMeta, cls).__new__(cls, name, parents, attrs)
115 |
116 | # give the Schema and Table objects a reference back to the model
117 | model.Schema._model = model
118 | model.Table._model = model
119 |
120 | # Put the instantiated indexes back into our attrs. We instantiate the Index class that's in the attrs and
121 | # provide the actual Index object from our table as the parameter.
122 | for name, klass in six.iteritems(indexes):
123 | index = klass(model, model.Table.indexes[klass.name])
124 | setattr(model, name, index)
125 |
126 | for relationship in six.itervalues(model.relationships):
127 | relationship.set_this_model(model)
128 |
129 | model_prepared.send(model)
130 |
131 | return model
132 |
133 |
134 | @six.add_metaclass(DynaModelMeta)
135 | class DynaModel(object):
136 | """``DynaModel`` is the base class all of your models will extend from. This model definition encapsulates the
137 | parameters used to create and manage the table as well as the schema for validating and marshalling data into object
138 | attributes. It will also hold any custom business logic you need for your objects.
139 |
140 | Your class must define two inner classes that specify the Dynamo Table options and the Schema, respectively.
141 |
142 | The Dynamo Table options are defined in a class named ``Table``. See the :mod:`dynamorm.table` module for
143 | more information.
144 |
145 | Any Local or Global Secondary Indexes you wish to create are defined as inner tables that extend from either the
146 | :class:`~LocalIndex` or :class:`~GlobalIndex` classes. See the :mod:`dynamorm.table` module for more information.
147 |
148 | The document schema is defined in a class named ``Schema``, which should be filled out exactly as you would fill
149 | out any other Marshmallow :class:`~marshmallow.Schema` or Schematics :class:`~schematics.Model`.
150 |
151 | For example:
152 |
153 | .. code-block:: python
154 |
155 | # Marshmallow example
156 | import os
157 |
158 | from dynamorm import DynaModel, GlobalIndex, ProjectAll
159 |
160 | from marshmallow import fields, validate, validates, ValidationError
161 |
162 | class Thing(DynaModel):
163 | class Table:
164 | name = 'things'
165 | hash_key = 'id'
166 | read = 5
167 | write = 1
168 |
169 | class ByColor(GlobalIndex):
170 | name = 'by-color'
171 | hash_key = 'color'
172 | read = 5
173 | write = 1
174 | projection = ProjectAll()
175 |
176 | class Schema:
177 | id = fields.String(required=True)
178 | name = fields.String()
179 | color = fields.String(validate=validate.OneOf(('purple', 'red', 'yellow')))
180 | compound = fields.Dict(required=True)
181 |
182 | @validates('name')
183 | def validate_name(self, value):
184 | # this is a very silly example just to illustrate that you can fill out the
185 | # inner Schema class just like any other Marshmallow class
186 | if name.lower() == 'evan':
187 | raise ValidationError("No Evan's allowed")
188 |
189 | def say_hello(self):
190 | print("Hello. {name} here. My ID is {id} and I'm colored {color}".format(
191 | id=self.id,
192 | name=self.name,
193 | color=self.color
194 | ))
195 | """
196 |
197 | def __init__(self, partial=False, **raw):
198 | """Create a new instance of a DynaModel
199 |
200 | :param \*\*raw: The raw data as pulled out of dynamo. This will be validated and the sanitized
201 | input will be put onto ``self`` as attributes.
202 | """
203 | pre_init.send(self.__class__, instance=self, partial=partial, raw=raw)
204 |
205 | # When creating models you can pass in values to the relationships defined on the model, we remove the value
206 | # from raw (since it would be ignored when validating anyway), and instead leverage the relationship to
207 | # determine if we should add any new values to raw to represent the relationship
208 | relationships = {}
209 | for name, relationship in six.iteritems(self.relationships):
210 | new_value = raw.pop(name, None)
211 | if new_value is not None:
212 | relationships[name] = new_value
213 |
214 | to_assign = relationship.assign(new_value)
215 | if to_assign:
216 | raw.update(to_assign)
217 |
218 | self._raw = raw
219 | self._validated_data = self.Schema.dynamorm_validate(
220 | raw, partial=partial, native=True
221 | )
222 | for k, v in six.iteritems(self._validated_data):
223 | setattr(self, k, v)
224 |
225 | for k, v in six.iteritems(relationships):
226 | setattr(self, k, v)
227 |
228 | post_init.send(self.__class__, instance=self, partial=partial, raw=raw)
229 |
230 | @classmethod
231 | def _normalize_keys_in_kwargs(cls, kwargs):
232 | """Helper method to pass kwargs that will be used as Key arguments in Table operations so that they are
233 | validated against the Schema. This is done so that if a field does transformation during validation or
234 | marshalling we can accept the untransformed value and pass the transformed value through to the Dyanmo
235 | operation.
236 | """
237 |
238 | def normalize(key):
239 | try:
240 | validated = cls.Schema.dynamorm_validate(
241 | {key: kwargs[key]}, partial=True
242 | )
243 | kwargs[key] = validated[key]
244 | except KeyError:
245 | pass
246 |
247 | normalize(cls.Table.hash_key)
248 | normalize(cls.Table.range_key)
249 | return kwargs
250 |
251 | @classmethod
252 | def put(cls, item, **kwargs):
253 | """Put a single item into the table for this model
254 |
255 | The attributes on the item go through validation, so this may raise :class:`ValidationError`.
256 |
257 | :param dict item: The item to put into the table
258 | :param \*\*kwargs: All other kwargs are passed through to the put method on the table
259 | """
260 | return cls.Table.put(cls.Schema.dynamorm_validate(item), **kwargs)
261 |
262 | @classmethod
263 | def put_unique(cls, item, **kwargs):
264 | """Put a single item into the table for this model, with a unique attribute constraint on the hash key
265 |
266 | :param dict item: The item to put into the table
267 | :param \*\*kwargs: All other kwargs are passed through to the put_unique method on the table
268 | """
269 | return cls.Table.put_unique(cls.Schema.dynamorm_validate(item), **kwargs)
270 |
271 | @classmethod
272 | def put_batch(cls, *items, **batch_kwargs):
273 | """Put one or more items into the table
274 |
275 | :param \*items: The items to put into the table
276 | :param \*\*kwargs: All other kwargs are passed through to the put_batch method on the table
277 |
278 | Example::
279 |
280 | Thing.put_batch(
281 | {"hash_key": "one"},
282 | {"hash_key": "two"},
283 | {"hash_key": "three"},
284 | )
285 | """
286 | return cls.Table.put_batch(
287 | *[cls.Schema.dynamorm_validate(item) for item in items], **batch_kwargs
288 | )
289 |
290 | @classmethod
291 | def update_item(cls, conditions=None, update_item_kwargs=None, **kwargs):
292 | """Update a item in the table
293 |
294 | :params conditions: A dict of key/val pairs that should be applied as a condition to the update
295 | :params update_item_kwargs: A dict of other kwargs that are passed through to update_item
296 | :params \*\*kwargs: Includes your hash/range key/val to match on as well as any keys to update
297 | """
298 | kwargs.update(
299 | dict(
300 | (k, v)
301 | for k, v in six.iteritems(
302 | cls.Schema.dynamorm_validate(kwargs, partial=True)
303 | )
304 | if k in kwargs
305 | )
306 | )
307 | kwargs = cls._normalize_keys_in_kwargs(kwargs)
308 | return cls.Table.update(
309 | conditions=conditions, update_item_kwargs=update_item_kwargs, **kwargs
310 | )
311 |
312 | @classmethod
313 | def new_from_raw(cls, raw, partial=False):
314 | """Return a new instance of this model from a raw (dict) of data that is loaded by our Schema
315 |
316 | :param dict raw: The attributes to use when creating the instance
317 | """
318 | if raw is None:
319 | return None
320 | return cls(partial=partial, **raw)
321 |
322 | @classmethod
323 | def get(cls, consistent=False, **kwargs):
324 | """Get an item from the table
325 |
326 | Example::
327 |
328 | Thing.get(hash_key="three")
329 |
330 | :param bool consistent: If set to True the get will be a consistent read
331 | :param \*\*kwargs: You must supply your hash key, and range key if used
332 | """
333 | kwargs = cls._normalize_keys_in_kwargs(kwargs)
334 | item = cls.Table.get(consistent=consistent, **kwargs)
335 | return cls.new_from_raw(item)
336 |
337 | @classmethod
338 | def get_batch(cls, keys, consistent=False, attrs=None):
339 | """Generator to get more than one item from the table.
340 |
341 | :param keys: One or more dicts containing the hash key, and range key if used
342 | :param bool consistent: If set to True then get_batch will be a consistent read
343 | :param str attrs: The projection expression of which attrs to fetch, if None all attrs will be fetched
344 | """
345 | keys = (cls._normalize_keys_in_kwargs(key) for key in keys)
346 | items = cls.Table.get_batch(keys, consistent=consistent, attrs=attrs)
347 | for item in items:
348 | yield cls.new_from_raw(item, partial=attrs is not None)
349 |
350 | @classmethod
351 | def query(cls, *args, **kwargs):
352 | """Execute a query on our table based on our keys
353 |
354 | You supply the key(s) to query based on as keyword arguments::
355 |
356 | Thing.query(foo="Mr. Foo")
357 |
358 | By default the ``eq`` condition is used. If you wish to use any of the other `valid conditions for keys`_ use
359 | a double underscore syntax following the key name. For example::
360 |
361 | Thing.query(foo__begins_with="Mr.")
362 |
363 | .. _valid conditions for keys: http://boto3.readthedocs.io/en/latest/reference/customizations/dynamodb.html#boto3.dynamodb.conditions.Key
364 |
365 | :param dict query_kwargs: Extra parameters that should be passed through to the Table query function
366 | :param \*\*kwargs: The key(s) and value(s) to query based on
367 | """ # noqa
368 | kwargs = cls._normalize_keys_in_kwargs(kwargs)
369 | return QueryIterator(cls, *args, **kwargs)
370 |
371 | @classmethod
372 | def scan(cls, *args, **kwargs):
373 | """Execute a scan on our table
374 |
375 | You supply the attr(s) to query based on as keyword arguments::
376 |
377 | Thing.scan(age=10)
378 |
379 | By default the ``eq`` condition is used. If you wish to use any of the other `valid conditions for attrs`_ use
380 | a double underscore syntax following the key name. For example:
381 |
382 | * ``<>``: ``Thing.scan(foo__ne='bar')``
383 | * ``<``: ``Thing.scan(count__lt=10)``
384 | * ``<=``: ``Thing.scan(count__lte=10)``
385 | * ``>``: ``Thing.scan(count__gt=10)``
386 | * ``>=``: ``Thing.scan(count__gte=10)``
387 | * ``BETWEEN``: ``Thing.scan(count__between=[10, 20])``
388 | * ``IN``: ``Thing.scan(count__in=[11, 12, 13])``
389 | * ``attribute_exists``: ``Thing.scan(foo__exists=True)``
390 | * ``attribute_not_exists``: ``Thing.scan(foo__not_exists=True)``
391 | * ``attribute_type``: ``Thing.scan(foo__type='S')``
392 | * ``begins_with``: ``Thing.scan(foo__begins_with='f')``
393 | * ``contains``: ``Thing.scan(foo__contains='oo')``
394 |
395 | .. _valid conditions for attrs: http://boto3.readthedocs.io/en/latest/reference/customizations/dynamodb.html#boto3.dynamodb.conditions.Attr
396 |
397 | Accessing nested attributes also uses the double underscore syntax::
398 |
399 | Thing.scan(address__state="CA")
400 | Thing.scan(address__state__begins_with="C")
401 |
402 | Multiple attrs are combined with the AND (&) operator::
403 |
404 | Thing.scan(address__state="CA", address__zip__begins_with="9")
405 |
406 | If you want to combine them with the OR (|) operator, or negate them (~), then you can use the Q function and
407 | pass them as arguments into scan where each argument is combined with AND::
408 |
409 | from dynamorm import Q
410 |
411 | Thing.scan(Q(address__state="CA") | Q(address__state="NY"), ~Q(address__zip__contains="5"))
412 |
413 | The above would scan for all things with an address.state of (CA OR NY) AND address.zip does not contain 5.
414 |
415 | This returns a generator, which will continue to yield items until all matching the scan are produced,
416 | abstracting away pagination. More information on scan pagination: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Scan.html#Scan.Pagination
417 |
418 | :param dict scan_kwargs: Extra parameters that should be passed through to the Table scan function
419 | :param \*args: An optional list of Q objects that can be combined with or superseded the \*\*kwargs values
420 | :param \*\*kwargs: The key(s) and value(s) to filter based on
421 | """ # noqa
422 | kwargs = cls._normalize_keys_in_kwargs(kwargs)
423 | return ScanIterator(cls, *args, **kwargs)
424 |
425 | def to_dict(self, native=False):
426 | obj = {}
427 | for k in self.Schema.dynamorm_fields():
428 | try:
429 | obj[k] = getattr(self, k)
430 | except AttributeError:
431 | pass
432 | return self.Schema.dynamorm_validate(obj, native=native)
433 |
434 | def validate(self):
435 | """Validate this instance
436 |
437 | We do this as a "native"/load/deserialization since Marshmallow ONLY raises validation errors for
438 | required/allow_none/validate(s) during deserialization. See the note at:
439 | https://marshmallow.readthedocs.io/en/latest/quickstart.html#validation
440 | """
441 | return self.to_dict(native=True)
442 |
443 | def save(self, partial=False, unique=False, return_all=False, **kwargs):
444 | """Save this instance to the table
445 |
446 | :param bool partial: When False the whole document will be ``.put`` or ``.put_unique`` to the table.
447 | When True only values that have changed since the document was loaded will sent
448 | to the table via an ``.update``.
449 | :param bool unique: Only relevant if partial=False, ignored otherwise. When False, the document will
450 | be ``.put`` to the table. When True, the document will be ``.put_unique``.
451 | :param bool return_all: Only used for partial saves. Passed through to ``.update``.
452 | :param \*\*kwargs: When partial is False these are passed through to the put method on the table. When partial
453 | is True these become the kwargs for update_item. See ``.put`` & ``.update`` for more
454 | details.
455 |
456 | The attributes on the item go through validation, so this may raise :class:`ValidationError`.
457 |
458 | TODO - Support unique, partial saves.
459 | """
460 | if not partial:
461 | pre_save.send(self.__class__, instance=self, put_kwargs=kwargs)
462 | as_dict = self.to_dict()
463 | if unique:
464 | resp = self.Table.put_unique(as_dict, **kwargs)
465 | else:
466 | resp = self.Table.put(as_dict, **kwargs)
467 | self._validated_data = as_dict
468 | post_save.send(self.__class__, instance=self, put_kwargs=kwargs)
469 | return resp
470 |
471 | # Collect the fields to updated based on what's changed
472 | # XXX: Deeply nested data will still put the whole top-most object that has changed
473 | # TODO: Support the __ syntax to do deeply nested updates
474 | updates = dict(
475 | (k, getattr(self, k))
476 | for k, v in six.iteritems(self._validated_data)
477 | if getattr(self, k) != v
478 | )
479 |
480 | if not updates:
481 | log.warning("Partial save on %s produced nothing to update", self)
482 |
483 | return self.update(update_item_kwargs=kwargs, return_all=return_all, **updates)
484 |
485 | def _add_hash_key_values(self, hash_dict):
486 | """Mutate a dictionary to add key: value pair for a hash and (if specified) sort key."""
487 | hash_dict[self.Table.hash_key] = getattr(self, self.Table.hash_key)
488 | try:
489 | hash_dict[self.Table.range_key] = getattr(self, self.Table.range_key)
490 | except (AttributeError, TypeError):
491 | pass
492 |
493 | def update(
494 | self, conditions=None, update_item_kwargs=None, return_all=False, **kwargs
495 | ):
496 | """Update this instance in the table
497 |
498 | New values are set via kwargs to this function:
499 |
500 | .. code-block:: python
501 |
502 | thing.update(foo='bar')
503 |
504 | This would set the ``foo`` attribute of the thing object to ``'bar'``. You cannot change the Hash or Range key
505 | via an update operation -- this is a property of DynamoDB.
506 |
507 | You can supply a dictionary of conditions that influence the update. In their simpliest form Conditions are
508 | supplied as a direct match (eq)::
509 |
510 | thing.update(foo='bar', conditions=dict(foo='foo'))
511 |
512 | This update would only succeed if foo was set to 'foo' at the time of the update. If you wish to use any of the
513 | other `valid conditions for attrs`_ use a double underscore syntax following the key name. You can also access
514 | nested attributes using the double underscore syntac. See the scan method for examples of both.
515 |
516 | You can also pass Q objects to conditions as either a complete expression, or a list of expressions that will be
517 | AND'd together::
518 |
519 | thing.update(foo='bar', conditions=Q(foo='foo'))
520 |
521 | thing.update(foo='bar', conditions=Q(foo='foo') | Q(bar='bar'))
522 |
523 | # the following two statements are equivalent
524 | thing.update(foo='bar', conditions=Q(foo='foo') & ~Q(bar='bar'))
525 | thing.update(foo='bar', conditions=[Q(foo='foo'), ~Q(bar='bar')])
526 |
527 | If your update conditions do not match then a dynamorm.exceptions.ConditionFailed exception will be raised.
528 |
529 | As long as the update succeeds the attrs on this instance will be updated to match their new values. If you set
530 | ``return_all`` to true then we will update all of the attributes on the object with the current values in
531 | Dyanmo, rather than just those you updated.
532 |
533 | .. expressions supported by Dynamo: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.OperatorsAndFunctions.html
534 | """
535 | is_noop = not kwargs
536 | resp = None
537 |
538 | self._add_hash_key_values(kwargs)
539 |
540 | pre_update.send(
541 | self.__class__,
542 | instance=self,
543 | conditions=conditions,
544 | update_item_kwargs=update_item_kwargs,
545 | updates=kwargs,
546 | )
547 |
548 | if not is_noop:
549 | if return_all is True:
550 | return_values = "ALL_NEW"
551 | else:
552 | return_values = "UPDATED_NEW"
553 | try:
554 | update_item_kwargs["ReturnValues"] = return_values
555 | except TypeError:
556 | update_item_kwargs = {"ReturnValues": return_values}
557 |
558 | resp = self.update_item(
559 | conditions=conditions, update_item_kwargs=update_item_kwargs, **kwargs
560 | )
561 |
562 | # update our local attrs to match what we updated
563 | partial_model = self.new_from_raw(resp["Attributes"], partial=True)
564 | for key, _ in six.iteritems(resp["Attributes"]):
565 | # elsewhere in Dynamorm, models can be created without all fields (non-"strict" mode in Schematics),
566 | # so we drop unknown keys here to be consistent
567 | if hasattr(partial_model, key):
568 | val = getattr(partial_model, key)
569 | setattr(self, key, val)
570 | self._validated_data[key] = val
571 |
572 | post_update.send(
573 | self.__class__,
574 | instance=self,
575 | conditions=conditions,
576 | update_item_kwargs=update_item_kwargs,
577 | updates=kwargs,
578 | )
579 | return resp
580 |
581 | def delete(self):
582 | """Delete this record in the table."""
583 | delete_item_kwargs = {}
584 | self._add_hash_key_values(delete_item_kwargs)
585 | self._normalize_keys_in_kwargs(delete_item_kwargs)
586 |
587 | pre_delete.send(self.__class__, instance=self)
588 | resp = self.Table.delete_item(**delete_item_kwargs)
589 | post_delete.send(self.__class__, instance=self)
590 | return resp
591 |
--------------------------------------------------------------------------------
/dynamorm/relationships.py:
--------------------------------------------------------------------------------
1 | """Relationships leverage the native tables & indexes in DynamoDB to allow more concise definition and access of related
2 | objects in your Python code.
3 |
4 | You define relationships along side your Schema and Indexes on your model, and must provide the query used to map the
5 | related models together. You can also supply a "back reference" query to have the other side of the relationship also
6 | have a relationship back to the defining model.
7 |
8 | DynamORM provides the following relationship types:
9 |
10 | * :py:class:`dynamorm.relationships.OneToOne` - Useful when you have a large number of attributes to store and you want
11 | to break them up over multiple tables for performance in querying.
12 |
13 | * :py:class:`dynamorm.relationships.OneToMany` / :py:class:`dynamorm.relationships.ManyToOne` - Useful when you have an
14 | instance of one model that has a collection of related instances of another model. You use ``OneToMany`` or
15 | ``ManyToOne`` depending on which side of the relationship you are defining the attribute on. You'll to use both
16 | interchangeably based on how your models are laid out since you need to pass a reference to the other model into the
17 | relationship.
18 |
19 |
20 | Here's an example of how you could model the `Forum Application`_ from the DynamoDB Examples:
21 |
22 | .. _Forum Application: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/SampleData.CreateTables.html#SampleData.CreateTables2
23 |
24 | .. code-block:: python
25 |
26 | class Reply(DynaModel):
27 | class Table:
28 | name = 'replies'
29 | hash_key = 'forum_thread'
30 | range_key = 'created'
31 | read = 1
32 | write = 1
33 |
34 | class ByUser(GlobalIndex):
35 | name = 'replies-by-user'
36 | hash_key = 'user_name'
37 | range_key = 'message'
38 | projection = ProjectKeys()
39 | read = 1
40 | write = 1
41 |
42 | class Schema:
43 | forum_thread = String(required=True)
44 | created = String(required=True)
45 | user_name = String(required=True)
46 | message = String()
47 |
48 | class User(DynaModel):
49 | class Table:
50 | name = 'users'
51 | hash_key = 'name'
52 | read = 1
53 | write = 1
54 |
55 | class Schema:
56 | name = String(required=True)
57 |
58 | replies = OneToMany(
59 | Reply,
60 | index='ByUser',
61 | query=lambda user: dict(user_name=user.name),
62 | back_query=lambda reply: dict(name=reply.user_name)
63 | )
64 |
65 | class Thread(DynaModel):
66 | class Table:
67 | name = 'threads'
68 | hash_key = 'forum_name'
69 | range_key = 'subject'
70 | read = 1
71 | write = 1
72 |
73 | class ByUser(GlobalIndex):
74 | name = 'threads-by-user'
75 | hash_key = 'user_name'
76 | range_key = 'subject'
77 | projection = ProjectKeys()
78 | read = 1
79 | write = 1
80 |
81 | class Schema:
82 | forum_name = String(required=True)
83 | user_name = String(required=True)
84 | subject = String(required=True)
85 |
86 | user = ManyToOne(
87 | User,
88 | query=lambda thread: dict(name=thread.user_name),
89 | back_index='ByUser',
90 | back_query=lambda user: dict(user_name=user.name)
91 | )
92 | replies = OneToMany(
93 | Reply,
94 | query=lambda thread: dict(forum_thread='{0}\\n{1}'.format(thread.forum_name, thread.subject)),
95 | back_query=lambda reply: dict(
96 | forum_name=reply.forum_thread.split('\\n')[0],
97 | subject=reply.forum_thread.split('\\n')[1]
98 | )
99 | )
100 |
101 | class Forum(DynaModel):
102 | class Table:
103 | name = 'forums'
104 | hash_key = 'name'
105 | read = 1
106 | write = 1
107 |
108 | class Schema:
109 | name = String(required=True)
110 |
111 | threads = OneToMany(
112 | Thread,
113 | query=lambda forum: dict(forum_name=forum.name),
114 | back_query=lambda thread: dict(name=thread.forum_name)
115 | )
116 | """
117 |
118 | import six
119 |
120 | from .signals import pre_save, post_save, pre_update, post_update
121 |
122 |
123 | @six.python_2_unicode_compatible
124 | class DefaultBackReference(object):
125 | """When given a relationship the string representation of this will be a "python" string name of the model the
126 | relationship exists on.
127 |
128 | For example, if there's a relationship defined on a model named ``OrderItem`` this would render ``order_item``.
129 | """
130 |
131 | def __init__(self, relationship):
132 | self.relationship = relationship
133 |
134 | def __str__(self):
135 | return "".join(
136 | [
137 | x if x.islower() else "_{0}".format(x.lower())
138 | for x in self.relationship.this.__name__
139 | ]
140 | ).strip("_")
141 |
142 |
143 | class Relationship(object):
144 | BackReferenceClass = None
145 | BackReferenceTemplate = None
146 |
147 | def __init__(
148 | self,
149 | other,
150 | query,
151 | index=None,
152 | back_query=None,
153 | back_index=None,
154 | back_reference=DefaultBackReference,
155 | ):
156 | self.this = None
157 | self.other = other
158 | self.query = query
159 | self.index = index
160 | self.back_query = back_query
161 | self.back_index = back_index
162 | self.back_reference = back_reference
163 | self.back_reference_relationship = None
164 |
165 | self.accessor = self.other if index is None else getattr(self.other, index)
166 |
167 | def __repr__(self):
168 | return "{0}({1}, {2})".format(self.__class__.__name__, self.this, self.other)
169 |
170 | def set_this_model(self, model):
171 | """Called from the metaclass once the model the relationship is being placed on has been initialized"""
172 | self.this = model
173 |
174 | if self.back_query is not None:
175 | if callable(self.back_reference):
176 | self.back_reference = self.back_reference(self)
177 |
178 | if self.back_reference is not None:
179 | self.set_back_reference()
180 |
181 | def set_back_reference(self):
182 | """Sets up a back reference to this model on the other model"""
183 | assert (
184 | self.this is not None
185 | ), "This model must be set prior to setting up a back reference!"
186 |
187 | if self.BackReferenceClass == "self":
188 | back_ref_cls = self.__class__
189 | else:
190 | back_ref_cls = self.BackReferenceClass
191 |
192 | self.back_reference_relationship = back_ref_cls(
193 | self.this,
194 | query=self.back_query,
195 | index=self.back_index,
196 | back_query=self.query,
197 | back_reference=None,
198 | )
199 | self.back_reference_relationship.set_this_model(self.other)
200 |
201 | ref_name = self.BackReferenceTemplate.format(back_reference=self.back_reference)
202 | setattr(self.other, ref_name, self.back_reference_relationship)
203 | self.other.relationships[ref_name] = self.back_reference_relationship
204 |
205 | def assign(self, value):
206 | """ """
207 | pass
208 |
209 |
210 | class OneToOne(Relationship):
211 | """A One-to-One relationship is where two models (tables) have items that have a relation to exactly one instance in
212 | the other model.
213 |
214 | It is a useful pattern when you wish to split up large tables with many attributes where your "main" table is
215 | queried frequently and having all of the attributes included in the query results would increase your required
216 | throughput. By splitting the data into two tables you can have lower throughput on the "secondary" table as the
217 | items will be lazily fetched only as they are accessed.
218 | """
219 |
220 | BackReferenceClass = "self"
221 | BackReferenceTemplate = "{back_reference}"
222 |
223 | def __init__(
224 | self,
225 | other,
226 | query,
227 | index=None,
228 | back_query=None,
229 | back_index=None,
230 | back_reference=DefaultBackReference,
231 | auto_create=True,
232 | ):
233 | super(OneToOne, self).__init__(
234 | other=other,
235 | query=query,
236 | index=index,
237 | back_query=back_query,
238 | back_index=back_index,
239 | back_reference=back_reference,
240 | )
241 | self.other_inst = None
242 | self.auto_create = auto_create
243 |
244 | def __get__(self, obj, owner):
245 | if self.other_inst is None:
246 | self.get_other_inst(obj, create_missing=self.auto_create)
247 | return self.other_inst
248 |
249 | def __set__(self, obj, new_instance):
250 | if not isinstance(new_instance, self.other):
251 | raise TypeError("%s is not an instance of %s", new_instance, self.other)
252 |
253 | query = self.query(obj)
254 | for key, val in six.iteritems(query):
255 | setattr(new_instance, key, val)
256 |
257 | self.other_inst = new_instance
258 |
259 | def __delete__(self, obj):
260 | if self.other_inst is None:
261 | self.get_other_inst(obj, create_missing=False)
262 |
263 | if not self.other_inst:
264 | raise AttributeError("No other instance to delete")
265 |
266 | self.other_inst.delete()
267 | self.other_inst = None
268 |
269 | def set_this_model(self, model):
270 | super(OneToOne, self).set_this_model(model)
271 |
272 | if self.back_reference:
273 | pre_save.connect(self.pre_save, sender=model)
274 | post_save.connect(self.post_save, sender=model)
275 | pre_update.connect(self.pre_update, sender=model)
276 | post_update.connect(self.post_update, sender=model)
277 |
278 | def get_other_inst(self, obj, create_missing=False):
279 | query = self.query(obj)
280 | results = self.accessor.query(**query)
281 |
282 | try:
283 | self.other_inst = next(results)
284 | except StopIteration:
285 | if create_missing:
286 | query["partial"] = True
287 | self.other_inst = self.other(**query)
288 |
289 | def assign(self, value):
290 | return self.back_query(value)
291 |
292 | def pre_save(self, sender, instance, put_kwargs):
293 | if self.other_inst:
294 | self.other_inst.validate()
295 |
296 | def post_save(self, sender, instance, put_kwargs):
297 | if self.other_inst:
298 | self.other_inst.save(partial=False)
299 |
300 | def pre_update(self, sender, instance, conditions, update_item_kwargs, updates):
301 | if self.other_inst:
302 | self.other_inst.validate()
303 |
304 | def post_update(self, sender, instance, conditions, update_item_kwargs, updates):
305 | if self.other_inst:
306 | self.other_inst.save(partial=True)
307 |
308 |
309 | class OneToMany(Relationship):
310 | """A One to Many relationship is defined on the "parent" model, where each instance has many related "child"
311 | instances of another model.
312 | """
313 |
314 | BackReferenceClass = OneToOne
315 | BackReferenceTemplate = "{back_reference}"
316 |
317 | def __get__(self, obj, owner):
318 | return QuerySet(
319 | self.other, self.query(obj), self.accessor if self.index else None
320 | )
321 |
322 |
323 | class ManyToOne(OneToOne):
324 | """A Many To One relationship is defined on the "child" model, where many child models have one parent model."""
325 |
326 | BackReferenceClass = OneToMany
327 | BackReferenceTemplate = "{back_reference}s"
328 |
329 |
330 | # XXX TODO: ManyToMany
331 |
332 |
333 | class QuerySet(object):
334 | # XXX TODO: QuerySet should be moved to it's own namespace and should also be leveraged by the model classes so that
335 | # when you query on a Table or Index you get back one of these. This will allow you to call .count() and .filter()
336 | # on existing queries to further refine them.
337 | def __init__(self, model, query, index=None):
338 | self.model = model
339 | self.query = query
340 | self.index = index
341 |
342 | def __iter__(self):
343 | if self.index:
344 | return self.index.query(**self.query)
345 | return self.model.query(**self.query)
346 |
347 | def __len__(self):
348 | return self.count()
349 |
350 | def count(self):
351 | query = self.query.copy()
352 | query["query_kwargs"] = dict(Select="COUNT")
353 | if self.index:
354 | query["query_kwargs"]["IndexName"] = self.index.name
355 | resp = self.model.Table.query(**query)
356 | return resp["Count"]
357 |
358 | def filter(self, **kwargs):
359 | new_query = self.query.copy()
360 | new_query.update(kwargs)
361 | return QuerySet(model=self.model, query=new_query, index=self.index)
362 |
--------------------------------------------------------------------------------
/dynamorm/signals.py:
--------------------------------------------------------------------------------
1 | """Signals provide a way for applications to loosely couple themselves and respond to different life cycle events.
2 |
3 | The `blinker`_ library provides the low-level signal implementation.
4 |
5 | To use the signals you ``connect`` a receiver function to the signals you're interested in:
6 |
7 | .. code-block:: python
8 |
9 | from dynamorm.signals import post_save
10 |
11 | def post_save_receiver(sender, instance, partial, put_kwargs):
12 | log.info("Received post_save signal from model %s for instance %s", sender, instance)
13 |
14 | post_save.connect(post_save_receiver)
15 |
16 | See the `blinker`_ documentation for more details.
17 |
18 | .. _blinker: https://pythonhosted.org/blinker/
19 | """
20 |
21 | from blinker import signal
22 |
23 | model_prepared = signal(
24 | "dynamorm.model_prepared",
25 | doc="""Sent whenever a model class has been prepared by the metaclass.
26 |
27 | :param: sender: The model class that is now prepared for use.
28 | """,
29 | )
30 |
31 | pre_init = signal(
32 | "dynamorm.pre_init",
33 | doc="""Sent during model instantiation, before processing the raw data.
34 |
35 | :param: sender: The model class.
36 | :param: instance: The model instance.
37 | :param: bool partial: True if this is a partial instantiation, not all data may be present.
38 | :param: dict raw: The raw data to be processed by the model schema.
39 | """,
40 | )
41 |
42 | post_init = signal(
43 | "dynamorm.post_init",
44 | doc="""Sent once model instantiation is complete and all raw data has been processed.
45 |
46 | :param: sender: The model class.
47 | :param: instance: The model instance.
48 | :param: bool partial: True if this is a partial instantiation, not all data may be present.
49 | :param: dict raw: The raw data to be processed by the model schema.
50 | """,
51 | )
52 |
53 | pre_save = signal(
54 | "dynamorm.pre_save",
55 | doc="""Sent before saving (via put) model instances.
56 |
57 | :param: sender: The model class.
58 | :param: instance: The model instance.
59 | :param: dict put_kwargs: A dict of the kwargs being sent to the table put method.
60 | """,
61 | )
62 |
63 | post_save = signal(
64 | "dynamorm.post_save",
65 | doc="""Sent after saving (via put) model instances.
66 |
67 | :param: sender: The model class.
68 | :param: instance: The model instance.
69 | :param: dict put_kwargs: A dict of the kwargs being sent to the table put method.
70 | """,
71 | )
72 |
73 | pre_update = signal(
74 | "dynamorm.pre_update",
75 | doc="""Sent before saving (via update) model instances.
76 |
77 | :param: sender: The model class.
78 | :param: instance: The model instance.
79 | :param: dict conditions: The conditions for the update to succeed.
80 | :param: dict update_item_kwargs: A dict of the kwargs being sent to the table put method.
81 | :param: dict updates: The fields to update.
82 | """,
83 | )
84 |
85 | post_update = signal(
86 | "dynamorm.post_update",
87 | doc="""Sent after saving (via update) model instances.
88 |
89 | :param: sender: The model class.
90 | :param: instance: The model instance.
91 | :param: dict conditions: The conditions for the update to succeed.
92 | :param: dict update_item_kwargs: A dict of the kwargs being sent to the table put method.
93 | :param: dict updates: The fields to update.
94 | """,
95 | )
96 |
97 | pre_delete = signal(
98 | "dynamorm.pre_delete",
99 | doc="""Sent before deleting model instances.
100 |
101 | :param: sender: The model class.
102 | :param: instance: The model instance.
103 | """,
104 | )
105 |
106 | post_delete = signal(
107 | "dynamorm.post_delete",
108 | doc="""Sent after deleting model instances.
109 |
110 | :param: sender: The model class.
111 | :param: instance: The deleted model instance.
112 | """,
113 | )
114 |
--------------------------------------------------------------------------------
/dynamorm/table.py:
--------------------------------------------------------------------------------
1 | """The inner ``Table`` class on ``DynaModel`` definitions becomes an instance of our
2 | :class:`dynamorm.table.DynamoTable3` class.
3 |
4 | The attributes you define on your inner ``Table`` class map to underlying boto data structures. This mapping is
5 | expressed through the following data model:
6 |
7 | ========= ======== ==== ===========
8 | Attribute Required Type Description
9 | ========= ======== ==== ===========
10 | name True str The name of the table, as stored in Dynamo.
11 |
12 | hash_key True str The name of the field to use as the hash key.
13 | It must exist in the schema.
14 |
15 | range_key False str The name of the field to use as the range_key, if one is used.
16 | It must exist in the schema.
17 |
18 | read True int The provisioned read throughput.
19 |
20 | write True int The provisioned write throughput.
21 |
22 | stream False str The stream view type, either None or one of:
23 | 'NEW_IMAGE'|'OLD_IMAGE'|'NEW_AND_OLD_IMAGES'|'KEYS_ONLY'
24 |
25 | ========= ======== ==== ===========
26 |
27 |
28 | Indexes
29 | -------
30 |
31 | Like the ``Table`` definition, Indexes are also inner classes on ``DynaModel`` definitions, and they require the same
32 | data model with one extra field.
33 |
34 | ========== ======== ====== ===========
35 | Attribute Required Type Description
36 | ========== ======== ====== ===========
37 | projection True object An instance of of :class:`dynamorm.model.ProjectAll`,
38 | :class:`dynamorm.model.ProjectKeys`, or :class:`dynamorm.model.ProjectInclude`
39 |
40 | ========== ======== ====== ===========
41 |
42 | """
43 |
44 | import logging
45 | import time
46 | import warnings
47 | from collections import defaultdict, OrderedDict
48 |
49 | try:
50 | from collections.abc import Iterable, Mapping
51 | except ImportError:
52 | from collections import Iterable, Mapping
53 |
54 | import boto3
55 | import botocore
56 | import six
57 |
58 | from boto3.dynamodb.conditions import Key, Attr
59 | from dynamorm.exceptions import (
60 | MissingTableAttribute,
61 | TableNotActive,
62 | InvalidSchemaField,
63 | HashKeyExists,
64 | ConditionFailed,
65 | )
66 |
67 | log = logging.getLogger(__name__)
68 |
69 |
70 | class DynamoCommon3(object):
71 | """Common properties & functions of Boto3 DynamORM objects -- i.e. Tables & Indexes"""
72 |
73 | REQUIRED_ATTRS = ("name", "hash_key")
74 |
75 | name = None
76 | hash_key = None
77 | range_key = None
78 | read = None
79 | write = None
80 |
81 | def __init__(self):
82 | for attr in self.REQUIRED_ATTRS:
83 | if getattr(self, attr) is None:
84 | raise MissingTableAttribute(
85 | "Missing required Table attribute: {0}".format(attr)
86 | )
87 |
88 | if self.hash_key not in self.schema.dynamorm_fields():
89 | raise InvalidSchemaField(
90 | "The hash key '{0}' does not exist in the schema".format(self.hash_key)
91 | )
92 |
93 | if self.range_key and self.range_key not in self.schema.dynamorm_fields():
94 | raise InvalidSchemaField(
95 | "The range key '{0}' does not exist in the schema".format(
96 | self.range_key
97 | )
98 | )
99 |
100 | @property
101 | def key_schema(self):
102 | """Return an appropriate KeySchema, based on our key attributes and the schema object"""
103 |
104 | def as_schema(name, key_type):
105 | return {"AttributeName": name, "KeyType": key_type}
106 |
107 | schema = [as_schema(self.hash_key, "HASH")]
108 | if self.range_key:
109 | schema.append(as_schema(self.range_key, "RANGE"))
110 | return schema
111 |
112 | @property
113 | def provisioned_throughput(self):
114 | """Return an appropriate ProvisionedThroughput, based on our attributes"""
115 | return {"ReadCapacityUnits": self.read, "WriteCapacityUnits": self.write}
116 |
117 |
118 | class DynamoIndex3(DynamoCommon3):
119 | REQUIRED_ATTRS = DynamoCommon3.REQUIRED_ATTRS + ("projection",)
120 | ARG_KEY = None
121 | INDEX_TYPE = None
122 |
123 | projection = None
124 |
125 | @classmethod
126 | def lookup_by_type(cls, index_type):
127 | for klass in cls.__subclasses__():
128 | if klass.INDEX_TYPE == index_type:
129 | return klass
130 | raise RuntimeError("Unknown index type: %s" % index_type)
131 |
132 | def __init__(self, table, schema):
133 | self.table = table
134 | self.schema = schema
135 |
136 | super(DynamoIndex3, self).__init__()
137 |
138 | @property
139 | def resource(self):
140 | return self.table.resource
141 |
142 | @property
143 | def index_args(self):
144 | if self.projection.__class__.__name__ == "ProjectAll":
145 | projection = {"ProjectionType": "ALL"}
146 | elif self.projection.__class__.__name__ == "ProjectKeys":
147 | projection = {"ProjectionType": "KEYS_ONLY"}
148 | elif self.projection.__class__.__name__ == "ProjectInclude":
149 | projection = {
150 | "ProjectionType": "INCLUDE",
151 | "NonKeyAttributes": self.projection.include,
152 | }
153 | else:
154 | raise RuntimeError("Unknown projection mode!")
155 |
156 | return {
157 | "IndexName": self.name,
158 | "KeySchema": self.key_schema,
159 | "Projection": projection,
160 | }
161 |
162 |
163 | class DynamoLocalIndex3(DynamoIndex3):
164 | INDEX_TYPE = "LocalIndex"
165 | ARG_KEY = "LocalSecondaryIndexes"
166 |
167 |
168 | class DynamoGlobalIndex3(DynamoIndex3):
169 | INDEX_TYPE = "GlobalIndex"
170 | ARG_KEY = "GlobalSecondaryIndexes"
171 |
172 | @property
173 | def index_args(self):
174 | args = super(DynamoGlobalIndex3, self).index_args
175 | args["ProvisionedThroughput"] = self.provisioned_throughput
176 | return args
177 |
178 |
179 | class DynamoTable3(DynamoCommon3):
180 | """Represents a Table object in the Boto3 DynamoDB API
181 |
182 | This is built in such a way that in the future, when Amazon releases future boto versions, a new DynamoTable class
183 | can be authored that implements the same methods but maps through to the new semantics.
184 | """
185 |
186 | session_kwargs = None
187 | resource_kwargs = None
188 |
189 | stream = None
190 |
191 | def __init__(self, schema, indexes=None):
192 | self.schema = schema
193 |
194 | super(DynamoTable3, self).__init__()
195 |
196 | self.indexes = {}
197 | if indexes:
198 | for name, klass in six.iteritems(indexes):
199 | # Our indexes are just uninstantiated classes, but what we are interested in is what their parent class
200 | # name is. We can reach into the MRO to find that out, and then determine our own index type.
201 | index_type = klass.__mro__[1].__name__
202 | index_class = DynamoIndex3.lookup_by_type(index_type)
203 |
204 | # Now that we know which of our classes we want to use, we create a new class on the fly that uses our
205 | # class with the attributes of the original class
206 | new_class = type(
207 | name,
208 | (index_class,),
209 | dict(
210 | (k, v) for k, v in six.iteritems(klass.__dict__) if k[0] != "_"
211 | ),
212 | )
213 |
214 | self.indexes[klass.name] = new_class(self, schema)
215 |
216 | if self.stream and self.stream not in [
217 | "NEW_IMAGE",
218 | "OLD_IMAGE",
219 | "NEW_AND_OLD_IMAGES",
220 | "KEYS_ONLY",
221 | ]:
222 | raise ConditionFailed(
223 | "Stream parameter '{0}' is invalid".format(self.stream)
224 | )
225 |
226 | @property
227 | def resource(self):
228 | return self.get_resource()
229 |
230 | @classmethod
231 | def get_resource(cls, **kwargs):
232 | """Return the boto3 resource
233 |
234 | If you provide kwargs here and the class doesn't have any resource_kwargs defined then the ones passed will
235 | permanently override the resource_kwargs on the class.
236 |
237 | This is useful for bootstrapping test resources against a Dynamo local instance as a call to
238 | ``DynamoTable3.get_resource`` will end up replacing the resource_kwargs on all classes that do not define their
239 | own.
240 | """
241 | if kwargs and not cls.resource_kwargs:
242 | cls.resource_kwargs = kwargs
243 |
244 | boto3_session = boto3.Session(**(cls.session_kwargs or {}))
245 |
246 | for key, val in six.iteritems(cls.resource_kwargs or {}):
247 | kwargs.setdefault(key, val)
248 |
249 | # allow for dict based resource config that we convert into a botocore Config object
250 | # https://botocore.readthedocs.io/en/stable/reference/config.html
251 | try:
252 | resource_config = kwargs["config"]
253 | except KeyError:
254 | # no 'config' provided in the kwargs
255 | pass
256 | else:
257 | if isinstance(resource_config, dict):
258 | kwargs["config"] = botocore.config.Config(**resource_config)
259 |
260 | return boto3_session.resource("dynamodb", **kwargs)
261 |
262 | @classmethod
263 | def get_table(cls, name):
264 | """Return the boto3 Table object for this model, create it if it doesn't exist
265 |
266 | The Table is stored on the class for each model, so it is shared between all instances of a given model.
267 | """
268 | try:
269 | return cls._table
270 | except AttributeError:
271 | pass
272 |
273 | cls._table = cls.get_resource().Table(name)
274 | return cls._table
275 |
276 | @property
277 | def table(self):
278 | """Return the boto3 table"""
279 | return self.get_table(self.name)
280 |
281 | @property
282 | def exists(self):
283 | """Return True or False based on the existance of this tables name in our resource"""
284 | return any(table.name == self.name for table in self.resource.tables.all())
285 |
286 | @property
287 | def table_attribute_fields(self):
288 | """Returns a list with the names of the table attribute fields (hash or range key)"""
289 | fields = set([self.hash_key])
290 | if self.range_key:
291 | fields.add(self.range_key)
292 |
293 | return fields
294 |
295 | @property
296 | def all_attribute_fields(self):
297 | """Returns a list with the names of all the attribute fields (hash or range key on the table or indexes)"""
298 | return self.table_attribute_fields.union(self.index_attribute_fields())
299 |
300 | def index_attribute_fields(self, index_name=None):
301 | """Return the attribute fields for a given index, or all indexes if omitted"""
302 | fields = set()
303 |
304 | for index in six.itervalues(self.indexes):
305 | if index_name and index.name != index_name:
306 | continue
307 |
308 | fields.add(index.hash_key)
309 | if index.range_key:
310 | fields.add(index.range_key)
311 |
312 | return fields
313 |
314 | @property
315 | def attribute_definitions(self):
316 | """Return an appropriate AttributeDefinitions, based on our key attributes and the schema object"""
317 | defs = []
318 |
319 | for name in self.all_attribute_fields:
320 | dynamorm_field = self.schema.dynamorm_fields()[name]
321 | field_type = self.schema.field_to_dynamo_type(dynamorm_field)
322 |
323 | defs.append({"AttributeName": name, "AttributeType": field_type})
324 |
325 | return defs
326 |
327 | @property
328 | def stream_specification(self):
329 | """Return an appropriate StreamSpecification, based on the stream attribute"""
330 | spec = {}
331 |
332 | if self.stream:
333 | spec = {"StreamEnabled": True, "StreamViewType": self.stream}
334 | else:
335 | spec = {"StreamEnabled": False}
336 |
337 | return spec
338 |
339 | def create(self, wait=True):
340 | """DEPRECATED -- shim"""
341 | warnings.warn(
342 | "DynamoTable3.create has been deprecated, please use DynamoTable3.create_table",
343 | DeprecationWarning,
344 | )
345 | return self.create_table(wait=wait)
346 |
347 | def create_table(self, wait=True):
348 | """Create a new table based on our attributes
349 |
350 | :param bool wait: If set to True, the default, this call will block until the table is created
351 | """
352 | if not self.read or not self.write:
353 | raise MissingTableAttribute(
354 | "The read/write attributes are required to create a table"
355 | )
356 |
357 | index_args = defaultdict(list)
358 | for index in six.itervalues(self.indexes):
359 | index_args[index.ARG_KEY].append(index.index_args)
360 |
361 | log.info("Creating table %s", self.name)
362 | table = self.resource.create_table(
363 | TableName=self.name,
364 | KeySchema=self.key_schema,
365 | AttributeDefinitions=self.attribute_definitions,
366 | ProvisionedThroughput=self.provisioned_throughput,
367 | StreamSpecification=self.stream_specification,
368 | **index_args
369 | )
370 | if wait:
371 | log.info("Waiting for table creation...")
372 | table.meta.client.get_waiter("table_exists").wait(TableName=self.name)
373 | return table
374 |
375 | _update_table_ops = None
376 |
377 | def update_table(self):
378 | """Updates an existing table
379 |
380 | Per the AWS documentation:
381 |
382 | You can only perform one of the following operations at once:
383 |
384 | * Modify the provisioned throughput settings of the table.
385 | * Enable or disable Streams on the table.
386 | * Remove a global secondary index from the table.
387 | * Create a new global secondary index on the table.
388 |
389 | Thus, this will recursively call itself to perform each of these operations in turn, waiting for the table to
390 | return to 'ACTIVE' status before performing the next.
391 |
392 | This returns the number of update operations performed.
393 | """
394 | try:
395 | self._update_table_ops += 1
396 | except TypeError:
397 | self._update_table_ops = 0
398 |
399 | table = self.resource.Table(self.name)
400 |
401 | def wait_for_active():
402 | def _wait(thing_type, thing_name, thing_status_callback):
403 | wait_duration = 0.5
404 | if thing_status_callback(table) != "ACTIVE":
405 | log.info(
406 | "Waiting for %s %s to become active before performing update...",
407 | thing_type,
408 | thing_name,
409 | )
410 |
411 | while thing_status_callback(table) != "ACTIVE":
412 | if thing_type == "index":
413 | if thing_status_callback(table) is None:
414 | # once the index status is None then the index is gone
415 | break
416 |
417 | ok_statuses = ("CREATING", "UPDATING", "DELETING")
418 | else:
419 | ok_statuses = ("CREATING", "UPDATING")
420 |
421 | thing_status = thing_status_callback(table)
422 | if thing_status in ok_statuses:
423 | time.sleep(wait_duration)
424 | if wait_duration < 20:
425 | wait_duration = min(20, wait_duration * 2)
426 | table.load()
427 | continue
428 |
429 | raise TableNotActive(
430 | "{0} {1} is {2}".format(
431 | thing_type, thing_name, thing_status
432 | )
433 | )
434 |
435 | def _index_status(table, index_name):
436 | for index in table.global_secondary_indexes or []:
437 | if index["IndexName"] == index_name:
438 | return index["IndexStatus"]
439 |
440 | _wait("table", table.table_name, lambda table: table.table_status)
441 | for index in table.global_secondary_indexes or []:
442 | _wait(
443 | "index",
444 | index["IndexName"],
445 | lambda table: _index_status(table, index["IndexName"]),
446 | )
447 |
448 | def do_update(**kwargs):
449 | kwargs.update(dict(AttributeDefinitions=self.attribute_definitions))
450 | return table.update(**kwargs)
451 |
452 | wait_for_active()
453 |
454 | # check if we're going to change our capacity
455 | if (self.read and self.write) and (
456 | self.read != table.provisioned_throughput["ReadCapacityUnits"]
457 | or self.write != table.provisioned_throughput["WriteCapacityUnits"]
458 | ):
459 |
460 | log.info(
461 | "Updating capacity on table %s (%s -> %s)",
462 | self.name,
463 | dict(
464 | (k, v)
465 | for k, v in six.iteritems(table.provisioned_throughput)
466 | if k.endswith("Units")
467 | ),
468 | self.provisioned_throughput,
469 | )
470 | do_update(ProvisionedThroughput=self.provisioned_throughput)
471 | return self.update_table()
472 |
473 | # check if we're going to modify the stream
474 | if self.stream_specification != (
475 | table.stream_specification or {"StreamEnabled": False}
476 | ):
477 | log.info(
478 | "Updating stream on table %s (%s -> %s)",
479 | self.name,
480 | table.stream_specification["StreamViewType"]
481 | if table.stream_specification
482 | and "StreamEnabled" in table.stream_specification
483 | else "NONE",
484 | self.stream,
485 | )
486 | do_update(StreamSpecification=self.stream_specification)
487 | return self.update_table()
488 |
489 | # Now for the global indexes, turn the data strucutre into a real dictionary so we can look things up by name
490 | # Along the way we'll delete any indexes that are no longer defined
491 | existing_indexes = {}
492 | for index in table.global_secondary_indexes or []:
493 | if index["IndexName"] not in self.indexes:
494 | log.info(
495 | "Deleting global secondary index %s on table %s",
496 | index["IndexName"],
497 | self.name,
498 | )
499 | do_update(
500 | GlobalSecondaryIndexUpdates=[
501 | {"Delete": {"IndexName": index["IndexName"]}}
502 | ]
503 | )
504 | return self.update_table()
505 |
506 | existing_indexes[index["IndexName"]] = index
507 |
508 | for index in six.itervalues(self.indexes):
509 | if index.name in existing_indexes:
510 | current_capacity = existing_indexes[index.name]["ProvisionedThroughput"]
511 | if (index.read and index.write) and (
512 | index.read != current_capacity["ReadCapacityUnits"]
513 | or index.write != current_capacity["WriteCapacityUnits"]
514 | ):
515 |
516 | log.info(
517 | "Updating capacity on global secondary index %s on table %s (%s)",
518 | index.name,
519 | self.name,
520 | index.provisioned_throughput,
521 | )
522 |
523 | do_update(
524 | GlobalSecondaryIndexUpdates=[
525 | {
526 | "Update": {
527 | "IndexName": index["IndexName"],
528 | "ProvisionedThroughput": index.provisioned_throughput,
529 | }
530 | }
531 | ]
532 | )
533 | return self.update_table()
534 | else:
535 | # create the index
536 | log.info(
537 | "Creating global secondary index %s on table %s",
538 | index.name,
539 | self.name,
540 | )
541 | do_update(
542 | AttributeDefinitions=self.attribute_definitions,
543 | GlobalSecondaryIndexUpdates=[{"Create": index.index_args}],
544 | )
545 | return self.update_table()
546 |
547 | update_ops = self._update_table_ops
548 | self._update_table_ops = None
549 | return update_ops
550 |
551 | def delete(self, wait=True):
552 | """Delete this existing table
553 |
554 | :param bool wait: If set to True, the default, this call will block until the table is deleted
555 | """
556 | self.table.delete()
557 | if wait:
558 | self.table.meta.client.get_waiter("table_not_exists").wait(
559 | TableName=self.name
560 | )
561 | return True
562 |
563 | def put(self, item, **kwargs):
564 | """Put a singular item into the table
565 |
566 | :param dict item: The data to put into the table
567 | :param \*\*kwargs: All other keyword arguments are passed through to the `DynamoDB Table put_item`_ function.
568 |
569 | .. _DynamoDB Table put_item: http://boto3.readthedocs.io/en/latest/reference/services/dynamodb.html#DynamoDB.Table.put_item
570 | """ # noqa
571 | return self.table.put_item(Item=remove_nones(item), **kwargs)
572 |
573 | def put_unique(self, item, **kwargs):
574 | try:
575 | kwargs["ConditionExpression"] = "attribute_not_exists({0})".format(
576 | self.hash_key
577 | )
578 | return self.put(item, **kwargs)
579 | except botocore.exceptions.ClientError as exc:
580 | if exc.response["Error"]["Code"] == "ConditionalCheckFailedException":
581 | raise HashKeyExists
582 | raise
583 |
584 | def put_batch(self, *items, **batch_kwargs):
585 | with self.table.batch_writer(**batch_kwargs) as writer:
586 | for item in items:
587 | writer.put_item(Item=remove_nones(item))
588 |
589 | def get_update_expr_for_key(self, id_, parts):
590 | """Given a key and a unique id, return all the information required
591 | for the update expression. This includes the actual field operations,
592 | a dictionary of generated field names, and the generated field value.
593 |
594 | To account for nested keys, the generated field expression placeholders
595 | are of the form::
596 |
597 | #uk_0_0 = :uv_0
598 | #uk_0_0.#uk_0_1 = :uv_0
599 | #uk_0_0.#uk_0_1.#uk_0_2 = :uv_0
600 | ...
601 |
602 | Note that if the value of a part - e.g #uk_0_1 - itself has a period ``.``,
603 | that is interpreted literally and not as nest in the document path. That is::
604 |
605 | #uk_0_0.#uk_0_1 = :uv_0
606 | {
607 | "#uk_0_0": "foo",
608 | "#uk_0_1": "bar.baz"
609 | }
610 | {
611 | ":uv_0": 42
612 | }
613 |
614 | ...will result in the value::
615 |
616 | "foo": {
617 | "bar.baz": 42
618 | }
619 |
620 | :param id_: Unique id for this key
621 | :param parts: List of parts that make up this key
622 | :rtype: tuple[str, dict, str]
623 | """
624 | UPDATE_FUNCTION_TEMPLATES = {
625 | "append": "{key} = list_append({key}, {value})",
626 | "plus": "{key} = {key} + {value}",
627 | "minus": "{key} = {key} - {value}",
628 | "if_not_exists": "{key} = if_not_exists({key}, {value})",
629 | None: "{key} = {value}",
630 | }
631 |
632 | if len(parts) == 1 or parts[-1] not in UPDATE_FUNCTION_TEMPLATES:
633 | function = None
634 | else:
635 | parts, function = parts[:-1], parts[-1]
636 |
637 | field_value = ":uv_{0}".format(id_)
638 | field_expr_names = OrderedDict(
639 | [
640 | ("#uk_{0}_{1}".format(id_, part_id), part_name)
641 | for part_id, part_name in enumerate(parts)
642 | ]
643 | )
644 | field_name = ".".join(six.iterkeys(field_expr_names))
645 |
646 | return (
647 | UPDATE_FUNCTION_TEMPLATES[function].format(
648 | key=field_name, value=field_value
649 | ),
650 | field_expr_names,
651 | field_value,
652 | )
653 |
654 | def update(self, update_item_kwargs=None, conditions=None, **kwargs):
655 | # copy update_item_kwargs, so that we don't mutate the original later on
656 | update_item_kwargs = dict(
657 | (k, v) for k, v in six.iteritems(update_item_kwargs or {})
658 | )
659 | conditions = conditions or {}
660 | update_fields = []
661 | expr_names = {}
662 | expr_vals = {}
663 |
664 | # First, pick out the keys for the update.
665 | update_key = {
666 | key: kwargs.pop(key)
667 | for key in (self.hash_key, self.range_key)
668 | if key in kwargs
669 | }
670 |
671 | # Then, generate the keys and values for the update-expression.
672 | for i, key in enumerate(kwargs):
673 | key_parts = key.split("__")
674 | top_level_key = key_parts[0]
675 |
676 | # Make sure the top-level field (key) exists
677 | # XXX TODO: Should we validate nested keys as well?
678 | if top_level_key not in self.schema.dynamorm_fields():
679 | raise InvalidSchemaField(
680 | "{0} does not exist in the schema fields".format(key)
681 | )
682 |
683 | # Add the actual field expression, keys, and value.
684 | (
685 | field_expr,
686 | field_expr_names,
687 | field_expr_value,
688 | ) = self.get_update_expr_for_key(i, key_parts)
689 | update_fields.append(field_expr)
690 | expr_names.update(field_expr_names)
691 | expr_vals[field_expr_value] = kwargs[key]
692 |
693 | update_item_kwargs["Key"] = update_key
694 | update_item_kwargs["UpdateExpression"] = "SET {0}".format(
695 | ", ".join(update_fields)
696 | )
697 | update_item_kwargs["ExpressionAttributeNames"] = expr_names
698 | update_item_kwargs["ExpressionAttributeValues"] = expr_vals
699 |
700 | if isinstance(conditions, Mapping):
701 | condition_expression = Q(**conditions)
702 | elif isinstance(conditions, Iterable):
703 | condition_expression = None
704 | for condition in conditions:
705 | try:
706 | condition_expression = condition_expression & condition
707 | except TypeError:
708 | condition_expression = condition
709 | else:
710 | condition_expression = conditions
711 |
712 | if condition_expression:
713 | update_item_kwargs["ConditionExpression"] = condition_expression
714 |
715 | try:
716 | return self.table.update_item(**update_item_kwargs)
717 | except botocore.exceptions.ClientError as exc:
718 | if exc.response["Error"]["Code"] == "ConditionalCheckFailedException":
719 | raise ConditionFailed(exc)
720 | raise
721 |
722 | def get_batch(self, keys, consistent=False, attrs=None, batch_get_kwargs=None):
723 | # copy batch_get_kwargs, so that we don't mutate the original later on
724 | batch_get_kwargs = dict(
725 | (k, v) for k, v in six.iteritems(batch_get_kwargs or {})
726 | )
727 |
728 | batch_get_kwargs["Keys"] = []
729 | for kwargs in keys:
730 | for k, v in six.iteritems(kwargs):
731 | if k not in self.schema.dynamorm_fields():
732 | raise InvalidSchemaField(
733 | "{0} does not exist in the schema fields".format(k)
734 | )
735 |
736 | batch_get_kwargs["Keys"].append(kwargs)
737 |
738 | if consistent:
739 | batch_get_kwargs["ConsistentRead"] = True
740 |
741 | if attrs:
742 | batch_get_kwargs["ProjectionExpression"] = attrs
743 |
744 | while True:
745 | response = self.resource.batch_get_item(
746 | RequestItems={self.name: batch_get_kwargs}
747 | )
748 |
749 | for item in response["Responses"][self.name]:
750 | yield item
751 |
752 | try:
753 | batch_get_kwargs = response["UnprocessedKeys"][self.name]
754 | except KeyError:
755 | # once our table is no longer listed in UnprocessedKeys we're done our while True loop
756 | break
757 |
758 | def get(self, consistent=False, get_item_kwargs=None, **kwargs):
759 | # copy get_item_kwargs, so that we don't mutate the original later on
760 | get_item_kwargs = dict((k, v) for k, v in six.iteritems(get_item_kwargs or {}))
761 |
762 | for k, v in six.iteritems(kwargs):
763 | if k not in self.schema.dynamorm_fields():
764 | raise InvalidSchemaField(
765 | "{0} does not exist in the schema fields".format(k)
766 | )
767 |
768 | get_item_kwargs["Key"] = kwargs
769 | if consistent:
770 | get_item_kwargs["ConsistentRead"] = True
771 |
772 | response = self.table.get_item(**get_item_kwargs)
773 |
774 | if "Item" in response:
775 | return response["Item"]
776 |
777 | def query(self, *args, **kwargs):
778 | # copy query_kwargs, so that we don't mutate the original later on
779 | query_kwargs = dict(
780 | (k, v) for k, v in six.iteritems(kwargs.pop("query_kwargs", {}))
781 | )
782 | filter_kwargs = {}
783 |
784 | if "IndexName" in query_kwargs:
785 | attr_fields = self.index_attribute_fields(
786 | index_name=query_kwargs["IndexName"]
787 | )
788 | else:
789 | attr_fields = self.table_attribute_fields
790 |
791 | while len(kwargs):
792 | full_key, value = kwargs.popitem()
793 |
794 | try:
795 | key, op = full_key.split("__")
796 | except ValueError:
797 | key = full_key
798 | op = "eq"
799 |
800 | if key not in attr_fields:
801 | filter_kwargs[full_key] = value
802 | continue
803 |
804 | key = Key(key)
805 | key_expression = get_expression(key, op, value)
806 |
807 | try:
808 | query_kwargs["KeyConditionExpression"] = (
809 | query_kwargs["KeyConditionExpression"] & key_expression
810 | )
811 | except (KeyError, TypeError):
812 | query_kwargs["KeyConditionExpression"] = key_expression
813 |
814 | if "KeyConditionExpression" not in query_kwargs:
815 | raise InvalidSchemaField("Primary key must be specified for queries")
816 |
817 | filter_expression = Q(**filter_kwargs)
818 | for arg in args:
819 | try:
820 | filter_expression = filter_expression & arg
821 | except TypeError:
822 | filter_expression = arg
823 |
824 | if filter_expression:
825 | query_kwargs["FilterExpression"] = filter_expression
826 |
827 | log.debug("Query: %s", query_kwargs)
828 | return self.table.query(**query_kwargs)
829 |
830 | def scan(self, *args, **kwargs):
831 | # copy scan_kwargs, so that we don't mutate the original later on
832 | scan_kwargs = dict(
833 | (k, v) for k, v in six.iteritems(kwargs.pop("scan_kwargs", {}))
834 | )
835 |
836 | filter_expression = Q(**kwargs)
837 | for arg in args:
838 | try:
839 | filter_expression = filter_expression & arg
840 | except TypeError:
841 | filter_expression = arg
842 |
843 | if filter_expression:
844 | scan_kwargs["FilterExpression"] = filter_expression
845 |
846 | return self.table.scan(**scan_kwargs)
847 |
848 | def delete_item(self, **kwargs):
849 | return self.table.delete_item(Key=kwargs)
850 |
851 |
852 | def remove_nones(in_dict):
853 | """
854 | Recursively remove keys with a value of ``None`` from the ``in_dict`` collection
855 | """
856 | try:
857 | return dict(
858 | (key, remove_nones(val))
859 | for key, val in six.iteritems(in_dict)
860 | if val is not None
861 | )
862 | except (ValueError, AttributeError):
863 | return in_dict
864 |
865 |
866 | def get_expression(attr, op, value):
867 | op = getattr(attr, op)
868 | try:
869 | return op(value)
870 | except TypeError:
871 | # A TypeError calling our attr op likely means we're invoking exists, not_exists or another op that
872 | # doesn't take an arg or takes multiple args. If our value is True then we try to re-call the op
873 | # function without any arguments, if our value is a list we use it as the arguments for the function,
874 | # otherwise we bubble it up.
875 | if value is True:
876 | return op()
877 | elif isinstance(value, Iterable):
878 | return op(*value)
879 | else:
880 | raise
881 |
882 |
883 | def Q(**mapping):
884 | """A Q object represents an AND'd together query using boto3's Attr object, based on a set of keyword arguments that
885 | support the full access to the operations (eq, ne, between, etc) as well as nested attributes.
886 |
887 | It can be used input to both scan operations as well as update conditions.
888 | """
889 | expression = None
890 |
891 | while len(mapping):
892 | attr, value = mapping.popitem()
893 |
894 | parts = attr.split("__")
895 | attr = Attr(parts.pop(0))
896 | op = "eq"
897 |
898 | while len(parts):
899 | if not hasattr(attr, parts[0]):
900 | # this is a nested field, extend the attr
901 | attr = Attr(".".join([attr.name, parts.pop(0)]))
902 | else:
903 | op = parts.pop(0)
904 | break
905 |
906 | assert len(parts) == 0, "Left over parts after parsing query attr"
907 |
908 | attr_expression = get_expression(attr, op, value)
909 | try:
910 | expression = expression & attr_expression
911 | except TypeError:
912 | expression = attr_expression
913 |
914 | return expression
915 |
916 |
917 | class ReadIterator(six.Iterator):
918 | """ReadIterator provides an iterator object that wraps a model and a method (either scan or query).
919 |
920 | Since it is an object we can attach attributes and functions to it that are useful to the caller.
921 |
922 | .. code-block:: python
923 |
924 | # Scan through a model, one at a time. Don't do this!
925 | results = MyModel.scan().limit(1)
926 | for model in results:
927 | print model.id
928 |
929 | # The next time you call scan (or query) pass the .last attribute of your previous results
930 | # in as the last argument
931 | results = MyModel.scan().start(results.last).limit(1)
932 | for model in results:
933 | print model.id
934 |
935 | # ...
936 |
937 | :param model: The Model class to wrap
938 | :param \*args: Q objects, passed through to scan or query
939 | :param \*\*kwargs: filters, passed through to scan or query
940 | """
941 |
942 | METHOD_NAME = None
943 |
944 | def __init__(self, model, *args, **kwargs):
945 | assert (
946 | self.METHOD_NAME
947 | ), "Improper use of ReadIterator, please use a subclass with a method name set"
948 |
949 | self.model = model
950 | self.args = args
951 | self.kwargs = kwargs
952 |
953 | self._partial = False
954 | self._recursive = False
955 | self.last = None
956 | self.resp = None
957 | self.index = -1
958 |
959 | self.dynamo_kwargs_key = "_".join([self.METHOD_NAME, "kwargs"])
960 | if self.dynamo_kwargs_key not in self.kwargs:
961 | self.kwargs[self.dynamo_kwargs_key] = {}
962 | self.dynamo_kwargs = self.kwargs[self.dynamo_kwargs_key]
963 |
964 | def __iter__(self):
965 | """We're the iterator"""
966 | return self
967 |
968 | def _get_resp(self):
969 | """Helper to get the response object from scan or query"""
970 | method = getattr(self.model.Table, self.METHOD_NAME)
971 | return method(*self.args, **self.kwargs)
972 |
973 | def __next__(self):
974 | """Called for each iteration of this object"""
975 | # If we don't have a resp object, go get it
976 | if self.resp is None:
977 | self.resp = self._get_resp()
978 |
979 | # Store the last key from query
980 | self.last = self.resp.get("LastEvaluatedKey", None)
981 |
982 | # If a Limit is specified we must not operate in recursive mode
983 | if "Limit" in self.dynamo_kwargs and self._recursive:
984 | log.warning(
985 | "%s was invoked with both a limit and the recursive flag set. "
986 | "The recursive flag will be ignored",
987 | self.__class__.__name__,
988 | )
989 | self._recursive = False
990 |
991 | # Increment which record we're going to pull from the items
992 | self.index += 1
993 |
994 | if self.index == self.resp["Count"]:
995 | # If we have no more items them we're done as long as we're not in recursive mode
996 | # And if we are in recursive mode we're done if the resp didn't contain a last key
997 | if not self._recursive or self.last is None:
998 | raise StopIteration
999 |
1000 | # Our last marker is not None and we are in recursive mode
1001 | # Reset our response state and re-call next
1002 | self.again()
1003 | return self.__next__()
1004 |
1005 | # Grab the raw item from the response and return it as a new instance of our model
1006 | raw = self.resp["Items"][self.index]
1007 | return self.model.new_from_raw(raw, partial=self._partial)
1008 |
1009 | def limit(self, limit):
1010 | """Set the limit value"""
1011 | self.dynamo_kwargs["Limit"] = limit
1012 | return self
1013 |
1014 | def start(self, last):
1015 | """Set the last value"""
1016 | self.dynamo_kwargs["ExclusiveStartKey"] = last
1017 | return self
1018 |
1019 | def consistent(self):
1020 | """Make this read a consistent one"""
1021 | self.dynamo_kwargs["ConsistentRead"] = True
1022 | return self
1023 |
1024 | def specific_attributes(self, attrs):
1025 | """Return only specific attributes in the documents through a ProjectionExpression
1026 |
1027 | This is a list of attribute names. See the documentation for more info:
1028 | https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.ProjectionExpressions.html
1029 | """
1030 | if "ExpressionAttributeNames" not in self.dynamo_kwargs:
1031 | self.dynamo_kwargs["ExpressionAttributeNames"] = {}
1032 |
1033 | pe = []
1034 | for attri, attr in enumerate(attrs):
1035 | name_parts = []
1036 | for parti, part in enumerate(attr.split(".")):
1037 | # replace the attrs with expression attributes so we can use reserved names (like count)
1038 | # convert names like child.sub -> to #pe1_1.#pe1_2
1039 | pename = "#pe{}".format("_".join([str(attri), str(parti)]))
1040 | self.dynamo_kwargs["ExpressionAttributeNames"][pename] = part
1041 | name_parts.append(pename)
1042 | pe.append(".".join(name_parts))
1043 |
1044 | self.dynamo_kwargs["ProjectionExpression"] = ", ".join(pe)
1045 | self._partial = True
1046 | return self
1047 |
1048 | def recursive(self):
1049 | """Set the recursive value to True for this iterator"""
1050 | self._recursive = True
1051 | return self
1052 |
1053 | def partial(self, partial):
1054 | """Set the partial value for this iterator, which is used when creating new items from the response.
1055 |
1056 | This is used by indexes"""
1057 | self._partial = bool(partial)
1058 | return self
1059 |
1060 | def count(self):
1061 | """Return the count matching the current read
1062 |
1063 | This triggers a new request to the table when it is invoked.
1064 | """
1065 | self.dynamo_kwargs["Select"] = "COUNT"
1066 | resp = self._get_resp()
1067 | return resp["Count"]
1068 |
1069 | def again(self):
1070 | """Call this to reset the iterator so that you can iterate over it again.
1071 |
1072 | If the previous invocation has a LastEvaluatedKey then this will resume from the next item. Otherwise it will
1073 | re-do the previous invocation.
1074 | """
1075 | self.resp = None
1076 | self.index = -1
1077 | if self.last:
1078 | return self.start(self.last)
1079 | return self
1080 |
1081 |
1082 | class ScanIterator(ReadIterator):
1083 | METHOD_NAME = "scan"
1084 |
1085 |
1086 | class QueryIterator(ReadIterator):
1087 | METHOD_NAME = "query"
1088 |
1089 | def reverse(self):
1090 | """Return results from the query in reverse"""
1091 | self.dynamo_kwargs["ScanIndexForward"] = False
1092 | return self
1093 |
--------------------------------------------------------------------------------
/dynamorm/types/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NerdWalletOSS/dynamorm/1270a85ae25a1cc4d1993bd64c752733c3e12072/dynamorm/types/__init__.py
--------------------------------------------------------------------------------
/dynamorm/types/_marshmallow.py:
--------------------------------------------------------------------------------
1 | import six
2 | from pkg_resources import parse_version
3 |
4 | from marshmallow import Schema as MarshmallowSchema
5 | from marshmallow.exceptions import MarshmallowError
6 | from marshmallow import fields, __version__ as marshmallow_version
7 |
8 | from .base import DynamORMSchema
9 | from ..exceptions import ValidationError
10 |
11 | # Define different validation logic depending on the version of marshmallow we're using
12 | if parse_version(marshmallow_version) >= parse_version("3.0.0a1"):
13 |
14 | def _validate(cls, obj, partial=False, native=False):
15 | """Validate using a Marshmallow v3+ schema"""
16 | try:
17 | if native:
18 | data = cls().load(obj, partial=partial, unknown="EXCLUDE")
19 | else:
20 | data = cls(partial=partial, unknown="EXCLUDE").dump(obj)
21 | except MarshmallowError as e:
22 | raise ValidationError(obj, cls.__name__, e)
23 | return data
24 |
25 |
26 | else:
27 |
28 | def _validate(cls, obj, partial=False, native=False):
29 | """Validate using a Marshmallow 2.x schema"""
30 | if native:
31 | data, errors = cls().load(obj, partial=partial)
32 | else:
33 | data, errors = cls(partial=partial).dump(obj)
34 | if errors:
35 | raise ValidationError(obj, cls.__name__, errors)
36 | return data
37 |
38 |
39 | class Schema(MarshmallowSchema, DynamORMSchema):
40 | """This is the base class for marshmallow based schemas"""
41 |
42 | @staticmethod
43 | def field_to_dynamo_type(field):
44 | """Given a marshmallow field object return the appropriate Dynamo type character"""
45 | if isinstance(field, fields.Raw):
46 | return "B"
47 | if isinstance(field, fields.Number):
48 | return "N"
49 | return "S"
50 |
51 | @classmethod
52 | def dynamorm_fields(cls):
53 | return cls().fields
54 |
55 | @classmethod
56 | def dynamorm_validate(cls, obj, partial=False, native=False):
57 | # Call out to our _validate to get the correct logic for the version of marshmallow we're using
58 | data = _validate(cls, obj, partial, native)
59 |
60 | # When asking for partial native objects (during model init) we want to return None values
61 | # This ensures our object has all attributes and we can track partial saves properly
62 | if partial and native:
63 | for name in six.iterkeys(cls().fields):
64 | if name not in data:
65 | data[name] = None
66 |
67 | return data
68 |
69 | @staticmethod
70 | def base_schema_type():
71 | return MarshmallowSchema
72 |
--------------------------------------------------------------------------------
/dynamorm/types/_schematics.py:
--------------------------------------------------------------------------------
1 | from schematics.models import Model as SchematicsModel
2 | from schematics.exceptions import (
3 | ValidationError as SchematicsValidationError,
4 | ModelConversionError,
5 | )
6 | from schematics import types
7 |
8 | from .base import DynamORMSchema
9 | from ..exceptions import ValidationError
10 |
11 |
12 | class Schema(SchematicsModel, DynamORMSchema):
13 | """This is the base class for schematics based schemas"""
14 |
15 | @staticmethod
16 | def field_to_dynamo_type(field):
17 | """Given a schematics field object return the appropriate Dynamo type character"""
18 | # XXX: Schematics does not currently have a "raw" type that would map to Dynamo's 'B' (binary) type.
19 | if isinstance(field, types.NumberType):
20 | return "N"
21 | return "S"
22 |
23 | @classmethod
24 | def dynamorm_fields(cls):
25 | return cls.fields
26 |
27 | @classmethod
28 | def dynamorm_validate(cls, obj, partial=False, native=False):
29 | try:
30 | inst = cls(obj, strict=False, partial=partial, validate=True)
31 | except (SchematicsValidationError, ModelConversionError) as e:
32 | raise ValidationError(obj, cls.__name__, e.messages)
33 |
34 | if native:
35 | return inst.to_native()
36 | else:
37 | return inst.to_primitive()
38 |
39 | @staticmethod
40 | def base_schema_type():
41 | return SchematicsModel
42 |
--------------------------------------------------------------------------------
/dynamorm/types/base.py:
--------------------------------------------------------------------------------
1 | class DynamORMSchema(object):
2 | """This is the base class for the inner ``Schema`` class on Tables.
3 |
4 | It must define ``dynamorm_validate`` which runs validation in your desired serialization library,
5 | ``dynamorm_fields`` which returns a dictionary of key value pairs where keys are attributes and values are the
6 | type of the attribute, and ``field_to_dynamo_type`` which returns the dynamo type character for the input type.
7 | """
8 |
9 | @staticmethod
10 | def field_to_dynamo_type(field):
11 | """Returns the dynamo type character given the field."""
12 | raise NotImplementedError("Child class must implement field_to_dynamo_type")
13 |
14 | @classmethod
15 | def dynamorm_fields(cls):
16 | """Returns a dictionary of key value pairs where keys are attributes and values are type classes"""
17 | raise NotImplementedError(
18 | "{0} class must implement dynamallow_fields".format(cls.__name__)
19 | )
20 |
21 | @classmethod
22 | def dynamorm_validate(cls, obj, partial=False, native=False):
23 | """Given a dictionary representing a blob from dynamo, this method will validate the blob given the desired
24 | validation library.
25 |
26 | If partial is true then the underlying validation library should allow for partial objects.
27 |
28 | If native is true then the underlying validation library should return a dictionary of native python values
29 | (i.e. datetime.datetime), otherwise it should return a dictionary of primitive values (i.e. a string
30 | representation of a date time value).
31 |
32 | On validation failure, this should raise ``dynamorm.exc.ValidationError``.
33 | """
34 | raise NotImplementedError(
35 | "{0} class must implement dynamallow_validate".format(cls.__name__)
36 | )
37 |
38 | @staticmethod
39 | def base_schema_type():
40 | """Returns the base class used for schemas of this type"""
41 | raise NotImplementedError("Child class must implement base_schema_type")
42 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [wheel]
2 | universal=1
3 |
4 | [aliases]
5 | test=pytest
6 |
7 | [flake8]
8 | exclude=./build/*,./configs/*,*.egg-info/*,.eggs/*
9 | max_line_length=160
10 |
11 | [coverage:run]
12 | omit =
13 | dynamorm/types/base.py
14 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | with open("README.rst", "r") as readme_fd:
4 | long_description = readme_fd.read()
5 |
6 | setup(
7 | name="dynamorm",
8 | version="0.11.0",
9 | description="DynamORM is a Python object & relation mapping library for Amazon's DynamoDB service.",
10 | long_description=long_description,
11 | author="Evan Borgstrom",
12 | author_email="evan@borgstrom.ca",
13 | url="https://github.com/NerdWalletOSS/DynamORM",
14 | license="Apache License Version 2.0",
15 | python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4",
16 | install_requires=["blinker>=1.4,<2.0", "boto3>=1.3,<2.0", "six"],
17 | extras_require={
18 | "marshmallow": ["marshmallow>=2.15.1,<4"],
19 | "schematics": ["schematics>=2.1.0,<3"],
20 | },
21 | packages=["dynamorm", "dynamorm.types"],
22 | classifiers=[
23 | "Development Status :: 4 - Beta",
24 | "Programming Language :: Python :: 2.7",
25 | "Programming Language :: Python :: 3.5",
26 | "Programming Language :: Python :: 3.6",
27 | "Programming Language :: Python :: 3.7",
28 | "Programming Language :: Python :: 3.8",
29 | "Intended Audience :: Developers",
30 | "License :: OSI Approved :: Apache Software License",
31 | "Natural Language :: English",
32 | "Programming Language :: Python",
33 | "Topic :: Database",
34 | "Topic :: Internet",
35 | "Topic :: Software Development :: Libraries",
36 | ],
37 | )
38 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NerdWalletOSS/dynamorm/1270a85ae25a1cc4d1993bd64c752733c3e12072/tests/__init__.py
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import logging
3 | import os
4 | import time
5 |
6 | import pytest
7 | from dateutil import tz
8 |
9 | from dynamorm import (
10 | DynaModel,
11 | GlobalIndex,
12 | LocalIndex,
13 | ProjectAll,
14 | ProjectKeys,
15 | ProjectInclude,
16 | )
17 | from dynamorm import local
18 | from dynamorm.table import DynamoTable3
19 |
20 | log = logging.getLogger(__name__)
21 |
22 |
23 | @pytest.fixture(scope="session", autouse=True)
24 | def setup_logging():
25 | logging.basicConfig(level=logging.INFO)
26 |
27 |
28 | @pytest.fixture(scope="session")
29 | def TestModel():
30 | """Provides a test model"""
31 |
32 | if os.environ.get("SERIALIZATION_PKG", "").startswith("marshmallow"):
33 | from marshmallow import fields
34 |
35 | class DynamoTimestamp(fields.DateTime):
36 | default_error_messages = {"invalid": "Not a valid timestamp"}
37 |
38 | def _serialize(self, value, attr, obj, **kwargs):
39 | try:
40 | value = time.mktime(value.timetuple())
41 | return int(value * 1000000)
42 | except (ValueError, AttributeError):
43 | self.fail("invalid")
44 |
45 | def _deserialize(self, value, attr, data, **kwargs):
46 | try:
47 | return datetime.datetime.fromtimestamp(
48 | float(value) / 1000000, tz=tz.tzutc()
49 | )
50 | except TypeError:
51 | if isinstance(value, datetime.datetime):
52 | return value
53 | self.fail("invalid")
54 |
55 | class TestModel(DynaModel):
56 | class Table:
57 | name = "peanut-butter"
58 | hash_key = "foo"
59 | range_key = "bar"
60 | read = 5
61 | write = 5
62 |
63 | class ByDate(LocalIndex):
64 | name = "by_date"
65 | hash_key = "foo"
66 | range_key = "when"
67 | projection = ProjectKeys()
68 |
69 | class ByBar(GlobalIndex):
70 | name = "bar"
71 | hash_key = "bar"
72 | read = 5
73 | write = 5
74 | projection = ProjectAll()
75 |
76 | class ByBaz(GlobalIndex):
77 | name = "baz"
78 | hash_key = "baz"
79 | range_key = "bar"
80 | read = 5
81 | write = 5
82 | projection = ProjectInclude("count")
83 |
84 | class Schema:
85 | foo = fields.String(required=True)
86 | bar = fields.String(required=True)
87 | baz = fields.String(required=True)
88 | count = fields.Integer()
89 | child = fields.Dict()
90 | things = fields.List(fields.String())
91 | when = fields.DateTime()
92 | created = DynamoTimestamp()
93 |
94 | def business_logic(self):
95 | return "http://art.lawver.net/funny/internet.jpg?foo={foo}&bar={bar}".format(
96 | foo=self.foo, bar=self.bar
97 | )
98 |
99 | else:
100 | from schematics import types
101 | from schematics.types import compound
102 | from schematics.exceptions import ConversionError
103 |
104 | class DynamoTimestampType(types.TimestampType, types.NumberType):
105 | primitive_type = int
106 | native_type = datetime.datetime
107 |
108 | def to_primitive(self, value, context=None):
109 | value = time.mktime(value.timetuple())
110 | return self.primitive_type(value * 1000000)
111 |
112 | def to_native(self, value, context=None):
113 | try:
114 | return datetime.datetime.fromtimestamp(
115 | float(value) / 1000000, tz=tz.tzutc()
116 | )
117 | except TypeError:
118 | if isinstance(value, datetime.datetime):
119 | return value
120 | raise ConversionError("Not a valid timestamp")
121 |
122 | class TestModel(DynaModel):
123 | class Table:
124 | name = "peanut-butter"
125 | hash_key = "foo"
126 | range_key = "bar"
127 | read = 5
128 | write = 5
129 |
130 | class ByDate(LocalIndex):
131 | name = "by_date"
132 | hash_key = "foo"
133 | range_key = "when"
134 | projection = ProjectKeys()
135 |
136 | class ByBar(GlobalIndex):
137 | name = "bar"
138 | hash_key = "bar"
139 | read = 5
140 | write = 5
141 | projection = ProjectAll()
142 |
143 | class ByBaz(GlobalIndex):
144 | name = "baz"
145 | hash_key = "baz"
146 | range_key = "bar"
147 | read = 5
148 | write = 5
149 | projection = ProjectInclude("count")
150 |
151 | class Schema:
152 | foo = types.StringType(required=True)
153 | bar = types.StringType(required=True)
154 | baz = types.StringType(required=True)
155 | count = types.IntType()
156 | child = compound.DictType(types.BaseType)
157 | things = compound.ListType(types.StringType)
158 | when = types.DateTimeType()
159 | created = DynamoTimestampType()
160 |
161 | def business_logic(self):
162 | return "http://art.lawver.net/funny/internet.jpg?foo={foo}&bar={bar}".format(
163 | foo=self.foo, bar=self.bar
164 | )
165 |
166 | return TestModel
167 |
168 |
169 | @pytest.fixture(scope="function")
170 | def TestModel_table(request, TestModel, dynamo_local):
171 | """Used with TestModel, creates and deletes the table around the test"""
172 | TestModel.Table.create_table()
173 | request.addfinalizer(TestModel.Table.delete)
174 |
175 |
176 | @pytest.fixture(scope="function")
177 | def TestModel_entries(TestModel, TestModel_table):
178 | """Used with TestModel, creates and deletes the table and populates entries"""
179 | TestModel.put_batch(
180 | {
181 | "foo": "first",
182 | "bar": "one",
183 | "baz": "bbq",
184 | "count": 111,
185 | "child": {"sub": "one"},
186 | },
187 | {
188 | "foo": "first",
189 | "bar": "two",
190 | "baz": "wtf",
191 | "count": 222,
192 | "child": {"sub": "two"},
193 | },
194 | {
195 | "foo": "first",
196 | "bar": "three",
197 | "baz": "bbq",
198 | "count": 333,
199 | "child": {"sub": "three"},
200 | },
201 | )
202 |
203 |
204 | @pytest.fixture(scope="function")
205 | def TestModel_entries_xlarge(TestModel, TestModel_table):
206 | """Used with TestModel, creates and deletes the table and populates multiple pages of entries"""
207 | TestModel.put_batch(
208 | *[
209 | {"foo": "first", "bar": str(i), "baz": "bat" * 100}
210 | for i in range(
211 | 4000
212 | ) # 1mb page is roughly 3300 items, so 4000 will be two pages.
213 | ]
214 | )
215 |
216 |
217 | @pytest.fixture(scope="session")
218 | def TestModelTwo():
219 | """Provides a test model without a range key"""
220 |
221 | if "marshmallow" in (os.getenv("SERIALIZATION_PKG") or ""):
222 | from marshmallow import fields
223 |
224 | class TestModelTwo(DynaModel):
225 | class Table:
226 | name = "peanut-butter"
227 | hash_key = "foo"
228 | read = 5
229 | write = 5
230 |
231 | class Schema:
232 | foo = fields.String(required=True)
233 | bar = fields.String()
234 | baz = fields.String()
235 |
236 | else:
237 | from schematics import types
238 |
239 | class TestModelTwo(DynaModel):
240 | class Table:
241 | name = "peanut-butter"
242 | hash_key = "foo"
243 | read = 5
244 | write = 5
245 |
246 | class Schema:
247 | foo = types.StringType(required=True)
248 | bar = types.StringType()
249 | baz = types.StringType()
250 |
251 | return TestModelTwo
252 |
253 |
254 | @pytest.fixture(scope="function")
255 | def TestModelTwo_table(request, TestModelTwo, dynamo_local):
256 | """Used with TestModel, creates and deletes the table around the test"""
257 | TestModelTwo.Table.create_table()
258 | request.addfinalizer(TestModelTwo.Table.delete)
259 |
260 |
261 | @pytest.fixture(scope="session")
262 | def dynamo_local(request):
263 | """Connect to a local dynamo instance"""
264 | dynamo_local_dir = os.environ.get("DYNAMO_LOCAL", "build/dynamo-local")
265 | dynamo_local_ = local.DynamoLocal(dynamo_local_dir)
266 | DynamoTable3.get_resource(
267 | aws_access_key_id="anything",
268 | aws_secret_access_key="anything",
269 | region_name="us-west-2",
270 | endpoint_url="http://localhost:{port}".format(port=dynamo_local_.port),
271 | )
272 | return dynamo_local_
273 |
--------------------------------------------------------------------------------
/tests/test_local.py:
--------------------------------------------------------------------------------
1 | import os
2 | import socket
3 | import time
4 |
5 | from dynamorm.local import DynamoLocal
6 |
7 | DYNAMO_CONN_RETRIES = 15
8 | DYNAMO_CONN_SLEEP = 1
9 |
10 |
11 | def test_shutdown_local_dynamo():
12 | dynamo_local_dir = os.environ.get("DYNAMO_LOCAL", "build/dynamo-local")
13 | dynamo_local = DynamoLocal(dynamo_local_dir)
14 | connected = -1
15 | for _ in range(DYNAMO_CONN_RETRIES):
16 | connected = _connect_to_port(dynamo_local.port)
17 | if connected == 0:
18 | break
19 | time.sleep(DYNAMO_CONN_SLEEP)
20 | assert connected == 0
21 | dynamo_local.shutdown()
22 | assert dynamo_local.dynamo_proc is None
23 | assert _connect_to_port(dynamo_local.port) != 0
24 |
25 |
26 | def _connect_to_port(port):
27 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
28 | try:
29 | result = sock.connect_ex(("127.0.0.1", port))
30 | finally:
31 | sock.close()
32 | return result
33 |
--------------------------------------------------------------------------------
/tests/test_model.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pytest
3 |
4 | from dynamorm.model import DynaModel
5 | from dynamorm.indexes import GlobalIndex, LocalIndex, ProjectAll, ProjectInclude
6 | from dynamorm.exceptions import (
7 | DynaModelException,
8 | HashKeyExists,
9 | InvalidSchemaField,
10 | MissingTableAttribute,
11 | ValidationError,
12 | )
13 |
14 |
15 | def is_marshmallow():
16 | return os.environ.get("SERIALIZATION_PKG", "").startswith("marshmallow")
17 |
18 |
19 | if is_marshmallow():
20 | from marshmallow.fields import String, Integer as Number, UUID
21 | from marshmallow import (
22 | validates,
23 | ValidationError as SchemaValidationError,
24 | Schema as BaseModel,
25 | )
26 | else:
27 | from schematics.exceptions import ValidationError as SchemaValidationError
28 | from schematics.types import (
29 | StringType as String,
30 | IntType as Number,
31 | UUIDType as UUID,
32 | )
33 | from schematics.models import Model as BaseModel
34 |
35 | try:
36 | from unittest.mock import MagicMock, call
37 | except ImportError:
38 | from mock import MagicMock, call
39 |
40 |
41 | def test_missing_inner_classes():
42 | """Classes must have both a Table and Schema inner class"""
43 | with pytest.raises(DynaModelException):
44 |
45 | class Model(DynaModel):
46 | pass
47 |
48 |
49 | def test_missing_inner_schema_class():
50 | """Classes must have an inner Schema class"""
51 | with pytest.raises(DynaModelException):
52 |
53 | class Model(DynaModel):
54 | class Table:
55 | pass
56 |
57 |
58 | def test_missing_inner_table_class():
59 | """Classes must have an inner Table class"""
60 | with pytest.raises(DynaModelException):
61 |
62 | class Model(DynaModel):
63 | class Schema:
64 | pass
65 |
66 |
67 | def test_parent_inner_classes():
68 | class Parent(DynaModel):
69 | class Table:
70 | name = "table"
71 | hash_key = "foo"
72 | read = 1
73 | write = 1
74 |
75 | class Schema:
76 | foo = String(required=True)
77 |
78 | class Child(Parent):
79 | pass
80 |
81 | assert Child.Table is Parent.Table
82 |
83 |
84 | def test_table_validation():
85 | """Defining a model with missing table attributes should raise exceptions"""
86 | with pytest.raises(MissingTableAttribute):
87 |
88 | class Model(DynaModel):
89 | class Table:
90 | name = "table"
91 |
92 | class Schema:
93 | foo = String(required=True)
94 |
95 |
96 | def test_table_create_validation():
97 | """You cannot create a table that is missing read/write attrs"""
98 | with pytest.raises(MissingTableAttribute):
99 |
100 | class Model(DynaModel):
101 | class Table:
102 | name = "table"
103 | hash_key = "foo"
104 | read = 5
105 |
106 | class Schema:
107 | foo = String(required=True)
108 |
109 | Model.Table.create_table()
110 |
111 | with pytest.raises(MissingTableAttribute):
112 |
113 | class Model(DynaModel):
114 | class Table:
115 | name = "table"
116 | hash_key = "foo"
117 | write = 5
118 |
119 | class Schema:
120 | foo = String(required=True)
121 |
122 | Model.Table.create_table()
123 |
124 | with pytest.raises(MissingTableAttribute):
125 |
126 | class Model(DynaModel):
127 | class Table:
128 | name = "table"
129 | hash_key = "foo"
130 |
131 | class Schema:
132 | foo = String(required=True)
133 |
134 | Model.Table.create_table()
135 |
136 |
137 | def test_invalid_hash_key():
138 | """Defining a model where ``hash_key`` in Table points to an invalid field should raise InvalidSchemaField"""
139 | with pytest.raises(InvalidSchemaField):
140 |
141 | class Model(DynaModel):
142 | class Table:
143 | name = "table"
144 | hash_key = "foo"
145 | read = 1
146 | write = 1
147 |
148 | class Schema:
149 | bar = String(required=True)
150 |
151 |
152 | def test_invalid_range_key():
153 | """Defining a model where ``range_key`` in Table points to an invalid field should raise InvalidSchemaField"""
154 | with pytest.raises(InvalidSchemaField):
155 |
156 | class Model(DynaModel):
157 | class Table:
158 | name = "table"
159 | hash_key = "foo"
160 | range_key = "bar"
161 | read = 1
162 | write = 1
163 |
164 | class Schema:
165 | foo = String(required=True)
166 | baz = String(required=True)
167 |
168 |
169 | def test_number_hash_key(dynamo_local, request):
170 | """Test a number hash key and ensure the dynamo type gets set correctly"""
171 |
172 | class Model(DynaModel):
173 | class Table:
174 | name = "table"
175 | hash_key = "foo"
176 | read = 1
177 | write = 1
178 |
179 | class Schema:
180 | foo = Number(required=True)
181 | baz = String(required=True)
182 |
183 | Model.Table.create_table()
184 | request.addfinalizer(Model.Table.delete)
185 |
186 | model = Model(foo=1, baz="foo")
187 | assert model.Table.attribute_definitions == [
188 | {"AttributeName": "foo", "AttributeType": "N"}
189 | ]
190 |
191 | model.save()
192 |
193 |
194 | def test_missing_field_validation():
195 | class Model(DynaModel):
196 | class Table:
197 | name = "table"
198 | hash_key = "foo"
199 | read = 1
200 | write = 1
201 |
202 | class Schema:
203 | foo = String(required=True)
204 | baz = String(required=True)
205 |
206 | model = Model(foo="foo", partial=True)
207 | with pytest.raises(ValidationError):
208 | model.validate()
209 |
210 | try:
211 | model.validate()
212 | except ValidationError as exc:
213 | assert str(exc).startswith(
214 | "Validation failed for schema ModelSchema. Errors: {'baz'"
215 | )
216 |
217 |
218 | def test_validation(dynamo_local):
219 |
220 | if is_marshmallow():
221 | from marshmallow.validate import Range
222 |
223 | number_field = Number(validate=[Range(max=5)])
224 | else:
225 | number_field = Number(max_value=5)
226 |
227 | class Book(DynaModel):
228 | class Table:
229 | name = "books"
230 | hash_key = "id"
231 | read = 1
232 | write = 1
233 |
234 | class Schema:
235 | id = String(required=True)
236 | rank = number_field
237 | name = String(required=True)
238 |
239 | Book.Table.create_table()
240 |
241 | # ok
242 | b = Book(id="foo", rank=1, name="Foos Gold")
243 | b.save()
244 |
245 | # no hash key
246 | with pytest.raises(ValidationError):
247 | b = Book(rank=1, name="Foos Gold")
248 | b.save()
249 |
250 | # no required attribute
251 | with pytest.raises(ValidationError):
252 | b = Book(id="foo", rank=1)
253 | b.save()
254 |
255 | # bad type for attribute
256 | with pytest.raises(ValidationError):
257 | b = Book(id="foo2", rank="bar", name="Foos Gold")
258 | b.save()
259 |
260 | # bad semantics - fails custom validation for attribute
261 | with pytest.raises(ValidationError):
262 | b = Book(id="foo2", rank=10, name="Foos Gold")
263 | b.save()
264 |
265 | Book.Table.delete()
266 |
267 |
268 | def test_index_setup():
269 | """Ensure our index objects are setup & transformed correctly by our meta class"""
270 |
271 | class Model(DynaModel):
272 | class Table:
273 | name = "table"
274 | hash_key = "foo"
275 | range_key = "bar"
276 | read = 1
277 | write = 1
278 |
279 | class Index(GlobalIndex):
280 | name = "test-idx"
281 | hash_key = "foo"
282 | range_key = "bar"
283 | projection = ProjectAll()
284 |
285 | class Schema:
286 | foo = String(required=True)
287 | bar = String(required=True)
288 |
289 | model = Model(foo="hi", bar="there")
290 |
291 | assert "test-idx" in model.Table.indexes
292 | assert model.Index.index is model.Table.indexes["test-idx"]
293 | assert model.Index.index.table is model.Table
294 |
295 | assert model.Index.index.schema is model.Schema
296 |
297 | # this gets automatically set during initialization, since read is an optional parameter
298 | assert model.Index.index.read is None
299 |
300 |
301 | def test_invalid_indexes():
302 | """Ensure validation happens for indexes"""
303 | for idx in (GlobalIndex, LocalIndex):
304 | with pytest.raises(MissingTableAttribute):
305 |
306 | class Model1(DynaModel):
307 | class Table:
308 | name = "table"
309 | hash_key = "foo"
310 | range_key = "bar"
311 | read = 1
312 | write = 1
313 |
314 | class Index(idx):
315 | name = "test-idx"
316 | # missing hash_key
317 | range_key = "bar"
318 | projection = ProjectAll()
319 |
320 | class Schema:
321 | foo = String(required=True)
322 | bar = String(required=True)
323 |
324 | with pytest.raises(MissingTableAttribute):
325 |
326 | class Model2(DynaModel):
327 | class Table:
328 | name = "table"
329 | hash_key = "foo"
330 | range_key = "bar"
331 | read = 1
332 | write = 1
333 |
334 | class Index(idx):
335 | name = "test-idx"
336 | hash_key = "foo"
337 | range_key = "bar"
338 | # no projection
339 |
340 | class Schema:
341 | foo = String(required=True)
342 | bar = String(required=True)
343 |
344 | with pytest.raises(InvalidSchemaField):
345 |
346 | class Model3(DynaModel):
347 | class Table:
348 | name = "table"
349 | hash_key = "foo"
350 | range_key = "bar"
351 | read = 1
352 | write = 1
353 |
354 | class Index(idx):
355 | name = "test-idx"
356 | hash_key = "foo"
357 | # no key named baz
358 | range_key = "baz"
359 | projection = ProjectAll()
360 |
361 | class Schema:
362 | foo = String(required=True)
363 | bar = String(required=True)
364 |
365 | with pytest.raises(InvalidSchemaField):
366 |
367 | class Model4(DynaModel):
368 | class Table:
369 | name = "table"
370 | hash_key = "foo"
371 | range_key = "bar"
372 | read = 1
373 | write = 1
374 |
375 | class Index(idx):
376 | name = "test-idx"
377 | # no key named baz
378 | hash_key = "baz"
379 | range_key = "bar"
380 | projection = ProjectAll()
381 |
382 | class Schema:
383 | foo = String(required=True)
384 | bar = String(required=True)
385 |
386 |
387 | def test_update_table(dynamo_local):
388 | class TableV1(DynaModel):
389 | class Table:
390 | name = "table"
391 | hash_key = "foo"
392 | range_key = "bar"
393 | read = 5
394 | write = 5
395 | stream = "NEW_AND_OLD_IMAGES"
396 |
397 | class Schema:
398 | foo = String(required=True)
399 | bar = String(required=True)
400 | baz = String(required=True)
401 | bbq = String(required=True)
402 |
403 | class TableV2(DynaModel):
404 | class Table:
405 | name = "table"
406 | hash_key = "foo"
407 | range_key = "bar"
408 | read = 10
409 | write = 10
410 |
411 | class Index1(GlobalIndex):
412 | name = "index1"
413 | hash_key = "baz"
414 | range_key = "bar"
415 | projection = ProjectAll()
416 | read = 5
417 | write = 5
418 |
419 | class Index2(GlobalIndex):
420 | name = "index2"
421 | hash_key = "bbq"
422 | range_key = "bar"
423 | projection = ProjectAll()
424 | read = 5
425 | write = 5
426 |
427 | class Schema:
428 | foo = String(required=True)
429 | bar = String(required=True)
430 | baz = String(required=True)
431 | bbq = String(required=True)
432 |
433 | class TableV3(DynaModel):
434 | class Table:
435 | name = "table"
436 | hash_key = "foo"
437 | range_key = "bar"
438 | read = 10
439 | write = 10
440 | stream = "NEW_IMAGE"
441 |
442 | class Index2(GlobalIndex):
443 | name = "index2"
444 | hash_key = "bbq"
445 | range_key = "bar"
446 | projection = ProjectAll()
447 | read = 5
448 | write = 5
449 |
450 | class Schema:
451 | foo = String(required=True)
452 | bar = String(required=True)
453 | baz = String(required=True)
454 | bbq = String(required=True)
455 |
456 | TableV1.Table.create_table()
457 |
458 | # updating to v2 should result in 3 changes
459 | # * changing throughput
460 | # * adding index1
461 | # * adding index2
462 | # * removing stream
463 | assert TableV2.Table.update_table() == 4
464 |
465 | # updating to v2 result in 1 change
466 | # * deleting index 1
467 | # * adding stream
468 | assert TableV3.Table.update_table() == 2
469 |
470 | # should now be a no-op
471 | assert TableV3.Table.update_table() == 0
472 |
473 |
474 | def test_sparse_indexes(dynamo_local):
475 | class MyModel(DynaModel):
476 | class Table:
477 | name = "mymodel"
478 | hash_key = "foo"
479 | read = 10
480 | write = 10
481 |
482 | class Index1(GlobalIndex):
483 | name = "index1"
484 | hash_key = "bar"
485 | read = 10
486 | write = 10
487 | projection = ProjectInclude("foo", "bar")
488 |
489 | class Schema:
490 | foo = String(required=True)
491 | bar = String(required=True)
492 | baz = String(required=True)
493 | bbq = String(required=True)
494 |
495 | MyModel.Table.create_table()
496 | MyModel.put_batch(
497 | {"foo": "1", "bar": "1", "baz": "1", "bbq": "1"},
498 | {"foo": "2", "bar": "2", "baz": "2", "bbq": "2"},
499 | )
500 |
501 | items = list(MyModel.Index1.query(bar="2"))
502 | assert len(items) == 1
503 | assert items[0].foo == "2"
504 |
505 |
506 | def test_partial_save(TestModel, TestModel_entries, dynamo_local):
507 | def get_first():
508 | first = TestModel.get(foo="first", bar="one")
509 | first.put = MagicMock()
510 | first.update_item = MagicMock()
511 | return first
512 |
513 | # the first time to a non-partial save and put should be called
514 | first = get_first()
515 | first.save()
516 | first.update_item.assert_not_called()
517 |
518 | # next do a partial save without any changed and again with a change
519 | # put should not be called, and update should only be called once dispite save being called twice
520 | first = get_first()
521 | first.save(partial=True)
522 |
523 | first.baz = "changed"
524 | first.update_item.return_value = {"Attributes": {"baz": "changed"}}
525 | first.save(partial=True)
526 | first.put.assert_not_called()
527 |
528 | baz_update_call = call(
529 | # no conditions should we set
530 | conditions=None,
531 | # our ReturnValues should be set to return updates values
532 | update_item_kwargs={"ReturnValues": "UPDATED_NEW"},
533 | # the the we changed should be included
534 | baz="changed",
535 | # and so should the primary key
536 | foo="first",
537 | bar="one",
538 | )
539 | first.update_item.assert_has_calls([baz_update_call])
540 |
541 | # do it again, and just count should be sent
542 | first.count = 999
543 | first.update_item.return_value = {"Attributes": {"count": 999}}
544 | first.save(partial=True)
545 | first.put.assert_not_called()
546 |
547 | count_update_call = call(
548 | conditions=None,
549 | update_item_kwargs={"ReturnValues": "UPDATED_NEW"},
550 | count=999,
551 | foo="first",
552 | bar="one",
553 | )
554 | first.update_item.assert_has_calls([baz_update_call, count_update_call])
555 |
556 |
557 | def test_partial_save_with_return_all(TestModel, TestModel_entries, dynamo_local):
558 | model_to_patch = TestModel(foo="first", bar="one", partial=True)
559 | assert model_to_patch.baz is None
560 | model_to_patch.count = 12345
561 | model_to_patch.save(partial=True, return_all=True)
562 | assert model_to_patch.baz == "bbq"
563 |
564 |
565 | def test_unique_save(TestModel, TestModel_entries, dynamo_local):
566 | first = TestModel(foo="first", bar="one", baz="uno")
567 | first.save()
568 |
569 | second = TestModel(foo="first", bar="one", baz="uno")
570 | with pytest.raises(HashKeyExists):
571 | second.save(unique=True)
572 | second.save()
573 |
574 |
575 | def test_explicit_schema_parents():
576 | """Inner Schema classes should be able to have explicit parents"""
577 |
578 | class SuperMixin(BaseModel):
579 | bbq = String()
580 |
581 | if is_marshmallow():
582 |
583 | class Mixin(SuperMixin):
584 | is_mixin = True
585 | bar = String()
586 |
587 | @validates("bar")
588 | def validate_bar(self, value):
589 | if value != "bar":
590 | raise SchemaValidationError("bar must be bar")
591 |
592 | else:
593 |
594 | class Mixin(SuperMixin):
595 | is_mixin = True
596 | bar = String()
597 |
598 | def validate_bar(self, data, value):
599 | if value != "bar":
600 | raise SchemaValidationError("bar must be bar")
601 |
602 | class Model(DynaModel):
603 | class Table:
604 | name = "table"
605 | hash_key = "foo"
606 | read = 1
607 | write = 1
608 |
609 | class Schema(Mixin):
610 | foo = Number(required=True)
611 | baz = String(required=True)
612 |
613 | assert Model.Schema.is_mixin is True
614 | assert list(sorted(Model.Schema.dynamorm_fields().keys())) == [
615 | "bar",
616 | "baz",
617 | "bbq",
618 | "foo",
619 | ]
620 |
621 | with pytest.raises(ValidationError):
622 | Model(foo="foo", baz="baz", bar="not bar")
623 |
624 |
625 | def test_schema_parents_mro():
626 | """Inner Schema classes should obey MRO (to test our schematics field pull up)"""
627 |
628 | class MixinTwo(BaseModel):
629 | bar = Number()
630 |
631 | class MixinOne(BaseModel):
632 | bar = String()
633 |
634 | class Model(DynaModel):
635 | class Table:
636 | name = "table"
637 | hash_key = "foo"
638 | read = 1
639 | write = 1
640 |
641 | class Schema(MixinOne, MixinTwo):
642 | foo = Number(required=True)
643 | baz = String(required=True)
644 |
645 | assert "bar" in Model.Schema.dynamorm_fields()
646 | assert isinstance(Model.Schema.dynamorm_fields()["bar"], String)
647 |
648 |
649 | def test_model_mixin():
650 | class Mixin(BaseModel):
651 | bar = String()
652 |
653 | class MyModel(DynaModel):
654 | class Table:
655 | name = "table"
656 | hash_key = "foo"
657 | read = 1
658 | write = 1
659 |
660 | class Schema(Mixin):
661 | foo = String(required=True)
662 |
663 | assert "foo" in MyModel.Schema.dynamorm_fields()
664 | assert "bar" in MyModel.Schema.dynamorm_fields()
665 | assert isinstance(MyModel.Schema.dynamorm_fields()["foo"], String)
666 | assert isinstance(MyModel.Schema.dynamorm_fields()["bar"], String)
667 |
668 |
669 | def test_table_config(TestModel, dynamo_local):
670 | class MyModel(DynaModel):
671 | class Table:
672 | name = "mymodel"
673 | hash_key = "foo"
674 | read = 10
675 | write = 10
676 |
677 | resource_kwargs = {"region_name": "us-east-2"}
678 |
679 | class Schema:
680 | foo = String(required=True)
681 |
682 | class OtherModel(DynaModel):
683 | class Table:
684 | name = "othermodel"
685 | hash_key = "foo"
686 | read = 10
687 | write = 10
688 |
689 | class Schema:
690 | foo = String(required=True)
691 |
692 | # dynamo_local sets up the default table config to point to us-west-2
693 | # So any models, like TestModel, that don't specify a config end up pointing there
694 | assert TestModel.Table.resource.meta.client.meta.region_name == "us-west-2"
695 |
696 | # Our first model above has explicit resource kwargs, as such it should get a different resource with our explicitly
697 | # configured region name
698 | assert MyModel.Table.resource.meta.client.meta.region_name == "us-east-2"
699 |
700 |
701 | def test_field_subclassing():
702 | class SubclassedString(String):
703 | pass
704 |
705 | class SubSubclassedString(SubclassedString):
706 | pass
707 |
708 | class Mixin(BaseModel):
709 | foo = SubSubclassedString(required=True)
710 |
711 | class MyModel(DynaModel):
712 | class Table:
713 | name = "mymodel"
714 | hash_key = "foo"
715 | read = 10
716 | write = 10
717 |
718 | class Schema(Mixin):
719 | pass
720 |
721 | assert isinstance(MyModel.Schema.dynamorm_fields()["foo"], String)
722 |
723 |
724 | def test_delete_normalized_keys(dynamo_local, request):
725 | class Model(DynaModel):
726 | class Table:
727 | name = "delete_normalized"
728 | hash_key = "uuid"
729 | read = 10
730 | write = 10
731 |
732 | class Schema:
733 | uuid = UUID(required=True)
734 | foo = String()
735 |
736 | Model.Table.create_table()
737 | request.addfinalizer(Model.Table.delete)
738 |
739 | Model(uuid="cc1dea15-c359-455a-a53e-c0a7a31ee022").save()
740 |
741 | # We originally did not normalize keys when calling delete, which would cause this
742 | # to fail with: TypeError: Unsupported type ""
743 | Model.get(uuid="cc1dea15-c359-455a-a53e-c0a7a31ee022").delete()
744 |
745 | assert Model.get(uuid="cc1dea15-c359-455a-a53e-c0a7a31ee022") is None
746 |
747 |
748 | def test_query_with_id_and_recursive(TestModel, TestModel_entries_xlarge, dynamo_local):
749 | """Ensure that we don't raise a KeyCondition error when our query + recursive returns more than a page
750 |
751 | https://github.com/NerdWalletOSS/dynamorm/pull/63/
752 | """
753 | assert len(list(TestModel.query(foo="first").recursive())) == 4000
754 |
--------------------------------------------------------------------------------
/tests/test_relationships.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import os
3 |
4 | import pytest
5 |
6 | from dynamorm.exceptions import ValidationError
7 | from dynamorm.model import DynaModel
8 | from dynamorm.indexes import GlobalIndex, ProjectKeys
9 | from dynamorm.relationships import OneToOne, OneToMany, ManyToOne
10 |
11 | if "marshmallow" in (os.getenv("SERIALIZATION_PKG") or ""):
12 | from marshmallow.fields import String, Integer as Number
13 | else:
14 | from schematics.types import StringType as String, IntType as Number
15 |
16 | try:
17 | from unittest.mock import MagicMock, call
18 | except ImportError:
19 | from mock import MagicMock, call
20 |
21 |
22 | def test_one_to_one(dynamo_local, request):
23 | class Details(DynaModel):
24 | class Table:
25 | name = "details"
26 | hash_key = "thing_version"
27 | read = 1
28 | write = 1
29 |
30 | class Schema:
31 | thing_version = String(required=True)
32 | attr1 = String()
33 | attr2 = Number(required=True)
34 | # ... lots more attrs ...
35 |
36 | class Sparse(DynaModel):
37 | class Table:
38 | name = "sparse"
39 | hash_key = "thing"
40 | range_key = "version"
41 | read = 1
42 | write = 1
43 |
44 | class Schema:
45 | thing = String(required=True)
46 | version = Number(required=True)
47 |
48 | details = OneToOne(
49 | Details,
50 | query=lambda sparse: dict(
51 | thing_version="{0}:{1}".format(sparse.thing, sparse.version)
52 | ),
53 | back_query=lambda details: dict(
54 | thing=details.thing_version.split(":")[0],
55 | version=details.thing_version.split(":")[1],
56 | ),
57 | )
58 |
59 | Details.Table.create_table()
60 | request.addfinalizer(Details.Table.delete)
61 |
62 | Sparse.Table.create_table()
63 | request.addfinalizer(Sparse.Table.delete)
64 |
65 | item = Sparse(thing="foo", version=1)
66 |
67 | # when accessing a one-to-one relationship that doesn't exist it will be automatically created
68 | item.details.attr1 = "this is attr1"
69 |
70 | # when saving an object with a one-to-one relationship both sides will be saved
71 | # when we call .save we should get a validation error from the pre_save signal since we're missing attr2
72 | with pytest.raises(ValidationError):
73 | item.save()
74 |
75 | assert Details.get(thing_version="foo:1", consistent=True) is None
76 |
77 | # set it, and the save should succeed
78 | item.details.attr2 = 1
79 | item.save()
80 |
81 | details = Details.get(thing_version="foo:1", consistent=True)
82 | assert details.attr1 == "this is attr1"
83 |
84 | # test replacing the details
85 | item.details = Details(attr1="new attr1", attr2=2, partial=True)
86 | item.save()
87 |
88 | details = Details.get(thing_version="foo:1")
89 | assert details.attr1 == "new attr1"
90 | assert details.attr2 == 2
91 | assert details.sparse.thing == "foo"
92 |
93 | # trying to set a value to the relationship that's not the type of our other model should raise a TypeError
94 | with pytest.raises(TypeError):
95 | item.details = 1
96 |
97 | # test deleting the details
98 | # reload the item first to ensure that the __delete__ method can successfully fetch not yet loaded relationships
99 | item = Sparse.get(thing="foo", version=1)
100 | del item.details
101 | assert Details.get(thing_version="foo:1", consistent=True) is None
102 |
103 | # also ensure that trying to delete a non-existent related object raises an error
104 | item = Sparse.get(thing="foo", version=1)
105 | with pytest.raises(AttributeError):
106 | del item.details
107 |
108 | # reload the item
109 | item = Sparse.get(thing="foo", version=1)
110 | item.details.attr1 = "this is attr1"
111 | item.details.attr2 = 1
112 | item.save()
113 |
114 | # change something on the details
115 | item.details.attr2 = 10
116 |
117 | # do a partial save on the item, and the details should use update_item to update themselves
118 | item.details.put = MagicMock()
119 | item.details.update_item = MagicMock()
120 | item.save(partial=True)
121 | item.details.put.assert_not_called()
122 | item.details.update_item.assert_has_calls(
123 | [
124 | call(
125 | conditions=None,
126 | update_item_kwargs={"ReturnValues": "UPDATED_NEW"},
127 | attr2=10,
128 | thing_version="foo:1",
129 | )
130 | ]
131 | )
132 |
133 |
134 | def test_one_to_many(dynamo_local, request):
135 | class Reply(DynaModel):
136 | class Table:
137 | name = "replies"
138 | hash_key = "forum_thread"
139 | range_key = "created"
140 | read = 1
141 | write = 1
142 |
143 | class ByUser(GlobalIndex):
144 | name = "replies-by-user"
145 | hash_key = "user_name"
146 | range_key = "message"
147 | projection = ProjectKeys()
148 | read = 1
149 | write = 1
150 |
151 | class Schema:
152 | forum_thread = String(required=True)
153 | created = String(required=True)
154 | user_name = String(required=True)
155 | message = String()
156 |
157 | class User(DynaModel):
158 | class Table:
159 | name = "users"
160 | hash_key = "name"
161 | read = 1
162 | write = 1
163 |
164 | class Schema:
165 | name = String(required=True)
166 |
167 | replies = OneToMany(
168 | Reply,
169 | index="ByUser",
170 | query=lambda user: dict(user_name=user.name),
171 | back_query=lambda reply: dict(name=reply.user_name),
172 | )
173 |
174 | class Thread(DynaModel):
175 | class Table:
176 | name = "threads"
177 | hash_key = "forum_name"
178 | range_key = "subject"
179 | read = 1
180 | write = 1
181 |
182 | class ByUser(GlobalIndex):
183 | name = "threads-by-user"
184 | hash_key = "user_name"
185 | range_key = "subject"
186 | projection = ProjectKeys()
187 | read = 1
188 | write = 1
189 |
190 | class Schema:
191 | forum_name = String(required=True)
192 | user_name = String(required=True)
193 | subject = String(required=True)
194 |
195 | user = ManyToOne(
196 | User,
197 | query=lambda thread: dict(name=thread.user_name),
198 | back_index="ByUser",
199 | back_query=lambda user: dict(user_name=user.name),
200 | )
201 | replies = OneToMany(
202 | Reply,
203 | query=lambda thread: dict(
204 | forum_thread="{0}\n{1}".format(thread.forum_name, thread.subject)
205 | ),
206 | back_query=lambda reply: dict(
207 | forum_name=reply.forum_thread.split("\n")[0],
208 | subject=reply.forum_thread.split("\n")[1],
209 | ),
210 | )
211 |
212 | class Forum(DynaModel):
213 | class Table:
214 | name = "forums"
215 | hash_key = "name"
216 | read = 1
217 | write = 1
218 |
219 | class Schema:
220 | name = String(required=True)
221 |
222 | threads = OneToMany(
223 | Thread,
224 | query=lambda forum: dict(forum_name=forum.name),
225 | back_query=lambda thread: dict(name=thread.forum_name),
226 | )
227 |
228 | User.Table.create_table()
229 | request.addfinalizer(User.Table.delete)
230 |
231 | Reply.Table.create_table()
232 | request.addfinalizer(Reply.Table.delete)
233 |
234 | Thread.Table.create_table()
235 | request.addfinalizer(Thread.Table.delete)
236 |
237 | Forum.Table.create_table()
238 | request.addfinalizer(Forum.Table.delete)
239 |
240 | alice = User(name="alice")
241 | alice.save()
242 |
243 | bob = User(name="bob")
244 | bob.save()
245 |
246 | general = Forum(name="general")
247 | general.save()
248 | assert len(general.threads) == 0
249 |
250 | topic1 = Thread(forum=general, user=bob, subject="Topic #1")
251 | assert topic1.forum_name == "general"
252 | assert topic1.user_name == "bob"
253 | topic1.save()
254 |
255 | assert len(general.threads) == 1
256 | assert len(bob.threads) == 1
257 |
258 | assert [t.subject for t in bob.threads] == ["Topic #1"]
259 |
260 | assert len(bob.replies) == 0
261 | assert len(alice.replies) == 0
262 |
263 | reply1 = Reply(
264 | thread=topic1,
265 | user=bob,
266 | created=str(datetime.datetime.utcnow()),
267 | message="Reply #1",
268 | )
269 | reply1.save()
270 |
271 | reply2 = Reply(
272 | thread=topic1,
273 | user=alice,
274 | created=str(datetime.datetime.utcnow()),
275 | message="Reply #2",
276 | )
277 | reply2.save()
278 |
279 | assert [r.forum_thread for r in bob.replies] == ["general\nTopic #1"]
280 | assert [r.forum_thread for r in alice.replies] == ["general\nTopic #1"]
281 |
--------------------------------------------------------------------------------
/tests/test_signals.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from dynamorm.model import DynaModel
4 | from dynamorm.signals import model_prepared
5 |
6 | if "marshmallow" in (os.getenv("SERIALIZATION_PKG") or ""):
7 | from marshmallow.fields import String
8 | else:
9 | from schematics.types import StringType as String
10 |
11 |
12 | def test_model_prepared():
13 | def receiver(model):
14 | receiver.calls.append(model)
15 |
16 | receiver.calls = []
17 |
18 | model_prepared.connect(receiver)
19 |
20 | assert len(receiver.calls) == 0
21 |
22 | class SillyModel(DynaModel):
23 | class Table:
24 | name = "silly"
25 | hash_key = "silly"
26 | read = 1
27 | write = 1
28 |
29 | class Schema:
30 | silly = String(required=True)
31 |
32 | assert receiver.calls == [SillyModel]
33 |
--------------------------------------------------------------------------------
/tests/test_table.py:
--------------------------------------------------------------------------------
1 | """These tests require dynamo local running"""
2 | import datetime
3 | import dateutil.tz
4 | import os
5 |
6 | from decimal import Decimal
7 |
8 | import pytest
9 |
10 | from dynamorm import Q
11 |
12 | from dynamorm.table import DynamoTable3, QueryIterator, ScanIterator
13 | from dynamorm.exceptions import (
14 | HashKeyExists,
15 | InvalidSchemaField,
16 | ValidationError,
17 | ConditionFailed,
18 | )
19 |
20 |
21 | def is_marshmallow():
22 | return os.environ.get("SERIALIZATION_PKG", "").startswith("marshmallow")
23 |
24 |
25 | def test_table_creation_deletion(TestModel, dynamo_local):
26 | """Creating, detecting and deleting tables should work"""
27 | assert not TestModel.Table.exists
28 | assert TestModel.Table.create_table()
29 | assert TestModel.Table.exists
30 | assert TestModel.Table.delete()
31 | assert not TestModel.Table.exists
32 |
33 |
34 | def test_put_get(TestModel, TestModel_table, dynamo_local):
35 | """Putting and getting an item should work"""
36 | TestModel.put({"foo": "first", "bar": "one", "baz": "lol", "count": 123})
37 | first_one = TestModel.get(foo="first", bar="one")
38 | assert isinstance(first_one, TestModel)
39 | assert first_one.baz == "lol" and first_one.count == 123
40 |
41 |
42 | def test_put_remove_nones(TestModel, TestModel_table, dynamo_local, mocker):
43 | # mock out the underlying table resource, we have to reach deep in to find it...
44 | mocker.patch.object(TestModel.Table.__class__, "_table")
45 |
46 | TestModel.put({"foo": "first", "bar": "one", "baz": "baz"})
47 |
48 | TestModel.Table.__class__._table.put_item.assert_called_with(
49 | Item={"foo": "first", "bar": "one", "baz": "baz"}
50 | )
51 |
52 |
53 | def test_schema_change(TestModel, TestModel_table, dynamo_local):
54 | """Simulate a schema change and make sure we get the record correctly"""
55 | data = {"foo": "1", "bar": "2", "bad_key": 10, "baz": "baz"}
56 | TestModel.Table.put(data)
57 | item = TestModel.get(foo="1", bar="2")
58 | assert item._raw == data
59 | assert item.foo == "1"
60 | assert item.bar == "2"
61 | assert not hasattr(item, "bad_key")
62 |
63 |
64 | def test_put_invalid_schema(TestModel, TestModel_table, dynamo_local):
65 | """Putting an invalid schema should raise a ``ValidationError``."""
66 | if is_marshmallow():
67 | pytest.skip("Marshmallow does marshalling and not validation when serializing")
68 |
69 | with pytest.raises(ValidationError):
70 | TestModel.put({"foo": [1], "bar": "10"})
71 |
72 |
73 | def test_put_batch(TestModel, TestModel_table, dynamo_local):
74 | """Batch putting items should work"""
75 | TestModel.put_batch(
76 | {"foo": "first", "bar": "two", "baz": "wtf", "count": 321},
77 | {"foo": "second", "bar": "one", "baz": "bbq", "count": 456},
78 | )
79 | second_one = TestModel.get(foo="second", bar="one")
80 | assert isinstance(second_one, TestModel)
81 | assert second_one.baz == "bbq" and second_one.count == 456
82 |
83 |
84 | def test_get_batch(TestModel, TestModel_entries, dynamo_local):
85 | items = TestModel.get_batch(
86 | keys=({"foo": "first", "bar": "one"}, {"foo": "first", "bar": "three"}),
87 | attrs="bar",
88 | )
89 |
90 | item_bars = [item.bar for item in items]
91 | assert "one" in item_bars
92 | assert "two" not in item_bars
93 | assert "three" in item_bars
94 |
95 |
96 | def test_get_batch_invalid_field(TestModel):
97 | """Calling .get_batch on an invalid field should result in an exception"""
98 | with pytest.raises(InvalidSchemaField):
99 | list(TestModel.get_batch(keys=({"invalid": "nope"},)))
100 |
101 |
102 | def test_get_non_existant(TestModel, TestModel_table, dynamo_local):
103 | """Getting a non-existant item should return None"""
104 | assert TestModel.get(foo="fifth", bar="derp") is None
105 |
106 |
107 | def test_object_syntax(TestModel, TestModel_table, dynamo_local):
108 | """Putting (saving) an item using the object syntax should work"""
109 | third_three = TestModel(foo="third", bar="three", baz="idk", count=7)
110 | third_three.save()
111 |
112 | assert TestModel.get(foo="third", bar="three").baz == "idk"
113 |
114 |
115 | def test_put_unique(TestModel, TestModel_table, dynamo_local):
116 | """Putting an item with a unique constraint should work"""
117 | TestModel.put({"foo": "third", "bar": "three", "baz": "fuu", "count": 8})
118 |
119 | assert TestModel.get(foo="third", bar="three").baz == "fuu"
120 |
121 | with pytest.raises(HashKeyExists):
122 | TestModel.put_unique({"foo": "third", "bar": "three", "baz": "waa", "count": 9})
123 |
124 |
125 | def test_get_invalid_field(TestModel):
126 | """Calling .get on an invalid field should result in an exception"""
127 | with pytest.raises(InvalidSchemaField):
128 | TestModel.get(bbq="wtf")
129 |
130 |
131 | def test_count(TestModel, TestModel_entries, dynamo_local):
132 | """Test the raw query/scan functions to allow things like Counting"""
133 | assert QueryIterator(TestModel, foo="first").count() == 3
134 | assert ScanIterator(TestModel, count__lt=250).count() == 2
135 |
136 |
137 | def test_query(TestModel, TestModel_entries, dynamo_local):
138 | """Querying should return the expected values"""
139 | results = list(TestModel.query(foo="first"))
140 | assert len(results) == 3
141 |
142 | # our table has a hash and range key, so our results are ordered based on the range key
143 | assert results[0].count == 111
144 | assert results[1].count == 333
145 | assert results[2].count == 222
146 |
147 | # get the results in the opposite order
148 | results = list(TestModel.query(foo="first").reverse())
149 | assert results[0].count == 222
150 |
151 | with pytest.raises(InvalidSchemaField):
152 | results = list(TestModel.query(baz="bbq"))
153 |
154 | results = list(TestModel.query(foo="first", bar="two"))
155 | assert len(results) == 1
156 | assert results[0].count == 222
157 |
158 | results = list(TestModel.query(foo="first", bar__begins_with="t"))
159 | assert len(results) == 2
160 |
161 | results = list(TestModel.query(foo="first").limit(2))
162 | assert len(results) == 2
163 | assert results[0].count == 111
164 | assert results[1].count == 333
165 |
166 |
167 | def test_query_filter(TestModel, TestModel_entries, dynamo_local):
168 | """Querying with non PK kwargs should return the expected values"""
169 | results = list(TestModel.query(foo="first", count__gt=200))
170 | assert len(results) == 2
171 | assert results[0].count == 333
172 | assert results[1].count == 222
173 |
174 | # This is *ugly* since in py2 you need to pass the positional args first (for the non-PK filters)
175 | # and then the keyword args for the PK query.
176 | results = list(
177 | TestModel.query(Q(count__gt=222) | Q(count__lt=222), ~Q(count=111), foo="first")
178 | )
179 | assert len(results) == 1
180 |
181 |
182 | def test_scan(TestModel, TestModel_entries, dynamo_local):
183 | """Scanning should return the expected values"""
184 | results = list(TestModel.scan(count__gt=200))
185 | assert len(results) == 2
186 |
187 | # our table has a hash and range key, so our results are ordered based on the range key
188 | assert results[0].count == 333
189 | assert results[1].count == 222
190 |
191 | results = list(TestModel.scan(child__sub="two"))
192 | assert len(results) == 1
193 | assert results[0].count == 222
194 |
195 | results = list(TestModel.scan(child__sub__begins_with="t"))
196 | assert len(results) == 2
197 | assert results[0].count == 333
198 | assert results[1].count == 222
199 |
200 | results = list(TestModel.scan(count__gt=0).limit(2))
201 | assert len(results) == 2
202 | assert results[0].count == 111
203 | assert results[1].count == 333
204 |
205 | TestModel.put({"foo": "no_child", "bar": "omg", "baz": "baz"})
206 | results = list(TestModel.scan(child__not_exists=True))
207 | assert len(results) == 1
208 | assert results[0].foo == "no_child"
209 |
210 | with pytest.raises(TypeError):
211 | # Make sure we reject if the value isn't True
212 | list(TestModel.scan(baz__not_exists=False))
213 |
214 |
215 | def test_scan_q(TestModel, TestModel_entries, dynamo_local):
216 | results = list(TestModel.scan(Q(count__gt=222) | Q(count__lt=222)))
217 | assert len(results) == 2
218 |
219 | results = list(TestModel.scan(Q(count__gt=222) | Q(count__lt=222), ~Q(count=111)))
220 | assert len(results) == 1
221 |
222 |
223 | def test_update(TestModel, TestModel_entries, dynamo_local):
224 | two = TestModel.get(foo="first", bar="two")
225 | assert two.baz == "wtf"
226 | two.update(baz="yay")
227 | assert two.baz == "yay"
228 |
229 | two = TestModel.get(foo="first", bar="two", consistent=True)
230 | assert two.baz == "yay"
231 |
232 |
233 | def test_query_instead_of_get(TestModel, TestModel_entries, dynamo_local):
234 | two_results = list(TestModel.query(foo="first", bar="two"))
235 | assert len(two_results) == 1
236 | two = two_results[0]
237 | assert two.baz == "wtf"
238 |
239 |
240 | def test_update_no_range(TestModelTwo, TestModelTwo_table, dynamo_local):
241 | TestModelTwo.put({"foo": "foo", "bar": "bar"})
242 | thing = TestModelTwo.get(foo="foo")
243 | thing.update(baz="illion")
244 |
245 | new = TestModelTwo.get(foo="foo", consistent=True)
246 | assert new.baz == "illion"
247 |
248 |
249 | def test_update_conditions(TestModel, TestModel_entries, dynamo_local):
250 | def update_should_fail_with_condition(conditions):
251 | with pytest.raises(ConditionFailed):
252 | TestModel.update_item(
253 | # our hash & range key -- matches current
254 | foo="first",
255 | bar="two",
256 | # things to update
257 | baz="yay",
258 | # things to check
259 | conditions=conditions,
260 | )
261 |
262 | # all of these should fail
263 | update_should_fail_with_condition(dict(baz="nope"))
264 | update_should_fail_with_condition(dict(count__ne=222))
265 | update_should_fail_with_condition(dict(count__gt=300))
266 | update_should_fail_with_condition(dict(count__gte=300))
267 | update_should_fail_with_condition(dict(count__lt=200))
268 | update_should_fail_with_condition(dict(count__lte=200))
269 | update_should_fail_with_condition(dict(count__between=[10, 20]))
270 | update_should_fail_with_condition(dict(count__in=[221, 223]))
271 | update_should_fail_with_condition(dict(count__not_exists=True))
272 | update_should_fail_with_condition(dict(things__exists=True))
273 | update_should_fail_with_condition(dict(count__type="S"))
274 | update_should_fail_with_condition(dict(baz__begins_with="nope"))
275 | update_should_fail_with_condition(dict(baz__contains="nope"))
276 |
277 | update_should_fail_with_condition(Q(count__gt=300) | Q(count__lt=200))
278 | update_should_fail_with_condition(Q(count__gt=200) & ~Q(count=222))
279 | update_should_fail_with_condition([Q(count__gt=200), ~Q(count=222)])
280 |
281 |
282 | def test_update_validation(TestModel, TestModel_entries, dynamo_local):
283 | if is_marshmallow():
284 | pytest.skip("Marshmallow does marshalling and not validation when serializing")
285 |
286 | with pytest.raises(ValidationError):
287 | TestModel.update_item(
288 | # our hash & range key -- matches current
289 | foo="first",
290 | bar="two",
291 | # things to update
292 | baz=["not a list"],
293 | )
294 |
295 |
296 | def test_update_invalid_fields(TestModel, TestModel_entries, dynamo_local):
297 | with pytest.raises(InvalidSchemaField):
298 | TestModel.update_item(
299 | # our hash & range key -- matches current
300 | foo="first",
301 | bar="two",
302 | # things to update
303 | unknown_attr="foo",
304 | )
305 |
306 | with pytest.raises(ConditionFailed):
307 | TestModel.update_item(
308 | # our hash & range key -- matches current
309 | foo="first",
310 | bar="two",
311 | # things to update
312 | baz="foo",
313 | conditions=dict(unknown_attr="foo"),
314 | )
315 |
316 |
317 | def test_schema_field_removed_update_return_all(
318 | TestModel, TestModel_table, dynamo_local
319 | ):
320 | """Simulate a schema change with field removed and make sure we can update the record
321 | and return all columns with new schema
322 | """
323 | data = {"foo": "1", "bar": "2", "old_schema_key": 10, "baz": "baz"}
324 | TestModel.Table.put(data)
325 | item = TestModel.get(foo="1", bar="2")
326 | item.update(baz="bbs", return_all=True)
327 |
328 | assert item.foo == "1"
329 | assert item.bar == "2"
330 | assert item.baz == "bbs"
331 | # Old unrecognized column should be dropped
332 | assert not hasattr(item, "old_schema_key")
333 |
334 |
335 | def test_update_expressions(TestModel, TestModel_entries, dynamo_local):
336 | two = TestModel.get(foo="first", bar="two")
337 | assert two.child == {"sub": "two"}
338 | two.update(child={"foo": "bar"})
339 | assert two.child == {"foo": "bar"}
340 |
341 | if is_marshmallow():
342 | with pytest.raises(AttributeError):
343 | assert two.things is None
344 | else:
345 | assert two.things is None
346 |
347 | # Test the `append` operation.
348 | two.update(things=["foo"])
349 | assert two.things == ["foo"]
350 | two.update(things__append=["bar"])
351 | assert two.things == ["foo", "bar"]
352 |
353 | dt = datetime.datetime(2017, 7, 28, 16, 18, 15, 48, tzinfo=dateutil.tz.tzutc())
354 | two.update(created=dt)
355 | assert isinstance(two.created, datetime.datetime)
356 |
357 | # Test the `+` and `-` operators.
358 | assert two.count == 222
359 | two.update(count__plus=10)
360 | assert two.count == 232
361 | two.update(count__minus=2)
362 | assert two.count == 230
363 |
364 | # Test the `if_not_exists` operators.
365 | two.update(count__if_not_exists=1)
366 | assert two.count == 230
367 |
368 | six = TestModel(foo="sixth", bar="six", baz="baz")
369 | six.save()
370 |
371 | if is_marshmallow():
372 | with pytest.raises(AttributeError):
373 | assert six.count is None
374 | else:
375 | assert six.count is None
376 | six.update(count__if_not_exists=6)
377 | assert six.count == 6
378 |
379 | # XXX TODO: support REMOVE in a different function
380 |
381 |
382 | @pytest.mark.usefixtures("TestModel_entries", "dynamo_local")
383 | def test_update_expressions_nested_paths(TestModel):
384 | two = TestModel.get(foo="first", bar="two")
385 | assert two.child == {"sub": "two"}
386 |
387 | # Test nested path updates.
388 | two.update(child__foo={"bar": "thing", "baz": "thing"})
389 | assert two.child == {"sub": "two", "foo": {"bar": "thing", "baz": "thing"}}
390 | two.update(child__foo__bar="nothing")
391 | assert two.child == {"sub": "two", "foo": {"bar": "nothing", "baz": "thing"}}
392 | two.update(child__foo={"bar": "nothing"})
393 | assert two.child == {"sub": "two", "foo": {"bar": "nothing"}}
394 |
395 | # Test an operation (here, `if_not_exists`) on a nested path.
396 | two.update(child__foo__bar__if_not_exists="new-thing")
397 | assert two.child["foo"]["bar"] == "nothing"
398 | two.update(child__foo__baz__if_not_exists="new-thing")
399 | assert two.child["foo"]["baz"] == "new-thing"
400 |
401 |
402 | def test_scan_iterator(TestModel, TestModel_entries_xlarge, dynamo_local, mocker):
403 | try:
404 | mocker.spy(TestModel.Table.__class__, "scan")
405 | except TypeError:
406 | # pypy doesn't allow us to spy on the dynamic class, so we need to spy on the instance
407 | mocker.spy(TestModel.Table, "scan")
408 | results = ScanIterator(TestModel)
409 |
410 | assert TestModel.Table.scan.call_count == 0
411 | assert len(list(results)) == 3299
412 | assert TestModel.Table.scan.call_count == 1
413 |
414 | results = ScanIterator(TestModel).start(results.last)
415 |
416 | assert len(list(results)) == 701
417 | assert TestModel.Table.scan.call_count == 2
418 |
419 |
420 | def test_scan_iterator_recursive(
421 | TestModel, TestModel_entries_xlarge, dynamo_local, mocker
422 | ):
423 | try:
424 | mocker.spy(TestModel.Table.__class__, "scan")
425 | except TypeError:
426 | # pypy doesn't allow us to spy on the dynamic class, so we need to spy on the instance
427 | mocker.spy(TestModel.Table, "scan")
428 | results = list(ScanIterator(TestModel).recursive())
429 |
430 | assert TestModel.Table.scan.call_count == 2
431 | assert len(results) == 4000
432 |
433 |
434 | def test_specific_attributes(TestModel, TestModel_entries, dynamo_local):
435 | results = list(
436 | TestModel.query(foo="first").specific_attributes(
437 | ["foo", "bar", "count", "child.sub"]
438 | )
439 | )
440 | assert results[0].baz is None
441 | assert results[0].child["sub"] == "one"
442 |
443 |
444 | def test_overwrite(TestModel, TestModel_entries, dynamo_local):
445 | """Putting an existing hash+range should replace the old entry"""
446 | TestModel.put(
447 | {
448 | "foo": "first",
449 | "bar": "one",
450 | "baz": "omg",
451 | "count": 999,
452 | "child": {"sub": "zero"},
453 | }
454 | )
455 |
456 | assert TestModel.query(foo="first").count() == 3
457 |
458 | first_one = TestModel.get(foo="first", bar="one")
459 | assert first_one.count == 999
460 |
461 |
462 | def test_save(TestModel, TestModel_table, dynamo_local):
463 | test_model = TestModel(foo="a", bar="b", baz="c", count=100)
464 | test_model.save()
465 | result = TestModel.get(foo="a", bar="b")
466 | assert result.foo == "a"
467 | assert result.bar == "b"
468 | assert result.count == 100
469 |
470 | test_model.count += 1
471 | test_model.baz = "hello_world"
472 | test_model.save()
473 | result = TestModel.get(foo="a", bar="b")
474 | assert result.foo == "a"
475 | assert result.bar == "b"
476 | assert result.count == 101
477 | assert result.baz == "hello_world"
478 |
479 |
480 | def test_save_update(TestModel, TestModel_entries, dynamo_local):
481 | result = TestModel.get(foo="first", bar="one")
482 | assert result.baz == "bbq"
483 | result.baz = "changed"
484 | result.save()
485 |
486 | result = TestModel.get(foo="first", bar="one")
487 | assert result.baz == "changed"
488 |
489 |
490 | def test_consistent_read(TestModel, TestModel_entries, dynamo_local):
491 | test_model = TestModel(foo="a", bar="b", baz="c", count=100)
492 | test_model.save()
493 |
494 | test_model = TestModel.get(foo="a", bar="b")
495 | assert test_model.count == 100
496 |
497 | TestModel(foo="a", bar="b", baz="c", count=200).save()
498 |
499 | test_model = TestModel.get(foo="a", bar="b", consistent=True)
500 | assert test_model.count == 200
501 |
502 |
503 | def test_delete_with_hash_and_sort(TestModel, TestModel_table, dynamo_local):
504 | test_model = TestModel(foo="d", bar="e", baz="f")
505 | test_model.save()
506 |
507 | get_result = TestModel.get(foo="d", bar="e")
508 | assert get_result is not None
509 | test_model.delete()
510 |
511 | result = TestModel.get(foo="d", bar="e")
512 | assert result is None
513 |
514 |
515 | def test_delete_with_hash(TestModelTwo, TestModelTwo_table, dynamo_local):
516 | test_model = TestModelTwo(foo="q")
517 | test_model.save()
518 |
519 | get_result = TestModelTwo.get(foo="q")
520 | assert get_result is not None
521 | test_model.delete()
522 |
523 | result = TestModelTwo.get(foo="q")
524 | assert result is None
525 |
526 |
527 | def test_native_types(TestModel, TestModel_table, dynamo_local):
528 | DT = datetime.datetime(2017, 7, 28, 16, 18, 15, 48, tzinfo=dateutil.tz.tzutc())
529 |
530 | TestModel.put(
531 | {
532 | "foo": "first",
533 | "bar": "one",
534 | "baz": "lol",
535 | "count": 123,
536 | "when": DT,
537 | "created": DT,
538 | }
539 | )
540 | model = TestModel.get(foo="first", bar="one")
541 | assert model.when == DT
542 |
543 | with pytest.raises(ValidationError):
544 | TestModel.put(
545 | {
546 | "foo": "first",
547 | "bar": "one",
548 | "baz": "lol",
549 | "count": 123,
550 | "when": DT,
551 | "created": {"foo": 1},
552 | }
553 | )
554 |
555 |
556 | def test_indexes_query(TestModel, TestModel_entries, dynamo_local):
557 | results = list(TestModel.ByBaz.query(baz="bbq"))
558 | assert len(results) == 2
559 |
560 | results = list(TestModel.ByBaz.query(baz="bbq", bar="one"))
561 | assert len(results) == 1
562 |
563 | # we project count into the ByBaz index, but not when
564 | assert results[0].count == 111
565 | assert results[0].when is None
566 |
567 | # ByBar only has a hash_key not a range key
568 | results = list(TestModel.ByBar.query(bar="three"))
569 | assert len(results) == 1
570 |
571 |
572 | def test_indexes_scan(TestModel, TestModel_entries, dynamo_local):
573 | results = list(TestModel.ByBaz.scan())
574 | assert len(results) == 3
575 |
576 | results = list(TestModel.ByBar.scan())
577 | assert len(results) == 3
578 |
579 |
580 | def test_config():
581 | class TestTable(DynamoTable3):
582 | resource_kwargs = {"region_name": "us-west-2", "config": {"connect_timeout": 1}}
583 |
584 | resource = TestTable.get_resource()
585 | assert resource.meta.client.meta.config.connect_timeout == 1
586 |
587 | class BadConfigTable(DynamoTable3):
588 | resource_kwargs = {"config": {"foo": True}}
589 |
590 | with pytest.raises(TypeError):
591 | BadConfigTable.get_resource()
592 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist =
3 | black
4 | py{2,3,py2,py3}-{schematics,marshmallow}
5 |
6 | skipsdist = True
7 | skip_missing_interpreters = {env:TOX_SKIP_MISSING_INTERPRETERS:True}
8 |
9 | [testenv]
10 | basepython =
11 | py2: python2
12 | py27: python2.7
13 | py3: python3
14 | py35: python3.5
15 | py36: python3.6
16 | py37: python3.7
17 | py38: python3.8
18 | pypy2: pypy
19 | pypy3: pypy3
20 |
21 | deps =
22 | pytest
23 | pytest-mock
24 |
25 | codecov: codecov
26 | schematics: .[schematics]
27 | marshmallow: .[marshmallow]
28 |
29 | passenv =
30 | # See https://github.com/codecov/codecov-python/blob/5b9d539a6a09bc84501b381b563956295478651a/README.md#using-tox
31 | codecov: TOXENV
32 | codecov: CI
33 | codecov: TRAVIS TRAVIS_*
34 |
35 | setenv =
36 | PIP_DISABLE_PIP_VERSION_CHECK=1
37 | VIRTUALENV_NO_DOWNLOAD=1
38 |
39 | schematics: SERIALIZATION_PKG=schematics
40 | marshmallow: SERIALIZATION_PKG=marshmallow
41 |
42 | commands =
43 | schematics-!codecov: pytest -v -W ignore::schematics.deprecated.SchematicsDeprecationWarning {posargs:tests}
44 | marshmallow-!codecov: pytest -v {posargs:tests}
45 | schematics-codecov: coverage run --source=dynamorm "{envbindir}/pytest" -v -W ignore::schematics.deprecated.SchematicsDeprecationWarning {posargs:tests}
46 | marshmallow-codecov: coverage run --source=dynamorm "{envbindir}/pytest" -v {posargs:tests}
47 | codecov: codecov -e TOXENV
48 |
49 |
50 | [testenv:black]
51 | skip_install = True
52 |
53 | basepython =
54 | python3
55 |
56 | deps =
57 | black
58 |
59 | commands =
60 | black {posargs:--check .}
61 |
62 |
63 | [testenv:docs]
64 | skip_install = True
65 |
66 | whitelist_externals =
67 | touch
68 |
69 | basepython =
70 | python3
71 |
72 | deps =
73 | sphinx
74 | .[marshmallow,schematics]
75 |
76 | commands =
77 | sphinx-build -b html -d "{envtmpdir}/doctrees" docs docs/_build/html
78 | touch docs/_build/html/.nojekyll
79 |
--------------------------------------------------------------------------------