├── .coveragerc
├── .github
└── workflows
│ ├── backport.yml
│ ├── release.yaml
│ └── test.yaml
├── .gitignore
├── .readthedocs.yaml
├── LICENSE
├── MANIFEST.in
├── README.rst
├── bench
└── benchmark.py
├── docs
├── Makefile
├── api.rst
├── attributes.rst
├── awsaccess.rst
├── batch.rst
├── conditional.rst
├── conf.py
├── contributing.rst
├── examples.rst
├── index.rst
├── indexes.rst
├── local.rst
├── logging.rst
├── low_level.rst
├── optimistic_locking.rst
├── polymorphism.rst
├── quickstart.rst
├── rate_limited_operations.rst
├── release_notes.rst
├── requirements.txt
├── settings.rst
├── signals.rst
├── transaction.rst
├── tutorial.rst
├── updates.rst
├── upgrading.rst
├── upgrading_binary.rst
├── upgrading_unicodeset.rst
└── versioning.rst
├── examples
├── attributes.py
├── connection.py
├── indexes.py
├── model.py
├── office_model.py
├── optimistic_locking.py
├── table_connection.py
└── url_shortener
│ ├── README.rst
│ ├── shortener.py
│ └── templates
│ └── index.html
├── mypy.ini
├── pynamodb
├── __init__.py
├── _schema.py
├── _util.py
├── attributes.py
├── connection
│ ├── __init__.py
│ ├── _botocore_private.py
│ ├── base.py
│ └── table.py
├── constants.py
├── exceptions.py
├── expressions
│ ├── __init__.py
│ ├── condition.py
│ ├── operand.py
│ ├── projection.py
│ ├── update.py
│ └── util.py
├── indexes.py
├── models.py
├── pagination.py
├── py.typed
├── settings.py
├── signals.py
├── transactions.py
└── types.py
├── pytest.ini
├── requirements-dev.txt
├── setup.cfg
├── setup.py
├── tests
├── __init__.py
├── data.py
├── deep_eq.py
├── integration
│ ├── __init__.py
│ ├── base_integration_test.py
│ ├── binary_update_test.py
│ ├── conftest.py
│ ├── model_integration_test.py
│ ├── table_integration_test.py
│ ├── test_discriminator_index.py
│ └── test_transaction_integration.py
├── response.py
├── test_attributes.py
├── test_base_connection.py
├── test_binary_legacy_encoding.py
├── test_discriminator.py
├── test_exceptions.py
├── test_expressions.py
├── test_model.py
├── test_pagination.py
├── test_settings.py
├── test_signals.py
├── test_table_connection.py
└── test_transaction.py
└── typing_tests
├── __init__.py
├── attributes.py
├── models.py
└── transactions.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | source = pynamodb/.
3 |
--------------------------------------------------------------------------------
/.github/workflows/backport.yml:
--------------------------------------------------------------------------------
1 | name: Backport merged pull request
2 | on:
3 | pull_request:
4 | types: [closed]
5 | permissions:
6 | contents: write # so it can comment
7 | pull-requests: write # so it can create pull requests
8 | jobs:
9 | backport:
10 | name: Backport pull request
11 | runs-on: ubuntu-latest
12 | # Don't run on closed unmerged pull requests
13 | if: github.event.pull_request.merged
14 | steps:
15 | - uses: actions/checkout@v3
16 | - name: Create backport pull requests
17 | uses: korthout/backport-action@v1
18 |
--------------------------------------------------------------------------------
/.github/workflows/release.yaml:
--------------------------------------------------------------------------------
1 | name: Release
2 |
3 | on:
4 | release:
5 | types: [published]
6 | push:
7 | branches: [master]
8 | pull_request:
9 | workflow_dispatch:
10 |
11 | jobs:
12 | deploy:
13 | runs-on: ubuntu-latest
14 | environment: release
15 | permissions:
16 | id-token: write
17 | steps:
18 | - uses: actions/checkout@v3
19 | - name: Set up Python
20 | uses: actions/setup-python@v4
21 | with:
22 | python-version: '3.x'
23 | - name: Install dependencies
24 | run: |
25 | python -m pip install --upgrade pip
26 | pip install setuptools wheel twine
27 | python -m pip install -e .[signals] -r requirements-dev.txt
28 |
29 | - name: Build packages
30 | run: |
31 | python setup.py sdist bdist_wheel
32 |
33 | - name: Publish to PyPI
34 | uses: pypa/gh-action-pypi-publish@release/v1
35 | if: ${{ github.event_name == 'release' }}
36 |
37 | - name: Publish to Test PyPI
38 | uses: pypa/gh-action-pypi-publish@release/v1
39 | if: ${{ github.event_name == 'workflow_dispatch' }}
40 | with:
41 | repository_url: https://test.pypi.org/legacy/
42 |
--------------------------------------------------------------------------------
/.github/workflows/test.yaml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on:
4 | push:
5 | branches: [master]
6 | pull_request:
7 |
8 | jobs:
9 | test:
10 |
11 | runs-on: ubuntu-22.04
12 | strategy:
13 | matrix:
14 | python-version: ['3.7', '3.8', '3.9', '3.10', '3.11' ,'3.12', 'pypy-3.8']
15 |
16 | steps:
17 | - uses: actions/checkout@v2
18 | - name: Set up Python ${{ matrix.python-version }}
19 | uses: actions/setup-python@v2
20 | with:
21 | python-version: ${{ matrix.python-version }}
22 |
23 | - name: Install dependencies
24 | run: |
25 | python -m pip install --upgrade pip wheel
26 | python -m pip install -e .[signals] -r requirements-dev.txt
27 |
28 | - name: Run dynamodb_local
29 | run: |
30 | wget --quiet http://dynamodb-local.s3-website-us-west-2.amazonaws.com/dynamodb_local_latest.tar.gz -O /tmp/dynamodb_local_latest.tar.gz
31 | tar -xzf /tmp/dynamodb_local_latest.tar.gz -C /tmp
32 | java -Djava.library.path=/tmp/DynamoDBLocal_lib -jar /tmp/DynamoDBLocal.jar -inMemory -port 8000 &
33 |
34 | - name: Run tests
35 | run: |
36 | pytest --cov-report term-missing --cov=pynamodb tests
37 |
38 | - name: Upload coverage
39 | run: |
40 | coveralls --service=github
41 | env:
42 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
43 | COVERALLS_FLAG_NAME: ${{ matrix.test-name }}
44 | COVERALLS_PARALLEL: true
45 |
46 |
47 | mypy:
48 | runs-on: ubuntu-latest
49 |
50 | steps:
51 | - uses: actions/checkout@v2
52 | - name: Set up Python 3.8
53 | uses: actions/setup-python@v2
54 | with:
55 | python-version: 3.8
56 | - name: Install dependencies
57 | run: |
58 | python -m pip install --upgrade pip
59 | python -m pip install -e .[signals] -r requirements-dev.txt
60 | - name: Run mypy
61 | run: |
62 | mypy .
63 |
64 | build-docs:
65 | runs-on: ubuntu-latest
66 |
67 | steps:
68 | - uses: actions/checkout@v2
69 | - name: Set up Python 3.8
70 | uses: actions/setup-python@v2
71 | with:
72 | python-version: 3.8
73 | - name: Install dependencies
74 | run: |
75 | python -m pip install --upgrade pip
76 | python -m pip install -r docs/requirements.txt
77 | - name: Build docs
78 | run: |
79 | sphinx-build -W docs /tmp/docs-build
80 |
81 | finish:
82 | needs: test
83 | runs-on: ubuntu-latest
84 | steps:
85 | - name: Coveralls Finished
86 | uses: coverallsapp/github-action@master
87 | with:
88 | github-token: ${{ secrets.github_token }}
89 | parallel-finished: true
90 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See http://help.github.com/ignore-files/ for more about ignoring files.
2 | #
3 | # If you find yourself ignoring temporary files generated by your text editor
4 | # or operating system, you probably want to add a global ignore instead:
5 | # git config --global core.excludesfile ~/.gitignore_global
6 |
7 | # Ignore mac .DS_Store
8 | *.DS_Store
9 |
10 | # Ignore bundler config
11 | /.bundle
12 |
13 | # Ignore the default SQLite database.
14 | /db/*.sqlite3
15 |
16 | # Ignore all logfiles and tempfiles.
17 | /log/*.log
18 | /tmp
19 |
20 | # Ignore other unneeded files.
21 | doc/
22 | *.swp
23 | *~
24 | .project
25 | .DS_Store
26 | .idea
27 | *.egg-info
28 |
29 | # Ignore auto-save files
30 | [#]*[#]
31 | *~
32 |
33 | # Ingore virtualenv stuff
34 | venv
35 |
36 | # Some Python specific stuff
37 | *.py[cod]
38 |
39 | # C extensions
40 | #*.so
41 |
42 | # Installer logs
43 | pip-log.txt
44 |
45 | # Unit test / coverage reports
46 | build/
47 | .coverage
48 | cover/
49 | .tox
50 |
51 | # mypy
52 | .mypy_cache/
53 |
54 | # Translations
55 | *.mo
56 |
57 | # Mr Developer
58 | .mr.developer.cfg
59 | .project
60 | .pydevproject
61 |
62 | # PyCharm
63 | .idea/
64 | build/
65 |
66 | # Ignore Cache
67 | .cache/
68 |
69 | # Ignore built docs
70 | docs/_build/
71 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yaml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | version: 2
6 |
7 | build:
8 | os: ubuntu-22.04
9 | tools:
10 | python: "3.11"
11 |
12 | # Build documentation in the docs/ directory with Sphinx
13 | sphinx:
14 | configuration: docs/conf.py
15 |
16 | # Optionally declare the Python requirements required to build your docs
17 | python:
18 | install:
19 | - requirements: docs/requirements.txt
20 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014 Jharrod LaFon
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include mypy.ini
3 | include requirements-*.txt
4 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | ========
2 | PynamoDB
3 | ========
4 |
5 | .. image:: https://img.shields.io/pypi/v/pynamodb.svg
6 | :target: https://pypi.python.org/pypi/pynamodb/
7 | .. image:: https://img.shields.io/conda/vn/conda-forge/pynamodb.svg
8 | :target: https://anaconda.org/conda-forge/pynamodb
9 | .. image:: https://github.com/pynamodb/PynamoDB/workflows/Tests/badge.svg
10 | :target: https://github.com/pynamodb/PynamoDB/actions
11 | .. image:: https://img.shields.io/coveralls/pynamodb/PynamoDB/master.svg
12 | :target: https://coveralls.io/github/pynamodb/PynamoDB
13 |
14 | A Pythonic interface for Amazon's `DynamoDB `_.
15 |
16 | DynamoDB is a great NoSQL service provided by Amazon, but the API is verbose.
17 | PynamoDB presents you with a simple, elegant API.
18 |
19 | Useful links:
20 |
21 | * See the full documentation at https://pynamodb.readthedocs.io/
22 | * Ask questions in the `GitHub issues `_
23 | * See release notes at https://pynamodb.readthedocs.io/en/latest/release_notes.html
24 |
25 | Installation
26 | ============
27 | From PyPi::
28 |
29 | $ pip install pynamodb
30 |
31 | From GitHub::
32 |
33 | $ pip install git+https://github.com/pynamodb/PynamoDB#egg=pynamodb
34 |
35 | From conda-forge::
36 |
37 | $ conda install -c conda-forge pynamodb
38 |
39 |
40 | Basic Usage
41 | ===========
42 |
43 | Create a model that describes your DynamoDB table.
44 |
45 | .. code-block:: python
46 |
47 | from pynamodb.models import Model
48 | from pynamodb.attributes import UnicodeAttribute
49 |
50 | class UserModel(Model):
51 | """
52 | A DynamoDB User
53 | """
54 | class Meta:
55 | table_name = "dynamodb-user"
56 | email = UnicodeAttribute(null=True)
57 | first_name = UnicodeAttribute(range_key=True)
58 | last_name = UnicodeAttribute(hash_key=True)
59 |
60 | PynamoDB allows you to create the table if needed (it must exist before you can use it!):
61 |
62 | .. code-block:: python
63 |
64 | UserModel.create_table(read_capacity_units=1, write_capacity_units=1)
65 |
66 | Create a new user:
67 |
68 | .. code-block:: python
69 |
70 | user = UserModel("John", "Denver")
71 | user.email = "djohn@company.org"
72 | user.save()
73 |
74 | Now, search your table for all users with a last name of 'Denver' and whose
75 | first name begins with 'J':
76 |
77 | .. code-block:: python
78 |
79 | for user in UserModel.query("Denver", UserModel.first_name.startswith("J")):
80 | print(user.first_name)
81 |
82 | Examples of ways to query your table with filter conditions:
83 |
84 | .. code-block:: python
85 |
86 | for user in UserModel.query("Denver", UserModel.email=="djohn@company.org"):
87 | print(user.first_name)
88 |
89 | Retrieve an existing user:
90 |
91 | .. code-block:: python
92 |
93 | try:
94 | user = UserModel.get("John", "Denver")
95 | print(user)
96 | except UserModel.DoesNotExist:
97 | print("User does not exist")
98 |
99 | Upgrade Warning
100 | ===============
101 |
102 | The behavior of 'UnicodeSetAttribute' has changed in backwards-incompatible ways
103 | as of the 1.6.0 and 3.0.1 releases of PynamoDB.
104 |
105 | See `UnicodeSetAttribute upgrade docs `_
106 | for detailed instructions on how to safely perform the upgrade.
107 |
108 | Advanced Usage
109 | ==============
110 |
111 | Want to use indexes? No problem:
112 |
113 | .. code-block:: python
114 |
115 | from pynamodb.models import Model
116 | from pynamodb.indexes import GlobalSecondaryIndex, AllProjection
117 | from pynamodb.attributes import NumberAttribute, UnicodeAttribute
118 |
119 | class ViewIndex(GlobalSecondaryIndex):
120 | class Meta:
121 | read_capacity_units = 2
122 | write_capacity_units = 1
123 | projection = AllProjection()
124 | view = NumberAttribute(default=0, hash_key=True)
125 |
126 | class TestModel(Model):
127 | class Meta:
128 | table_name = "TestModel"
129 | forum = UnicodeAttribute(hash_key=True)
130 | thread = UnicodeAttribute(range_key=True)
131 | view = NumberAttribute(default=0)
132 | view_index = ViewIndex()
133 |
134 | Now query the index for all items with 0 views:
135 |
136 | .. code-block:: python
137 |
138 | for item in TestModel.view_index.query(0):
139 | print("Item queried from index: {0}".format(item))
140 |
141 | It's really that simple.
142 |
143 |
144 | Want to use DynamoDB local? Just add a ``host`` name attribute and specify your local server.
145 |
146 | .. code-block:: python
147 |
148 | from pynamodb.models import Model
149 | from pynamodb.attributes import UnicodeAttribute
150 |
151 | class UserModel(Model):
152 | """
153 | A DynamoDB User
154 | """
155 | class Meta:
156 | table_name = "dynamodb-user"
157 | host = "http://localhost:8000"
158 | email = UnicodeAttribute(null=True)
159 | first_name = UnicodeAttribute(range_key=True)
160 | last_name = UnicodeAttribute(hash_key=True)
161 |
162 | Want to enable streams on a table? Just add a ``stream_view_type`` name attribute and specify
163 | the type of data you'd like to stream.
164 |
165 | .. code-block:: python
166 |
167 | from pynamodb.models import Model
168 | from pynamodb.attributes import UnicodeAttribute
169 | from pynamodb.constants import STREAM_NEW_AND_OLD_IMAGE
170 |
171 | class AnimalModel(Model):
172 | """
173 | A DynamoDB Animal
174 | """
175 | class Meta:
176 | table_name = "dynamodb-user"
177 | host = "http://localhost:8000"
178 | stream_view_type = STREAM_NEW_AND_OLD_IMAGE
179 | type = UnicodeAttribute(null=True)
180 | name = UnicodeAttribute(range_key=True)
181 | id = UnicodeAttribute(hash_key=True)
182 |
183 | Features
184 | ========
185 |
186 | * Python >= 3.7 support
187 | * An ORM-like interface with query and scan filters
188 | * Compatible with DynamoDB Local
189 | * Supports the entire DynamoDB API
190 | * Support for Unicode, Binary, JSON, Number, Set, and UTC Datetime attributes
191 | * Support for Global and Local Secondary Indexes
192 | * Provides iterators for working with queries, scans, that are automatically paginated
193 | * Automatic pagination for bulk operations
194 | * Complex queries
195 | * Batch operations with automatic pagination
196 | * Iterators for working with Query and Scan operations
197 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # User-friendly check for sphinx-build
11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
13 | endif
14 |
15 | # Internal variables.
16 | PAPEROPT_a4 = -D latex_paper_size=a4
17 | PAPEROPT_letter = -D latex_paper_size=letter
18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
19 | # the i18n builder cannot share the environment and doctrees with the others
20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
21 |
22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
23 |
24 | help:
25 | @echo "Please use \`make ' where is one of"
26 | @echo " html to make standalone HTML files"
27 | @echo " dirhtml to make HTML files named index.html in directories"
28 | @echo " singlehtml to make a single large HTML file"
29 | @echo " pickle to make pickle files"
30 | @echo " json to make JSON files"
31 | @echo " htmlhelp to make HTML files and a HTML help project"
32 | @echo " qthelp to make HTML files and a qthelp project"
33 | @echo " devhelp to make HTML files and a Devhelp project"
34 | @echo " epub to make an epub"
35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
36 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
38 | @echo " text to make text files"
39 | @echo " man to make manual pages"
40 | @echo " texinfo to make Texinfo files"
41 | @echo " info to make Texinfo files and run them through makeinfo"
42 | @echo " gettext to make PO message catalogs"
43 | @echo " changes to make an overview of all changed/added/deprecated items"
44 | @echo " xml to make Docutils-native XML files"
45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
46 | @echo " linkcheck to check all external links for integrity"
47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
48 |
49 | clean:
50 | rm -rf $(BUILDDIR)/*
51 |
52 | html:
53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
54 | @echo
55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
56 |
57 | dirhtml:
58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
59 | @echo
60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
61 |
62 | singlehtml:
63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
64 | @echo
65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
66 |
67 | pickle:
68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
69 | @echo
70 | @echo "Build finished; now you can process the pickle files."
71 |
72 | json:
73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
74 | @echo
75 | @echo "Build finished; now you can process the JSON files."
76 |
77 | htmlhelp:
78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
79 | @echo
80 | @echo "Build finished; now you can run HTML Help Workshop with the" \
81 | ".hhp project file in $(BUILDDIR)/htmlhelp."
82 |
83 | qthelp:
84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
85 | @echo
86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PynamoDB.qhcp"
89 | @echo "To view the help file:"
90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PynamoDB.qhc"
91 |
92 | devhelp:
93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
94 | @echo
95 | @echo "Build finished."
96 | @echo "To view the help file:"
97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/PynamoDB"
98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PynamoDB"
99 | @echo "# devhelp"
100 |
101 | epub:
102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
103 | @echo
104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
105 |
106 | latex:
107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
108 | @echo
109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
111 | "(use \`make latexpdf' here to do that automatically)."
112 |
113 | latexpdf:
114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
115 | @echo "Running LaTeX files through pdflatex..."
116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
118 |
119 | latexpdfja:
120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
121 | @echo "Running LaTeX files through platex and dvipdfmx..."
122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
124 |
125 | text:
126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
127 | @echo
128 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
129 |
130 | man:
131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
132 | @echo
133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
134 |
135 | texinfo:
136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
137 | @echo
138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
139 | @echo "Run \`make' in that directory to run these through makeinfo" \
140 | "(use \`make info' here to do that automatically)."
141 |
142 | info:
143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
144 | @echo "Running Texinfo files through makeinfo..."
145 | make -C $(BUILDDIR)/texinfo info
146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
147 |
148 | gettext:
149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
150 | @echo
151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
152 |
153 | changes:
154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
155 | @echo
156 | @echo "The overview file is in $(BUILDDIR)/changes."
157 |
158 | linkcheck:
159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
160 | @echo
161 | @echo "Link check complete; look for any errors in the above output " \
162 | "or in $(BUILDDIR)/linkcheck/output.txt."
163 |
164 | doctest:
165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
166 | @echo "Testing of doctests in the sources finished, look at the " \
167 | "results in $(BUILDDIR)/doctest/output.txt."
168 |
169 | xml:
170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
171 | @echo
172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
173 |
174 | pseudoxml:
175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
176 | @echo
177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
178 |
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | API
2 | ===
3 |
4 | High Level API
5 | --------------
6 |
7 | .. automodule:: pynamodb.models
8 | :members: Model
9 |
10 | .. automodule:: pynamodb.attributes
11 | :members:
12 |
13 | .. automodule:: pynamodb.indexes
14 | :members:
15 |
16 | .. automodule:: pynamodb.transactions
17 | :members:
18 |
19 | .. automodule:: pynamodb.pagination
20 | :members:
21 |
22 | Low Level API
23 | -------------
24 |
25 | .. automodule:: pynamodb.connection
26 | :members: Connection, TableConnection
27 |
28 | Exceptions
29 | ----------
30 |
31 | .. autoexception:: pynamodb.exceptions.PynamoDBException
32 | .. autoexception:: pynamodb.exceptions.PynamoDBConnectionError
33 | .. autoexception:: pynamodb.exceptions.DeleteError
34 | .. autoexception:: pynamodb.exceptions.QueryError
35 | .. autoexception:: pynamodb.exceptions.ScanError
36 | .. autoexception:: pynamodb.exceptions.PutError
37 | .. autoexception:: pynamodb.exceptions.UpdateError
38 | .. autoexception:: pynamodb.exceptions.GetError
39 | .. autoexception:: pynamodb.exceptions.TableError
40 | .. autoexception:: pynamodb.exceptions.TableDoesNotExist
41 | .. autoexception:: pynamodb.exceptions.DoesNotExist
42 | .. autoexception:: pynamodb.exceptions.TransactWriteError
43 | .. autoexception:: pynamodb.exceptions.TransactGetError
44 | .. autoexception:: pynamodb.exceptions.InvalidStateError
45 | .. autoexception:: pynamodb.exceptions.AttributeDeserializationError
46 | .. autoexception:: pynamodb.exceptions.AttributeNullError
47 | .. autoclass:: pynamodb.exceptions.CancellationReason
48 |
--------------------------------------------------------------------------------
/docs/awsaccess.rst:
--------------------------------------------------------------------------------
1 | AWS Access
2 | ==========
3 |
4 | PynamoDB uses botocore to interact with the DynamoDB API. Thus, any method of configuration supported by ``botocore`` works with PynamoDB.
5 | For local development the use of environment variables such as `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`
6 | is probably preferable. You can of course use IAM users, as recommended by AWS. In addition
7 | `EC2 roles `_ will work as well and
8 | would be recommended when running on EC2.
9 |
10 | As for the permissions granted via IAM, many tasks can be carried out by PynamoDB. So you should construct your
11 | policies as required, see the
12 | `DynamoDB `_ docs for more
13 | information.
14 |
15 | If for some reason you can't use conventional AWS configuration methods, you can set the credentials in the Model Meta class:
16 |
17 | .. code-block:: python
18 |
19 | from pynamodb.models import Model
20 |
21 | class MyModel(Model):
22 | class Meta:
23 | aws_access_key_id = 'my_access_key_id'
24 | aws_secret_access_key = 'my_secret_access_key'
25 | aws_session_token = 'my_session_token' # Optional, only for temporary credentials like those received when assuming a role
26 |
27 | Finally, see the `AWS CLI documentation `_
28 | for more details on how to pass credentials to botocore.
--------------------------------------------------------------------------------
/docs/batch.rst:
--------------------------------------------------------------------------------
1 | Batch Operations
2 | ================
3 |
4 | Batch operations are supported using context managers, and iterators. The DynamoDB API has limits for each batch operation
5 | that it supports, but PynamoDB removes the need implement your own grouping or pagination. Instead, it handles
6 | pagination for you automatically.
7 |
8 |
9 | .. note::
10 |
11 | DynamoDB limits batch write operations to 25 `PutRequests` and `DeleteRequests` combined. `PynamoDB` automatically
12 | groups your writes 25 at a time for you.
13 |
14 | Suppose that you have defined a `Thread` Model for the examples below.
15 |
16 | .. code-block:: python
17 |
18 | from pynamodb.models import Model
19 | from pynamodb.attributes import (
20 | UnicodeAttribute, NumberAttribute
21 | )
22 |
23 |
24 | class Thread(Model):
25 | class Meta:
26 | table_name = 'Thread'
27 |
28 | forum_name = UnicodeAttribute(hash_key=True)
29 | subject = UnicodeAttribute(range_key=True)
30 | views = NumberAttribute(default=0)
31 |
32 |
33 | Batch Writes
34 | ^^^^^^^^^^^^
35 |
36 | Here is an example using a context manager for a bulk write operation:
37 |
38 | .. code-block:: python
39 |
40 | with Thread.batch_write() as batch:
41 | items = [Thread('forum-{0}'.format(x), 'subject-{0}'.format(x)) for x in range(1000)]
42 | for item in items:
43 | batch.save(item)
44 |
45 | Batch Gets
46 | ^^^^^^^^^^
47 |
48 | Here is an example using an iterator for retrieving items in bulk:
49 |
50 | .. code-block:: python
51 |
52 | item_keys = [('forum-{0}'.format(x), 'subject-{0}'.format(x)) for x in range(1000)]
53 | for item in Thread.batch_get(item_keys):
54 | print(item)
55 |
56 | Query Filters
57 | ^^^^^^^^^^^^^
58 |
59 | You can query items from your table using a simple syntax:
60 |
61 | .. code-block:: python
62 |
63 | for item in Thread.query('ForumName', Thread.subject.startswith('mygreatprefix')):
64 | print("Query returned item {0}".format(item))
65 |
66 | Additionally, you can filter the results before they are returned using condition expressions:
67 |
68 | .. code-block:: python
69 |
70 | for item in Thread.query('ForumName', Thread.subject == 'Subject', Thread.views > 0):
71 | print("Query returned item {0}".format(item))
72 |
73 |
74 |
75 | Query filters use the condition expression syntax (see :ref:`conditions`).
76 |
77 | .. note::
78 |
79 | DynamoDB only allows the following conditions on range keys: `==`, `<`, `<=`, `>`, `>=`, `between`, and `startswith`.
80 | DynamoDB does not allow multiple conditions using range keys.
81 |
82 |
83 | Scan Filters
84 | ^^^^^^^^^^^^
85 |
86 | Scan filters have the same syntax as Query filters, but support all condition expressions:
87 |
88 | .. code-block:: python
89 |
90 | >>> for item in Thread.scan(Thread.forum_name.startswith('Prefix') & (Thread.views > 10)):
91 | print(item)
92 |
93 | Limiting results
94 | ^^^^^^^^^^^^^^^^
95 |
96 | Both `Scan` and `Query` results can be limited to a maximum number of items using the `limit` argument.
97 |
98 | .. code-block:: python
99 |
100 | for item in Thread.query('ForumName', Thread.subject.startswith('mygreatprefix'), limit=5):
101 | print("Query returned item {0}".format(item))
102 |
--------------------------------------------------------------------------------
/docs/conditional.rst:
--------------------------------------------------------------------------------
1 | .. _conditional_operations:
2 |
3 | Conditional Operations
4 | ======================
5 |
6 | Some DynamoDB operations support the inclusion of conditions. The user can supply a condition to be
7 | evaluated by DynamoDB before an item is modified (with save, update and delete) or before an item is included
8 | in the result (with query and scan). See the `official documentation `_
9 | for more details.
10 |
11 | Suppose that you have defined a `Thread` Model for the examples below.
12 |
13 | .. code-block:: python
14 |
15 | from pynamodb.models import Model
16 | from pynamodb.attributes import UnicodeAttribute, NumberAttribute
17 |
18 |
19 | class Thread(Model):
20 | class Meta:
21 | table_name = 'Thread'
22 |
23 | forum_name = UnicodeAttribute(hash_key=True)
24 | subject = UnicodeAttribute(range_key=True)
25 | views = NumberAttribute(default=0)
26 | authors = ListAttribute()
27 | properties = MapAttribute()
28 |
29 |
30 | .. _conditions:
31 |
32 | Condition Expressions
33 | ^^^^^^^^^^^^^^^^^^^^^
34 |
35 | PynamoDB supports creating condition expressions from attributes using a mix of built-in operators and method calls.
36 | Any value provided will be serialized using the serializer defined for that attribute.
37 | See the `comparison operator and function reference `_
38 | for more details.
39 |
40 | .. csv-table::
41 | :header: DynamoDB Condition, PynamoDB Syntax, Attribute Types, Example
42 |
43 | =, ==, Any, :code:`Thread.forum_name == 'Some Forum'`
44 | <>, !=, Any, :code:`Thread.forum_name != 'Some Forum'`
45 | <, <, "Binary, Number, String", :code:`Thread.views < 10`
46 | <=, <=, "Binary, Number, String", :code:`Thread.views <= 10`
47 | >, >, "Binary, Number, String", :code:`Thread.views > 10`
48 | >=, >=, "Binary, Number, String", :code:`Thread.views >= 10`
49 | BETWEEN, "between( `lower` , `upper` )", "Binary, Number, String", ":code:`Thread.views.between(1, 5)`"
50 | IN, is_in( `*values` ), "Binary, Number, String", ":code:`Thread.subject.is_in('Subject', 'Other Subject')`"
51 | attribute_exists ( `path` ), exists(), Any, :code:`Thread.forum_name.exists()`
52 | attribute_not_exists ( `path` ), does_not_exist(), Any, :code:`Thread.forum_name.does_not_exist()`
53 | "attribute_type ( `path` , `type` )", is_type(), Any, :code:`Thread.forum_name.is_type()`
54 | "begins_with ( `path` , `substr` )", startswith( `prefix` ), String, :code:`Thread.subject.startswith('Example')`
55 | "contains ( `path` , `operand` )", contains( `item` ), "Set, String", :code:`Thread.subject.contains('foobar')`
56 | size ( `path` ), size( `attribute` ), "Binary, List, Map, Set, String", :code:`size(Thread.subject) == 10`
57 | AND, &, Any, :code:`(Thread.views > 1) & (Thread.views < 5)`
58 | OR, \|, Any, :code:`(Thread.views < 1) | (Thread.views > 5)`
59 | NOT, ~, Any, :code:`~Thread.subject.contains('foobar')`
60 |
61 | Conditions expressions using nested list and map attributes can be created with Python's item operator ``[]``.
62 |
63 | .. code-block:: python
64 |
65 | # Query for threads where 'properties' map contains key 'emoji'
66 | Thread.query(..., filter_condition=Thread.properties['emoji'].exists())
67 |
68 | # Query for threads where the first author's name contains "John"
69 | Thread.authors[0].contains("John")
70 |
71 | Conditions can be composited using ``&`` (AND) and ``|`` (OR) operators. For the ``&`` (AND) operator, the left-hand side
72 | operand can be ``None`` to allow easier chaining of filter conditions:
73 |
74 | .. code-block:: python
75 |
76 | condition = None
77 |
78 | if request.subject:
79 | condition &= Thread.subject.contains(request.subject)
80 |
81 | if request.min_views:
82 | condition &= Thread.views >= min_views
83 |
84 | results = Thread.query(..., filter_condition=condition)
85 |
86 | Conditioning on keys
87 | ^^^^^^^^^^^^^^^^^^^^
88 |
89 | When writing to a table (save, update, delete), an ``exists()`` condition on a key attribute
90 | ensures that the item already exists (under the given key) in the table before the operation.
91 | For example, a `save` or `update` would update an existing item, but fail if the item
92 | does not exist.
93 |
94 | Correspondingly, a ``does_not_exist()`` condition on a key ensures that the item
95 | does not exist. For example, a `save` with such a condition ensures that it's not
96 | overwriting an existing item.
97 |
98 | For models with a range key, conditioning ``exists()`` on either the hash key
99 | or the range key has the same effect. There is no way to condition on _some_ item
100 | existing with the given hash key. For example:
101 |
102 | .. code-block:: python
103 |
104 | thread = Thread('DynamoDB', 'Using conditions')
105 |
106 | # This will fail if the item ('DynamoDB', 'Using conditions') does not exist,
107 | # even if the item ('DynamoDB', 'Using update expressions') does.
108 | thread.save(condition=Thread.forum_name.exists())
109 |
110 | # This will fail if the item ('DynamoDB', 'Using conditions') does not exist,
111 | # even if the item ('S3', 'Using conditions') does.
112 | thread.save(condition=Thread.subject.exists())
113 |
114 |
115 | Conditional Model.save
116 | ^^^^^^^^^^^^^^^^^^^^^^
117 |
118 | This example saves a `Thread` item, only if the item exists.
119 |
120 | .. code-block:: python
121 |
122 | thread_item = Thread('Existing Forum', 'Example Subject')
123 |
124 | # DynamoDB will only save the item if forum_name exists
125 | print(thread_item.save(Thread.forum_name.exists())
126 |
127 | # You can specify multiple conditions
128 | print(thread_item.save(Thread.forum_name.exists() & Thread.subject.contains('foobar')))
129 |
130 |
131 | Conditional Model.update
132 | ^^^^^^^^^^^^^^^^^^^^^^^^
133 |
134 | This example will update a `Thread` item, if the `views` attribute is less than 5 *OR* greater than 10:
135 |
136 | .. code-block:: python
137 |
138 | thread_item.update(condition=(Thread.views < 5) | (Thread.views > 10))
139 |
140 |
141 | Conditional Model.delete
142 | ^^^^^^^^^^^^^^^^^^^^^^^^
143 |
144 | This example will delete the item, only if its `views` attribute is equal to 0.
145 |
146 | .. code-block:: python
147 |
148 | print(thread_item.delete(Thread.views == 0))
149 |
150 |
151 | Conditional Operation Failures
152 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
153 |
154 | You can check for conditional operation failures by inspecting the cause of the raised exception:
155 |
156 | .. code-block:: python
157 |
158 | try:
159 | thread_item.save(Thread.forum_name.exists())
160 | except PutError as e:
161 | if e.cause_response_code = "ConditionalCheckFailedException":
162 | raise ThreadDidNotExistError()
163 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | Contributing
2 | ============
3 |
4 | Pull requests are welcome, forking from the ``master`` branch. If you are new to GitHub, be sure and check out
5 | GitHub's `Hello World `_ tutorial.
6 |
7 |
8 | Environment Setup
9 | -----------------
10 |
11 | You'll need a python3 installation and a virtualenv. There are many ways to manage
12 | virtualenvs, but a minimal example is shown below.
13 |
14 | .. code-block:: bash
15 |
16 | $ virtualenv -p python3 venv && source venv/bin/activate
17 | $ pip install -e .[signals] -r requirements-dev.txt
18 |
19 |
20 | A java runtime is required to run the integration tests. After installing java, download and untar the
21 | mock dynamodb server like so:
22 |
23 | .. code-block:: bash
24 |
25 | $ wget --quiet http://dynamodb-local.s3-website-us-west-2.amazonaws.com/dynamodb_local_latest.tar.gz -O /tmp/dynamodb_local_latest.tar.gz
26 | $ tar -xzf /tmp/dynamodb_local_latest.tar.gz -C /tmp
27 |
28 | Note that you may want to place files somewhere other than ``/tmp``.
29 |
30 |
31 | Running Tests
32 | -------------
33 |
34 | After installing requirements in environment setup and ensuring your venv is activated, unit tests are run with:
35 |
36 | .. code-block:: bash
37 |
38 | $ pytest tests/ -k "not ddblocal"
39 |
40 |
41 | There are also a set of integration tests that require a local dynamodb server to be mocked.
42 |
43 | .. code-block:: bash
44 |
45 | $ java -Djava.library.path=/tmp/DynamoDBLocal_lib -jar /tmp/DynamoDBLocal.jar -inMemory -port 8000
46 | $ pytest tests/ # in another window
47 |
48 |
49 | Backwards Compatibility
50 | -----------------------
51 |
52 | Particular care should be paid to backwards compatibility when making any change in PynamoDB, especially
53 | with attributes and serialization/deserialization. Consider data written with an older version of the
54 | library and whether it can still be read after upgrading.
55 |
56 | Where possible, write logic to continue supporting older data for at least one major version to simplify
57 | the upgrade path. Where that's not possible, create a new version of the attribute with a different name
58 | and mark the old one as deprecated.
59 |
60 | Outside of data compatibility, follow the usual semver rules for API changes and limit breaking changes
61 | to a major release.
62 |
63 |
64 | The Scope of the Library
65 | ------------------------
66 |
67 | The purpose of this library is to provide a Pythonic ODM layer on top of DynamoDB to be used
68 | in server applications' runtime, i.e. to enable their various application logic and features.
69 | While striving for the library to be useful, we're also trying to "do one thing well". For this reason:
70 |
71 | - Database administration tasks are out of scope, and while PynamoDB has functions for
72 | operations like CreateTable, CreateIndex and DeleteTable, it's because they are useful
73 | for interacting with dynamodb-local and moto's DynamoDB backend from within test code.
74 |
75 | For this reason, features such as enabling PITR backups, restoring from such backups,
76 | updating indices, etc. are intentionally absent. For getting started and operating
77 | on a small scale, AWS Console and the AWS Command Line Interface (awscli) can be used.
78 | For larger scale, infrastructure provisioning by dedicated tools (such as CloudFormation
79 | or Terraform) would be vastly preferable over anything PynamoDB could offer.
80 |
81 | Per security best practices, we recommend running your application's runtime with an IAM role
82 | having the least privileges necessary for it to function (which likely excludes any database
83 | administration operations).
84 |
85 | - While the library aims to empower application developers, it steers away from high-level features
86 | which are not specific to DynamoDB. For example, a custom attribute which serializes UUIDs
87 | as strings is doubtlessly something many applications have had a need for, but as long as it doesn't
88 | exercise any core DynamoDB functionality (e.g. in the case of a UUID attribute, there isn't
89 | a dedicated DynamoDB data type or API feature for storing UUIDs), we would recommend relegating
90 | such functionality to auxiliary libraries. One such library is `pynamodb-attributes `_.
91 |
92 |
93 | Pull Requests
94 | -------------
95 |
96 | Pull requests should:
97 |
98 | #. Specify an accurate title and detailed description of the change
99 | #. Include thorough testing. Unit tests at a minimum, sometimes integration tests
100 | #. Add test coverage for new code (CI will verify the delta)
101 | #. Add type annotations to any code modified
102 | #. Write documentation for new features
103 | #. Maintain the existing code style (mostly PEP8) and patterns
104 |
105 |
106 | Changelog
107 | ---------
108 |
109 | Any non-trivial change should be documented in the
110 | `release notes `_.
111 | Please include sufficient detail in the PR description, which will be used by
112 | maintainers to populate the release notes.
113 |
114 |
115 | Documentation
116 | -------------
117 |
118 | Docs are built using `sphinx `_ and
119 | the latest are available on `readthedocs `_. A release
120 | of the `latest` tag (tracking master) happens automatically on merge via
121 | a GitHub webhook.
122 |
--------------------------------------------------------------------------------
/docs/examples.rst:
--------------------------------------------------------------------------------
1 | PynamoDB Examples
2 | =================
3 |
4 | A directory of examples is available with the PynamoDB source on `GitHub `__.
5 | The examples are configured to use ``http://localhost:8000`` as the DynamoDB endpoint. For information on how to run DynamoDB locally,
6 | see :ref:`local`.
7 |
8 | .. note::
9 |
10 | You should read the examples before executing them. They are configured to use ``http://localhost:8000`` by default, so
11 | that you can run them without actually consuming DynamoDB resources on AWS, and therefore not costing you any money.
12 |
13 | Install PynamoDB
14 | ^^^^^^^^^^^^^^^^
15 |
16 | Although you can install & run PynamoDB from GitHub, it's best to use a released version from PyPI::
17 |
18 | $ pip install pynamodb
19 |
20 |
21 | Getting the examples
22 | ^^^^^^^^^^^^^^^^^^^^
23 |
24 | You can clone the PynamoDB repository to get the examples::
25 |
26 | $ git clone https://github.com/pynamodb/PynamoDB.git
27 |
28 | Running the examples
29 | ^^^^^^^^^^^^^^^^^^^^
30 |
31 | Go into the examples directory::
32 |
33 | $ cd pynamodb/examples
34 |
35 | Configuring the examples
36 | ^^^^^^^^^^^^^^^^^^^^^^^^
37 |
38 | Each example is configured to use ``http://localhost:8000`` as the DynamoDB endpoint. You'll need
39 | to edit an example and either remove the ``host`` setting (causing PynamoDB to use a default), or
40 | specify your own.
41 |
42 | Running an example
43 | ^^^^^^^^^^^^^^^^^^
44 |
45 | Each example file can be executed as a script by a Python interpreter::
46 |
47 | $ python model.py
48 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | ..
2 |
3 |
4 | Welcome to PynamoDB's documentation!
5 | ====================================
6 |
7 | PynamoDB is a Pythonic interface to Amazon's DynamoDB. By using simple, yet powerful abstractions
8 | over the DynamoDB API, PynamoDB allows you to start developing immediately.
9 |
10 | Features
11 | ========
12 |
13 | * Python 3 support
14 | * Support for Unicode, Binary, JSON, Number, Set, and UTC Datetime attributes
15 | * Support for DynamoDB Local
16 | * Support for all of the DynamoDB API
17 | * Support for Global and Local Secondary Indexes
18 | * Batch operations with automatic pagination
19 | * Iterators for working with Query and Scan operations
20 | * `Fully tested `_
21 |
22 | Topics
23 | ======
24 |
25 | .. toctree::
26 | :maxdepth: 2
27 |
28 | quickstart
29 | tutorial
30 | indexes
31 | batch
32 | updates
33 | conditional
34 | polymorphism
35 | attributes
36 | transaction
37 | optimistic_locking
38 | rate_limited_operations
39 | local
40 | signals
41 | examples
42 | settings
43 | low_level
44 | awsaccess
45 | logging
46 | contributing
47 | release_notes
48 | versioning
49 | upgrading
50 |
51 | API docs
52 | ========
53 |
54 | .. toctree::
55 | :maxdepth: 2
56 |
57 | api
58 |
59 |
60 | Indices and tables
61 | ==================
62 |
63 | * :ref:`genindex`
64 | * :ref:`modindex`
65 | * :ref:`search`
66 |
--------------------------------------------------------------------------------
/docs/indexes.rst:
--------------------------------------------------------------------------------
1 | Index Queries
2 | ======================
3 |
4 | DynamoDB supports two types of indexes: global secondary indexes, and local secondary indexes.
5 | Indexes can make accessing your data more efficient, and should be used when appropriate. See
6 | `the documentation for more information `__.
7 |
8 | Index Settings
9 | ^^^^^^^^^^^^^^
10 |
11 | The ``Meta`` class is required with at least the ``projection`` class attribute to specify the projection type. For Global secondary indexes,
12 | the ``read_capacity_units`` and ``write_capacity_units`` also need to be provided. By default, PynamoDB will use the class attribute
13 | name that you provide on the model as the ``index_name`` used when making requests to the DynamoDB API. You can override the default
14 | name by providing the ``index_name`` class attribute in the ``Meta`` class of the index.
15 |
16 |
17 | Global Secondary Indexes
18 | ^^^^^^^^^^^^^^^^^^^^^^^^
19 |
20 | Indexes are defined as classes, just like models. Here is a simple index class:
21 |
22 | .. code-block:: python
23 |
24 | from pynamodb.indexes import GlobalSecondaryIndex, AllProjection
25 | from pynamodb.attributes import NumberAttribute
26 |
27 |
28 | class ViewIndex(GlobalSecondaryIndex):
29 | """
30 | This class represents a global secondary index
31 | """
32 | class Meta:
33 | # index_name is optional, but can be provided to override the default name
34 | index_name = 'foo-index'
35 | read_capacity_units = 2
36 | write_capacity_units = 1
37 | # All attributes are projected
38 | projection = AllProjection()
39 |
40 | # This attribute is the hash key for the index
41 | # Note that this attribute must also exist
42 | # in the model
43 | view = NumberAttribute(default=0, hash_key=True)
44 |
45 |
46 | Global indexes require you to specify the read and write capacity, as we have done
47 | in this example. Indexes are said to *project* attributes from the main table into the index.
48 | As such, there are three styles of projection in DynamoDB, and PynamoDB provides three corresponding
49 | projection classes.
50 |
51 | * :py:class:`AllProjection `: All attributes are projected.
52 | * :py:class:`KeysOnlyProjection `: Only the index and primary keys are projected.
53 | * :py:class:`IncludeProjection(attributes) `: Only the specified ``attributes`` are projected.
54 |
55 | We still need to attach the index to the model in order for us to use it. You define it as
56 | a class attribute on the model, as in this example:
57 |
58 | .. code-block:: python
59 |
60 | from pynamodb.models import Model
61 | from pynamodb.attributes import UnicodeAttribute
62 |
63 |
64 | class TestModel(Model):
65 | """
66 | A test model that uses a global secondary index
67 | """
68 | class Meta:
69 | table_name = 'TestModel'
70 | forum = UnicodeAttribute(hash_key=True)
71 | thread = UnicodeAttribute(range_key=True)
72 | view_index = ViewIndex()
73 | view = NumberAttribute(default=0)
74 |
75 |
76 | Local Secondary Indexes
77 | ^^^^^^^^^^^^^^^^^^^^^^^
78 |
79 | Local secondary indexes are defined just like global ones, but they inherit from ``LocalSecondaryIndex`` instead:
80 |
81 | .. code-block:: python
82 |
83 | from pynamodb.indexes import LocalSecondaryIndex, AllProjection
84 | from pynamodb.attributes import NumberAttribute
85 |
86 |
87 | class ViewIndex(LocalSecondaryIndex):
88 | """
89 | This class represents a local secondary index
90 | """
91 | class Meta:
92 | # All attributes are projected
93 | projection = AllProjection()
94 | forum = UnicodeAttribute(hash_key=True)
95 | view = NumberAttribute(range_key=True)
96 |
97 | Every local secondary index must meet the following conditions:
98 |
99 | - The partition key (hash key) is the same as that of its base table.
100 | - The sort key (range key) consists of exactly one scalar attribute. The range key can be any attribute.
101 | - The sort key (range key) of the base table is projected into the index, where it acts as a non-key attribute.
102 |
103 | Querying an index
104 | ^^^^^^^^^^^^^^^^^^
105 |
106 | Index queries use the same syntax as model queries. Continuing our example, we can query
107 | the ``view_index`` global secondary index simply by calling ``query``:
108 |
109 | .. code-block:: python
110 |
111 | for item in TestModel.view_index.query(1):
112 | print("Item queried from index: {0}".format(item))
113 |
114 | This example queries items from the table using the global secondary index, called ``view_index``, using
115 | a hash key value of 1 for the index. This would return all ``TestModel`` items that have a ``view`` attribute
116 | of value 1.
117 |
118 | Local secondary index queries have a similar syntax. They require a hash key, and can include conditions on the
119 | range key of the index. Here is an example that queries the index for values of ``view`` greater than zero:
120 |
121 | .. code-block:: python
122 |
123 | for item in TestModel.view_index.query('foo', TestModel.view > 0):
124 | print("Item queried from index: {0}".format(item.view))
125 |
126 |
127 | Pagination and last evaluated key
128 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
129 |
130 | The query returns a ``ResultIterator`` object that transparently paginates through results.
131 | To stop iterating and allow the caller to continue later on, use the ``last_evaluated_key`` property
132 | of the iterator:
133 |
134 | .. code-block:: python
135 |
136 | def iterate_over_page(last_evaluated_key = None):
137 | results = TestModel.view_index.query('foo', TestModel.view > 0,
138 | limit=10,
139 | last_evaluated_key=last_evaluated_key)
140 | for item in results:
141 | ...
142 | return results.last_evaluated_key
143 |
144 | The ``last_evaluated_key`` is effectively the key attributes of the last iterated item; the next returned items will be the items following it. For index queries, the returned ``last_evaluated_key`` will contain both the table's hash/range keys and the indexes hash/range keys. This is due to the fact that DynamoDB indexes have no uniqueness constraint, i.e. the same hash/range pair can map to multiple items. For the example above, the ``last_evaluated_key`` will look like:
145 |
146 | .. code-block:: python
147 |
148 | {
149 | "forum": {"S": "..."},
150 | "thread": {"S": "..."},
151 | "view": {"N": "..."}
152 | }
153 |
--------------------------------------------------------------------------------
/docs/local.rst:
--------------------------------------------------------------------------------
1 | .. _local:
2 |
3 | Use PynamoDB Locally
4 | ====================
5 |
6 | Several DynamoDB compatible servers have been written for testing and debugging purposes. PynamoDB can be
7 | used with any server that provides the same API as DynamoDB.
8 |
9 | PynamoDB has been tested with two DynamoDB compatible servers, `DynamoDB Local `_
10 | and `dynalite `_.
11 |
12 | To use a local server, you need to set the ``host`` attribute on your ``Model``'s ``Meta`` class to the hostname and port
13 | that your server is listening on.
14 |
15 | .. note::
16 |
17 | Local implementations of DynamoDB such as DynamoDB Local or dynalite may not be fully featured
18 | (and I don't maintain either of those packages), so you may encounter errors or bugs with a
19 | local implementation that you would not encounter using DynamoDB.
20 |
21 |
22 | .. code-block:: python
23 |
24 | from pynamodb.models import Model
25 | from pynamodb.attributes import UnicodeAttribute
26 |
27 |
28 | class Thread(Model):
29 | class Meta:
30 | table_name = "Thread"
31 | host = "http://localhost:8000"
32 | forum_name = UnicodeAttribute(hash_key=True)
33 |
34 | Running dynalite
35 | ^^^^^^^^^^^^^^^^
36 |
37 | Make sure you have the Node Package Manager installed (see `npm instructions `_).
38 |
39 | Install dynalite::
40 |
41 | $ npm install -g dynalite
42 |
43 | Run dynalite::
44 |
45 | $ dynalite --port 8000
46 |
47 | That's it, you've got a DynamoDB compatible server running on port 8000.
48 |
49 |
50 | Running DynamoDB Local
51 | ^^^^^^^^^^^^^^^^^^^^^^
52 |
53 | DynamoDB local is a tool provided by Amazon that mocks the DynamoDB API, and uses a local file to
54 | store your data. You can use DynamoDB local with PynamoDB for testing, debugging, or offline development.
55 | For more information, you can read `Amazon's Announcement `_ and
56 | `Jeff Barr's blog post `_ about it.
57 |
58 | * Download the `latest version of DynamoDB Local `_.
59 | * Unpack the contents of the archive into a directory of your choice.
60 |
61 | DynamoDB local requires the `Java Runtime Environment `_ version 7. Make sure the JRE is installed before continuing.
62 |
63 | From the directory where you unpacked DynamoDB local, you can launch it like this:
64 |
65 | ::
66 |
67 | $ java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar
68 |
69 | Once the server has started, you should see output:
70 |
71 | ::
72 |
73 | $ java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar
74 | 2014-03-28 12:09:10.892:INFO:oejs.Server:jetty-8.1.12.v20130726
75 | 2014-03-28 12:09:10.943:INFO:oejs.AbstractConnector:Started SelectChannelConnector@0.0.0.0:8000
76 |
77 | Now DynamoDB local is running locally, listening on port 8000 by default.
78 |
79 |
80 |
81 |
--------------------------------------------------------------------------------
/docs/logging.rst:
--------------------------------------------------------------------------------
1 | Logging
2 | =======
3 |
4 | Logging in PynamoDB uses the standard Python logging facilities. PynamoDB is built on top of ``botocore`` which also
5 | uses standard Python logging facilities. Logging is quite verbose, so you may only wish to enable it for debugging purposes.
6 |
7 | Here is an example showing how to enable logging for PynamoDB:
8 |
9 | .. code-block:: python
10 |
11 | import logging
12 | from pynamodb.models import Model
13 | from pynamodb.attributes import (
14 | UnicodeAttribute, NumberAttribute
15 | )
16 |
17 | logging.basicConfig()
18 | log = logging.getLogger("pynamodb")
19 | log.setLevel(logging.DEBUG)
20 | log.propagate = True
21 |
22 | class Thread(Model):
23 | class Meta:
24 | table_name = 'Thread'
25 |
26 | forum_name = UnicodeAttribute(hash_key=True)
27 | subject = UnicodeAttribute(range_key=True)
28 | views = NumberAttribute(default=0)
29 |
30 | # Scan
31 | for item in Thread.scan():
32 | print(item)
33 |
--------------------------------------------------------------------------------
/docs/low_level.rst:
--------------------------------------------------------------------------------
1 | .. _low-level:
2 |
3 | Low Level API
4 | =============
5 |
6 | PynamoDB was designed with high level features in mind, but includes a fully featured low level API.
7 | Any operation can be performed with the low level API, and the higher level PynamoDB features were all
8 | written on top of it.
9 |
10 | Creating a connection
11 | ^^^^^^^^^^^^^^^^^^^^^
12 |
13 | Creating a connection is simple:
14 |
15 | .. code-block:: python
16 |
17 | from pynamodb.connection import Connection
18 |
19 | conn = Connection()
20 |
21 | You can specify a different DynamoDB url:
22 |
23 | .. code-block:: python
24 |
25 | conn = Connection(host='http://alternative-domain/')
26 |
27 | By default, PynamoDB will connect to the us-east-1 region, but you can specify a different one.
28 |
29 | .. code-block:: python
30 |
31 | conn = Connection(region='us-west-1')
32 |
33 |
34 | Modifying tables
35 | ^^^^^^^^^^^^^^^^
36 |
37 | You can easily list tables:
38 |
39 | .. code-block:: python
40 |
41 | >>> conn.list_tables()
42 | {u'TableNames': [u'Thread']}
43 |
44 | or delete a table:
45 |
46 | .. code-block:: python
47 |
48 | >>> conn.delete_table('Thread')
49 |
50 | If you want to change the capacity of a table, that can be done as well:
51 |
52 | .. code-block:: python
53 |
54 | >>> conn.update_table('Thread', read_capacity_units=20, write_capacity_units=20)
55 |
56 | You can create tables as well, although the syntax is verbose. You should really use the model API instead,
57 | but here is a low level example to demonstrate the point:
58 |
59 | .. code-block:: python
60 |
61 | kwargs = {
62 | 'write_capacity_units': 1,
63 | 'read_capacity_units': 1
64 | 'attribute_definitions': [
65 | {
66 | 'attribute_type': 'S',
67 | 'attribute_name': 'key1'
68 | },
69 | {
70 | 'attribute_type': 'S',
71 | 'attribute_name': 'key2'
72 | }
73 | ],
74 | 'key_schema': [
75 | {
76 | 'key_type': 'HASH',
77 | 'attribute_name': 'key1'
78 | },
79 | {
80 | 'key_type': 'RANGE',
81 | 'attribute_name': 'key2'
82 | }
83 | ]
84 | }
85 | conn.create_table('table_name', **kwargs)
86 |
87 | You can also use `update_table` to change the Provisioned Throughput capacity of Global Secondary Indexes:
88 |
89 | .. code-block:: python
90 |
91 | >>> kwargs = {
92 | 'global_secondary_index_updates': [
93 | {
94 | 'index_name': 'index_name',
95 | 'read_capacity_units': 10,
96 | 'write_capacity_units': 10
97 | }
98 | ]
99 | }
100 | >>> conn.update_table('table_name', **kwargs)
101 |
102 | Modifying items
103 | ^^^^^^^^^^^^^^^
104 |
105 | The low level API can perform item operations too, such as getting an item:
106 |
107 | .. code-block:: python
108 |
109 | conn.get_item('table_name', 'hash_key', 'range_key')
110 |
111 | You can put items as well, specifying the keys and any other attributes:
112 |
113 | .. code-block:: python
114 |
115 | conn.put_item('table_name', 'hash_key', 'range_key', attributes={'key': 'value'})
116 |
117 | Deleting an item has similar syntax:
118 |
119 | .. code-block:: python
120 |
121 | conn.delete_item('table_name', 'hash_key', 'range_key')
122 |
123 |
--------------------------------------------------------------------------------
/docs/polymorphism.rst:
--------------------------------------------------------------------------------
1 | .. _polymorphism:
2 |
3 | Polymorphism
4 | ============
5 |
6 | PynamoDB supports polymorphism through the use of discriminators.
7 |
8 | A discriminator is a value that is written to DynamoDB that identifies the python class being stored.
9 |
10 | Discriminator Attributes
11 | ^^^^^^^^^^^^^^^^^^^^^^^^
12 |
13 | The discriminator value is stored using a special attribute, the DiscriminatorAttribute.
14 | Only a single DiscriminatorAttribute can be defined on a class.
15 |
16 | The discriminator value can be assigned to a class as part of the definition:
17 |
18 | .. code-block:: python
19 |
20 | class ParentClass(MapAttribute):
21 | cls = DiscriminatorAttribute()
22 |
23 | class ChildClass(ParentClass, discriminator='child'):
24 | pass
25 |
26 | Declaring the discriminator value as part of the class definition will automatically register the class with the discriminator attribute.
27 | A class can also be registered manually:
28 |
29 | .. code-block:: python
30 |
31 | class ParentClass(MapAttribute):
32 | cls = DiscriminatorAttribute()
33 |
34 | class ChildClass(ParentClass):
35 | pass
36 |
37 | ParentClass._cls.register_class(ChildClass, 'child')
38 |
39 | .. note::
40 |
41 | A class may be registered with a discriminator attribute multiple times.
42 | Only the first registered value is used during serialization;
43 | however, any registered value can be used to deserialize the class.
44 | This behavior is intended to facilitate migrations if discriminator values must be changed.
45 |
46 | .. warning::
47 |
48 | Discriminator values are written to DynamoDB.
49 | Changing the value after items have been saved to the database can result in deserialization failures.
50 | In order to read items with an old discriminator value, the old value must be manually registered.
51 |
52 |
53 | Model Discriminators
54 | ^^^^^^^^^^^^^^^^^^^^
55 |
56 | Model classes also support polymorphism through the use of discriminators.
57 | (Note: currently discriminator attributes cannot be used as the hash or range key of a table.)
58 |
59 | .. code-block:: python
60 |
61 | class ParentModel(Model):
62 | class Meta:
63 | table_name = 'polymorphic_table'
64 | id = UnicodeAttribute(hash_key=True)
65 | cls = DiscriminatorAttribute()
66 |
67 | class FooModel(ParentModel, discriminator='Foo'):
68 | foo = UnicodeAttribute()
69 |
70 | class BarModel(ParentModel, discriminator='Bar'):
71 | bar = UnicodeAttribute()
72 |
73 | BarModel(id='Hello', bar='World!').serialize()
74 | # {'id': {'S': 'Hello'}, 'cls': {'S': 'Bar'}, 'bar': {'S': 'World!'}}
75 | .. note::
76 |
77 | Read operations that are performed on a class that has a discriminator value are slightly modified to ensure that only instances of the class are returned.
78 | Query and scan operations transparently add a filter condition to ensure that only items with a matching discriminator value are returned.
79 | Get and batch get operations will raise a ``ValueError`` if the returned item(s) are not a subclass of the model being read.
80 |
--------------------------------------------------------------------------------
/docs/quickstart.rst:
--------------------------------------------------------------------------------
1 | Usage
2 | =====
3 |
4 | PynamoDB was written from scratch to be Pythonic, and supports the entire DynamoDB API.
5 |
6 | Creating a model
7 | ^^^^^^^^^^^^^^^^
8 |
9 | Let's create a simple model to describe users.
10 |
11 | ::
12 |
13 | from pynamodb.models import Model
14 | from pynamodb.attributes import UnicodeAttribute
15 |
16 | class UserModel(Model):
17 | """
18 | A DynamoDB User
19 | """
20 | class Meta:
21 | table_name = 'dynamodb-user'
22 | region = 'us-west-1'
23 | email = UnicodeAttribute(hash_key=True)
24 | first_name = UnicodeAttribute()
25 | last_name = UnicodeAttribute()
26 |
27 | Models are backed by DynamoDB tables. In this example, the model has a hash key attribute
28 | that stores the user's email address. Any attribute can be set as a hash key by including the argument
29 | `hash_key=True`. The `region` attribute is not required and, if omitted, the default
30 | `boto configuration search behavior
31 | `_
32 | will be used to determine the region.
33 |
34 | PynamoDB allows you to create the table:
35 |
36 | >>> UserModel.create_table(read_capacity_units=1, write_capacity_units=1)
37 |
38 | Now you can create a user in local memory:
39 |
40 | >>> user = UserModel('test@example.com', first_name='Samuel', last_name='Adams')
41 | dynamodb-user
42 |
43 | To write the user to DynamoDB, just call save:
44 |
45 | >>> user.save()
46 |
47 | You can see that the table count has changed:
48 |
49 | >>> UserModel.count()
50 | 1
51 |
52 | Attributes can be accessed and set normally:
53 |
54 | >>> user.email
55 | 'test@example.com'
56 | >>> user.email = 'foo-bar'
57 | >>> user.email
58 | 'foo-bar
59 |
60 | Did another process update the user? We can refresh the user with data from DynamoDB::
61 |
62 | >>> user.refresh()
63 |
64 | Ready to delete the user?
65 |
66 | >>> user.delete()
67 |
68 | .. _changing-items:
69 |
70 | Changing items
71 | ^^^^^^^^^^^^^^
72 |
73 | Changing existing items in the database can be done using either
74 | `update()` or `save()`. There are important differences between the
75 | two.
76 |
77 | Use of `save()` looks like this::
78 |
79 | user = UserModel.get('test@example.com')
80 | user.first_name = 'Robert'
81 | user.save()
82 |
83 | Use of `update()` (in its simplest form) looks like this::
84 |
85 | user = UserModel.get('test@example.com')
86 | user.update(
87 | actions=[
88 | UserModel.first_name.set('Robert')
89 | ]
90 | )
91 |
92 | `save()` will entirely replace an object (it internally uses `PutItem
93 | `_). As
94 | a consequence, even if you modify only one attribute prior to calling
95 | `save()`, the entire object is re-written. Any modifications done to
96 | the same user by other processes will be lost, even if made to other
97 | attributes that you did not change. To avoid this, use `update()` to
98 | perform more fine grained updates or see the
99 | :ref:`conditional_operations` for how to avoid race conditions
100 | entirely.
101 |
102 | Additionally, PynamoDB ignores attributes it does not know about when
103 | reading an object from the database. As a result, if the item in
104 | DynamoDB contains attributes not declared in your model, `save()` will
105 | cause those attributes to be deleted.
106 |
107 | In particular, performing a rolling upgrade of your application after
108 | having added an attribute is an example of such a situation. To avoid
109 | data loss, either avoid using `save()` or perform a multi-step update
110 | with the first step is to upgrade to a version that merely declares
111 | the attribute on the model without ever setting it to any value.
112 |
113 | Querying
114 | ^^^^^^^^
115 |
116 | `PynamoDB` provides an intuitive abstraction over the DynamoDB Query API.
117 | All of the Query API comparison operators are supported.
118 |
119 | Suppose you had a table with both a hash key that is the user's last name
120 | and a range key that is the user's first name:
121 |
122 | ::
123 |
124 | class UserModel(Model):
125 | """
126 | A DynamoDB User
127 | """
128 | class Meta:
129 | table_name = 'dynamodb-user'
130 | email = UnicodeAttribute()
131 | first_name = UnicodeAttribute(range_key=True)
132 | last_name = UnicodeAttribute(hash_key=True)
133 |
134 | Now, suppose that you want to search the table for users with a last name
135 | 'Smith', and first name that begins with the letter 'J':
136 |
137 | ::
138 |
139 | for user in UserModel.query('Smith', UserModel.first_name.startswith('J')):
140 | print(user.first_name)
141 |
142 | You can combine query terms:
143 |
144 | ::
145 |
146 | for user in UserModel.query('Smith', UserModel.first_name.startswith('J') | UserModel.email.contains('domain.com')):
147 | print(user)
148 |
149 |
150 | Counting Items
151 | ^^^^^^^^^^^^^^
152 |
153 | You can retrieve the count for queries by using the `count` method:
154 |
155 | ::
156 |
157 | print(UserModel.count('Smith', UserModel.first_name.startswith('J'))
158 |
159 |
160 | Counts also work for indexes:
161 |
162 | ::
163 |
164 | print(UserModel.custom_index.count('my_hash_key'))
165 |
166 |
167 | Alternatively, you can retrieve the table item count by calling the `count` method without filters:
168 |
169 | ::
170 |
171 | print(UserModel.count())
172 |
173 |
174 | Note that the first positional argument to `count()` is a `hash_key`. Although
175 | this argument can be `None`, filters must not be used when `hash_key` is `None`:
176 |
177 | ::
178 |
179 | # raises a ValueError
180 | print(UserModel.count(UserModel.first_name == 'John'))
181 |
182 | # returns count of only the matching users
183 | print(UserModel.count('my_hash_key', UserModel.first_name == 'John'))
184 |
185 |
186 | Batch Operations
187 | ^^^^^^^^^^^^^^^^
188 |
189 | `PynamoDB` provides context managers for batch operations.
190 |
191 | .. note::
192 |
193 | DynamoDB limits batch write operations to 25 `PutRequests` and `DeleteRequests` combined. `PynamoDB` automatically groups your writes 25 at a time for you.
194 |
195 | Let's create a whole bunch of users:
196 |
197 | ::
198 |
199 | with UserModel.batch_write() as batch:
200 | for i in range(100):
201 | batch.save(UserModel('user-{0}@example.com'.format(i), first_name='Samuel', last_name='Adams'))
202 |
203 | Now, suppose you want to retrieve all those users:
204 |
205 | ::
206 |
207 | user_keys = [('user-{0}@example.com'.format(i)) for i in range(100)]
208 | for item in UserModel.batch_get(user_keys):
209 | print(item)
210 |
211 | Perhaps you want to delete all these users:
212 |
213 | ::
214 |
215 | with UserModel.batch_write() as batch:
216 | items = [UserModel('user-{0}@example.com'.format(x)) for x in range(100)]
217 | for item in items:
218 | batch.delete(item)
219 |
--------------------------------------------------------------------------------
/docs/rate_limited_operations.rst:
--------------------------------------------------------------------------------
1 | Rate-Limited Operation
2 | ======================
3 |
4 | `Scan`, `Query` and `Count` operations can be rate-limited based on the consumed capacities returned from DynamoDB.
5 | Simply specify the `rate_limit` argument when calling these methods. Rate limited batch writes are not currently supported,
6 | but if you would like to see it in a future version, please add a feature request for it in Issues.
7 |
8 | .. note::
9 |
10 | Rate-limiting is only meant to slow operations down to conform to capacity limitations.
11 | Rate-limiting can not be used to speed operations up. Specifying a higher rate-limit that exceeds the possible
12 | writing speed allowed by the environment, will not have any effect.
13 |
14 | Example Usage
15 | ^^^^^^^^^^^^^
16 |
17 | Suppose that you have defined a `User` Model for the examples below.
18 |
19 | .. code-block:: python
20 |
21 | from pynamodb.models import Model
22 | from pynamodb.attributes import (
23 | UnicodeAttribute
24 | )
25 |
26 |
27 | class User(Model):
28 | class Meta:
29 | table_name = 'Users'
30 |
31 | id = UnicodeAttribute(hash_key=True)
32 | name = UnicodeAttribute(range_key=True)
33 |
34 |
35 | Here is an example using `rate-limit` in while scanning the `User` model
36 |
37 | .. code-block:: python
38 |
39 | # Using only 5 RCU per second
40 | for user in User.scan(rate_limit=5):
41 | print("User id: {}, name: {}".format(user.id, user.name))
42 |
43 |
44 | Query
45 | ^^^^^
46 |
47 | You can use `rate-limit` when querying items from your table:
48 |
49 | .. code-block:: python
50 |
51 | # Using only 15 RCU per second
52 | for user in User.query('id1', User.name.startswith('re'), rate_limit = 15):
53 | print("Query returned user {0}".format(user))
54 |
55 |
56 | Count
57 | ^^^^^
58 |
59 | You can use `rate-limit` when counting items in your table:
60 |
61 | .. code-block:: python
62 |
63 | # Using only 15 RCU per second
64 | count = User.count(rate_limit=15)
65 | print("Count : {}".format(count))
66 |
67 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | .[signals]
2 | sphinx>=5
3 | sphinx-rtd-theme==1.1.1
4 | sphinx-issues
5 |
--------------------------------------------------------------------------------
/docs/settings.rst:
--------------------------------------------------------------------------------
1 | .. _settings:
2 |
3 | Settings
4 | ========
5 |
6 | Settings reference
7 | ~~~~~~~~~~~~~~~~~~
8 |
9 |
10 | Here is a complete list of settings which control default PynamoDB behavior.
11 |
12 | connect_timeout_seconds
13 | -----------------------
14 |
15 | Default: ``15``
16 |
17 | The time in seconds till a ``ConnectTimeoutError`` is thrown when attempting to make a connection.
18 |
19 |
20 | read_timeout_seconds
21 | -----------------------
22 |
23 | Default: ``30``
24 |
25 | The time in seconds till a ``ReadTimeoutError`` is thrown when attempting to read from a connection.
26 |
27 |
28 | max_retry_attempts
29 | ------------------
30 |
31 | Default: ``3``
32 |
33 | The number of times to retry certain failed DynamoDB API calls. The most common cases eligible for
34 | retries include ``ProvisionedThroughputExceededException`` and ``5xx`` errors.
35 |
36 |
37 | region
38 | ------
39 |
40 | Default: ``"us-east-1"``
41 |
42 | The default AWS region to connect to.
43 |
44 |
45 | max_pool_connections
46 | --------------------
47 |
48 | Default: ``10``
49 |
50 | The maximum number of connections to keep in a connection pool.
51 |
52 |
53 | extra_headers
54 | --------------------
55 |
56 | Default: ``None``
57 |
58 | A dictionary of headers that should be added to every request. This is only useful
59 | when interfacing with DynamoDB through a proxy, where headers are stripped by the
60 | proxy before forwarding along. Failure to strip these headers before sending to AWS
61 | will result in an ``InvalidSignatureException`` due to request signing.
62 |
63 |
64 | host
65 | ------
66 |
67 | Default: automatically constructed by boto to account for region
68 |
69 | The URL endpoint for DynamoDB. This can be used to use a local implementation of DynamoDB such as DynamoDB Local or dynalite.
70 |
71 |
72 | retry_configuration
73 | -------------------
74 |
75 | Default: ``"LEGACY"``
76 |
77 | This controls the PynamoDB retry behavior. The default of ``"LEGACY"`` keeps the
78 | existing PynamoDB retry behavior. If set to ``None``, this will use botocore's default
79 | retry configuration discovery mechanism as documented
80 | `in boto3 `_
81 | and
82 | `in the AWS SDK docs `_.
83 | If set to a retry configuration dictionary as described
84 | `here `_
85 | it will be used directly in the botocore client configuration.
86 |
87 | Overriding settings
88 | ~~~~~~~~~~~~~~~~~~~
89 |
90 | Default settings may be overridden by providing a Python module which exports the desired new values.
91 | Set the ``PYNAMODB_CONFIG`` environment variable to an absolute path to this module or write it to
92 | ``/etc/pynamodb/global_default_settings.py`` to have it automatically discovered.
93 |
94 |
--------------------------------------------------------------------------------
/docs/signals.rst:
--------------------------------------------------------------------------------
1 | Signals
2 | =======
3 | Starting with PynamoDB 3.1.0, there is support for signalling. This support is provided by the `blinker`_ library, which is not installed by default. In order to ensure blinker is installed, specify your PynamoDB requirement like so:
4 |
5 | ::
6 |
7 | pynamodb[signals]==
8 |
9 | Signals allow certain senders to notify subscribers that something happened. PynamoDB currently sends signals before and after every DynamoDB API call.
10 |
11 | .. note::
12 |
13 | It is recommended to avoid business logic in signal callbacks, as this can have performance implications. To reinforce this, only the operation name and table name are available in the signal callback.
14 |
15 |
16 | Subscribing to Signals
17 | ----------------------
18 |
19 | PynamoDB fires two signal calls, `pre_dynamodb_send` before the network call and `post_dynamodb_send` after the network call to DynamoDB.
20 |
21 | The callback must taking the following arguments:
22 |
23 | ================ ===========
24 | Arguments Description
25 | ================ ===========
26 | *sender* The object that fired that method.
27 | *operation_name* The string name of the DynamoDB action
28 | *table_name* The name of the table the operation is called upon.
29 | *req_uuid* A unique identifier so subscribers can correlate the before and after events.
30 | ================ ===========
31 |
32 | To subscribe to a signal, the user needs to import the signal object and connect your callback, like so.
33 |
34 | .. code:: python
35 |
36 | from pynamodb.signals import pre_dynamodb_send, post_dynamodb_send
37 |
38 | def record_pre_dynamodb_send(sender, operation_name, table_name, req_uuid):
39 | pre_recorded.append((operation_name, table_name, req_uuid))
40 |
41 | def record_post_dynamodb_send(sender, operation_name, table_name, req_uuid):
42 | post_recorded.append((operation_name, table_name, req_uuid))
43 |
44 | pre_dynamodb_send.connect(record_pre_dynamodb_send)
45 | post_dynamodb_send.connect(record_post_dynamodb_send)
46 |
47 | .. _blinker: https://pypi.org/project/blinker/
48 | .. _Dynamo action: https://github.com/pynamodb/PynamoDB/blob/cd705cc4e0e3dd365c7e0773f6bc02fe071a0631/
49 |
--------------------------------------------------------------------------------
/docs/updates.rst:
--------------------------------------------------------------------------------
1 | Update Operations
2 | =================
3 |
4 | The UpdateItem DynamoDB operations allows you to create or modify attributes of an item using an update expression.
5 | See the `official documentation `_
6 | for more details.
7 |
8 | Suppose that you have defined a `Thread` Model for the examples below.
9 |
10 | .. code-block:: python
11 |
12 | from pynamodb.models import Model
13 | from pynamodb.attributes import (
14 | ListAttribute, UnicodeAttribute, UnicodeSetAttribute, NumberAttribute
15 | )
16 |
17 |
18 | class Thread(Model):
19 | class Meta:
20 | table_name = 'Thread'
21 |
22 | forum_name = UnicodeAttribute(hash_key=True)
23 | subjects = UnicodeSetAttribute(default=set)
24 | author = UnicodeAttribute(null=True)
25 | views = NumberAttribute(default=0)
26 | notes = ListAttribute(default=list)
27 |
28 |
29 | .. _updates:
30 |
31 | Update Expressions
32 | ^^^^^^^^^^^^^^^^^^
33 |
34 | PynamoDB supports creating update expressions from attributes using a mix of built-in operators and method calls.
35 | Any value provided will be serialized using the serializer defined for that attribute.
36 |
37 | .. csv-table::
38 | :header: DynamoDB Action / Operator, PynamoDB Syntax, Attribute Types, Example
39 |
40 | SET, set( `value` ), Any, :code:`Thread.views.set(10)`
41 | REMOVE, remove(), "Any", :code:`Thread.notes.remove()`
42 | REMOVE, remove(), "Element of List", :code:`Thread.notes[0].remove()`
43 | ADD, add( `number` ), "Number", ":code:`Thread.views.add(1)`"
44 | ADD, add( `set` ), "Set", ":code:`Thread.subjects.add({'A New Subject', 'Another New Subject'})`"
45 | DELETE, delete( `set` ), "Set", :code:`Thread.subjects.delete({'An Old Subject'})`
46 |
47 | The following expressions and functions can only be used in the context of the above actions:
48 |
49 | .. csv-table::
50 | :header: DynamoDB Action / Operator, PynamoDB Syntax, Attribute Types, Example
51 |
52 | `attr_or_value_1` \+ `attr_or_value_2`, `attr_or_value_1` \+ `attr_or_value_2`, "Number", :code:`Thread.views + 5`
53 | `attr_or_value_1` \- `attr_or_value_2`, `attr_or_value_1` \- `attr_or_value_2`, "Number", :code:`5 - Thread.views`
54 | "list_append( `attr` , `value` )", append( `value` ), "List", :code:`Thread.notes.append(['my last note'])`
55 | "list_append( `value` , `attr` )", prepend( `value` ), "List", :code:`Thread.notes.prepend(['my first note'])`
56 | "if_not_exists( `attr`, `value` )", `attr` | `value`, Any, :code:`Thread.forum_name | 'Default Forum Name'`
57 |
58 | ``set`` action
59 | ^^^^^^^^^^^^^^
60 |
61 | The ``set`` action is the simplest action as it overwrites any previously stored value:
62 |
63 | .. code-block:: python
64 |
65 | thread.update(actions=[
66 | Thread.views.set(10),
67 | ])
68 | assert thread.views == 10
69 |
70 | It can reference existing values (from this or other attributes) for arithmetics and concatenation:
71 |
72 | .. code-block:: python
73 |
74 | # Increment views by 5
75 | thread.update(actions=[
76 | Thread.views.set(Thread.views + 5)
77 | ])
78 |
79 | # Append 2 notes
80 | thread.update(actions=[
81 | Thread.notes.set(
82 | Thread.notes.append([
83 | 'my last note',
84 | 'p.s. no, really, this is my last note',
85 | ]),
86 | )
87 | ])
88 |
89 | # Prepend a note
90 | thread.update(actions=[
91 | Thread.notes.set(
92 | Thread.notes.prepend([
93 | 'my first note',
94 | ]),
95 | )
96 | ])
97 |
98 | # Set author to John Doe unless there's already one
99 | thread.update(actions=[
100 | Thread.author.set(Thread.author | 'John Doe')
101 | ])
102 |
103 | ``remove`` action
104 | ^^^^^^^^^^^^^^^^^
105 |
106 | The ``remove`` action unsets attributes:
107 |
108 | .. code-block:: python
109 |
110 | thread.update(actions=[
111 | Thread.views.remove(),
112 | ])
113 | assert thread.views == 0 # default value
114 |
115 | It can also be used to remove elements from a list attribute:
116 |
117 | .. code-block:: python
118 |
119 | # Remove the first note
120 | thread.update(actions=[
121 | Thread.notes[0].remove(),
122 | ])
123 |
124 |
125 | ``add`` action
126 | ^^^^^^^^^^^^^^
127 |
128 | Applying to (binary, number and string) set attributes, the ``add`` action adds elements to the set:
129 |
130 | .. code-block:: python
131 |
132 | # Add the subjects 'A New Subject' and 'Another New Subject'
133 | thread.update(actions=[
134 | Thread.subjects.add({'A New Subject', 'Another New Subject'})
135 | ])
136 |
137 | Applying to number attributes, the ``add`` action increments or decrements the number
138 | and is equivalent to a ``set`` action:
139 |
140 | .. code-block:: python
141 |
142 | # Increment views by 5
143 | thread.update(actions=[
144 | Thread.views.add(5),
145 | ])
146 | # Also increment views by 5
147 | thread.update(actions=[
148 | Thread.views.set(Thread.views + 5),
149 | ])
150 |
151 | ``delete`` action
152 | ^^^^^^^^^^^^^^^^^
153 |
154 | For set attributes, the ``delete`` action is the opposite of the ``add`` action:
155 |
156 | .. code-block:: python
157 |
158 | # Delete the subject 'An Old Subject'
159 | thread.update(actions=[
160 | Thread.subjects.delete({'An Old Subject'})
161 | ])
162 |
--------------------------------------------------------------------------------
/docs/upgrading.rst:
--------------------------------------------------------------------------------
1 | Upgrading
2 | =========
3 |
4 | This file complements the :ref:`release notes `, focusing on helping safe upgrades of the library
5 | in production scenarios.
6 |
7 | PynamoDB 5.x to 6.x
8 | -------------------
9 |
10 | BinaryAttribute is no longer double base64-encoded
11 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
12 |
13 | See :ref:`upgrading_binary` for details.
14 |
15 | PynamoDB 4.x to 5.x
16 | -------------------
17 |
18 | Null checks enforced where they weren't previously
19 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
20 |
21 | Previously null errors (persisting ``None`` into an attribute defined as ``null=False``) were ignored inside **nested** map attributes, e.g.
22 |
23 | .. code-block:: python
24 |
25 | from pynamodb.models import Model
26 | from pynamodb.attributes import ListAttribute, MapAttribute, UnicodeAttribute
27 |
28 | class Employee(MapAttribute):
29 | name = UnicodeAttribute(null=False)
30 |
31 | class Team(Model):
32 | employees = ListAttribute(of=Employee)
33 |
34 |
35 | team = Team()
36 | team.employees = [Employee(name=None)]
37 | team.save() # this will raise now
38 |
39 |
40 | Now these will resulted in an :py:class:`~pynamodb.exceptions.AttributeNullError` being raised.
41 |
42 | This was an unintentional breaking change introduced in 5.0.3.
43 |
44 | Empty values are now meaningful
45 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
46 |
47 | :py:class:`~pynamodb.attributes.UnicodeAttribute` and :py:class:`~pynamodb.attributes.BinaryAttribute` now support empty values (:pr:`830`)
48 |
49 | In previous versions, assigning an empty value to would be akin to assigning ``None``: if the attribute was defined with ``null=True`` then it would be omitted, otherwise an error would be raised.
50 | DynamoDB `added support `_ empty values
51 | for String and Binary attributes. This release of PynamoDB starts treating empty values like any other values. If existing code unintentionally assigns empty values to StringAttribute or BinaryAttribute,
52 | this may be a breaking change: for example, the code may rely on the fact that in previous versions empty strings would be "read back" as ``None`` values when reloaded from the database.
53 |
54 | No longer parsing date-time strings leniently
55 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
56 |
57 | :py:class:`~pynamodb.attributes.UTCDateTimeAttribute` now strictly requires the date string format ``'%Y-%m-%dT%H:%M:%S.%f%z'`` to ensure proper ordering.
58 | PynamoDB has always written values with this format but previously would accept reading other formats.
59 | Items written using other formats must be rewritten before upgrading.
60 |
61 | Removed functionality
62 | ~~~~~~~~~~~~~~~~~~~~~
63 |
64 | The following changes are breaking but are less likely to go unnoticed:
65 |
66 | * Python 2 is no longer supported. Python 3.6 or greater is now required.
67 | * Table backup functionality (``Model.dump[s]`` and ``Model.load[s]``) has been removed.
68 | * ``Model.query`` no longer converts unsupported range key conditions into filter conditions.
69 | * Internal attribute type constants are replaced with their "short" DynamoDB version (:pr:`827`)
70 | * Remove ``ListAttribute.remove_indexes`` (added in v4.3.2) and document usage of remove for list elements (:pr:`838`)
71 | * Remove ``pynamodb.connection.util.pythonic`` (:pr:`753`) and (:pr:`865`)
72 | * Remove ``ModelContextManager`` class (:pr:`861`)
73 |
74 | PynamoDB 3.x to 4.x
75 | -------------------
76 |
77 | Requests Removal
78 | ~~~~~~~~~~~~~~~~
79 |
80 | Given that ``botocore`` has moved to using ``urllib3`` directly for making HTTP requests, we'll be doing the same (via ``botocore``). This means the following:
81 |
82 | * The ``session_cls`` option is no longer supported.
83 | * The ``request_timeout_seconds`` parameter is no longer supported. ``connect_timeout_seconds`` and ``read_timeout_seconds`` are available instead.
84 |
85 | + Note that the timeouts for connection and read are now ``15`` and ``30`` seconds respectively. This represents a change from the previous ``60`` second combined ``requests`` timeout.
86 | * *Wrapped* exceptions (i.e ``exc.cause``) that were from ``requests.exceptions`` will now be comparable ones from ``botocore.exceptions`` instead.
87 |
88 | Key attribute types must match table
89 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
90 |
91 | The previous release would call `DescribeTable` to discover table metadata
92 | and would use the key types as defined in the DynamoDB table. This could obscure
93 | type mismatches e.g. where a table's hash key is a number (`N`) in DynamoDB,
94 | but defined in PynamoDB as a `UnicodeAttribute`.
95 |
96 | With this release, we're always using the PynamoDB model's definition
97 | of all attributes including the key attributes.
98 |
99 | Deprecation of old APIs
100 | ~~~~~~~~~~~~~~~~~~~~~~~
101 |
102 | Support for `Legacy Conditional Parameters `_ has been
103 | removed. See a complete list of affected ``Model`` methods below:
104 |
105 | .. list-table::
106 | :widths: 10 90
107 | :header-rows: 1
108 |
109 | * - Method
110 | - Changes
111 | * - ``update_item``
112 | - removed in favor of ``update``
113 | * - ``rate_limited_scan``
114 | - removed in favor of ``scan`` and ``ResultIterator``
115 | * - ``delete``
116 | - ``conditional_operator`` and ``**expected_values`` kwargs removed. Use ``condition`` instead.
117 | * - ``update``
118 | - ``attributes``, ``conditional_operator`` and ``**expected_values`` kwargs removed. Use ``actions`` and ``condition`` instead.
119 | * - ``save``
120 | - ``conditional_operator`` and ``**expected_values`` kwargs removed. Use ``condition`` instead.
121 | * - ``count``
122 | - ``**filters`` kwargs removed. Use ``range_key_condition``/``filter_condition`` instead.
123 | * - ``query``
124 | - ``conditional_operator`` and ``**filters`` kwargs removed. Use ``range_key_condition``/``filter_condition`` instead.
125 | * - ``scan``
126 | -
127 | - ``conditional_operator`` and ``**filters`` kwargs removed. Use ``filter_condition`` instead.
128 | - ``allow_rate_limited_scan_without_consumed_capacity`` was removed
129 |
130 |
131 | When upgrading, pay special attention to use of ``**filters`` and ``**expected_values``, as you'll need to check for arbitrary names that correspond to
132 | attribute names. Also keep an eye out for kwargs like ``user_id__eq=5`` or ``email__null=True``, which are no longer supported. A type check can help you catch cases like these.
133 |
134 | PynamoDB 2.x to 3.x
135 | --------------------
136 |
137 | Changes to UnicodeSetAttribute
138 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
139 |
140 | See :ref:`upgrading_unicodeset` for details.
141 |
--------------------------------------------------------------------------------
/docs/upgrading_unicodeset.rst:
--------------------------------------------------------------------------------
1 | :orphan:
2 |
3 | .. _upgrading_unicodeset:
4 |
5 | Upgrading UnicodeSetAttribute
6 | =============================
7 |
8 | .. warning::
9 |
10 | The behavior of 'UnicodeSetAttribute' has changed in backwards-incompatible ways
11 | as of the 1.6.0 and 3.0.1 releases of PynamoDB.
12 |
13 | The following steps can be used to safely update PynamoDB assuming that the data stored
14 | in the item's UnicodeSetAttribute is not JSON. If JSON is being stored, these steps will
15 | not work and a custom migration plan is required. Be aware that values such as numeric
16 | strings (i.e. "123") are valid JSON.
17 |
18 | When upgrading services that use PynamoDB with tables that contain UnicodeSetAttributes
19 | with a version < 1.6.0, first deploy version 1.5.4 to prepare the read path for the new
20 | serialization format.
21 |
22 | Once all services that read from the tables have been deployed, then deploy version 2.2.0
23 | and migrate your data using the provided convenience methods on the Model.
24 | (Note: these methods are only available in version 2.2.0)
25 |
26 | .. code-block:: python
27 |
28 | def get_save_kwargs(item):
29 | # any conditional args needed to ensure data does not get overwritten
30 | # for example if your item has a `version` attribute
31 | {'version__eq': item.version}
32 |
33 | # Re-serialize all UnicodeSetAttributes in the table by scanning all items.
34 | # See documentation of fix_unicode_set_attributes for rate limiting options
35 | # to avoid exceeding provisioned capacity.
36 | Model.fix_unicode_set_attributes(get_save_kwargs)
37 |
38 | # Verify the migration is complete
39 | print("Migration Complete? " + Model.needs_unicode_set_fix())
40 |
41 | Once all data has been migrated then upgrade to a version >= 3.0.1.
42 |
--------------------------------------------------------------------------------
/docs/versioning.rst:
--------------------------------------------------------------------------------
1 | Versioning Scheme
2 | =================
3 |
4 | PynamoDB conforms to `PEP 440 `__.
5 | Generally, PynamoDB uses `Semantic Versioning `__, where the version number has
6 | the format:
7 |
8 | ``MAJOR`` . ``MINOR`` . ``PATCH``
9 |
10 | * The ``MAJOR`` version number changes when backward *incompatible* changes are introduced.
11 | * The ``MINOR`` version number changes when new features are added, but are backward compatible.
12 | * The ``PATCH`` version number changes when backward compatible bug fixes are added.
13 |
--------------------------------------------------------------------------------
/examples/attributes.py:
--------------------------------------------------------------------------------
1 | """
2 | A PynamoDB example using a custom attribute
3 | """
4 | import pickle
5 | from typing import Any
6 |
7 | from pynamodb.attributes import Attribute
8 | from pynamodb.attributes import UnicodeAttribute
9 | from pynamodb.constants import BINARY
10 | from pynamodb.models import Model
11 |
12 |
13 | class Color(object):
14 | """
15 | This class is used to demonstrate the PickleAttribute below
16 | """
17 | def __init__(self, name: str) -> None:
18 | self.name = name
19 |
20 |
21 | class PickleAttribute(Attribute[object]):
22 | """
23 | This class will serializer/deserialize any picklable Python object.
24 | The value will be stored as a binary attribute in DynamoDB.
25 | """
26 | attr_type = BINARY
27 |
28 | def serialize(self, value: Any) -> bytes:
29 | return pickle.dumps(value)
30 |
31 | def deserialize(self, value: Any) -> Any:
32 | return pickle.loads(value)
33 |
34 |
35 | class CustomAttributeModel(Model):
36 | """
37 | A model with a custom attribute
38 | """
39 | class Meta:
40 | host = 'http://localhost:8000'
41 | table_name = 'custom_attr'
42 | read_capacity_units = 1
43 | write_capacity_units = 1
44 |
45 | id = UnicodeAttribute(hash_key=True)
46 | obj = PickleAttribute()
47 |
48 |
49 | # Create the example table
50 | if not CustomAttributeModel.exists():
51 | CustomAttributeModel.create_table(wait=True)
52 |
53 |
54 | instance = CustomAttributeModel()
55 | instance.obj = Color('red')
56 | instance.id = 'red'
57 | instance.save()
58 |
59 | instance = CustomAttributeModel.get('red')
60 | print(instance.obj)
61 |
--------------------------------------------------------------------------------
/examples/connection.py:
--------------------------------------------------------------------------------
1 | """
2 | Examples using a connection
3 | """
4 | from pynamodb.connection import Connection
5 |
6 | # Get a connection
7 | conn = Connection(host='http://localhost:8000')
8 | print(conn)
9 |
10 | # List tables
11 | print(conn.list_tables())
12 |
13 | # Describe a table
14 | print(conn.describe_table('Thread'))
15 |
16 | # Get an item
17 | print(conn.get_item('Thread', 'hash-key', 'range-key'))
18 |
19 | # Put an item
20 | conn.put_item('Thread', 'hash-key', 'range-key', attributes={'forum_name': 'value', 'subject': 'value'})
21 |
22 | # Delete an item
23 | conn.delete_item('Thread', 'hash-key', 'range-key')
24 |
--------------------------------------------------------------------------------
/examples/indexes.py:
--------------------------------------------------------------------------------
1 | """
2 | Examples using DynamoDB indexes
3 | """
4 | import datetime
5 | from pynamodb.models import Model
6 | from pynamodb.indexes import GlobalSecondaryIndex, AllProjection, LocalSecondaryIndex
7 | from pynamodb.attributes import UnicodeAttribute, NumberAttribute, UTCDateTimeAttribute
8 |
9 |
10 | class ViewIndex(GlobalSecondaryIndex):
11 | """
12 | This class represents a global secondary index
13 | """
14 | class Meta:
15 | # You can override the index name by setting it below
16 | index_name = "viewIdx"
17 | read_capacity_units = 1
18 | write_capacity_units = 1
19 | # All attributes are projected
20 | projection = AllProjection()
21 | # This attribute is the hash key for the index
22 | # Note that this attribute must also exist
23 | # in the model
24 | view = NumberAttribute(default=0, hash_key=True)
25 |
26 |
27 | class TestModel(Model):
28 | """
29 | A test model that uses a global secondary index
30 | """
31 | class Meta:
32 | table_name = "TestModel"
33 | # Set host for using DynamoDB Local
34 | host = "http://localhost:8000"
35 | forum = UnicodeAttribute(hash_key=True)
36 | thread = UnicodeAttribute(range_key=True)
37 | view_index = ViewIndex()
38 | view = NumberAttribute(default=0)
39 |
40 | if not TestModel.exists():
41 | TestModel.create_table(read_capacity_units=1, write_capacity_units=1, wait=True)
42 |
43 | # Create an item
44 | test_item = TestModel('forum-example', 'thread-example')
45 | test_item.view = 1
46 | test_item.save()
47 |
48 | # Indexes can be queried easily using the index's hash key
49 | for test_item in TestModel.view_index.query(1):
50 | print("Item queried from index: {0}".format(test_item))
51 |
52 |
53 | class GamePlayerOpponentIndex(LocalSecondaryIndex):
54 | class Meta:
55 | read_capacity_units = 1
56 | write_capacity_units = 1
57 | table_name = "GamePlayerOpponentIndex"
58 | host = "http://localhost:8000"
59 | projection = AllProjection()
60 | player_id = UnicodeAttribute(hash_key=True)
61 | winner_id = UnicodeAttribute(range_key=True)
62 |
63 |
64 | class GameOpponentTimeIndex(GlobalSecondaryIndex):
65 | class Meta:
66 | read_capacity_units = 1
67 | write_capacity_units = 1
68 | table_name = "GameOpponentTimeIndex"
69 | host = "http://localhost:8000"
70 | projection = AllProjection()
71 | winner_id = UnicodeAttribute(hash_key=True)
72 | created_time = UnicodeAttribute(range_key=True)
73 |
74 |
75 | class GameModel(Model):
76 | class Meta:
77 | read_capacity_units = 1
78 | write_capacity_units = 1
79 | table_name = "GameModel"
80 | host = "http://localhost:8000"
81 | player_id = UnicodeAttribute(hash_key=True)
82 | created_time = UTCDateTimeAttribute(range_key=True)
83 | winner_id = UnicodeAttribute()
84 | loser_id = UnicodeAttribute(null=True)
85 |
86 | player_opponent_index = GamePlayerOpponentIndex()
87 | opponent_time_index = GameOpponentTimeIndex()
88 |
89 | if not GameModel.exists():
90 | GameModel.create_table(wait=True)
91 |
92 | # Create an item
93 | item = GameModel('1234', datetime.datetime.utcnow())
94 | item.winner_id = '5678'
95 | item.save()
96 |
97 | # Indexes can be queried easily using the index's hash key
98 | for item in GameModel.player_opponent_index.query('1234'):
99 | print("Item queried from index: {0}".format(item))
100 |
101 | # Count on an index
102 | print(GameModel.player_opponent_index.count('1234'))
103 |
--------------------------------------------------------------------------------
/examples/model.py:
--------------------------------------------------------------------------------
1 | """
2 | An example using Amazon's Thread example for motivation
3 |
4 | http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/SampleTablesAndData.html
5 | """
6 | import logging
7 | from pynamodb.models import Model
8 | from pynamodb.attributes import (
9 | ListAttribute, UnicodeAttribute, NumberAttribute, UnicodeSetAttribute, UTCDateTimeAttribute
10 | )
11 | from datetime import datetime
12 |
13 | logging.basicConfig()
14 | log = logging.getLogger("pynamodb")
15 | log.setLevel(logging.DEBUG)
16 | log.propagate = True
17 |
18 |
19 | class Thread(Model):
20 | class Meta:
21 | read_capacity_units = 1
22 | write_capacity_units = 1
23 | table_name = "Thread"
24 | host = "http://localhost:8000"
25 | forum_name = UnicodeAttribute(hash_key=True)
26 | subject = UnicodeAttribute(range_key=True)
27 | views = NumberAttribute(default=0)
28 | replies = NumberAttribute(default=0)
29 | answered = NumberAttribute(default=0)
30 | tags = UnicodeSetAttribute()
31 | last_post_datetime = UTCDateTimeAttribute(null=True)
32 | notes = ListAttribute(default=list) # type: ignore # todo: add ability for basic list types
33 |
34 |
35 | # Delete the table
36 | # print(Thread.delete_table())
37 |
38 | # Create the table
39 | if not Thread.exists():
40 | Thread.create_table(wait=True)
41 |
42 | # Create a thread
43 | thread_item = Thread(
44 | 'Some Forum',
45 | 'Some Subject',
46 | tags=['foo', 'bar'],
47 | last_post_datetime=datetime.now()
48 | )
49 |
50 | # try:
51 | # Thread.get('does not', 'exist')
52 | # except Thread.DoesNotExist:
53 | # pass
54 |
55 | # Save the thread
56 | thread_item.save()
57 |
58 | # Batch write operation
59 | with Thread.batch_write() as batch:
60 | threads = []
61 | for x in range(100):
62 | thread = Thread('forum-{0}'.format(x), 'subject-{0}'.format(x))
63 | thread.tags = {'tag1', 'tag2'}
64 | thread.last_post_datetime = datetime.now()
65 | threads.append(thread)
66 |
67 | for thread in threads:
68 | batch.save(thread)
69 |
70 | # Get table count
71 | print(Thread.count())
72 |
73 | # Count based on a filter
74 | print(Thread.count('forum-1'))
75 |
76 | # Batch get
77 | item_keys = [('forum-{0}'.format(x), 'subject-{0}'.format(x)) for x in range(100)]
78 | for item in Thread.batch_get(item_keys):
79 | print(item)
80 |
81 | # Scan
82 | for item in Thread.scan():
83 | print(item)
84 |
85 | # Query
86 | for item in Thread.query('forum-1', Thread.subject.startswith('subject')):
87 | print(item)
88 |
89 |
90 | print("-"*80)
91 |
92 |
93 | # A model that uses aliased attribute names
94 | class AliasedModel(Model):
95 | class Meta:
96 | table_name = "AliasedModel"
97 | host = "http://localhost:8000"
98 | forum_name = UnicodeAttribute(hash_key=True, attr_name='fn')
99 | subject = UnicodeAttribute(range_key=True, attr_name='s')
100 | views = NumberAttribute(default=0, attr_name='v')
101 | replies = NumberAttribute(default=0, attr_name='rp')
102 | answered = NumberAttribute(default=0, attr_name='an')
103 | tags = UnicodeSetAttribute(attr_name='t')
104 | last_post_datetime = UTCDateTimeAttribute(attr_name='lp')
105 |
106 | if not AliasedModel.exists():
107 | AliasedModel.create_table(read_capacity_units=1, write_capacity_units=1, wait=True)
108 |
109 | # Create an aliased model
110 | aliased_item = AliasedModel(
111 | 'Some Forum',
112 | 'Some Subject',
113 | tags=['foo', 'bar'],
114 | last_post_datetime=datetime.now()
115 | )
116 |
117 | # Save the aliased model
118 | aliased_item.save()
119 |
120 | # Batch write operation
121 | with AliasedModel.batch_write() as batch:
122 | aliased_items = []
123 | for x in range(100):
124 | aliased_item = AliasedModel('forum-{0}'.format(x), 'subject-{0}'.format(x))
125 | aliased_item.tags = {'tag1', 'tag2'}
126 | aliased_item.last_post_datetime = datetime.now()
127 | aliased_items.append(aliased_item)
128 |
129 | for aliased_item in aliased_items:
130 | batch.save(aliased_item)
131 |
132 | # Batch get
133 | item_keys = [('forum-{0}'.format(x), 'subject-{0}'.format(x)) for x in range(100)]
134 | for aliased_item in AliasedModel.batch_get(item_keys):
135 | print("Batch get item: {0}".format(aliased_item))
136 |
137 | # Scan
138 | for aliased_item in AliasedModel.scan():
139 | print("Scanned item: {0}".format(aliased_item))
140 |
141 | # Query
142 | for aliased_item in AliasedModel.query('forum-1', AliasedModel.subject.startswith('subject')):
143 | print("Query using aliased attribute: {0}".format(aliased_item))
144 |
145 | # Query with filters
146 | for item in Thread.query('forum-1', filter_condition=(Thread.views == 0) | (Thread.replies == 0)):
147 | print("Query result: {0}".format(item))
148 |
149 |
150 | # Scan with filters
151 | for item in Thread.scan(Thread.subject.startswith('subject') & (Thread.views == 0)):
152 | print("Scanned item: {0} {1}".format(item.subject, item.views))
153 |
154 | # Scan with null filter
155 | for item in Thread.scan(Thread.subject.startswith('subject') & Thread.last_post_datetime.does_not_exist()):
156 | print("Scanned item: {0} {1}".format(item.subject, item.views))
157 |
158 | # Conditionally save an item
159 | thread_item = Thread(
160 | 'Some Forum',
161 | 'Some Subject',
162 | tags=['foo', 'bar'],
163 | last_post_datetime=datetime.now()
164 | )
165 |
166 | # DynamoDB will only save the item if forum_name exists
167 | print(thread_item.save(Thread.forum_name.exists()))
168 |
169 | # DynamoDB will update the item, by adding 1 to the views attribute,
170 | # if the forum_name attribute equals 'Some Forum' or the subject attribute exists
171 | print(thread_item.update(
172 | actions=[
173 | Thread.views.add(1)
174 | ],
175 | condition=(
176 | (Thread.forum_name == 'Some Forum') | Thread.subject.exists()
177 | )
178 | ))
179 |
180 | # DynamoDB will atomically update the attributes `replies` (increase value by 1),
181 | # and `last_post_datetime` (set value to the current datetime)
182 | print(thread_item.update(actions=[
183 | Thread.replies.add(1),
184 | Thread.last_post_datetime.set(datetime.now()),
185 | ]))
186 |
187 | # Remove an item's attribute
188 | print(thread_item.update(actions=[
189 | Thread.tags.remove()
190 | ]))
191 |
192 | # Update list attribute
193 | print(thread_item.update(actions=[
194 | Thread.notes.set(
195 | Thread.notes.append(["new note"])
196 | )
197 | ]))
198 |
199 | # DynamoDB will delete the item, only if the views attribute is equal to one
200 | try:
201 | print(thread_item.delete(Thread.views == 1))
202 | except:
203 | pass
204 |
205 | # Backup/restore example
206 | # Print the size of the table (note that this is async/eventually consistent)
207 | print("Table size: {}".format(Thread.describe_table().get('ItemCount')))
208 |
209 | # Optionally Delete all table items
210 | # Commented out for safety
211 | # for item in Thread.scan():
212 | # item.delete()
213 | print("Table size: {}".format(Thread.describe_table().get('ItemCount')))
214 |
--------------------------------------------------------------------------------
/examples/office_model.py:
--------------------------------------------------------------------------------
1 | from pynamodb.attributes import ListAttribute, MapAttribute, NumberAttribute, UnicodeAttribute
2 | from pynamodb.models import Model
3 |
4 |
5 | class Location(MapAttribute):
6 |
7 | lat = NumberAttribute(attr_name='latitude')
8 | lng = NumberAttribute(attr_name='longitude')
9 | name = UnicodeAttribute()
10 |
11 |
12 | class Person(MapAttribute):
13 |
14 | fname = UnicodeAttribute(attr_name='firstName')
15 | lname = UnicodeAttribute()
16 | age = NumberAttribute()
17 |
18 |
19 | class OfficeEmployeeMap(MapAttribute):
20 |
21 | office_employee_id = NumberAttribute()
22 | person = Person()
23 | office_location = Location()
24 |
25 |
26 | class Office(Model):
27 | class Meta:
28 | table_name = 'OfficeModel'
29 | host = "http://localhost:8000"
30 |
31 | office_id = NumberAttribute(hash_key=True)
32 | address = Location()
33 | employees = ListAttribute(of=OfficeEmployeeMap)
34 |
--------------------------------------------------------------------------------
/examples/optimistic_locking.py:
--------------------------------------------------------------------------------
1 | from contextlib import contextmanager
2 | from uuid import uuid4
3 | from botocore.client import ClientError
4 |
5 | from pynamodb.connection import Connection
6 | from pynamodb.attributes import ListAttribute, MapAttribute, UnicodeAttribute, VersionAttribute
7 | from pynamodb.exceptions import PutError, UpdateError, TransactWriteError, DeleteError, DoesNotExist
8 | from pynamodb.models import Model
9 | from pynamodb.transactions import TransactWrite
10 |
11 |
12 | class OfficeEmployeeMap(MapAttribute):
13 | office_employee_id = UnicodeAttribute()
14 | person = UnicodeAttribute()
15 |
16 | def __eq__(self, other):
17 | return isinstance(other, OfficeEmployeeMap) and self.person == other.person
18 |
19 |
20 | class Office(Model):
21 | class Meta:
22 | read_capacity_units = 1
23 | write_capacity_units = 1
24 | table_name = 'Office'
25 | host = "http://localhost:8000"
26 | office_id = UnicodeAttribute(hash_key=True)
27 | employees = ListAttribute(of=OfficeEmployeeMap)
28 | name = UnicodeAttribute()
29 | version = VersionAttribute()
30 |
31 |
32 | if not Office.exists():
33 | Office.create_table(wait=True)
34 |
35 |
36 | @contextmanager
37 | def assert_condition_check_fails():
38 | try:
39 | yield
40 | except (PutError, UpdateError, DeleteError) as e:
41 | assert isinstance(e.cause, ClientError)
42 | assert e.cause_response_code == "ConditionalCheckFailedException"
43 | except TransactWriteError as e:
44 | assert isinstance(e.cause, ClientError)
45 | assert e.cause_response_code == "TransactionCanceledException"
46 | assert e.cause_response_message is not None
47 | assert "ConditionalCheckFailed" in e.cause_response_message
48 | else:
49 | raise AssertionError("The version attribute conditional check should have failed.")
50 |
51 |
52 | justin = OfficeEmployeeMap(office_employee_id=str(uuid4()), person='justin')
53 | garrett = OfficeEmployeeMap(office_employee_id=str(uuid4()), person='garrett')
54 | office = Office(office_id=str(uuid4()), name="office 3", employees=[justin, garrett])
55 | office.save()
56 | assert office.version == 1
57 |
58 | # Get a second local copy of Office
59 | office_out_of_date = Office.get(office.office_id)
60 | # Add another employee and save the changes.
61 | office.employees.append(OfficeEmployeeMap(office_employee_id=str(uuid4()), person='lita'))
62 | office.save()
63 | # After a successful save or update operation the version is set or incremented locally so there's no need to refresh
64 | # between operations using the same local copy.
65 | assert office.version == 2
66 | assert office_out_of_date.version == 1
67 |
68 | # Condition check fails for update.
69 | with assert_condition_check_fails():
70 | office_out_of_date.update(actions=[Office.name.set('new office name')])
71 |
72 | # Condition check fails for save.
73 | office_out_of_date.employees.remove(garrett)
74 | with assert_condition_check_fails():
75 | office_out_of_date.save()
76 |
77 | # After refreshing the local copy the operation will succeed.
78 | office_out_of_date.refresh()
79 | office_out_of_date.employees.remove(garrett)
80 | office_out_of_date.save()
81 | assert office_out_of_date.version == 3
82 |
83 | # Condition check fails for delete.
84 | with assert_condition_check_fails():
85 | office.delete()
86 |
87 | # Example failed transactions.
88 | connection = Connection(host='http://localhost:8000')
89 |
90 | with assert_condition_check_fails(), TransactWrite(connection=connection) as transaction:
91 | transaction.save(Office(office.office_id, name='newer name', employees=[]))
92 |
93 | with assert_condition_check_fails(), TransactWrite(connection=connection) as transaction:
94 | transaction.update(
95 | Office(office.office_id, name='newer name', employees=[]),
96 | actions=[
97 | Office.name.set('Newer Office Name'),
98 | ]
99 | )
100 |
101 | with assert_condition_check_fails(), TransactWrite(connection=connection) as transaction:
102 | transaction.delete(Office(office.office_id, name='newer name', employees=[]))
103 |
104 | # Example successful transaction.
105 | office2 = Office(office_id=str(uuid4()), name="second office", employees=[justin])
106 | office2.save()
107 | assert office2.version == 1
108 | office3 = Office(office_id=str(uuid4()), name="third office", employees=[garrett])
109 | office3.save()
110 | assert office3.version == 1
111 |
112 | with TransactWrite(connection=connection) as transaction:
113 | transaction.condition_check(Office, office.office_id, condition=(Office.name.exists()))
114 | transaction.delete(office2)
115 | transaction.save(Office(office_id=str(uuid4()), name="new office", employees=[justin, garrett]))
116 | transaction.update(
117 | office3,
118 | actions=[
119 | Office.name.set('birdistheword'),
120 | ]
121 | )
122 |
123 | try:
124 | office2.refresh()
125 | except DoesNotExist:
126 | pass
127 | else:
128 | raise AssertionError(
129 | "This item should have been deleted, but no DoesNotExist "
130 | "exception was raised when attempting to refresh a local copy."
131 | )
132 |
133 | assert office.version == 2
134 | # The version attribute of items which are saved or updated in a transaction are updated automatically to match the
135 | # persisted value.
136 | assert office3.version == 2
137 | office.refresh()
138 | assert office.version == 3
139 |
--------------------------------------------------------------------------------
/examples/table_connection.py:
--------------------------------------------------------------------------------
1 | """
2 | Example use of the TableConnection API
3 | """
4 | from pynamodb.connection import TableConnection
5 |
6 | # Get a table connection
7 | table = TableConnection('Thread', host='http://localhost:8000')
8 |
9 | # If the table doesn't already exist, the rest of this example will not work.
10 |
11 | # Describe the table
12 | print(table.describe_table())
13 |
14 | # Get an item
15 | print(table.get_item('hash-key', 'range-key'))
16 |
17 | # Put an item
18 | table.put_item('hash-key', 'range-key', attributes={'forum_name': 'value'})
19 |
20 | # Delete an item
21 | table.delete_item('hash-key', 'range-key')
22 |
--------------------------------------------------------------------------------
/examples/url_shortener/README.rst:
--------------------------------------------------------------------------------
1 | ==========================
2 | A very short URL shortener
3 | ==========================
4 |
5 | This is a very small implementation of a `URL shortener `_ powered by Flask and PynamoDB.
6 |
7 | Try it for yourself in three easy steps, assuming you have `access to AWS `_.
8 |
9 | Install Requirements
10 | ====================
11 | ::
12 |
13 | $ pip install flask pynamodb
14 |
15 | Run the server
16 | ==============
17 | ::
18 |
19 | $ python shortener.py
20 |
21 | Shorten URLs
22 | ============
23 |
24 | Now you can navigate to `http://localhost:5000 `_ to start shortening URLs.
25 |
--------------------------------------------------------------------------------
/examples/url_shortener/shortener.py:
--------------------------------------------------------------------------------
1 | """
2 | A fully working url shortener example
3 | """
4 | import flask
5 | from hashlib import md5
6 | from base64 import b64encode
7 | from pynamodb.models import Model
8 | from pynamodb.indexes import GlobalSecondaryIndex, AllProjection
9 | from pynamodb.attributes import UnicodeAttribute
10 |
11 |
12 | class Url(Model):
13 | class Meta:
14 | table_name = "shortened-urls"
15 | host = "http://localhost:8000"
16 |
17 | class CodeIndex(GlobalSecondaryIndex):
18 | class Meta:
19 | read_capacity_units = 1
20 | write_capacity_units = 1
21 | projection = AllProjection()
22 | code = UnicodeAttribute(hash_key=True)
23 |
24 | url = UnicodeAttribute(hash_key=True)
25 | code = UnicodeAttribute()
26 | code_index = CodeIndex()
27 |
28 | def save(self, **kwargs):
29 | """
30 | Generates the shortened code before saving
31 | """
32 | self.code = b64encode(
33 | md5(self.url.encode('utf-8')).hexdigest()[-4:].encode('utf-8')
34 | ).decode('utf-8').replace('=', '').replace('/', '_')
35 | super(Url, self).save(**kwargs)
36 |
37 | app = flask.Flask(__name__)
38 | app.config.update(DEBUG=True)
39 |
40 |
41 | @app.route('/')
42 | def index():
43 | return flask.render_template("index.html")
44 |
45 |
46 | @app.route('/shorten/')
47 | def shorten(url):
48 | model = Url(url)
49 | model.save()
50 | return flask.Response(model.code)
51 |
52 |
53 | @app.route('/')
54 | def resolve(code):
55 | # next() in Python3 is __next__()
56 | try:
57 | model = Url.code_index.query(code).next()
58 | except AttributeError:
59 | model = Url.code_index.query(code).__next__()
60 | except StopIteration:
61 | flask.abort(404)
62 | finally:
63 | return flask.redirect(model.url)
64 |
65 | if __name__ == "__main__":
66 | if not Url.exists():
67 | print("Creating table...")
68 | Url.create_table(wait=True, read_capacity_units=1, write_capacity_units=1)
69 | app.run()
70 |
--------------------------------------------------------------------------------
/examples/url_shortener/templates/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 | A DynamoDB Powered URL Shortener
10 |
11 |
12 |
13 |
14 |
15 |
18 |
19 |
20 |
A URL shortener, powered by Python & DynamoDB
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | strict_optional = True
3 | check_untyped_defs = True
4 | warn_no_return = True
5 | ignore_missing_imports = True
6 | warn_unused_ignores = True
7 | warn_unused_configs = True
8 | warn_redundant_casts = True
9 | warn_incomplete_stub = True
10 | follow_imports = normal
11 | show_error_codes = True
12 |
13 | # Ignore errors in the docs/conf.py file
14 | [mypy-conf]
15 | ignore_errors = True
16 |
17 | # TODO: burn these down
18 | [mypy-tests.*]
19 | ignore_errors = True
20 |
21 | [mypy-benchmark]
22 | ignore_errors = True
23 |
--------------------------------------------------------------------------------
/pynamodb/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | PynamoDB Library
3 | ^^^^^^^^^^^^^^^^
4 |
5 | A simple abstraction over DynamoDB
6 |
7 | """
8 | __author__ = 'Jharrod LaFon'
9 | __license__ = 'MIT'
10 | __version__ = '6.1.0'
11 |
--------------------------------------------------------------------------------
/pynamodb/_schema.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from typing import Dict
3 | from typing import List
4 |
5 | if sys.version_info >= (3, 8):
6 | from typing import TypedDict
7 | else:
8 | from typing_extensions import TypedDict
9 |
10 | if sys.version_info >= (3, 11):
11 | from typing import NotRequired
12 | else:
13 | from typing_extensions import NotRequired
14 |
15 |
16 | class SchemaAttrDefinition(TypedDict):
17 | AttributeName: str
18 | AttributeType: str
19 |
20 |
21 | class KeySchema(TypedDict):
22 | AttributeName: str
23 | KeyType: str
24 |
25 |
26 | class Projection(TypedDict):
27 | ProjectionType: str
28 | NonKeyAttributes: NotRequired[List[str]]
29 |
30 |
31 | class IndexSchema(TypedDict):
32 | index_name: str
33 | key_schema: List[Dict[str, str]]
34 | projection: Dict[str, str]
35 | attribute_definitions: List[SchemaAttrDefinition]
36 |
37 |
38 | class ProvisionedThroughput(TypedDict, total=False):
39 | ReadCapacityUnits: int
40 | WriteCapacityUnits: int
41 |
42 |
43 | class GlobalSecondaryIndexSchema(IndexSchema):
44 | provisioned_throughput: ProvisionedThroughput
45 |
46 |
47 | class ModelSchema(TypedDict):
48 | attribute_definitions: List[SchemaAttrDefinition]
49 | key_schema: List[KeySchema]
50 | global_secondary_indexes: List[GlobalSecondaryIndexSchema]
51 | local_secondary_indexes: List[IndexSchema]
52 |
--------------------------------------------------------------------------------
/pynamodb/_util.py:
--------------------------------------------------------------------------------
1 | import json
2 | from base64 import b64decode
3 | from base64 import b64encode
4 | from typing import Any
5 | from typing import Dict
6 |
7 | from pynamodb.constants import BINARY
8 | from pynamodb.constants import BINARY_SET
9 | from pynamodb.constants import BOOLEAN
10 | from pynamodb.constants import LIST
11 | from pynamodb.constants import MAP
12 | from pynamodb.constants import NULL
13 | from pynamodb.constants import NUMBER
14 | from pynamodb.constants import NUMBER_SET
15 | from pynamodb.constants import STRING
16 | from pynamodb.constants import STRING_SET
17 |
18 |
19 | def attr_value_to_simple_dict(attribute_value: Dict[str, Any], force: bool) -> Any:
20 | attr_type, attr_value = next(iter(attribute_value.items()))
21 | if attr_type == LIST:
22 | return [attr_value_to_simple_dict(v, force) for v in attr_value]
23 | if attr_type == MAP:
24 | return {k: attr_value_to_simple_dict(v, force) for k, v in attr_value.items()}
25 | if attr_type == NULL:
26 | return None
27 | if attr_type == BOOLEAN:
28 | return attr_value
29 | if attr_type == STRING:
30 | return attr_value
31 | if attr_type == NUMBER:
32 | return json.loads(attr_value)
33 | if attr_type == BINARY:
34 | if force:
35 | return b64encode(attr_value).decode()
36 | raise ValueError("Binary attributes are not supported")
37 | if attr_type == BINARY_SET:
38 | if force:
39 | return [b64encode(v).decode() for v in attr_value]
40 | raise ValueError("Binary set attributes are not supported")
41 | if attr_type == STRING_SET:
42 | if force:
43 | return attr_value
44 | raise ValueError("String set attributes are not supported")
45 | if attr_type == NUMBER_SET:
46 | if force:
47 | return [json.loads(v) for v in attr_value]
48 | raise ValueError("Number set attributes are not supported")
49 | raise ValueError("Unknown attribute type: {}".format(attr_type))
50 |
51 |
52 | def simple_dict_to_attr_value(value: Any) -> Dict[str, Any]:
53 | if value is None:
54 | return {NULL: True}
55 | if value is True or value is False:
56 | return {BOOLEAN: value}
57 | if isinstance(value, (int, float)):
58 | return {NUMBER: json.dumps(value)}
59 | if isinstance(value, str):
60 | return {STRING: value}
61 | if isinstance(value, list):
62 | return {LIST: [simple_dict_to_attr_value(v) for v in value]}
63 | if isinstance(value, dict):
64 | return {MAP: {k: simple_dict_to_attr_value(v) for k, v in value.items()}}
65 | raise ValueError("Unknown value type: {}".format(type(value).__name__))
66 |
67 |
68 | def _b64encode(b: bytes) -> str:
69 | return b64encode(b).decode()
70 |
71 |
72 | def bin_encode_attr(attr: Dict[str, Any]) -> None:
73 | if BINARY in attr:
74 | attr[BINARY] = _b64encode(attr[BINARY])
75 | elif BINARY_SET in attr:
76 | attr[BINARY_SET] = [_b64encode(v) for v in attr[BINARY_SET]]
77 | elif MAP in attr:
78 | for sub_attr in attr[MAP].values():
79 | bin_encode_attr(sub_attr)
80 | elif LIST in attr:
81 | for sub_attr in attr[LIST]:
82 | bin_encode_attr(sub_attr)
83 |
84 |
85 | def bin_decode_attr(attr: Dict[str, Any]) -> None:
86 | if BINARY in attr:
87 | attr[BINARY] = b64decode(attr[BINARY])
88 | elif BINARY_SET in attr:
89 | attr[BINARY_SET] = [b64decode(v) for v in attr[BINARY_SET]]
90 | elif MAP in attr:
91 | for sub_attr in attr[MAP].values():
92 | bin_decode_attr(sub_attr)
93 | elif LIST in attr:
94 | for sub_attr in attr[LIST]:
95 | bin_decode_attr(sub_attr)
96 |
--------------------------------------------------------------------------------
/pynamodb/connection/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | PynamoDB lowest level connection
3 | """
4 |
5 | from pynamodb.connection.base import Connection
6 | from pynamodb.connection.table import TableConnection
7 |
8 |
9 | __all__ = [
10 | "Connection",
11 | "TableConnection",
12 | ]
13 |
--------------------------------------------------------------------------------
/pynamodb/connection/_botocore_private.py:
--------------------------------------------------------------------------------
1 | """
2 | Type-annotates the private botocore APIs that we're currently relying on.
3 | """
4 | from typing import Dict
5 |
6 | import botocore.client
7 | import botocore.credentials
8 | import botocore.endpoint
9 | import botocore.hooks
10 | import botocore.model
11 | import botocore.signers
12 |
13 |
14 | class BotocoreEndpointPrivate(botocore.endpoint.Endpoint):
15 | _event_emitter: botocore.hooks.HierarchicalEmitter
16 |
17 |
18 | class BotocoreRequestSignerPrivate(botocore.signers.RequestSigner):
19 | _credentials: botocore.credentials.Credentials
20 |
21 |
22 | class BotocoreBaseClientPrivate(botocore.client.BaseClient):
23 | _endpoint: BotocoreEndpointPrivate
24 | _request_signer: BotocoreRequestSignerPrivate
25 |
26 | def _make_api_call(
27 | self,
28 | operation_name: str,
29 | operation_kwargs: Dict,
30 | ) -> Dict:
31 | raise NotImplementedError
32 |
--------------------------------------------------------------------------------
/pynamodb/constants.py:
--------------------------------------------------------------------------------
1 | """
2 | Pynamodb constants
3 | """
4 | import sys
5 | if sys.version_info >= (3, 8):
6 | from typing import Final
7 | else:
8 | from typing_extensions import Final
9 |
10 | # Operations
11 | TRANSACT_WRITE_ITEMS = 'TransactWriteItems'
12 | TRANSACT_GET_ITEMS = 'TransactGetItems'
13 | BATCH_WRITE_ITEM = 'BatchWriteItem'
14 | DESCRIBE_TABLE = 'DescribeTable'
15 | BATCH_GET_ITEM = 'BatchGetItem'
16 | CREATE_TABLE = 'CreateTable'
17 | UPDATE_TABLE = 'UpdateTable'
18 | DELETE_TABLE = 'DeleteTable'
19 | LIST_TABLES = 'ListTables'
20 | UPDATE_ITEM = 'UpdateItem'
21 | DELETE_ITEM = 'DeleteItem'
22 | GET_ITEM = 'GetItem'
23 | PUT_ITEM = 'PutItem'
24 | QUERY = 'Query'
25 | SCAN = 'Scan'
26 |
27 | # Request Parameters
28 | RETURN_VALUES_ON_CONDITION_FAILURE = 'ReturnValuesOnConditionCheckFailure'
29 | GLOBAL_SECONDARY_INDEX_UPDATES = 'GlobalSecondaryIndexUpdates'
30 | RETURN_ITEM_COLL_METRICS = 'ReturnItemCollectionMetrics'
31 | EXCLUSIVE_START_TABLE_NAME = 'ExclusiveStartTableName'
32 | RETURN_CONSUMED_CAPACITY = 'ReturnConsumedCapacity'
33 | CLIENT_REQUEST_TOKEN = 'ClientRequestToken'
34 | COMPARISON_OPERATOR = 'ComparisonOperator'
35 | SCAN_INDEX_FORWARD = 'ScanIndexForward'
36 | ATTR_DEFINITIONS = 'AttributeDefinitions'
37 | TABLE_DESCRIPTION = 'TableDescription'
38 | UNPROCESSED_KEYS = 'UnprocessedKeys'
39 | UNPROCESSED_ITEMS = 'UnprocessedItems'
40 | CONSISTENT_READ = 'ConsistentRead'
41 | DELETE_REQUEST = 'DeleteRequest'
42 | TRANSACT_ITEMS = 'TransactItems'
43 | RETURN_VALUES = 'ReturnValues'
44 | REQUEST_ITEMS = 'RequestItems'
45 | ATTRS_TO_GET = 'AttributesToGet'
46 | TABLE_STATUS = 'TableStatus'
47 | TABLE_NAME = 'TableName'
48 | KEY_SCHEMA = 'KeySchema'
49 | ATTR_NAME: Final = 'AttributeName'
50 | ATTR_TYPE: Final = 'AttributeType'
51 | ITEM_COUNT = 'ItemCount'
52 | CAMEL_COUNT = 'Count'
53 | PUT_REQUEST = 'PutRequest'
54 | INDEX_NAME = 'IndexName'
55 | ATTRIBUTES = 'Attributes'
56 | TABLE_KEY = 'Table'
57 | RESPONSES = 'Responses'
58 | RANGE_KEY = 'RangeKey'
59 | KEY_TYPE: Final = 'KeyType'
60 | UPDATE = 'Update'
61 | SELECT = 'Select'
62 | ACTIVE = 'ACTIVE'
63 | LIMIT = 'Limit'
64 | ITEMS = 'Items'
65 | ITEM = 'Item'
66 | TAGS = 'Tags'
67 | KEYS = 'Keys'
68 | UTC = 'UTC'
69 | KEY = 'Key'
70 | GET = 'Get'
71 |
72 | # transaction operators
73 | TRANSACT_CONDITION_CHECK = 'ConditionCheck'
74 | TRANSACT_DELETE = 'Delete'
75 | TRANSACT_GET = 'Get'
76 | TRANSACT_PUT = 'Put'
77 | TRANSACT_UPDATE = 'Update'
78 |
79 | ACTION = 'Action'
80 |
81 | # Response Parameters
82 | SCANNED_COUNT = 'ScannedCount'
83 |
84 | # Expression Parameters
85 | CONDITION_EXPRESSION = 'ConditionExpression'
86 | EXPRESSION_ATTRIBUTE_NAMES = 'ExpressionAttributeNames'
87 | EXPRESSION_ATTRIBUTE_VALUES = 'ExpressionAttributeValues'
88 | FILTER_EXPRESSION = 'FilterExpression'
89 | KEY_CONDITION_EXPRESSION = 'KeyConditionExpression'
90 | PROJECTION_EXPRESSION = 'ProjectionExpression'
91 | UPDATE_EXPRESSION = 'UpdateExpression'
92 |
93 | # Billing Modes
94 | PAY_PER_REQUEST_BILLING_MODE = 'PAY_PER_REQUEST'
95 | PROVISIONED_BILLING_MODE = 'PROVISIONED'
96 | AVAILABLE_BILLING_MODES = [PROVISIONED_BILLING_MODE, PAY_PER_REQUEST_BILLING_MODE]
97 |
98 | # Defaults
99 | DEFAULT_ENCODING = 'utf-8'
100 | DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f+0000'
101 | SERVICE_NAME = 'dynamodb'
102 | HTTP_OK = 200
103 | HTTP_BAD_REQUEST = 400
104 | DEFAULT_BILLING_MODE = PROVISIONED_BILLING_MODE
105 |
106 | # Create Table arguments
107 | PROVISIONED_THROUGHPUT = 'ProvisionedThroughput'
108 | READ_CAPACITY_UNITS: Final = 'ReadCapacityUnits'
109 | WRITE_CAPACITY_UNITS: Final = 'WriteCapacityUnits'
110 | BILLING_MODE = 'BillingMode'
111 |
112 | # Attribute Types
113 | BINARY = 'B'
114 | BINARY_SET = 'BS'
115 | BOOLEAN = 'BOOL'
116 | LIST = 'L'
117 | MAP = 'M'
118 | NULL = 'NULL'
119 | NUMBER = 'N'
120 | NUMBER_SET = 'NS'
121 | STRING = 'S'
122 | STRING_SET = 'SS'
123 |
124 | ATTRIBUTE_TYPES = [BINARY, BINARY_SET, BOOLEAN, LIST, MAP, NULL, NUMBER, NUMBER_SET, STRING, STRING_SET]
125 |
126 | # Constants needed for creating indexes
127 | LOCAL_SECONDARY_INDEX = 'LocalSecondaryIndex'
128 | LOCAL_SECONDARY_INDEXES = 'LocalSecondaryIndexes'
129 | GLOBAL_SECONDARY_INDEX = 'GlobalSecondaryIndex'
130 | GLOBAL_SECONDARY_INDEXES = 'GlobalSecondaryIndexes'
131 | PROJECTION = 'Projection'
132 | PROJECTION_TYPE = 'ProjectionType'
133 | NON_KEY_ATTRIBUTES = 'NonKeyAttributes'
134 | KEYS_ONLY = 'KEYS_ONLY'
135 | ALL = 'ALL'
136 | INCLUDE = 'INCLUDE'
137 |
138 | # Constants for Dynamodb Streams
139 | STREAM_VIEW_TYPE = 'StreamViewType'
140 | STREAM_SPECIFICATION = 'StreamSpecification'
141 | STREAM_ENABLED = 'StreamEnabled'
142 | STREAM_NEW_IMAGE = 'NEW_IMAGE'
143 | STREAM_OLD_IMAGE = 'OLD_IMAGE'
144 | STREAM_NEW_AND_OLD_IMAGE = 'NEW_AND_OLD_IMAGES'
145 | STREAM_KEYS_ONLY = 'KEYS_ONLY'
146 |
147 | # Constants for updating a table's TTL
148 | UPDATE_TIME_TO_LIVE = 'UpdateTimeToLive'
149 | TIME_TO_LIVE_SPECIFICATION = 'TimeToLiveSpecification'
150 | ENABLED = 'Enabled'
151 |
152 | # These are constants used in the KeyConditionExpression parameter
153 | # http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_Query.html#DDB-Query-request-KeyConditionExpression
154 | EXCLUSIVE_START_KEY = 'ExclusiveStartKey'
155 | LAST_EVALUATED_KEY = 'LastEvaluatedKey'
156 |
157 | # These are the valid select values for the Scan operation
158 | # See: http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_Scan.html#DDB-Scan-request-Select
159 | ALL_ATTRIBUTES = 'ALL_ATTRIBUTES'
160 | ALL_PROJECTED_ATTRIBUTES = 'ALL_PROJECTED_ATTRIBUTES'
161 | SPECIFIC_ATTRIBUTES = 'SPECIFIC_ATTRIBUTES'
162 | COUNT = 'COUNT'
163 | SELECT_VALUES = [ALL_ATTRIBUTES, ALL_PROJECTED_ATTRIBUTES, SPECIFIC_ATTRIBUTES, COUNT]
164 |
165 | # These are the valid comparison operators for the Scan operation
166 | # See: http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_Scan.html#DDB-Scan-request-ScanFilter
167 | SEGMENT = 'Segment'
168 | TOTAL_SEGMENTS = 'TotalSegments'
169 |
170 | # These are constants used in the expected condition for PutItem
171 | # See:
172 | # http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_PutItem.html#DDB-PutItem-request-Expected
173 | VALUE = 'Value'
174 |
175 | # These are the valid ReturnConsumedCapacity values used in multiple operations
176 | # See: http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchGetItem.html#DDB-BatchGetItem-request-ReturnConsumedCapacity
177 | CONSUMED_CAPACITY = 'ConsumedCapacity'
178 | CAPACITY_UNITS = 'CapacityUnits'
179 | INDEXES = 'INDEXES'
180 | TOTAL = 'TOTAL'
181 | NONE = 'NONE'
182 | RETURN_CONSUMED_CAPACITY_VALUES = [INDEXES, TOTAL, NONE]
183 |
184 | # These are the valid ReturnItemCollectionMetrics values used in multiple operations
185 | # See: http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html#DDB-BatchWriteItem-request-ReturnItemCollectionMetrics
186 | SIZE = 'SIZE'
187 | RETURN_ITEM_COLL_METRICS_VALUES = [SIZE, NONE]
188 |
189 | # These are the valid ReturnValues values used in the PutItem operation
190 | # See: http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_PutItem.html#DDB-PutItem-request-ReturnValues
191 | ALL_OLD = 'ALL_OLD'
192 | UPDATED_OLD = 'UPDATED_OLD'
193 | ALL_NEW = 'ALL_NEW'
194 | UPDATED_NEW = 'UPDATED_NEW'
195 | RETURN_VALUES_VALUES = [NONE, ALL_OLD, UPDATED_OLD, ALL_NEW, UPDATED_NEW]
196 | RETURN_VALUES_ON_CONDITION_FAILURE_VALUES = [NONE, ALL_OLD]
197 |
198 | # These are constants used in the AttributeUpdates parameter for UpdateItem
199 | # See: http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateItem.html#DDB-UpdateItem-request-AttributeUpdates
200 | PUT = 'PUT'
201 | DELETE = 'DELETE'
202 | ADD = 'ADD'
203 | BATCH_GET_PAGE_LIMIT = 100
204 | BATCH_WRITE_PAGE_LIMIT = 25
205 |
206 | META_CLASS_NAME = "Meta"
207 | REGION = "region"
208 | HOST = "host"
209 |
--------------------------------------------------------------------------------
/pynamodb/expressions/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pynamodb/PynamoDB/2f8e8bb60474babd6ad558a91b0ad89a2c36d515/pynamodb/expressions/__init__.py
--------------------------------------------------------------------------------
/pynamodb/expressions/condition.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from typing import Dict
3 |
4 |
5 | # match dynamo function syntax: size(path)
6 | def size(path):
7 | from pynamodb.expressions.operand import _Size
8 | return _Size(path)
9 |
10 |
11 | class Condition(object):
12 | format_string: str = ''
13 |
14 | def __init__(self, operator: str, *values) -> None:
15 | self.operator = operator
16 | self.values = values
17 |
18 | def __eq__(self, other: Any) -> bool:
19 | return (
20 | type(self) is type(other)
21 | and self.operator == other.operator
22 | and len(self.values) == len(other.values)
23 | and all(v1._equals_to(v2) for v1, v2 in zip(self.values, other.values))
24 | )
25 |
26 | def serialize(self, placeholder_names: Dict[str, str], expression_attribute_values: Dict[str, str]) -> str:
27 | values = [value.serialize(placeholder_names, expression_attribute_values) for value in self.values]
28 | return self.format_string.format(*values, operator=self.operator)
29 |
30 | def __and__(self, other):
31 | if not isinstance(other, Condition):
32 | raise TypeError("unsupported operand type(s) for &: '{}' and '{}'"
33 | .format(self.__class__.__name__, other.__class__.__name__))
34 | return And(self, other)
35 |
36 | def __rand__(self, other):
37 | # special case 'None & condition' to enable better syntax for chaining
38 | if other is not None:
39 | raise TypeError("unsupported operand type(s) for &: '{}' and '{}'"
40 | .format(other.__class__.__name__, self.__class__.__name__))
41 | return self
42 |
43 | def __or__(self, other):
44 | if not isinstance(other, Condition):
45 | raise TypeError("unsupported operand type(s) for |: '{}' and '{}'"
46 | .format(self.__class__.__name__, other.__class__.__name__))
47 | return Or(self, other)
48 |
49 | def __invert__(self):
50 | return Not(self)
51 |
52 | def __repr__(self) -> str:
53 | values = [str(value) for value in self.values]
54 | return self.format_string.format(*values, operator=self.operator)
55 |
56 | def __bool__(self):
57 | # Prevent users from accidentally comparing the condition object instead of the attribute instance
58 | raise TypeError("unsupported operand type(s) for bool: {}".format(self.__class__.__name__))
59 |
60 |
61 | class Comparison(Condition):
62 | format_string = '{0} {operator} {1}'
63 |
64 | def __init__(self, operator, lhs, rhs):
65 | if operator not in ['=', '<>', '<', '<=', '>', '>=']:
66 | raise ValueError("{0} is not a valid comparison operator: {0}".format(operator))
67 | super().__init__(operator, lhs, rhs)
68 |
69 |
70 | class Between(Condition):
71 | format_string = '{0} {operator} {1} AND {2}'
72 |
73 | def __init__(self, path, lower, upper):
74 | super().__init__('BETWEEN', path, lower, upper)
75 |
76 |
77 | class In(Condition):
78 | def __init__(self, path, *values):
79 | super().__init__('IN', path, *values)
80 | list_format = ', '.join('{' + str(i + 1) + '}' for i in range(len(values)))
81 | self.format_string = '{0} {operator} (' + list_format + ')'
82 |
83 |
84 | class Exists(Condition):
85 | format_string = '{operator} ({0})'
86 |
87 | def __init__(self, path):
88 | super().__init__('attribute_exists', path)
89 |
90 |
91 | class NotExists(Condition):
92 | format_string = '{operator} ({0})'
93 |
94 | def __init__(self, path):
95 | super().__init__('attribute_not_exists', path)
96 |
97 |
98 | class IsType(Condition):
99 | format_string = '{operator} ({0}, {1})'
100 |
101 | def __init__(self, path, attr_type):
102 | super().__init__('attribute_type', path, attr_type)
103 |
104 |
105 | class BeginsWith(Condition):
106 | format_string = '{operator} ({0}, {1})'
107 |
108 | def __init__(self, path, prefix):
109 | super().__init__('begins_with', path, prefix)
110 |
111 |
112 | class Contains(Condition):
113 | format_string = '{operator} ({0}, {1})'
114 |
115 | def __init__(self, path, operand):
116 | super().__init__('contains', path, operand)
117 |
118 |
119 | class And(Condition):
120 | format_string = '({0} {operator} {1})'
121 |
122 | def __init__(self, condition1: Condition, condition2: Condition) -> None:
123 | super().__init__('AND', condition1, condition2)
124 |
125 |
126 | class Or(Condition):
127 | format_string = '({0} {operator} {1})'
128 |
129 | def __init__(self, condition1: Condition, condition2: Condition) -> None:
130 | super().__init__('OR', condition1, condition2)
131 |
132 |
133 | class Not(Condition):
134 | format_string = '({operator} {0})'
135 |
136 | def __init__(self, condition: Condition) -> None:
137 | super().__init__('NOT', condition)
138 |
--------------------------------------------------------------------------------
/pynamodb/expressions/projection.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 | from typing import List
3 | from typing import Union
4 |
5 | from pynamodb.attributes import Attribute
6 | from pynamodb.expressions.operand import Path
7 | from pynamodb.expressions.util import substitute_names
8 |
9 |
10 | def create_projection_expression(attributes_to_get, placeholders: Dict[str, str]) -> str:
11 | if not isinstance(attributes_to_get, list):
12 | attributes_to_get = [attributes_to_get]
13 | expressions = [substitute_names(_get_document_path(attribute), placeholders) for attribute in attributes_to_get]
14 | return ', '.join(expressions)
15 |
16 |
17 | def _get_document_path(attribute: Union[Attribute, Path, str]) -> List[str]:
18 | if isinstance(attribute, Attribute):
19 | return [attribute.attr_name]
20 | if isinstance(attribute, Path):
21 | return attribute.path
22 | return attribute.split('.')
23 |
--------------------------------------------------------------------------------
/pynamodb/expressions/update.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from typing import Dict
3 | from typing import List
4 | from typing import Optional
5 | from typing import Sequence
6 | from typing import TYPE_CHECKING
7 |
8 | from pynamodb.constants import BINARY_SET
9 | from pynamodb.constants import NUMBER
10 | from pynamodb.constants import NUMBER_SET
11 | from pynamodb.constants import STRING_SET
12 |
13 | if TYPE_CHECKING:
14 | from pynamodb.expressions.operand import _Operand
15 | from pynamodb.expressions.operand import Path
16 | from pynamodb.expressions.operand import Value
17 |
18 |
19 | class Action:
20 | format_string: str = ''
21 |
22 | def __init__(self, *values: '_Operand') -> None:
23 | self.values = values
24 |
25 | def __eq__(self, other: Any) -> bool:
26 | return (
27 | type(self) is type(other)
28 | and len(self.values) == len(other.values)
29 | and all(
30 | (
31 | type(v1) is type(v2)
32 | and v1._equals_to(v2)
33 | )
34 | for v1, v2 in zip(self.values, other.values))
35 | )
36 |
37 | def serialize(self, placeholder_names: Dict[str, str], expression_attribute_values: Dict[str, str]) -> str:
38 | values = [value.serialize(placeholder_names, expression_attribute_values) for value in self.values]
39 | return self.format_string.format(*values)
40 |
41 | def __repr__(self) -> str:
42 | values = [str(value) for value in self.values]
43 | return self.format_string.format(*values)
44 |
45 |
46 | class SetAction(Action):
47 | """
48 | The SET action adds an attribute to an item.
49 | """
50 | format_string = '{0} = {1}'
51 |
52 | def __init__(self, path: 'Path', value: '_Operand') -> None:
53 | super(SetAction, self).__init__(path, value)
54 |
55 |
56 | class RemoveAction(Action):
57 | """
58 | The REMOVE action deletes an attribute from an item.
59 | """
60 | format_string = '{0}'
61 |
62 | def __init__(self, path: 'Path') -> None:
63 | super(RemoveAction, self).__init__(path)
64 |
65 |
66 | class AddAction(Action):
67 | """
68 | The ADD action appends elements to a set or mathematically adds to a number attribute.
69 | """
70 | format_string = '{0} {1}'
71 |
72 | def __init__(self, path: 'Path', subset: 'Value') -> None:
73 | path._type_check(BINARY_SET, NUMBER, NUMBER_SET, STRING_SET)
74 | subset._type_check(BINARY_SET, NUMBER, NUMBER_SET, STRING_SET)
75 | super(AddAction, self).__init__(path, subset)
76 |
77 |
78 | class DeleteAction(Action):
79 | """
80 | The DELETE action removes elements from a set.
81 | """
82 | format_string = '{0} {1}'
83 |
84 | def __init__(self, path: 'Path', subset: 'Value') -> None:
85 | path._type_check(BINARY_SET, NUMBER_SET, STRING_SET)
86 | subset._type_check(BINARY_SET, NUMBER_SET, STRING_SET)
87 | super(DeleteAction, self).__init__(path, subset)
88 |
89 |
90 | class Update:
91 |
92 | def __init__(self, *actions: Action) -> None:
93 | self.set_actions: List[SetAction] = []
94 | self.remove_actions: List[RemoveAction] = []
95 | self.add_actions: List[AddAction] = []
96 | self.delete_actions: List[DeleteAction] = []
97 | for action in actions:
98 | self.add_action(action)
99 |
100 | def add_action(self, action: Action) -> None:
101 | if isinstance(action, SetAction):
102 | self.set_actions.append(action)
103 | elif isinstance(action, RemoveAction):
104 | self.remove_actions.append(action)
105 | elif isinstance(action, AddAction):
106 | self.add_actions.append(action)
107 | elif isinstance(action, DeleteAction):
108 | self.delete_actions.append(action)
109 | else:
110 | raise ValueError("unsupported action type: '{}'".format(action.__class__.__name__))
111 |
112 | def serialize(self, placeholder_names: Dict[str, str], expression_attribute_values: Dict[str, str]) -> Optional[str]:
113 | clauses = [
114 | self._get_clause('SET', self.set_actions, placeholder_names, expression_attribute_values),
115 | self._get_clause('REMOVE', self.remove_actions, placeholder_names, expression_attribute_values),
116 | self._get_clause('ADD', self.add_actions, placeholder_names, expression_attribute_values),
117 | self._get_clause('DELETE', self.delete_actions, placeholder_names, expression_attribute_values),
118 | ]
119 | expression = ' '.join(clause for clause in clauses if clause is not None)
120 | return expression or None
121 |
122 | @staticmethod
123 | def _get_clause(
124 | keyword: str,
125 | actions: Sequence[Action],
126 | placeholder_names: Dict[str, str],
127 | expression_attribute_values: Dict[str, str]
128 | ) -> Optional[str]:
129 | actions_string = ', '.join(
130 | action.serialize(placeholder_names, expression_attribute_values) for action in actions
131 | )
132 | return keyword + ' ' + actions_string if actions_string else None
133 |
--------------------------------------------------------------------------------
/pynamodb/expressions/util.py:
--------------------------------------------------------------------------------
1 | import re
2 | from typing import Any
3 | from typing import Dict
4 | from typing import List
5 | from typing import Union
6 |
7 |
8 | PATH_SEGMENT_REGEX = re.compile(r'([^\[\]]+)((?:\[\d+\])*)$')
9 |
10 |
11 | def get_path_segments(document_path: Union[str, List[str]]) -> List[str]:
12 | """
13 | Splits a document path into nested elements using the map dereference operator (.)
14 | and returns the list of path segments (an attribute name and optional list dereference operators ([n]).
15 | If the document path is already a list of path segments, a new copy is returned.
16 |
17 | https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.Attributes.html
18 |
19 | Note: callers depend upon the returned list being a copy so that it may be safely mutated
20 | """
21 | return document_path.split('.') if isinstance(document_path, str) else list(document_path)
22 |
23 |
24 | def substitute_names(document_path: Union[str, List[str]], placeholders: Dict[str, str]) -> str:
25 | """
26 | Replaces all attribute names in the given document path with placeholders.
27 | Stores the placeholders in the given dictionary.
28 |
29 | :param document_path: list of path segments (an attribute name and optional list dereference operators)
30 | :param placeholders: a dictionary to store mappings from attribute names to expression attribute name placeholders
31 |
32 | For example: given the document_path for some attribute "baz", that is the first element of a list attribute "bar",
33 | that itself is a map element of "foo" (i.e. ['foo', 'bar[0]', 'baz']) and an empty placeholders dictionary,
34 | `substitute_names` will return "#0.#1[0].#2" and placeholders will contain {"foo": "#0", "bar": "#1", "baz": "#2}
35 | """
36 | path_segments = get_path_segments(document_path)
37 | for idx, segment in enumerate(path_segments):
38 | match = PATH_SEGMENT_REGEX.match(segment)
39 | if not match:
40 | raise ValueError('{} is not a valid document path'.format('.'.join(document_path)))
41 | name, indexes = match.groups()
42 | if name in placeholders:
43 | placeholder = placeholders[name]
44 | else:
45 | placeholder = '#' + str(len(placeholders))
46 | placeholders[name] = placeholder
47 | path_segments[idx] = placeholder + indexes
48 | return '.'.join(path_segments)
49 |
50 |
51 | def get_value_placeholder(value: Any, expression_attribute_values: Dict[str, str]) -> str:
52 | placeholder = ':' + str(len(expression_attribute_values))
53 | expression_attribute_values[placeholder] = value
54 | return placeholder
55 |
--------------------------------------------------------------------------------
/pynamodb/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pynamodb/PynamoDB/2f8e8bb60474babd6ad558a91b0ad89a2c36d515/pynamodb/py.typed
--------------------------------------------------------------------------------
/pynamodb/settings.py:
--------------------------------------------------------------------------------
1 | import importlib.util
2 | import logging
3 | import os
4 | import warnings
5 | from os import getenv
6 |
7 | from typing import Any
8 |
9 | log = logging.getLogger(__name__)
10 |
11 | default_settings_dict = {
12 | 'connect_timeout_seconds': 15,
13 | 'read_timeout_seconds': 30,
14 | 'max_retry_attempts': 3,
15 | 'region': None,
16 | 'max_pool_connections': 10,
17 | 'extra_headers': None,
18 | 'retry_configuration': 'LEGACY'
19 | }
20 |
21 | OVERRIDE_SETTINGS_PATH = getenv('PYNAMODB_CONFIG', '/etc/pynamodb/global_default_settings.py')
22 |
23 |
24 | def _load_module(name, path):
25 | # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
26 | spec = importlib.util.spec_from_file_location(name, path)
27 | module = importlib.util.module_from_spec(spec) # type: ignore
28 | spec.loader.exec_module(module) # type: ignore
29 | return module
30 |
31 |
32 | override_settings = {}
33 | if os.path.isfile(OVERRIDE_SETTINGS_PATH):
34 | override_settings = _load_module('__pynamodb_override_settings__', OVERRIDE_SETTINGS_PATH)
35 | if hasattr(override_settings, 'session_cls') or hasattr(override_settings, 'request_timeout_seconds'):
36 | warnings.warn("The `session_cls` and `request_timeout_second` options are no longer supported")
37 | log.info('Override settings for pynamo available {}'.format(OVERRIDE_SETTINGS_PATH))
38 | else:
39 | log.info('Override settings for pynamo not available {}'.format(OVERRIDE_SETTINGS_PATH))
40 | log.info('Using Default settings value')
41 |
42 |
43 | def get_settings_value(key: str) -> Any:
44 | """
45 | Fetches the value from the override file.
46 | If the value is not present, then tries to fetch the values from constants.py
47 | """
48 | if hasattr(override_settings, key):
49 | return getattr(override_settings, key)
50 |
51 | if key in default_settings_dict:
52 | return default_settings_dict[key]
53 |
54 | return None
55 |
--------------------------------------------------------------------------------
/pynamodb/signals.py:
--------------------------------------------------------------------------------
1 | """
2 | Implements signals based on blinker if available, otherwise
3 | falls silently back to a noop.
4 |
5 | This implementation was taken from Flask:
6 | https://github.com/pallets/flask/blob/master/flask/signals.py
7 | """
8 | signals_available = False
9 |
10 |
11 | class _FakeNamespace(object):
12 | def signal(self, name, doc=None):
13 | return _FakeSignal(name, doc)
14 |
15 |
16 | class _FakeSignal(object):
17 | """
18 | If blinker is unavailable, create a fake class with the same
19 | interface that allows sending of signals but will fail with an
20 | error on anything else. Instead of doing anything on send, it
21 | will just ignore the arguments and do nothing instead.
22 | """
23 |
24 | def __init__(self, name, doc=None):
25 | self.name = name
26 | self.__doc__ = doc
27 |
28 | def _fail(self, *args, **kwargs):
29 | raise RuntimeError('signalling support is unavailable '
30 | 'because the blinker library is '
31 | 'not installed.')
32 |
33 | send = lambda *a, **kw: None # noqa
34 | connect = disconnect = has_receivers_for = receivers_for = \
35 | temporarily_connected_to = _fail
36 | del _fail
37 |
38 |
39 | try:
40 | from blinker import Namespace
41 | signals_available = True
42 | except ImportError: # pragma: no cover
43 | Namespace = _FakeNamespace # type:ignore
44 |
45 | # The namespace for code signals. If you are not PynamoDB code, do
46 | # not put signals in here. Create your own namespace instead.
47 | _signals = Namespace()
48 |
49 | pre_dynamodb_send = _signals.signal('pre_dynamodb_send')
50 | post_dynamodb_send = _signals.signal('post_dynamodb_send')
51 |
--------------------------------------------------------------------------------
/pynamodb/transactions.py:
--------------------------------------------------------------------------------
1 | from typing import Tuple, TypeVar, Type, Any, List, Optional, Dict, Union, Text, Generic
2 |
3 | from pynamodb.connection import Connection
4 | from pynamodb.constants import ITEM, RESPONSES
5 | from pynamodb.expressions.condition import Condition
6 | from pynamodb.expressions.update import Action
7 | from pynamodb.models import Model, _ModelFuture, _KeyType
8 |
9 | _M = TypeVar('_M', bound=Model)
10 | _TTransaction = TypeVar('_TTransaction', bound='Transaction')
11 |
12 |
13 | class Transaction:
14 |
15 | """
16 | Base class for a type of transaction operation
17 | """
18 |
19 | def __init__(self, connection: Connection, return_consumed_capacity: Optional[str] = None) -> None:
20 | self._connection = connection
21 | self._return_consumed_capacity = return_consumed_capacity
22 |
23 | def _commit(self):
24 | raise NotImplementedError()
25 |
26 | def __enter__(self: _TTransaction) -> _TTransaction:
27 | return self
28 |
29 | def __exit__(self, exc_type, exc_val, exc_tb):
30 | if exc_type is None and exc_val is None and exc_tb is None:
31 | self._commit()
32 |
33 |
34 | class TransactGet(Transaction):
35 |
36 | _results: Optional[List] = None
37 |
38 | def __init__(self, *args: Any, **kwargs: Any) -> None:
39 | self._get_items: List[Dict] = []
40 | self._futures: List[_ModelFuture] = []
41 | super(TransactGet, self).__init__(*args, **kwargs)
42 |
43 | def get(self, model_cls: Type[_M], hash_key: _KeyType, range_key: Optional[_KeyType] = None) -> _ModelFuture[_M]:
44 | """
45 | Adds the operation arguments for an item to list of models to get
46 | returns a _ModelFuture object as a placeholder
47 |
48 | :param model_cls:
49 | :param hash_key:
50 | :param range_key:
51 | :return:
52 | """
53 | operation_kwargs = model_cls.get_operation_kwargs_from_class(hash_key, range_key=range_key)
54 | model_future = _ModelFuture(model_cls)
55 | self._futures.append(model_future)
56 | self._get_items.append(operation_kwargs)
57 | return model_future
58 |
59 | @staticmethod
60 | def _update_futures(futures: List[_ModelFuture], results: List) -> None:
61 | for model, data in zip(futures, results):
62 | model.update_with_raw_data(data.get(ITEM))
63 |
64 | def _commit(self) -> Any:
65 | response = self._connection.transact_get_items(
66 | get_items=self._get_items,
67 | return_consumed_capacity=self._return_consumed_capacity
68 | )
69 |
70 | results = response[RESPONSES]
71 | self._results = results
72 | self._update_futures(self._futures, results)
73 | return response
74 |
75 |
76 | class TransactWrite(Transaction):
77 |
78 | def __init__(
79 | self,
80 | client_request_token: Optional[str] = None,
81 | return_item_collection_metrics: Optional[str] = None,
82 | **kwargs: Any,
83 | ) -> None:
84 | super(TransactWrite, self).__init__(**kwargs)
85 | self._client_request_token: Optional[str] = client_request_token
86 | self._return_item_collection_metrics = return_item_collection_metrics
87 | self._condition_check_items: List[Dict] = []
88 | self._delete_items: List[Dict] = []
89 | self._put_items: List[Dict] = []
90 | self._update_items: List[Dict] = []
91 | self._models_for_version_attribute_update: List[Any] = []
92 |
93 | def condition_check(self, model_cls: Type[_M], hash_key: _KeyType, range_key: Optional[_KeyType] = None, condition: Optional[Condition] = None):
94 | if condition is None:
95 | raise TypeError('`condition` cannot be None')
96 | operation_kwargs = model_cls.get_operation_kwargs_from_class(
97 | hash_key,
98 | range_key=range_key,
99 | condition=condition
100 | )
101 | self._condition_check_items.append(operation_kwargs)
102 |
103 | def delete(self, model: _M, condition: Optional[Condition] = None, *, add_version_condition: bool = True) -> None:
104 | operation_kwargs = model.get_delete_kwargs_from_instance(
105 | condition=condition,
106 | add_version_condition=add_version_condition,
107 | )
108 | self._delete_items.append(operation_kwargs)
109 |
110 | def save(self, model: _M, condition: Optional[Condition] = None, return_values: Optional[str] = None) -> None:
111 | operation_kwargs = model.get_save_kwargs_from_instance(
112 | condition=condition,
113 | return_values_on_condition_failure=return_values
114 | )
115 | self._put_items.append(operation_kwargs)
116 | self._models_for_version_attribute_update.append(model)
117 |
118 | def update(self, model: _M, actions: List[Action], condition: Optional[Condition] = None,
119 | return_values: Optional[str] = None,
120 | *,
121 | add_version_condition: bool = True) -> None:
122 | operation_kwargs = model.get_update_kwargs_from_instance(
123 | actions=actions,
124 | condition=condition,
125 | return_values_on_condition_failure=return_values,
126 | add_version_condition=add_version_condition,
127 | )
128 | self._update_items.append(operation_kwargs)
129 | self._models_for_version_attribute_update.append(model)
130 |
131 | def _commit(self) -> Any:
132 | response = self._connection.transact_write_items(
133 | condition_check_items=self._condition_check_items,
134 | delete_items=self._delete_items,
135 | put_items=self._put_items,
136 | update_items=self._update_items,
137 | client_request_token=self._client_request_token,
138 | return_consumed_capacity=self._return_consumed_capacity,
139 | return_item_collection_metrics=self._return_item_collection_metrics,
140 | )
141 | for model in self._models_for_version_attribute_update:
142 | model.update_local_version_attribute()
143 | return response
144 |
--------------------------------------------------------------------------------
/pynamodb/types.py:
--------------------------------------------------------------------------------
1 | """
2 | Types used in pynamodb
3 | """
4 |
5 | STRING = 'S'
6 | NUMBER = 'N'
7 | BINARY = 'B'
8 | HASH = 'HASH'
9 | RANGE = 'RANGE'
10 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | markers =
3 | ddblocal: requires an mock dynamodb server running on localhost:8000
4 | env =
5 | AWS_ACCESS_KEY_ID=1
6 | AWS_SECRET_ACCESS_KEY=2
7 | AWS_DEFAULT_REGION=us-east-1
8 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | pytest>=6
2 | pytest-env
3 | pytest-mock
4 | freezegun
5 |
6 | # only used in CI
7 | coveralls
8 | mypy==1.2.0
9 | typing-extensions==4.5.0
10 | pytest-cov
11 | blinker==1.6.2
12 |
13 | # used for type-checking
14 | botocore-stubs
15 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [wheel]
2 | universal = False
3 |
4 | [metadata]
5 | license_files = LICENSE
6 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 |
4 | install_requires = [
5 | 'botocore>=1.12.54',
6 | 'typing-extensions>=4; python_version<"3.11"',
7 | ]
8 |
9 | setup(
10 | name='pynamodb',
11 | version=__import__('pynamodb').__version__,
12 | packages=find_packages(exclude=('examples', 'tests', 'typing_tests', 'tests.integration',)),
13 | url='http://jlafon.io/pynamodb.html',
14 | project_urls={
15 | 'Source': 'https://github.com/pynamodb/PynamoDB',
16 | },
17 | author='Jharrod LaFon',
18 | author_email='jlafon@eyesopen.com',
19 | description='A Pythonic Interface to DynamoDB',
20 | long_description=open('README.rst').read(),
21 | long_description_content_type='text/x-rst',
22 | zip_safe=False,
23 | license='MIT',
24 | keywords='python dynamodb amazon',
25 | python_requires=">=3.7",
26 | install_requires=install_requires,
27 | classifiers=[
28 | 'Development Status :: 5 - Production/Stable',
29 | 'Intended Audience :: Developers',
30 | 'Programming Language :: Python',
31 | 'Operating System :: OS Independent',
32 | 'Programming Language :: Python :: 3.7',
33 | 'Programming Language :: Python :: 3.8',
34 | 'Programming Language :: Python :: 3.9',
35 | 'Programming Language :: Python :: 3.10',
36 | 'Programming Language :: Python :: 3.11',
37 | 'Programming Language :: Python :: 3.12',
38 | 'License :: OSI Approved :: MIT License',
39 | ],
40 | extras_require={
41 | 'signals': ['blinker>=1.3,<2.0'],
42 | },
43 | package_data={'pynamodb': ['py.typed']},
44 | )
45 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pynamodb/PynamoDB/2f8e8bb60474babd6ad558a91b0ad89a2c36d515/tests/__init__.py
--------------------------------------------------------------------------------
/tests/deep_eq.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2010-2013 Samuel Sutch [samuel.sutch@gmail.com]
2 | #
3 | # Permission is hereby granted, free of charge, to any person obtaining a copy
4 | # of this software and associated documentation files (the "Software"), to deal
5 | # in the Software without restriction, including without limitation the rights
6 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | # copies of the Software, and to permit persons to whom the Software is
8 | # furnished to do so, subject to the following conditions:
9 | #
10 | # The above copyright notice and this permission notice shall be included in
11 | # all copies or substantial portions of the Software.
12 | #
13 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 | # THE SOFTWARE.
20 |
21 | import datetime, time, functools, operator
22 |
23 | default_fudge = datetime.timedelta(seconds=0, microseconds=0, days=0)
24 |
25 |
26 | def deep_eq(_v1, _v2, datetime_fudge=default_fudge, _assert=False):
27 | """
28 | Tests for deep equality between two python data structures recursing
29 | into sub-structures if necessary. Works with all python types including
30 | iterators and generators. This function was dreampt up to test API responses
31 | but could be used for anything. Be careful. With deeply nested structures
32 | you may blow the stack.
33 |
34 | Options:
35 | datetime_fudge => this is a datetime.timedelta object which, when
36 | comparing dates, will accept values that differ
37 | by the number of seconds specified
38 | _assert => passing yes for this will raise an assertion error
39 | when values do not match, instead of returning
40 | false (very useful in combination with pdb)
41 |
42 | Doctests included:
43 |
44 | >>> x1, y1 = ({'a': 'b'}, {'a': 'b'})
45 | >>> deep_eq(x1, y1)
46 | True
47 | >>> x2, y2 = ({'a': 'b'}, {'b': 'a'})
48 | >>> deep_eq(x2, y2)
49 | False
50 | >>> x3, y3 = ({'a': {'b': 'c'}}, {'a': {'b': 'c'}})
51 | >>> deep_eq(x3, y3)
52 | True
53 | >>> x4, y4 = ({'c': 't', 'a': {'b': 'c'}}, {'a': {'b': 'n'}, 'c': 't'})
54 | >>> deep_eq(x4, y4)
55 | False
56 | >>> x5, y5 = ({'a': [1,2,3]}, {'a': [1,2,3]})
57 | >>> deep_eq(x5, y5)
58 | True
59 | >>> x6, y6 = ({'a': [1,'b',8]}, {'a': [2,'b',8]})
60 | >>> deep_eq(x6, y6)
61 | False
62 | >>> x7, y7 = ('a', 'a')
63 | >>> deep_eq(x7, y7)
64 | True
65 | >>> x8, y8 = (['p','n',['asdf']], ['p','n',['asdf']])
66 | >>> deep_eq(x8, y8)
67 | True
68 | >>> x9, y9 = (['p','n',['asdf',['omg']]], ['p', 'n', ['asdf',['nowai']]])
69 | >>> deep_eq(x9, y9)
70 | False
71 | >>> x10, y10 = (1, 2)
72 | >>> deep_eq(x10, y10)
73 | False
74 | >>> deep_eq((str(p) for p in xrange(10)), (str(p) for p in xrange(10)))
75 | True
76 | >>> str(deep_eq(range(4), range(4)))
77 | 'True'
78 | >>> deep_eq(xrange(100), xrange(100))
79 | True
80 | >>> deep_eq(xrange(2), xrange(5))
81 | False
82 | >>> import datetime
83 | >>> from datetime import datetime as dt
84 | >>> d1, d2 = (dt.now(), dt.now() + datetime.timedelta(seconds=4))
85 | >>> deep_eq(d1, d2)
86 | False
87 | >>> deep_eq(d1, d2, datetime_fudge=datetime.timedelta(seconds=5))
88 | True
89 | """
90 | _deep_eq = functools.partial(deep_eq, datetime_fudge=datetime_fudge,
91 | _assert=_assert)
92 |
93 | def _check_assert(R, a, b, reason=''):
94 | if _assert and not R:
95 | assert 0, "an assertion has failed in deep_eq ({}) {} != {}".format(
96 | reason, str(a), str(b))
97 | return R
98 |
99 | def _deep_dict_eq(d1, d2):
100 | k1, k2 = (sorted(d1.keys()), sorted(d2.keys()))
101 | if k1 != k2: # keys should be exactly equal
102 | return _check_assert(False, k1, k2, "keys")
103 |
104 | return _check_assert(operator.eq(sum(_deep_eq(d1[k], d2[k])
105 | for k in k1),
106 | len(k1)), d1, d2, "dictionaries")
107 |
108 | def _deep_iter_eq(l1, l2):
109 | if len(l1) != len(l2):
110 | return _check_assert(False, l1, l2, "lengths")
111 | return _check_assert(operator.eq(sum(_deep_eq(v1, v2)
112 | for v1, v2 in zip(l1, l2)),
113 | len(l1)), l1, l2, "iterables")
114 |
115 | def op(a, b):
116 | _op = operator.eq
117 | if type(a) == datetime.datetime and type(b) == datetime.datetime:
118 | s = datetime_fudge.seconds
119 | t1, t2 = (time.mktime(a.timetuple()), time.mktime(b.timetuple()))
120 | l = t1 - t2
121 | l = -l if l > 0 else l
122 | return _check_assert((-s if s > 0 else s) <= l, a, b, "dates")
123 | return _check_assert(_op(a, b), a, b, "values")
124 |
125 | c1, c2 = (_v1, _v2)
126 |
127 | # guard against strings because they are iterable and their
128 | # elements yield iterables infinitely.
129 | # I N C E P T I O N
130 | if not isinstance(_v1, str):
131 | if isinstance(_v1, dict):
132 | op = _deep_dict_eq
133 | else:
134 | try:
135 | c1, c2 = (list(iter(_v1)), list(iter(_v2)))
136 | except TypeError:
137 | c1, c2 = _v1, _v2
138 | else:
139 | op = _deep_iter_eq
140 |
141 | return op(c1, c2)
142 |
--------------------------------------------------------------------------------
/tests/integration/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pynamodb/PynamoDB/2f8e8bb60474babd6ad558a91b0ad89a2c36d515/tests/integration/__init__.py
--------------------------------------------------------------------------------
/tests/integration/base_integration_test.py:
--------------------------------------------------------------------------------
1 | """
2 | Runs tests against dynamodb
3 | """
4 | import time
5 |
6 | from pynamodb.connection import Connection
7 | from pynamodb.constants import PROVISIONED_THROUGHPUT, READ_CAPACITY_UNITS
8 | from pynamodb.expressions.condition import BeginsWith, NotExists
9 | from pynamodb.expressions.operand import Path, Value
10 | from pynamodb.exceptions import TableDoesNotExist
11 | from pynamodb.types import STRING, HASH, RANGE, NUMBER
12 |
13 | import pytest
14 |
15 |
16 | @pytest.mark.ddblocal
17 | def test_connection_integration(ddb_url):
18 | table_name = 'pynamodb-ci-connection'
19 |
20 | # For use with a fake dynamodb connection
21 | # See: http://aws.amazon.com/dynamodb/developer-resources/
22 | conn = Connection(host=ddb_url)
23 |
24 | print(conn)
25 | print("conn.describe_table...")
26 | table = None
27 | try:
28 | table = conn.describe_table(table_name)
29 | except TableDoesNotExist:
30 | params = {
31 | 'read_capacity_units': 1,
32 | 'write_capacity_units': 1,
33 | 'attribute_definitions': [
34 | {
35 | 'attribute_type': STRING,
36 | 'attribute_name': 'Forum'
37 | },
38 | {
39 | 'attribute_type': STRING,
40 | 'attribute_name': 'Thread'
41 | },
42 | {
43 | 'attribute_type': STRING,
44 | 'attribute_name': 'AltKey'
45 | },
46 | {
47 | 'attribute_type': NUMBER,
48 | 'attribute_name': 'number'
49 | }
50 | ],
51 | 'key_schema': [
52 | {
53 | 'key_type': HASH,
54 | 'attribute_name': 'Forum'
55 | },
56 | {
57 | 'key_type': RANGE,
58 | 'attribute_name': 'Thread'
59 | }
60 | ],
61 | 'global_secondary_indexes': [
62 | {
63 | 'index_name': 'alt-index',
64 | 'key_schema': [
65 | {
66 | 'KeyType': 'HASH',
67 | 'AttributeName': 'AltKey'
68 | }
69 | ],
70 | 'projection': {
71 | 'ProjectionType': 'KEYS_ONLY'
72 | },
73 | 'provisioned_throughput': {
74 | 'ReadCapacityUnits': 1,
75 | 'WriteCapacityUnits': 1,
76 | }
77 | }
78 | ],
79 | 'local_secondary_indexes': [
80 | {
81 | 'index_name': 'view-index',
82 | 'key_schema': [
83 | {
84 | 'KeyType': 'HASH',
85 | 'AttributeName': 'Forum'
86 | },
87 | {
88 | 'KeyType': 'RANGE',
89 | 'AttributeName': 'AltKey'
90 | }
91 | ],
92 | 'projection': {
93 | 'ProjectionType': 'KEYS_ONLY'
94 | }
95 | }
96 | ]
97 | }
98 | print("conn.create_table...")
99 | conn.create_table(table_name, **params)
100 |
101 | while table is None:
102 | time.sleep(1)
103 | table = conn.describe_table(table_name)
104 |
105 | while table['TableStatus'] == 'CREATING':
106 | time.sleep(2)
107 | table = conn.describe_table(table_name)
108 | print("conn.list_tables")
109 | conn.list_tables()
110 | print("conn.update_table...")
111 |
112 | conn.update_table(
113 | table_name,
114 | read_capacity_units=table.get(PROVISIONED_THROUGHPUT).get(READ_CAPACITY_UNITS) + 1,
115 | write_capacity_units=2
116 | )
117 |
118 | table = conn.describe_table(table_name)
119 |
120 | while table['TableStatus'] != 'ACTIVE':
121 | time.sleep(2)
122 | table = conn.describe_table(table_name)
123 |
124 | print("conn.put_item")
125 | conn.put_item(
126 | table_name,
127 | 'item1-hash',
128 | range_key='item1-range',
129 | attributes={'foo': {'S': 'bar'}},
130 | condition=NotExists(Path('Forum')),
131 | )
132 | conn.get_item(
133 | table_name,
134 | 'item1-hash',
135 | range_key='item1-range'
136 | )
137 | conn.delete_item(
138 | table_name,
139 | 'item1-hash',
140 | range_key='item1-range'
141 | )
142 |
143 | items = []
144 | for i in range(10):
145 | items.append(
146 | {"Forum": "FooForum", "Thread": "thread-{}".format(i)}
147 | )
148 | print("conn.batch_write_items...")
149 | conn.batch_write_item(
150 | table_name,
151 | put_items=items
152 | )
153 | print("conn.batch_get_items...")
154 | data = conn.batch_get_item(
155 | table_name,
156 | items
157 | )
158 | print("conn.query...")
159 | conn.query(
160 | table_name,
161 | "FooForum",
162 | range_key_condition=(BeginsWith(Path('Thread'), Value('thread'))),
163 | )
164 | print("conn.scan...")
165 | conn.scan(
166 | table_name,
167 | )
168 | print("conn.delete_table...")
169 | conn.delete_table(table_name)
170 |
--------------------------------------------------------------------------------
/tests/integration/binary_update_test.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from pynamodb.attributes import UnicodeAttribute, BinaryAttribute, BinarySetAttribute
4 | from pynamodb.models import Model
5 |
6 |
7 | @pytest.mark.ddblocal
8 | @pytest.mark.parametrize('legacy_encoding', [False, True])
9 | def test_binary_set_attribute_update(legacy_encoding: bool, ddb_url: str) -> None:
10 | class DataModel(Model):
11 | class Meta:
12 | table_name = f'binary_attr_update__legacy_{legacy_encoding}'
13 | host = ddb_url
14 | pkey = UnicodeAttribute(hash_key=True)
15 | data = BinaryAttribute(legacy_encoding=legacy_encoding)
16 |
17 | DataModel.create_table(read_capacity_units=1, write_capacity_units=1, wait=True)
18 | data = b'\x00hey\xfb'
19 | pkey = 'pkey'
20 | DataModel(pkey, data=data).save()
21 | m = DataModel.get(pkey)
22 | assert m.data == data
23 |
24 | new_data = b'\xff'
25 | m.update(actions=[DataModel.data.set(new_data)])
26 | assert new_data == m.data
27 |
28 |
29 | @pytest.mark.ddblocal
30 | @pytest.mark.parametrize('legacy_encoding', [False, True])
31 | def test_binary_set_attribute_update(legacy_encoding: bool, ddb_url: str) -> None:
32 | class DataModel(Model):
33 | class Meta:
34 | table_name = f'binary_set_attr_update__legacy_{legacy_encoding}'
35 | host = ddb_url
36 | pkey = UnicodeAttribute(hash_key=True)
37 | data = BinarySetAttribute(legacy_encoding=legacy_encoding)
38 |
39 | DataModel.create_table(read_capacity_units=1, write_capacity_units=1, wait=True)
40 | data = {b'\x00hey\xfb', b'\x00beautiful\xfb'}
41 | pkey = 'pkey'
42 | DataModel(pkey, data=data).save()
43 | m = DataModel.get(pkey)
44 | assert m.data == data
45 |
46 | new_data = {b'\xff'}
47 | m.update(actions=[DataModel.data.set(new_data)])
48 | assert new_data == m.data
49 |
--------------------------------------------------------------------------------
/tests/integration/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pytest
4 |
5 |
6 | @pytest.fixture(scope='module')
7 | def ddb_url():
8 | """Obtain the URL of a local DynamoDB instance.
9 |
10 | This is meant to be used with something like DynamoDB Local:
11 |
12 | http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.html
13 |
14 | It must be set up "out of band"; we merely assume it exists on
15 | http://localhost:8000 or a URL specified though the
16 | PYNAMODB_INTEGRATION_TEST_DDB_URL environment variable.
17 | """
18 | ddb_url = os.getenv("PYNAMODB_INTEGRATION_TEST_DDB_URL")
19 | return "http://localhost:8000" if ddb_url is None else ddb_url
20 |
--------------------------------------------------------------------------------
/tests/integration/model_integration_test.py:
--------------------------------------------------------------------------------
1 | """
2 | Integration tests for the model API
3 | """
4 |
5 | from datetime import datetime
6 |
7 | from pynamodb.models import Model
8 | from pynamodb.indexes import GlobalSecondaryIndex, AllProjection, LocalSecondaryIndex
9 | from pynamodb.attributes import (
10 | UnicodeAttribute, BinaryAttribute, UTCDateTimeAttribute, NumberSetAttribute, NumberAttribute,
11 | VersionAttribute)
12 |
13 | import pytest
14 |
15 |
16 | class LSIndex(LocalSecondaryIndex):
17 | """
18 | A model for the local secondary index
19 | """
20 | class Meta:
21 | projection = AllProjection()
22 | forum = UnicodeAttribute(hash_key=True)
23 | view = NumberAttribute(range_key=True)
24 |
25 |
26 | class GSIndex(GlobalSecondaryIndex):
27 | """
28 | A model for the secondary index
29 | """
30 | class Meta:
31 | projection = AllProjection()
32 | read_capacity_units = 2
33 | write_capacity_units = 1
34 | epoch = UTCDateTimeAttribute(hash_key=True)
35 |
36 |
37 | @pytest.mark.ddblocal
38 | def test_model_integration(ddb_url):
39 |
40 | class TestModel(Model):
41 | """
42 | A model for testing
43 | """
44 | class Meta:
45 | region = 'us-east-1'
46 | table_name = 'pynamodb-ci'
47 | host = ddb_url
48 | forum = UnicodeAttribute(hash_key=True)
49 | thread = UnicodeAttribute(range_key=True)
50 | view = NumberAttribute(default=0)
51 | view_index = LSIndex()
52 | epoch_index = GSIndex()
53 | epoch = UTCDateTimeAttribute(default=datetime.now)
54 | content = BinaryAttribute(null=True, legacy_encoding=False)
55 | scores = NumberSetAttribute()
56 | version = VersionAttribute()
57 |
58 | if TestModel.exists():
59 | TestModel.delete_table()
60 | TestModel.create_table(read_capacity_units=1, write_capacity_units=1, wait=True)
61 |
62 | obj = TestModel('1', '2')
63 | obj.save()
64 | obj.refresh()
65 | obj = TestModel('foo', 'bar')
66 | obj.save()
67 | TestModel('foo2', 'bar2')
68 | obj3 = TestModel('setitem', 'setrange', scores={1, 2.1})
69 | obj3.save()
70 | obj3.refresh()
71 |
72 | with TestModel.batch_write() as batch:
73 | items = [TestModel('hash-{}'.format(x), '{}'.format(x)) for x in range(10)]
74 | for item in items:
75 | batch.save(item)
76 |
77 | item_keys = [('hash-{}'.format(x), 'thread-{}'.format(x)) for x in range(10)]
78 |
79 | for item in TestModel.batch_get(item_keys):
80 | print(item)
81 |
82 | for item in TestModel.query('setitem', TestModel.thread.startswith('set')):
83 | print("Query Item {}".format(item))
84 |
85 | with TestModel.batch_write() as batch:
86 | items = [TestModel('hash-{}'.format(x), '{}'.format(x)) for x in range(10)]
87 | for item in items:
88 | print("Batch delete")
89 | batch.delete(item)
90 |
91 | for item in TestModel.scan():
92 | print("Scanned item: {}".format(item))
93 |
94 | tstamp = datetime.now()
95 | query_obj = TestModel('query_forum', 'query_thread')
96 | query_obj.forum = 'foo'
97 | query_obj.save()
98 | query_obj.update([TestModel.view.add(1)])
99 | for item in TestModel.epoch_index.query(tstamp):
100 | print("Item queried from index: {}".format(item))
101 |
102 | for item in TestModel.view_index.query('foo', TestModel.view > 0):
103 | print("Item queried from index: {}".format(item.view))
104 |
105 | query_obj.update([TestModel.scores.set([])])
106 | query_obj.refresh()
107 | assert query_obj.scores is None
108 |
109 | print(query_obj.update([TestModel.view.add(1)], condition=TestModel.forum.exists()))
110 | TestModel.delete_table()
111 |
112 |
113 | def test_can_inherit_version_attribute(ddb_url) -> None:
114 |
115 | class TestModelA(Model):
116 | """
117 | A model for testing
118 | """
119 |
120 | class Meta:
121 | region = 'us-east-1'
122 | table_name = 'pynamodb-ci-a'
123 | host = ddb_url
124 |
125 | forum = UnicodeAttribute(hash_key=True)
126 | thread = UnicodeAttribute(range_key=True)
127 | scores = NumberAttribute()
128 | version = VersionAttribute()
129 |
130 | class TestModelB(TestModelA):
131 | class Meta:
132 | region = 'us-east-1'
133 | table_name = 'pynamodb-ci-b'
134 | host = ddb_url
135 |
136 | with pytest.raises(ValueError) as e:
137 | class TestModelC(TestModelA):
138 | class Meta:
139 | region = 'us-east-1'
140 | table_name = 'pynamodb-ci-c'
141 | host = ddb_url
142 |
143 | version_invalid = VersionAttribute()
144 | assert str(e.value) == 'The model has more than one Version attribute: version, version_invalid'
145 |
--------------------------------------------------------------------------------
/tests/integration/table_integration_test.py:
--------------------------------------------------------------------------------
1 | """
2 | Run tests against dynamodb using the table abstraction
3 | """
4 | import time
5 | from pynamodb.constants import PROVISIONED_THROUGHPUT, READ_CAPACITY_UNITS
6 | from pynamodb.connection import TableConnection
7 | from pynamodb.expressions.condition import BeginsWith, NotExists
8 | from pynamodb.expressions.operand import Path, Value
9 | from pynamodb.exceptions import TableDoesNotExist
10 | from pynamodb.types import STRING, HASH, RANGE, NUMBER
11 |
12 | import pytest
13 |
14 |
15 | @pytest.mark.ddblocal
16 | def test_table_integration(ddb_url):
17 | table_name = 'pynamodb-ci-table'
18 |
19 | # For use with a fake dynamodb connection
20 | # See: http://aws.amazon.com/dynamodb/developer-resources/
21 | conn = TableConnection(table_name, host=ddb_url)
22 | print(conn)
23 |
24 | print("conn.describe_table...")
25 | table = None
26 | try:
27 | table = conn.describe_table()
28 | except TableDoesNotExist:
29 | params = {
30 | 'read_capacity_units': 1,
31 | 'write_capacity_units': 1,
32 | 'attribute_definitions': [
33 | {
34 | 'attribute_type': STRING,
35 | 'attribute_name': 'Forum'
36 | },
37 | {
38 | 'attribute_type': STRING,
39 | 'attribute_name': 'Thread'
40 | },
41 | {
42 | 'attribute_type': STRING,
43 | 'attribute_name': 'AltKey'
44 | },
45 | {
46 | 'attribute_type': NUMBER,
47 | 'attribute_name': 'number'
48 | }
49 | ],
50 | 'key_schema': [
51 | {
52 | 'key_type': HASH,
53 | 'attribute_name': 'Forum'
54 | },
55 | {
56 | 'key_type': RANGE,
57 | 'attribute_name': 'Thread'
58 | }
59 | ],
60 | 'global_secondary_indexes': [
61 | {
62 | 'index_name': 'alt-index',
63 | 'key_schema': [
64 | {
65 | 'KeyType': 'HASH',
66 | 'AttributeName': 'AltKey'
67 | }
68 | ],
69 | 'projection': {
70 | 'ProjectionType': 'KEYS_ONLY'
71 | },
72 | 'provisioned_throughput': {
73 | 'ReadCapacityUnits': 1,
74 | 'WriteCapacityUnits': 1,
75 | }
76 | }
77 | ],
78 | 'local_secondary_indexes': [
79 | {
80 | 'index_name': 'view-index',
81 | 'key_schema': [
82 | {
83 | 'KeyType': 'HASH',
84 | 'AttributeName': 'Forum'
85 | },
86 | {
87 | 'KeyType': 'RANGE',
88 | 'AttributeName': 'AltKey'
89 | }
90 | ],
91 | 'projection': {
92 | 'ProjectionType': 'KEYS_ONLY'
93 | }
94 | }
95 | ]
96 | }
97 | print("conn.create_table...")
98 | conn.create_table(**params)
99 |
100 | while table is None:
101 | time.sleep(2)
102 | table = conn.describe_table()
103 | while table['TableStatus'] == 'CREATING':
104 | time.sleep(5)
105 | print(table['TableStatus'])
106 | table = conn.describe_table()
107 | print("conn.update_table...")
108 |
109 | conn.update_table(
110 | read_capacity_units=table.get(PROVISIONED_THROUGHPUT).get(READ_CAPACITY_UNITS) + 1,
111 | write_capacity_units=2
112 | )
113 |
114 | table = conn.describe_table()
115 | while table['TableStatus'] != 'ACTIVE':
116 | time.sleep(2)
117 | table = conn.describe_table()
118 |
119 | print("conn.put_item")
120 | conn.put_item(
121 | 'item1-hash',
122 | range_key='item1-range',
123 | attributes={'foo': {'S': 'bar'}},
124 | condition=NotExists(Path('Forum')),
125 | )
126 | conn.get_item(
127 | 'item1-hash',
128 | range_key='item1-range'
129 | )
130 | conn.delete_item(
131 | 'item1-hash',
132 | range_key='item1-range'
133 | )
134 |
135 | items = []
136 | for i in range(10):
137 | items.append(
138 | {"Forum": "FooForum", "Thread": "thread-{}".format(i)}
139 | )
140 | print("conn.batch_write_items...")
141 | conn.batch_write_item(
142 | put_items=items
143 | )
144 | print("conn.batch_get_items...")
145 | data = conn.batch_get_item(
146 | items
147 | )
148 | print("conn.query...")
149 | conn.query(
150 | "FooForum",
151 | range_key_condition=(BeginsWith(Path('Thread'), Value('thread'))),
152 | )
153 | print("conn.scan...")
154 | conn.scan()
155 | print("conn.delete_table...")
156 | conn.delete_table()
157 |
--------------------------------------------------------------------------------
/tests/integration/test_discriminator_index.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import pynamodb.exceptions
4 | from pynamodb.attributes import DiscriminatorAttribute
5 | from pynamodb.attributes import DynamicMapAttribute
6 | from pynamodb.attributes import ListAttribute
7 | from pynamodb.attributes import MapAttribute
8 | from pynamodb.attributes import NumberAttribute
9 | from pynamodb.attributes import UnicodeAttribute
10 | from pynamodb.indexes import AllProjection
11 | from pynamodb.models import Model
12 | from pynamodb.indexes import GlobalSecondaryIndex
13 |
14 |
15 | class TestDiscriminatorIndex:
16 |
17 | @pytest.mark.ddblocal
18 | def test_create_table(self, ddb_url):
19 | class ParentModel(Model, discriminator='Parent'):
20 | class Meta:
21 | host = ddb_url
22 | table_name = 'discriminator_index_test'
23 | read_capacity_units = 1
24 | write_capacity_units = 1
25 |
26 | hash_key = UnicodeAttribute(hash_key=True)
27 | cls = DiscriminatorAttribute()
28 |
29 | class ChildIndex(GlobalSecondaryIndex):
30 | class Meta:
31 | index_name = 'child_index'
32 | projection = AllProjection()
33 | read_capacity_units = 1
34 | write_capacity_units = 1
35 |
36 | index_key = UnicodeAttribute(hash_key=True)
37 |
38 | class ChildModel1(ParentModel, discriminator='Child1'):
39 | child_index = ChildIndex()
40 | index_key = UnicodeAttribute()
41 |
42 | # Multiple child models can share the same index
43 | class ChildModel2(ParentModel, discriminator='Child2'):
44 | child_index = ChildIndex()
45 | index_key = UnicodeAttribute()
46 |
47 | # What's important to notice is that the child_index is not defined on the parent class.
48 | # We're running `create_table` on the ParentModel, and expect it to know about child models
49 | # (through the discriminator association) and include all child models' indexes
50 | # during table creation.
51 | ParentModel.create_table(read_capacity_units=1, write_capacity_units=1, wait=True)
52 |
53 | model = ChildModel1()
54 | model.hash_key = 'hash_key1'
55 | model.index_key = 'bar'
56 | model.save()
57 |
58 | model = ChildModel2()
59 | model.hash_key = 'hash_key2'
60 | model.index_key = 'baz'
61 | model.save()
62 |
63 | model = next(ChildModel1.child_index.query('bar'))
64 | assert isinstance(model, ChildModel1)
65 |
66 | model = next(ChildModel2.child_index.query('baz'))
67 | assert isinstance(model, ChildModel2)
68 |
69 | @pytest.mark.ddblocal
70 | def test_create_table__incompatible_indexes(self, ddb_url):
71 | class ParentModel(Model, discriminator='Parent'):
72 | class Meta:
73 | host = ddb_url
74 | table_name = 'discriminator_index_test__incompatible_indexes'
75 | read_capacity_units = 1
76 | write_capacity_units = 1
77 |
78 | hash_key = UnicodeAttribute(hash_key=True)
79 | cls = DiscriminatorAttribute()
80 |
81 | class ChildIndex1(GlobalSecondaryIndex):
82 | class Meta:
83 | index_name = 'child_index1'
84 | projection = AllProjection()
85 | read_capacity_units = 1
86 | write_capacity_units = 1
87 |
88 | index_key = UnicodeAttribute(hash_key=True)
89 |
90 | class ChildIndex2(GlobalSecondaryIndex):
91 | class Meta:
92 | index_name = 'child_index2'
93 | projection = AllProjection()
94 | read_capacity_units = 1
95 | write_capacity_units = 1
96 |
97 | # Intentionally a different type from ChildIndex1.index_key
98 | index_key = NumberAttribute(hash_key=True)
99 |
100 | # noinspection PyUnusedLocal
101 | class ChildModel1(ParentModel, discriminator='Child1'):
102 | child_index = ChildIndex1()
103 | index_key = UnicodeAttribute()
104 |
105 | # noinspection PyUnusedLocal
106 | class ChildModel2(ParentModel, discriminator='Child2'):
107 | child_index = ChildIndex2()
108 | index_key = UnicodeAttribute()
109 |
110 | # Unlike `test_create_table`, we expect this to fail because the child indexes
111 | # attempt to use the same attribute name for different types, thus the resulting table's
112 | # AttributeDefinitions would have the same attribute appear twice with conflicting types.
113 | with pytest.raises(pynamodb.exceptions.TableError, match="Cannot have two attributes with the same name"):
114 | ParentModel.create_table(read_capacity_units=1, write_capacity_units=1, wait=True)
115 |
--------------------------------------------------------------------------------
/tests/response.py:
--------------------------------------------------------------------------------
1 | """
2 | Mock response
3 | """
4 | from urllib3 import HTTPResponse
5 |
6 |
7 | class MockResponse(HTTPResponse):
8 | """
9 | A class for mocked responses
10 | """
11 | def __init__(self, status_code=None, content='Empty'):
12 | super(MockResponse, self).__init__()
13 | self.status_code = status_code
14 | self._content = content
15 | self.reason = 'Test Response'
16 |
17 |
18 | class HttpOK(MockResponse):
19 | """
20 | A response that returns status code 200
21 | """
22 | def __init__(self, content=None):
23 | super(HttpOK, self).__init__(status_code=200, content=content)
24 |
--------------------------------------------------------------------------------
/tests/test_binary_legacy_encoding.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from pynamodb.attributes import BinaryAttribute
4 | from pynamodb.attributes import MapAttribute
5 | from pynamodb.models import Model
6 |
7 |
8 | def test_legacy_encoding__model() -> None:
9 | class _(Model):
10 | binary = BinaryAttribute(legacy_encoding=True)
11 |
12 |
13 | def test_legacy_encoding__map_attribute() -> None:
14 | with pytest.raises(ValueError, match='legacy_encoding'):
15 | class _(MapAttribute):
16 | binary = BinaryAttribute(legacy_encoding=True)
17 |
--------------------------------------------------------------------------------
/tests/test_discriminator.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from pynamodb.attributes import DiscriminatorAttribute
4 | from pynamodb.attributes import DynamicMapAttribute
5 | from pynamodb.attributes import ListAttribute
6 | from pynamodb.attributes import MapAttribute
7 | from pynamodb.attributes import NumberAttribute
8 | from pynamodb.attributes import UnicodeAttribute
9 | from pynamodb.models import Model
10 |
11 |
12 | class_name = lambda cls: cls.__name__
13 |
14 |
15 | class TypedValue(MapAttribute):
16 | _cls = DiscriminatorAttribute(attr_name = 'cls')
17 | name = UnicodeAttribute()
18 |
19 |
20 | class NumberValue(TypedValue, discriminator=class_name):
21 | value = NumberAttribute()
22 |
23 |
24 | class StringValue(TypedValue, discriminator=class_name):
25 | value = UnicodeAttribute()
26 |
27 |
28 | class RenamedValue(TypedValue, discriminator='custom_name'):
29 | value = UnicodeAttribute()
30 |
31 |
32 | class DiscriminatorTestModel(Model, discriminator='Parent'):
33 | class Meta:
34 | host = 'http://localhost:8000'
35 | table_name = 'test'
36 | hash_key = UnicodeAttribute(hash_key=True)
37 | value = TypedValue()
38 | values = ListAttribute(of=TypedValue)
39 | type = DiscriminatorAttribute()
40 |
41 |
42 | class ChildModel(DiscriminatorTestModel, discriminator='Child'):
43 | value = UnicodeAttribute()
44 |
45 |
46 | class DynamicSubclassedMapAttribute(DynamicMapAttribute):
47 | string_attr = UnicodeAttribute()
48 |
49 |
50 | class DynamicMapDiscriminatorTestModel(Model, discriminator='Parent'):
51 | class Meta:
52 | host = 'http://localhost:8000'
53 | table_name = 'test'
54 | hash_key = UnicodeAttribute(hash_key=True)
55 | value = DynamicSubclassedMapAttribute(default=dict)
56 | type = DiscriminatorAttribute()
57 |
58 |
59 | class DynamicMapDiscriminatorChildTestModel(DynamicMapDiscriminatorTestModel, discriminator='Child'):
60 | value = UnicodeAttribute()
61 |
62 |
63 | class TestDiscriminatorAttribute:
64 |
65 | def test_serialize(self):
66 | dtm = DiscriminatorTestModel()
67 | dtm.hash_key = 'foo'
68 | dtm.value = StringValue(name='foo', value='Hello')
69 | dtm.values = [NumberValue(name='bar', value=5), RenamedValue(name='baz', value='World')]
70 | assert dtm.serialize() == {
71 | 'hash_key': {'S': 'foo'},
72 | 'type': {'S': 'Parent'},
73 | 'value': {'M': {'cls': {'S': 'StringValue'}, 'name': {'S': 'foo'}, 'value': {'S': 'Hello'}}},
74 | 'values': {'L': [
75 | {'M': {'cls': {'S': 'NumberValue'}, 'name': {'S': 'bar'}, 'value': {'N': '5'}}},
76 | {'M': {'cls': {'S': 'custom_name'}, 'name': {'S': 'baz'}, 'value': {'S': 'World'}}}
77 | ]}
78 | }
79 |
80 | def test_deserialize(self):
81 | item = {
82 | 'hash_key': {'S': 'foo'},
83 | 'type': {'S': 'Parent'},
84 | 'value': {'M': {'cls': {'S': 'StringValue'}, 'name': {'S': 'foo'}, 'value': {'S': 'Hello'}}},
85 | 'values': {'L': [
86 | {'M': {'cls': {'S': 'NumberValue'}, 'name': {'S': 'bar'}, 'value': {'N': '5'}}},
87 | {'M': {'cls': {'S': 'custom_name'}, 'name': {'S': 'baz'}, 'value': {'S': 'World'}}}
88 | ]}
89 | }
90 | dtm = DiscriminatorTestModel.from_raw_data(item)
91 | assert dtm.hash_key == 'foo'
92 | assert dtm.value.value == 'Hello'
93 | assert dtm.values[0].value == 5
94 | assert dtm.values[1].value == 'World'
95 |
96 | def test_condition_expression(self):
97 | condition = DiscriminatorTestModel.value._cls == RenamedValue
98 | placeholder_names, expression_attribute_values = {}, {}
99 | expression = condition.serialize(placeholder_names, expression_attribute_values)
100 | assert expression == "#0.#1 = :0"
101 | assert placeholder_names == {'value': '#0', 'cls': '#1'}
102 | assert expression_attribute_values == {':0': {'S': 'custom_name'}}
103 |
104 | def test_multiple_discriminator_values(self):
105 | class TestAttribute(MapAttribute, discriminator='new_value'):
106 | cls = DiscriminatorAttribute()
107 |
108 | TestAttribute.cls.register_class(TestAttribute, 'old_value')
109 |
110 | # ensure the first registered value is used during serialization
111 | assert TestAttribute.cls.get_discriminator(TestAttribute) == 'new_value'
112 | assert TestAttribute.cls.serialize(TestAttribute) == 'new_value'
113 |
114 | # ensure the second registered value can be used to deserialize
115 | assert TestAttribute.cls.deserialize('old_value') == TestAttribute
116 | assert TestAttribute.cls.deserialize('new_value') == TestAttribute
117 |
118 | def test_multiple_discriminator_classes(self):
119 | with pytest.raises(ValueError):
120 | # fail when attempting to register a class with an existing discriminator value
121 | class RenamedValue2(TypedValue, discriminator='custom_name'):
122 | pass
123 |
124 | class TestDiscriminatorModel:
125 |
126 | def test_serialize(self):
127 | cm = ChildModel()
128 | cm.hash_key = 'foo'
129 | cm.value = 'bar'
130 | cm.values = []
131 | assert cm.serialize() == {
132 | 'hash_key': {'S': 'foo'},
133 | 'type': {'S': 'Child'},
134 | 'value': {'S': 'bar'},
135 | 'values': {'L': []}
136 | }
137 |
138 | def test_deserialize(self):
139 | item = {
140 | 'hash_key': {'S': 'foo'},
141 | 'type': {'S': 'Child'},
142 | 'value': {'S': 'bar'},
143 | 'values': {'L': []}
144 | }
145 | cm = DiscriminatorTestModel.from_raw_data(item)
146 | assert isinstance(cm, ChildModel)
147 | assert cm.hash_key == 'foo'
148 | assert cm.value == 'bar'
149 |
150 |
151 | class TestDynamicDiscriminatorModel:
152 |
153 | def test_serialize_parent(self):
154 | m = DynamicMapDiscriminatorTestModel()
155 | m.hash_key = 'foo'
156 | m.value.string_attr = 'foostr'
157 | m.value.bar_attribute = 3
158 | assert m.serialize() == {
159 | 'hash_key': {'S': 'foo'},
160 | 'type': {'S': 'Parent'},
161 | 'value': {'M': {'string_attr': {'S': 'foostr'}, 'bar_attribute': {'N': '3'}}},
162 | }
163 |
164 | def test_deserialize_parent(self):
165 | item = {
166 | 'hash_key': {'S': 'foo'},
167 | 'type': {'S': 'Parent'},
168 | 'value': {
169 | 'M': {'string_attr': {'S': 'foostr'}, 'bar_attribute': {'N': '3'}}
170 | }
171 | }
172 | m = DynamicMapDiscriminatorTestModel.from_raw_data(item)
173 | assert m.hash_key == 'foo'
174 | assert m.value
175 | assert m.value.string_attr == 'foostr'
176 | assert m.value.bar_attribute == 3
177 |
178 | def test_serialize_child(self):
179 | m = DynamicMapDiscriminatorChildTestModel()
180 | m.hash_key = 'foo'
181 | m.value = 'string val'
182 | assert m.serialize() == {
183 | 'hash_key': {'S': 'foo'},
184 | 'type': {'S': 'Child'},
185 | 'value': {'S': 'string val'}
186 | }
187 |
188 | def test_deserialize_child(self):
189 | item = {
190 | 'hash_key': {'S': 'foo'},
191 | 'type': {'S': 'Child'},
192 | 'value': {'S': 'string val'}
193 | }
194 | m = DynamicMapDiscriminatorChildTestModel.from_raw_data(item)
195 | assert m.hash_key == 'foo'
196 | assert m.value == 'string val'
197 |
--------------------------------------------------------------------------------
/tests/test_exceptions.py:
--------------------------------------------------------------------------------
1 | from botocore.exceptions import ClientError
2 |
3 | from pynamodb.exceptions import PynamoDBException, PutError
4 |
5 |
6 | def test_get_cause_response_code():
7 | error = PutError(
8 | cause=ClientError(
9 | error_response={
10 | 'Error': {
11 | 'Code': 'hello'
12 | }
13 | },
14 | operation_name='test'
15 | )
16 | )
17 | assert error.cause_response_code == 'hello'
18 |
19 |
20 | def test_get_cause_response_code__no_code():
21 | error = PutError()
22 | assert error.cause_response_code is None
23 |
24 |
25 | def test_get_cause_response_message():
26 | error = PutError(
27 | cause=ClientError(
28 | error_response={
29 | 'Error': {
30 | 'Message': 'hiya'
31 | }
32 | },
33 | operation_name='test'
34 | )
35 | )
36 | assert error.cause_response_message == 'hiya'
37 |
38 |
39 | def test_get_cause_response_message__no_message():
40 | error = PutError()
41 | assert error.cause_response_message is None
42 |
43 |
44 | class PynamoDBTestError(PynamoDBException):
45 | msg = "Test message"
46 |
47 |
48 | def test_subclass_message_is_not_overwritten_with_none():
49 | assert PynamoDBTestError().msg == "Test message"
50 |
--------------------------------------------------------------------------------
/tests/test_pagination.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from pynamodb.pagination import RateLimiter
3 |
4 |
5 | class MockTime():
6 | def __init__(self):
7 | self.current_time = 0.0
8 |
9 | def sleep(self, amount):
10 | self.current_time += amount
11 |
12 | def time(self):
13 | return self.current_time
14 |
15 | def increment_time(self, amount):
16 | self.current_time += amount
17 |
18 |
19 | def test_rate_limiter_exceptions():
20 | with pytest.raises(ValueError):
21 | r = RateLimiter(0)
22 |
23 | with pytest.raises(ValueError):
24 | r = RateLimiter(-1)
25 |
26 | with pytest.raises(ValueError):
27 | r = RateLimiter(10)
28 | r.rate_limit = 0
29 |
30 | with pytest.raises(ValueError):
31 | r = RateLimiter(10)
32 | r.rate_limit = -1
33 |
34 |
35 | def test_basic_rate_limiting():
36 | mock_time = MockTime()
37 | r = RateLimiter(0.1, mock_time)
38 |
39 | # 100 operations
40 | for i in range(0, 100):
41 | r.acquire()
42 | # Simulates an operation that takes 1 second
43 | mock_time.increment_time(1)
44 | r.consume(1)
45 |
46 | # Since the first acquire doesn't take time, thus we should be expecting (100-1) * 10 seconds = 990 delay
47 | # plus 1 for the last increment_time(1) operation
48 | assert mock_time.time() == 991.0
49 |
50 |
51 | def test_basic_rate_limiting_small_increment():
52 | mock_time = MockTime()
53 | r = RateLimiter(0.1, mock_time)
54 |
55 | # 100 operations
56 | for i in range(0, 100):
57 | r.acquire()
58 | # Simulates an operation that takes 2 second
59 | mock_time.increment_time(2)
60 | r.consume(1)
61 |
62 | # Since the first acquire doesn't take time, thus we should be expecting (100-1) * 10 seconds = 990 delay
63 | # plus 2 for the last increment_time(2) operation
64 | assert mock_time.time() == 992.0
65 |
66 |
67 | def test_basic_rate_limiting_large_increment():
68 | mock_time = MockTime()
69 | r = RateLimiter(0.1, mock_time)
70 |
71 | # 100 operations
72 | for i in range(0, 100):
73 | r.acquire()
74 | # Simulates an operation that takes 2 second
75 | mock_time.increment_time(11)
76 | r.consume(1)
77 |
78 | # The operation takes longer than the minimum wait, so rate limiting should have no effect
79 | assert mock_time.time() == 1100.0
80 |
--------------------------------------------------------------------------------
/tests/test_settings.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import patch
2 |
3 | import pytest
4 | from importlib import reload
5 |
6 | import pynamodb.settings
7 |
8 |
9 | @pytest.mark.parametrize('settings_str', [
10 | "session_cls = object()",
11 | "request_timeout_seconds = 5",
12 | ])
13 | def test_override_old_attributes(settings_str, tmpdir):
14 | custom_settings = tmpdir.join("pynamodb_settings.py")
15 | custom_settings.write(settings_str)
16 |
17 | with patch.dict('os.environ', {'PYNAMODB_CONFIG': str(custom_settings)}):
18 | with pytest.warns(UserWarning) as warns:
19 | reload(pynamodb.settings)
20 | assert len(warns) == 1
21 | assert 'options are no longer supported' in str(warns[0].message)
22 |
23 |
24 | def test_default_settings():
25 | """Ensure that the default settings are what we expect. This is mainly done to catch
26 | any potentially breaking changes to default settings.
27 | """
28 | assert pynamodb.settings.default_settings_dict == {
29 | 'connect_timeout_seconds': 15,
30 | 'read_timeout_seconds': 30,
31 | 'max_retry_attempts': 3,
32 | 'region': None,
33 | 'max_pool_connections': 10,
34 | 'extra_headers': None,
35 | 'retry_configuration': 'LEGACY'
36 | }
37 |
--------------------------------------------------------------------------------
/tests/test_signals.py:
--------------------------------------------------------------------------------
1 | import unittest.mock
2 | import pytest
3 |
4 | from pynamodb.connection import Connection
5 | from pynamodb.signals import _FakeNamespace
6 | from pynamodb.signals import pre_dynamodb_send, post_dynamodb_send
7 |
8 | try:
9 | import blinker
10 | except ImportError:
11 | blinker = None
12 |
13 | PATCH_METHOD = 'pynamodb.connection.Connection._make_api_call'
14 |
15 |
16 | @unittest.mock.patch(PATCH_METHOD)
17 | @unittest.mock.patch('pynamodb.connection.base.uuid')
18 | def test_signal(mock_uuid, mock_req):
19 | pre_recorded = []
20 | post_recorded = []
21 | UUID = '123-abc'
22 |
23 | def record_pre_dynamodb_send(sender, operation_name, table_name, req_uuid):
24 | pre_recorded.append((operation_name, table_name, req_uuid))
25 |
26 | def record_post_dynamodb_send(sender, operation_name, table_name, req_uuid):
27 | post_recorded.append((operation_name, table_name, req_uuid))
28 |
29 | pre_dynamodb_send.connect(record_pre_dynamodb_send)
30 | post_dynamodb_send.connect(record_post_dynamodb_send)
31 | try:
32 | mock_uuid.uuid4.return_value = UUID
33 | mock_req.return_value = {'TableDescription': {'TableName': 'table', 'TableStatus': 'Creating'}}
34 | c = Connection()
35 | c.dispatch('CreateTable', {'TableName': 'MyTable'})
36 | assert ('CreateTable', 'MyTable', UUID) == pre_recorded[0]
37 | assert ('CreateTable', 'MyTable', UUID) == post_recorded[0]
38 | finally:
39 | pre_dynamodb_send.disconnect(record_pre_dynamodb_send)
40 | post_dynamodb_send.disconnect(record_post_dynamodb_send)
41 |
42 |
43 | @unittest.mock.patch(PATCH_METHOD)
44 | @unittest.mock.patch('pynamodb.connection.base.uuid')
45 | def test_signal_exception_pre_signal(mock_uuid, mock_req):
46 | post_recorded = []
47 | UUID = '123-abc'
48 |
49 | def record_pre_dynamodb_send(sender, operation_name, table_name, req_uuid):
50 | raise ValueError()
51 |
52 | def record_post_dynamodb_send(sender, operation_name, table_name, req_uuid):
53 | post_recorded.append((operation_name, table_name, req_uuid))
54 |
55 | pre_dynamodb_send.connect(record_pre_dynamodb_send)
56 | post_dynamodb_send.connect(record_post_dynamodb_send)
57 | try:
58 | mock_uuid.uuid4.return_value = UUID
59 | mock_req.return_value = {'TableDescription': {'TableName': 'table', 'TableStatus': 'Creating'}}
60 | c = Connection()
61 | c.dispatch('CreateTable', {'TableName': 'MyTable'})
62 | assert ('CreateTable', 'MyTable', UUID) == post_recorded[0]
63 | finally:
64 | pre_dynamodb_send.disconnect(record_pre_dynamodb_send)
65 | post_dynamodb_send.disconnect(record_post_dynamodb_send)
66 |
67 |
68 | @unittest.mock.patch(PATCH_METHOD)
69 | @unittest.mock.patch('pynamodb.connection.base.uuid')
70 | def test_signal_exception_post_signal(mock_uuid, mock_req):
71 | pre_recorded = []
72 | UUID = '123-abc'
73 |
74 | def record_pre_dynamodb_send(sender, operation_name, table_name, req_uuid):
75 | pre_recorded.append((operation_name, table_name, req_uuid))
76 |
77 | def record_post_dynamodb_send(sender, operation_name, table_name, req_uuid):
78 | raise ValueError()
79 |
80 | pre_dynamodb_send.connect(record_pre_dynamodb_send)
81 | post_dynamodb_send.connect(record_post_dynamodb_send)
82 | try:
83 | mock_uuid.uuid4.return_value = UUID
84 | mock_req.return_value = {'TableDescription': {'TableName': 'table', 'TableStatus': 'Creating'}}
85 | c = Connection()
86 | c.dispatch('CreateTable', {'TableName': 'MyTable'})
87 | assert ('CreateTable', 'MyTable', UUID) == pre_recorded[0]
88 | finally:
89 | pre_dynamodb_send.disconnect(record_pre_dynamodb_send)
90 | post_dynamodb_send.disconnect(record_post_dynamodb_send)
91 |
92 |
93 | def test_fake_signals():
94 | _signals = _FakeNamespace()
95 | pre_dynamodb_send = _signals.signal('pre_dynamodb_send')
96 | with pytest.raises(RuntimeError):
97 | pre_dynamodb_send.connect(lambda x: x)
98 | pre_dynamodb_send.send(object, operation_name="UPDATE", table_name="TEST", req_uuid="something")
99 |
--------------------------------------------------------------------------------
/tests/test_transaction.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from pynamodb.attributes import NumberAttribute, UnicodeAttribute, VersionAttribute
3 |
4 | from pynamodb.connection import Connection
5 | from pynamodb.connection.base import MetaTable
6 | from pynamodb.constants import TABLE_KEY
7 | from pynamodb.transactions import Transaction, TransactGet, TransactWrite
8 | from pynamodb.models import Model
9 | from tests.test_base_connection import PATCH_METHOD
10 |
11 | from unittest.mock import patch
12 |
13 |
14 | class MockModel(Model):
15 | class Meta:
16 | table_name = 'mock'
17 |
18 | mock_hash = NumberAttribute(hash_key=True)
19 | mock_range = NumberAttribute(range_key=True)
20 | mock_toot = UnicodeAttribute(null=True)
21 | mock_version = VersionAttribute()
22 |
23 |
24 | MOCK_TABLE_DESCRIPTOR = {
25 | "Table": {
26 | "TableName": "mock",
27 | "KeySchema": [
28 | {
29 | "AttributeName": "mock_hash",
30 | "KeyType": "HASH"
31 | },
32 | {
33 | "AttributeName": "mock_range",
34 | "KeyType": "RANGE"
35 | }
36 | ],
37 | "AttributeDefinitions": [
38 | {
39 | "AttributeName": "mock_hash",
40 | "AttributeType": "N"
41 | },
42 | {
43 | "AttributeName": "mock_range",
44 | "AttributeType": "N"
45 | }
46 | ]
47 | }
48 | }
49 |
50 |
51 | class TestTransaction:
52 |
53 | def test_commit__not_implemented(self):
54 | t = Transaction(connection=Connection())
55 | with pytest.raises(NotImplementedError):
56 | t._commit()
57 |
58 |
59 | class TestTransactGet:
60 |
61 | def test_commit(self, mocker):
62 | connection = Connection()
63 | connection.add_meta_table(MetaTable(MOCK_TABLE_DESCRIPTOR[TABLE_KEY]))
64 |
65 | mock_connection_transact_get = mocker.patch.object(connection, 'transact_get_items')
66 |
67 | with TransactGet(connection=connection) as t:
68 | t.get(MockModel, 1, 2)
69 |
70 | mock_connection_transact_get.assert_called_once_with(
71 | get_items=[{'Key': {'mock_hash': {'N': '1'}, 'mock_range': {'N': '2'}}, 'TableName': 'mock'}],
72 | return_consumed_capacity=None
73 | )
74 |
75 |
76 | class TestTransactWrite:
77 |
78 | def test_condition_check__no_condition(self):
79 | with pytest.raises(TypeError):
80 | with TransactWrite(connection=Connection()) as transaction:
81 | transaction.condition_check(MockModel, hash_key=1, condition=None)
82 |
83 | def test_commit(self, mocker):
84 | connection = Connection()
85 | mock_connection_transact_write = mocker.patch.object(connection, 'transact_write_items')
86 | with TransactWrite(connection=connection) as t:
87 | t.condition_check(MockModel, 1, 3, condition=(MockModel.mock_hash.does_not_exist()))
88 | t.delete(MockModel(2, 4))
89 | t.save(MockModel(3, 5))
90 | t.update(MockModel(4, 6), actions=[MockModel.mock_toot.set('hello')], return_values='ALL_OLD')
91 |
92 | expected_condition_checks = [{
93 | 'ConditionExpression': 'attribute_not_exists (#0)',
94 | 'ExpressionAttributeNames': {'#0': 'mock_hash'},
95 | 'Key': {'mock_hash': {'N': '1'}, 'mock_range': {'N': '3'}},
96 | 'TableName': 'mock'}
97 | ]
98 | expected_deletes = [{
99 | 'ConditionExpression': 'attribute_not_exists (#0)',
100 | 'ExpressionAttributeNames': {'#0': 'mock_version'},
101 | 'Key': {'mock_hash': {'N': '2'}, 'mock_range': {'N': '4'}},
102 | 'TableName': 'mock'
103 | }]
104 | expected_puts = [{
105 | 'ConditionExpression': 'attribute_not_exists (#0)',
106 | 'ExpressionAttributeNames': {'#0': 'mock_version'},
107 | 'Item': {'mock_hash': {'N': '3'}, 'mock_range': {'N': '5'}, 'mock_version': {'N': '1'}},
108 | 'TableName': 'mock'
109 | }]
110 | expected_updates = [{
111 | 'ConditionExpression': 'attribute_not_exists (#0)',
112 | 'TableName': 'mock',
113 | 'Key': {'mock_hash': {'N': '4'}, 'mock_range': {'N': '6'}},
114 | 'ReturnValuesOnConditionCheckFailure': 'ALL_OLD',
115 | 'UpdateExpression': 'SET #1 = :0, #0 = :1',
116 | 'ExpressionAttributeNames': {'#0': 'mock_version', '#1': 'mock_toot'},
117 | 'ExpressionAttributeValues': {':0': {'S': 'hello'}, ':1': {'N': '1'}}
118 | }]
119 | mock_connection_transact_write.assert_called_once_with(
120 | condition_check_items=expected_condition_checks,
121 | delete_items=expected_deletes,
122 | put_items=expected_puts,
123 | update_items=expected_updates,
124 | client_request_token=None,
125 | return_consumed_capacity=None,
126 | return_item_collection_metrics=None
127 | )
128 |
--------------------------------------------------------------------------------
/typing_tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pynamodb/PynamoDB/2f8e8bb60474babd6ad558a91b0ad89a2c36d515/typing_tests/__init__.py
--------------------------------------------------------------------------------
/typing_tests/attributes.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 | from typing import Any
3 |
4 | from typing_extensions import assert_type
5 |
6 |
7 | def test_number_attribute() -> None:
8 | from pynamodb.attributes import NumberAttribute
9 | from pynamodb.models import Model
10 |
11 | class MyModel(Model):
12 | my_attr = NumberAttribute()
13 |
14 | assert_type(MyModel.my_attr, NumberAttribute)
15 | assert_type(MyModel().my_attr, float)
16 |
17 |
18 | def test_unicode_attribute() -> None:
19 | from pynamodb.attributes import UnicodeAttribute
20 | from pynamodb.models import Model
21 |
22 | class MyModel(Model):
23 | my_attr = UnicodeAttribute()
24 |
25 | assert_type(MyModel.my_attr, UnicodeAttribute)
26 | assert_type(MyModel().my_attr, str)
27 |
28 |
29 | def test_map_attribute() -> None:
30 | from pynamodb.attributes import MapAttribute, UnicodeAttribute
31 | from pynamodb.models import Model
32 |
33 | class MySubMap(MapAttribute):
34 | s = UnicodeAttribute()
35 |
36 | class MyMap(MapAttribute):
37 | m2 = MySubMap()
38 |
39 | class MyModel(Model):
40 | m1 = MyMap()
41 |
42 | assert_type(MyModel.m1, MyMap)
43 | assert_type(MyModel().m1, MyMap)
44 | assert_type(MyModel.m1.m2, MySubMap)
45 | assert_type(MyModel().m1.m2, MySubMap)
46 | assert_type(MyModel.m1.m2.s, str)
47 | assert_type(MyModel().m1.m2.s, str)
48 |
49 | assert_type(MyMap.m2, MySubMap)
50 | assert_type(MyMap().m2, MySubMap)
51 |
52 | assert_type(MySubMap.s, UnicodeAttribute)
53 | assert_type(MySubMap().s, str)
54 |
55 |
56 | def test_list_attribute() -> None:
57 | from pynamodb.attributes import ListAttribute, MapAttribute, UnicodeAttribute
58 | from pynamodb.models import Model
59 |
60 | class MyMap(MapAttribute):
61 | my_sub_attr = UnicodeAttribute()
62 |
63 | class MyModel(Model):
64 | my_list = ListAttribute(of=MyMap)
65 | my_untyped_list = ListAttribute() # type: ignore[var-annotated]
66 |
67 | assert_type(MyModel.my_list, ListAttribute[MyMap])
68 | assert_type(MyModel().my_list, list[MyMap])
69 | assert_type(MyModel().my_list[0].my_sub_attr, str)
70 |
71 | # Untyped lists are not well-supported yet
72 | assert_type(MyModel().my_untyped_list[0].my_sub_attr, Any)
73 |
--------------------------------------------------------------------------------
/typing_tests/models.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 | from typing import Any
3 |
4 | from typing_extensions import assert_type
5 |
6 |
7 | def test_model_count() -> None:
8 | from pynamodb.models import Model
9 | from pynamodb.expressions.operand import Path
10 |
11 | class MyModel(Model):
12 | pass
13 |
14 | assert_type(MyModel.count('hash', Path('a').between(1, 3)), int)
15 |
16 |
17 | def test_model_query() -> None:
18 | from pynamodb.attributes import NumberAttribute
19 | from pynamodb.models import Model
20 |
21 | class MyModel(Model):
22 | my_attr = NumberAttribute()
23 |
24 | # test conditions
25 | MyModel.query(123, range_key_condition=(MyModel.my_attr == 5), filter_condition=(MyModel.my_attr == 5))
26 |
27 | # test conditions are optional
28 | MyModel.query(123, range_key_condition=None, filter_condition=None)
29 |
30 |
31 | def test_pagination() -> None:
32 | from pynamodb.attributes import NumberAttribute
33 | from pynamodb.models import Model
34 |
35 | class MyModel(Model):
36 | my_attr = NumberAttribute()
37 |
38 | result_iterator = MyModel.query(123)
39 | for model in result_iterator:
40 | assert_type(model, MyModel)
41 | if result_iterator.last_evaluated_key:
42 | assert_type(result_iterator.last_evaluated_key['my_attr'], dict[str, Any])
43 |
44 |
45 | def test_model_update() -> None:
46 | from pynamodb.attributes import NumberAttribute, UnicodeAttribute
47 | from pynamodb.models import Model
48 |
49 | class MyModel(Model):
50 | my_attr = NumberAttribute()
51 | my_str_attr = UnicodeAttribute()
52 |
53 | my_model = MyModel()
54 | my_model.update(actions=[
55 | # test update expressions
56 | MyModel.my_attr.set(MyModel.my_attr + 123),
57 | MyModel.my_attr.set(123 + MyModel.my_attr),
58 | MyModel.my_attr.set(MyModel.my_attr - 123),
59 | MyModel.my_attr.set(123 - MyModel.my_attr),
60 | MyModel.my_attr.set(MyModel.my_attr | 123),
61 | ])
62 |
63 | _ = MyModel.my_attr.set('foo') # type:ignore[arg-type]
64 | _ = MyModel.my_attr.set(MyModel.my_str_attr) # type:ignore[arg-type]
65 |
66 |
67 | def test_paths() -> None:
68 | import pynamodb.expressions.operand
69 | import pynamodb.expressions.condition
70 | from pynamodb.attributes import ListAttribute, MapAttribute, UnicodeAttribute
71 | from pynamodb.models import Model
72 |
73 | class MyMap(MapAttribute):
74 | my_sub_attr = UnicodeAttribute()
75 |
76 | class MyModel(Model):
77 | my_list = ListAttribute(of=MyMap)
78 | my_map = MyMap()
79 |
80 | assert_type(MyModel.my_list[0], pynamodb.expressions.operand.Path)
81 | assert_type(MyModel.my_list[0] == MyModel(), pynamodb.expressions.condition.Comparison)
82 | # the following string indexing is not type checked - not by mypy nor in runtime
83 | assert_type(MyModel.my_list[0]['my_sub_attr'] == 'foobar', pynamodb.expressions.condition.Comparison)
84 | assert_type(MyModel.my_map == 'foobar', pynamodb.expressions.condition.Comparison)
85 |
86 |
87 | def test_index_query_scan() -> None:
88 | from pynamodb.attributes import NumberAttribute
89 | from pynamodb.models import Model
90 | from pynamodb.indexes import GlobalSecondaryIndex
91 | from pynamodb.pagination import ResultIterator
92 |
93 | class UntypedIndex(GlobalSecondaryIndex):
94 | bar = NumberAttribute(hash_key=True)
95 |
96 | class TypedIndex(GlobalSecondaryIndex['MyModel']):
97 | bar = NumberAttribute(hash_key=True)
98 |
99 | class MyModel(Model):
100 | foo = NumberAttribute(hash_key=True)
101 | bar = NumberAttribute()
102 |
103 | untyped_index = UntypedIndex()
104 | typed_index = TypedIndex()
105 |
106 | # Ensure old code keeps working
107 | untyped_query_result: ResultIterator = MyModel.untyped_index.query(123)
108 | assert_type(next(untyped_query_result), Any)
109 |
110 | # Allow users to specify which model their indices return
111 | typed_query_result: ResultIterator[MyModel] = MyModel.typed_index.query(123)
112 | assert_type(next(typed_query_result), MyModel)
113 |
114 | # Ensure old code keeps working
115 | untyped_scan_result = MyModel.untyped_index.scan()
116 | assert_type(next(untyped_scan_result), Any)
117 |
118 | # Allow users to specify which model their indices return
119 | typed_scan_result = MyModel.typed_index.scan()
120 | assert_type(next(typed_scan_result), MyModel)
121 |
122 |
123 | def test_map_attribute_derivation() -> None:
124 | from pynamodb.attributes import MapAttribute
125 |
126 | class MyMap(MapAttribute, object):
127 | pass
128 |
129 |
130 | def test_is_in() -> None:
131 | from pynamodb.models import Model
132 | from pynamodb.attributes import UnicodeAttribute
133 |
134 | class MyModel(Model):
135 | attr = UnicodeAttribute()
136 |
137 | _ = MyModel.attr.is_in('foo', 'bar')
138 | _ = MyModel.attr.is_in(123) # type:ignore[arg-type]
139 | _ = MyModel.attr.is_in(['foo', 'bar']) # type:ignore[arg-type]
140 |
141 |
142 | def test_append() -> None:
143 | from pynamodb.models import Model
144 | from pynamodb.attributes import ListAttribute, NumberAttribute
145 |
146 | class MyModel(Model):
147 | attr = ListAttribute(of=NumberAttribute)
148 |
149 | MyModel.attr.append(42) # type:ignore[arg-type]
150 | MyModel.attr.append([42])
151 | MyModel.attr.prepend(42) # type:ignore[arg-type]
152 | MyModel.attr.prepend([42])
153 |
--------------------------------------------------------------------------------
/typing_tests/transactions.py:
--------------------------------------------------------------------------------
1 | from typing_extensions import assert_type
2 |
3 |
4 | def test_transact_write() -> None:
5 | from pynamodb.transactions import TransactWrite
6 | with TransactWrite() as tx:
7 | assert_type(tx, TransactWrite)
8 |
9 |
10 | def test_transact_get() -> None:
11 | from pynamodb.transactions import TransactGet
12 | from pynamodb.models import Model, _ModelFuture
13 |
14 | class FirstModel(Model):
15 | pass
16 |
17 | class SecondModel(Model):
18 | pass
19 |
20 | with TransactGet() as tx:
21 | assert_type(tx, TransactGet)
22 | assert_type(tx.get(FirstModel, "pk"), _ModelFuture[FirstModel])
23 | assert_type(tx.get(SecondModel, "pk"), _ModelFuture[SecondModel])
24 |
25 | second_model_instance_future = tx.get(SecondModel, "pk")
26 |
27 | assert_type(second_model_instance_future.get(), SecondModel)
28 | _first_model_instance: FirstModel = second_model_instance_future.get() # type:ignore[assignment]
29 |
--------------------------------------------------------------------------------