├── .coveragerc
├── .flake8
├── .github
└── workflows
│ ├── branches.yml
│ ├── develop.yml
│ └── master.yml
├── .gitignore
├── LICENSE
├── MANIFEST.in
├── README.md
├── boto3_batch_utils
├── Base.py
├── Cloudwatch.py
├── Dynamodb.py
├── Kinesis.py
├── SQS.py
├── __init__.py
├── constants.py
└── utils.py
├── requirements.txt
├── resources
└── developer_docs.md
├── setup.cfg
├── setup.py
└── tests
├── integration_tests
├── __init__.py
├── cloudwatch
│ ├── __init__.py
│ └── test_cloudwatch.py
├── dynamo
│ ├── __init__.py
│ └── test_dynamo.py
├── kinesis
│ ├── __init__.py
│ └── test_kinesis.py
├── large_messages.py
└── sqs
│ ├── __init__.py
│ ├── test_sqs_fifo.py
│ └── test_sqs_standard.py
└── unit_tests
├── __init__.py
├── test_base_dispatcher.py
├── test_cloudwatch.py
├── test_dynamodb.py
├── test_kinesis.py
├── test_sqs.py
└── test_utils.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | branch = true
3 | source = boto3_batch_utils
4 |
5 | [report]
6 | fail_under = 85
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length=120
3 | exclude = venv/*, tests/*, docs/*
4 | max-complexity = 7
5 | count=true
6 | show-source=true
7 | statistics=true
--------------------------------------------------------------------------------
/.github/workflows/branches.yml:
--------------------------------------------------------------------------------
1 | name: Feature Branch Pipeline
2 | on:
3 | push:
4 | branches:
5 | - '*' # matches every branch
6 | - '*/*' # matches every branch containing a single '/'
7 | - '!master' # excludes master
8 | - '!develop' # excludes develop
9 | - '!gh-pages' # excludes docs branch
10 |
11 | jobs:
12 | linting:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - uses: actions/checkout@v2
16 | - name: Set up Python 3.7
17 | uses: actions/setup-python@v1
18 | with:
19 | python-version: 3.7
20 | - name: Install dependencies
21 | run: |
22 | python -m pip install --upgrade pip
23 | pip install -r requirements.txt
24 | pip install flake8
25 | - name: Lint with flake8
26 | run: |
27 | flake8 .
28 |
29 | unitTests:
30 | runs-on: ubuntu-latest
31 | steps:
32 | - uses: actions/checkout@v2
33 | - name: Set up Python 3.7
34 | uses: actions/setup-python@v1
35 | with:
36 | python-version: 3.7
37 | - name: Install dependencies
38 | run: |
39 | python -m pip install --upgrade pip
40 | pip install -r requirements.txt
41 | pip install nose
42 | pip install coverage
43 | - name: Test with nose and coverage
44 | run: |
45 | nosetests tests/unit_tests --with-coverage --cover-min-percentage 85 --cover-package=boto3_batch_utils
46 |
47 | integrationTests:
48 | runs-on: ubuntu-latest
49 | steps:
50 | - uses: actions/checkout@v2
51 | - name: Set up Python 3.7
52 | uses: actions/setup-python@v1
53 | with:
54 | python-version: 3.7
55 | - name: Install dependencies
56 | run: |
57 | python -m pip install --upgrade pip
58 | pip install -r requirements.txt
59 | pip install nose
60 | - name: Test with nose
61 | run: |
62 | nosetests tests/integration_tests
63 |
--------------------------------------------------------------------------------
/.github/workflows/develop.yml:
--------------------------------------------------------------------------------
1 | name: Develop Pipeline
2 | on:
3 | push:
4 | branches:
5 | - 'develop'
6 |
7 | jobs:
8 | linting:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v2
12 | - name: Set up Python 3.7
13 | uses: actions/setup-python@v1
14 | with:
15 | python-version: 3.7
16 | - name: Install dependencies
17 | run: |
18 | python -m pip install --upgrade pip
19 | pip install -r requirements.txt
20 | pip install flake8
21 | - name: Lint with flake8
22 | run: |
23 | flake8 .
24 |
25 | unitTests:
26 | runs-on: ubuntu-latest
27 | strategy:
28 | matrix:
29 | python: [2.7, 3.7, 3.8]
30 | steps:
31 | - uses: actions/checkout@v2
32 | - name: Set up Python 3.7
33 | uses: actions/setup-python@v1
34 | with:
35 | python-version: 3.7
36 | - name: Install dependencies
37 | run: |
38 | python -m pip install --upgrade pip
39 | pip install -r requirements.txt
40 | pip install nose
41 | pip install coverage
42 | - name: Test with nose and coverage
43 | run: |
44 | nosetests tests/unit_tests --with-coverage --cover-min-percentage 85 --cover-package=boto3_batch_utils
45 |
46 | integrationTests:
47 | runs-on: ubuntu-latest
48 | steps:
49 | - uses: actions/checkout@v2
50 | - name: Set up Python 3.7
51 | uses: actions/setup-python@v1
52 | with:
53 | python-version: 3.7
54 | - name: Install dependencies
55 | run: |
56 | python -m pip install --upgrade pip
57 | pip install -r requirements.txt
58 | pip install nose
59 | - name: Test with nose
60 | run: |
61 | nosetests tests/integration_tests
62 |
--------------------------------------------------------------------------------
/.github/workflows/master.yml:
--------------------------------------------------------------------------------
1 | name: Master Pipeline
2 | on:
3 | push:
4 | branches:
5 | - 'master'
6 |
7 | jobs:
8 | linting:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v2
12 | - name: Set up Python 3.7
13 | uses: actions/setup-python@v1
14 | with:
15 | python-version: 3.7
16 | - name: Install dependencies
17 | run: |
18 | python -m pip install --upgrade pip
19 | pip install -r requirements.txt
20 | pip install flake8
21 | - name: Lint with flake8
22 | run: |
23 | flake8 .
24 |
25 | unitTests:
26 | runs-on: ubuntu-latest
27 | steps:
28 | - uses: actions/checkout@v2
29 | - name: Set up Python 3.7
30 | uses: actions/setup-python@v1
31 | with:
32 | python-version: 3.7
33 | - name: Install dependencies
34 | run: |
35 | python -m pip install --upgrade pip
36 | pip install -r requirements.txt
37 | pip install nose
38 | pip install coverage
39 | - name: Test with nose
40 | run: |
41 | nosetests tests/unit_tests --with-coverage --cover-min 85 --cover-package=boto3_batch_utils
42 |
43 | integrationTests:
44 | runs-on: ubuntu-latest
45 | steps:
46 | - uses: actions/checkout@v2
47 | - name: Set up Python 3.7
48 | uses: actions/setup-python@v1
49 | with:
50 | python-version: 3.7
51 | - name: Install dependencies
52 | run: |
53 | python -m pip install --upgrade pip
54 | pip install -r requirements.txt
55 | pip install nose
56 | - name: Test with nose
57 | run: |
58 | nosetests tests/integration_tests
59 |
60 | deploy:
61 | needs: [linting, unitTests, integrationTests]
62 | runs-on: ubuntu-latest
63 | steps:
64 | - uses: actions/checkout@v2
65 | - name: Set up Python
66 | uses: actions/setup-python@v1
67 | with:
68 | python-version: '3.7'
69 | - name: Install dependencies
70 | run: |
71 | python -m pip install --upgrade pip
72 | pip install setuptools wheel twine
73 | - name: Build and Publish
74 | env:
75 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
76 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
77 | run: |
78 | python setup.py sdist bdist_wheel
79 | twine upload dist/*
80 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 | local_settings.py
56 |
57 | # Flask stuff:
58 | instance/
59 | .webassets-cache
60 |
61 | # Scrapy stuff:
62 | .scrapy
63 |
64 | # Sphinx documentation
65 | docs/_build/
66 |
67 | # PyBuilder
68 | target/
69 |
70 | # Jupyter Notebook
71 | .ipynb_checkpoints
72 |
73 | # pyenv
74 | .python-version
75 |
76 | # celery beat schedule file
77 | celerybeat-schedule
78 |
79 | # SageMath parsed files
80 | *.sage.py
81 |
82 | # dotenv
83 | .env
84 |
85 | # virtualenv
86 | .venv
87 | venv/
88 | ENV/
89 |
90 | # Spyder project settings
91 | .spyderproject
92 | .spyproject
93 |
94 | # Rope project settings
95 | .ropeproject
96 |
97 | # mkdocs documentation
98 | /site
99 |
100 | # mypy
101 | .mypy_cache/
102 |
103 | .idea/
104 |
105 | .coverage
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU AFFERO GENERAL PUBLIC LICENSE
2 | Version 3, 19 November 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU Affero General Public License is a free, copyleft license for
11 | software and other kinds of works, specifically designed to ensure
12 | cooperation with the community in the case of network server software.
13 |
14 | The licenses for most software and other practical works are designed
15 | to take away your freedom to share and change the works. By contrast,
16 | our General Public Licenses are intended to guarantee your freedom to
17 | share and change all versions of a program--to make sure it remains free
18 | software for all its users.
19 |
20 | When we speak of free software, we are referring to freedom, not
21 | price. Our General Public Licenses are designed to make sure that you
22 | have the freedom to distribute copies of free software (and charge for
23 | them if you wish), that you receive source code or can get it if you
24 | want it, that you can change the software or use pieces of it in new
25 | free programs, and that you know you can do these things.
26 |
27 | Developers that use our General Public Licenses protect your rights
28 | with two steps: (1) assert copyright on the software, and (2) offer
29 | you this License which gives you legal permission to copy, distribute
30 | and/or modify the software.
31 |
32 | A secondary benefit of defending all users' freedom is that
33 | improvements made in alternate versions of the program, if they
34 | receive widespread use, become available for other developers to
35 | incorporate. Many developers of free software are heartened and
36 | encouraged by the resulting cooperation. However, in the case of
37 | software used on network servers, this result may fail to come about.
38 | The GNU General Public License permits making a modified version and
39 | letting the public access it on a server without ever releasing its
40 | source code to the public.
41 |
42 | The GNU Affero General Public License is designed specifically to
43 | ensure that, in such cases, the modified source code becomes available
44 | to the community. It requires the operator of a network server to
45 | provide the source code of the modified version running there to the
46 | users of that server. Therefore, public use of a modified version, on
47 | a publicly accessible server, gives the public access to the source
48 | code of the modified version.
49 |
50 | An older license, called the Affero General Public License and
51 | published by Affero, was designed to accomplish similar goals. This is
52 | a different license, not a version of the Affero GPL, but Affero has
53 | released a new version of the Affero GPL which permits relicensing under
54 | this license.
55 |
56 | The precise terms and conditions for copying, distribution and
57 | modification follow.
58 |
59 | TERMS AND CONDITIONS
60 |
61 | 0. Definitions.
62 |
63 | "This License" refers to version 3 of the GNU Affero General Public License.
64 |
65 | "Copyright" also means copyright-like laws that apply to other kinds of
66 | works, such as semiconductor masks.
67 |
68 | "The Program" refers to any copyrightable work licensed under this
69 | License. Each licensee is addressed as "you". "Licensees" and
70 | "recipients" may be individuals or organizations.
71 |
72 | To "modify" a work means to copy from or adapt all or part of the work
73 | in a fashion requiring copyright permission, other than the making of an
74 | exact copy. The resulting work is called a "modified version" of the
75 | earlier work or a work "based on" the earlier work.
76 |
77 | A "covered work" means either the unmodified Program or a work based
78 | on the Program.
79 |
80 | To "propagate" a work means to do anything with it that, without
81 | permission, would make you directly or secondarily liable for
82 | infringement under applicable copyright law, except executing it on a
83 | computer or modifying a private copy. Propagation includes copying,
84 | distribution (with or without modification), making available to the
85 | public, and in some countries other activities as well.
86 |
87 | To "convey" a work means any kind of propagation that enables other
88 | parties to make or receive copies. Mere interaction with a user through
89 | a computer network, with no transfer of a copy, is not conveying.
90 |
91 | An interactive user interface displays "Appropriate Legal Notices"
92 | to the extent that it includes a convenient and prominently visible
93 | feature that (1) displays an appropriate copyright notice, and (2)
94 | tells the user that there is no warranty for the work (except to the
95 | extent that warranties are provided), that licensees may convey the
96 | work under this License, and how to view a copy of this License. If
97 | the interface presents a list of user commands or options, such as a
98 | menu, a prominent item in the list meets this criterion.
99 |
100 | 1. Source Code.
101 |
102 | The "source code" for a work means the preferred form of the work
103 | for making modifications to it. "Object code" means any non-source
104 | form of a work.
105 |
106 | A "Standard Interface" means an interface that either is an official
107 | standard defined by a recognized standards body, or, in the case of
108 | interfaces specified for a particular programming language, one that
109 | is widely used among developers working in that language.
110 |
111 | The "System Libraries" of an executable work include anything, other
112 | than the work as a whole, that (a) is included in the normal form of
113 | packaging a Major Component, but which is not part of that Major
114 | Component, and (b) serves only to enable use of the work with that
115 | Major Component, or to implement a Standard Interface for which an
116 | implementation is available to the public in source code form. A
117 | "Major Component", in this context, means a major essential component
118 | (kernel, window system, and so on) of the specific operating system
119 | (if any) on which the executable work runs, or a compiler used to
120 | produce the work, or an object code interpreter used to run it.
121 |
122 | The "Corresponding Source" for a work in object code form means all
123 | the source code needed to generate, install, and (for an executable
124 | work) run the object code and to modify the work, including scripts to
125 | control those activities. However, it does not include the work's
126 | System Libraries, or general-purpose tools or generally available free
127 | programs which are used unmodified in performing those activities but
128 | which are not part of the work. For example, Corresponding Source
129 | includes interface definition files associated with source files for
130 | the work, and the source code for shared libraries and dynamically
131 | linked subprograms that the work is specifically designed to require,
132 | such as by intimate data communication or control flow between those
133 | subprograms and other parts of the work.
134 |
135 | The Corresponding Source need not include anything that users
136 | can regenerate automatically from other parts of the Corresponding
137 | Source.
138 |
139 | The Corresponding Source for a work in source code form is that
140 | same work.
141 |
142 | 2. Basic Permissions.
143 |
144 | All rights granted under this License are granted for the term of
145 | copyright on the Program, and are irrevocable provided the stated
146 | conditions are met. This License explicitly affirms your unlimited
147 | permission to run the unmodified Program. The output from running a
148 | covered work is covered by this License only if the output, given its
149 | content, constitutes a covered work. This License acknowledges your
150 | rights of fair use or other equivalent, as provided by copyright law.
151 |
152 | You may make, run and propagate covered works that you do not
153 | convey, without conditions so long as your license otherwise remains
154 | in force. You may convey covered works to others for the sole purpose
155 | of having them make modifications exclusively for you, or provide you
156 | with facilities for running those works, provided that you comply with
157 | the terms of this License in conveying all material for which you do
158 | not control copyright. Those thus making or running the covered works
159 | for you must do so exclusively on your behalf, under your direction
160 | and control, on terms that prohibit them from making any copies of
161 | your copyrighted material outside their relationship with you.
162 |
163 | Conveying under any other circumstances is permitted solely under
164 | the conditions stated below. Sublicensing is not allowed; section 10
165 | makes it unnecessary.
166 |
167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168 |
169 | No covered work shall be deemed part of an effective technological
170 | measure under any applicable law fulfilling obligations under article
171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
172 | similar laws prohibiting or restricting circumvention of such
173 | measures.
174 |
175 | When you convey a covered work, you waive any legal power to forbid
176 | circumvention of technological measures to the extent such circumvention
177 | is effected by exercising rights under this License with respect to
178 | the covered work, and you disclaim any intention to limit operation or
179 | modification of the work as a means of enforcing, against the work's
180 | users, your or third parties' legal rights to forbid circumvention of
181 | technological measures.
182 |
183 | 4. Conveying Verbatim Copies.
184 |
185 | You may convey verbatim copies of the Program's source code as you
186 | receive it, in any medium, provided that you conspicuously and
187 | appropriately publish on each copy an appropriate copyright notice;
188 | keep intact all notices stating that this License and any
189 | non-permissive terms added in accord with section 7 apply to the code;
190 | keep intact all notices of the absence of any warranty; and give all
191 | recipients a copy of this License along with the Program.
192 |
193 | You may charge any price or no price for each copy that you convey,
194 | and you may offer support or warranty protection for a fee.
195 |
196 | 5. Conveying Modified Source Versions.
197 |
198 | You may convey a work based on the Program, or the modifications to
199 | produce it from the Program, in the form of source code under the
200 | terms of section 4, provided that you also meet all of these conditions:
201 |
202 | a) The work must carry prominent notices stating that you modified
203 | it, and giving a relevant date.
204 |
205 | b) The work must carry prominent notices stating that it is
206 | released under this License and any conditions added under section
207 | 7. This requirement modifies the requirement in section 4 to
208 | "keep intact all notices".
209 |
210 | c) You must license the entire work, as a whole, under this
211 | License to anyone who comes into possession of a copy. This
212 | License will therefore apply, along with any applicable section 7
213 | additional terms, to the whole of the work, and all its parts,
214 | regardless of how they are packaged. This License gives no
215 | permission to license the work in any other way, but it does not
216 | invalidate such permission if you have separately received it.
217 |
218 | d) If the work has interactive user interfaces, each must display
219 | Appropriate Legal Notices; however, if the Program has interactive
220 | interfaces that do not display Appropriate Legal Notices, your
221 | work need not make them do so.
222 |
223 | A compilation of a covered work with other separate and independent
224 | works, which are not by their nature extensions of the covered work,
225 | and which are not combined with it such as to form a larger program,
226 | in or on a volume of a storage or distribution medium, is called an
227 | "aggregate" if the compilation and its resulting copyright are not
228 | used to limit the access or legal rights of the compilation's users
229 | beyond what the individual works permit. Inclusion of a covered work
230 | in an aggregate does not cause this License to apply to the other
231 | parts of the aggregate.
232 |
233 | 6. Conveying Non-Source Forms.
234 |
235 | You may convey a covered work in object code form under the terms
236 | of sections 4 and 5, provided that you also convey the
237 | machine-readable Corresponding Source under the terms of this License,
238 | in one of these ways:
239 |
240 | a) Convey the object code in, or embodied in, a physical product
241 | (including a physical distribution medium), accompanied by the
242 | Corresponding Source fixed on a durable physical medium
243 | customarily used for software interchange.
244 |
245 | b) Convey the object code in, or embodied in, a physical product
246 | (including a physical distribution medium), accompanied by a
247 | written offer, valid for at least three years and valid for as
248 | long as you offer spare parts or customer support for that product
249 | model, to give anyone who possesses the object code either (1) a
250 | copy of the Corresponding Source for all the software in the
251 | product that is covered by this License, on a durable physical
252 | medium customarily used for software interchange, for a price no
253 | more than your reasonable cost of physically performing this
254 | conveying of source, or (2) access to copy the
255 | Corresponding Source from a network server at no charge.
256 |
257 | c) Convey individual copies of the object code with a copy of the
258 | written offer to provide the Corresponding Source. This
259 | alternative is allowed only occasionally and noncommercially, and
260 | only if you received the object code with such an offer, in accord
261 | with subsection 6b.
262 |
263 | d) Convey the object code by offering access from a designated
264 | place (gratis or for a charge), and offer equivalent access to the
265 | Corresponding Source in the same way through the same place at no
266 | further charge. You need not require recipients to copy the
267 | Corresponding Source along with the object code. If the place to
268 | copy the object code is a network server, the Corresponding Source
269 | may be on a different server (operated by you or a third party)
270 | that supports equivalent copying facilities, provided you maintain
271 | clear directions next to the object code saying where to find the
272 | Corresponding Source. Regardless of what server hosts the
273 | Corresponding Source, you remain obligated to ensure that it is
274 | available for as long as needed to satisfy these requirements.
275 |
276 | e) Convey the object code using peer-to-peer transmission, provided
277 | you inform other peers where the object code and Corresponding
278 | Source of the work are being offered to the general public at no
279 | charge under subsection 6d.
280 |
281 | A separable portion of the object code, whose source code is excluded
282 | from the Corresponding Source as a System Library, need not be
283 | included in conveying the object code work.
284 |
285 | A "User Product" is either (1) a "consumer product", which means any
286 | tangible personal property which is normally used for personal, family,
287 | or household purposes, or (2) anything designed or sold for incorporation
288 | into a dwelling. In determining whether a product is a consumer product,
289 | doubtful cases shall be resolved in favor of coverage. For a particular
290 | product received by a particular user, "normally used" refers to a
291 | typical or common use of that class of product, regardless of the status
292 | of the particular user or of the way in which the particular user
293 | actually uses, or expects or is expected to use, the product. A product
294 | is a consumer product regardless of whether the product has substantial
295 | commercial, industrial or non-consumer uses, unless such uses represent
296 | the only significant mode of use of the product.
297 |
298 | "Installation Information" for a User Product means any methods,
299 | procedures, authorization keys, or other information required to install
300 | and execute modified versions of a covered work in that User Product from
301 | a modified version of its Corresponding Source. The information must
302 | suffice to ensure that the continued functioning of the modified object
303 | code is in no case prevented or interfered with solely because
304 | modification has been made.
305 |
306 | If you convey an object code work under this section in, or with, or
307 | specifically for use in, a User Product, and the conveying occurs as
308 | part of a transaction in which the right of possession and use of the
309 | User Product is transferred to the recipient in perpetuity or for a
310 | fixed term (regardless of how the transaction is characterized), the
311 | Corresponding Source conveyed under this section must be accompanied
312 | by the Installation Information. But this requirement does not apply
313 | if neither you nor any third party retains the ability to install
314 | modified object code on the User Product (for example, the work has
315 | been installed in ROM).
316 |
317 | The requirement to provide Installation Information does not include a
318 | requirement to continue to provide support service, warranty, or updates
319 | for a work that has been modified or installed by the recipient, or for
320 | the User Product in which it has been modified or installed. Access to a
321 | network may be denied when the modification itself materially and
322 | adversely affects the operation of the network or violates the rules and
323 | protocols for communication across the network.
324 |
325 | Corresponding Source conveyed, and Installation Information provided,
326 | in accord with this section must be in a format that is publicly
327 | documented (and with an implementation available to the public in
328 | source code form), and must require no special password or key for
329 | unpacking, reading or copying.
330 |
331 | 7. Additional Terms.
332 |
333 | "Additional permissions" are terms that supplement the terms of this
334 | License by making exceptions from one or more of its conditions.
335 | Additional permissions that are applicable to the entire Program shall
336 | be treated as though they were included in this License, to the extent
337 | that they are valid under applicable law. If additional permissions
338 | apply only to part of the Program, that part may be used separately
339 | under those permissions, but the entire Program remains governed by
340 | this License without regard to the additional permissions.
341 |
342 | When you convey a copy of a covered work, you may at your option
343 | remove any additional permissions from that copy, or from any part of
344 | it. (Additional permissions may be written to require their own
345 | removal in certain cases when you modify the work.) You may place
346 | additional permissions on material, added by you to a covered work,
347 | for which you have or can give appropriate copyright permission.
348 |
349 | Notwithstanding any other provision of this License, for material you
350 | add to a covered work, you may (if authorized by the copyright holders of
351 | that material) supplement the terms of this License with terms:
352 |
353 | a) Disclaiming warranty or limiting liability differently from the
354 | terms of sections 15 and 16 of this License; or
355 |
356 | b) Requiring preservation of specified reasonable legal notices or
357 | author attributions in that material or in the Appropriate Legal
358 | Notices displayed by works containing it; or
359 |
360 | c) Prohibiting misrepresentation of the origin of that material, or
361 | requiring that modified versions of such material be marked in
362 | reasonable ways as different from the original version; or
363 |
364 | d) Limiting the use for publicity purposes of names of licensors or
365 | authors of the material; or
366 |
367 | e) Declining to grant rights under trademark law for use of some
368 | trade names, trademarks, or service marks; or
369 |
370 | f) Requiring indemnification of licensors and authors of that
371 | material by anyone who conveys the material (or modified versions of
372 | it) with contractual assumptions of liability to the recipient, for
373 | any liability that these contractual assumptions directly impose on
374 | those licensors and authors.
375 |
376 | All other non-permissive additional terms are considered "further
377 | restrictions" within the meaning of section 10. If the Program as you
378 | received it, or any part of it, contains a notice stating that it is
379 | governed by this License along with a term that is a further
380 | restriction, you may remove that term. If a license document contains
381 | a further restriction but permits relicensing or conveying under this
382 | License, you may add to a covered work material governed by the terms
383 | of that license document, provided that the further restriction does
384 | not survive such relicensing or conveying.
385 |
386 | If you add terms to a covered work in accord with this section, you
387 | must place, in the relevant source files, a statement of the
388 | additional terms that apply to those files, or a notice indicating
389 | where to find the applicable terms.
390 |
391 | Additional terms, permissive or non-permissive, may be stated in the
392 | form of a separately written license, or stated as exceptions;
393 | the above requirements apply either way.
394 |
395 | 8. Termination.
396 |
397 | You may not propagate or modify a covered work except as expressly
398 | provided under this License. Any attempt otherwise to propagate or
399 | modify it is void, and will automatically terminate your rights under
400 | this License (including any patent licenses granted under the third
401 | paragraph of section 11).
402 |
403 | However, if you cease all violation of this License, then your
404 | license from a particular copyright holder is reinstated (a)
405 | provisionally, unless and until the copyright holder explicitly and
406 | finally terminates your license, and (b) permanently, if the copyright
407 | holder fails to notify you of the violation by some reasonable means
408 | prior to 60 days after the cessation.
409 |
410 | Moreover, your license from a particular copyright holder is
411 | reinstated permanently if the copyright holder notifies you of the
412 | violation by some reasonable means, this is the first time you have
413 | received notice of violation of this License (for any work) from that
414 | copyright holder, and you cure the violation prior to 30 days after
415 | your receipt of the notice.
416 |
417 | Termination of your rights under this section does not terminate the
418 | licenses of parties who have received copies or rights from you under
419 | this License. If your rights have been terminated and not permanently
420 | reinstated, you do not qualify to receive new licenses for the same
421 | material under section 10.
422 |
423 | 9. Acceptance Not Required for Having Copies.
424 |
425 | You are not required to accept this License in order to receive or
426 | run a copy of the Program. Ancillary propagation of a covered work
427 | occurring solely as a consequence of using peer-to-peer transmission
428 | to receive a copy likewise does not require acceptance. However,
429 | nothing other than this License grants you permission to propagate or
430 | modify any covered work. These actions infringe copyright if you do
431 | not accept this License. Therefore, by modifying or propagating a
432 | covered work, you indicate your acceptance of this License to do so.
433 |
434 | 10. Automatic Licensing of Downstream Recipients.
435 |
436 | Each time you convey a covered work, the recipient automatically
437 | receives a license from the original licensors, to run, modify and
438 | propagate that work, subject to this License. You are not responsible
439 | for enforcing compliance by third parties with this License.
440 |
441 | An "entity transaction" is a transaction transferring control of an
442 | organization, or substantially all assets of one, or subdividing an
443 | organization, or merging organizations. If propagation of a covered
444 | work results from an entity transaction, each party to that
445 | transaction who receives a copy of the work also receives whatever
446 | licenses to the work the party's predecessor in interest had or could
447 | give under the previous paragraph, plus a right to possession of the
448 | Corresponding Source of the work from the predecessor in interest, if
449 | the predecessor has it or can get it with reasonable efforts.
450 |
451 | You may not impose any further restrictions on the exercise of the
452 | rights granted or affirmed under this License. For example, you may
453 | not impose a license fee, royalty, or other charge for exercise of
454 | rights granted under this License, and you may not initiate litigation
455 | (including a cross-claim or counterclaim in a lawsuit) alleging that
456 | any patent claim is infringed by making, using, selling, offering for
457 | sale, or importing the Program or any portion of it.
458 |
459 | 11. Patents.
460 |
461 | A "contributor" is a copyright holder who authorizes use under this
462 | License of the Program or a work on which the Program is based. The
463 | work thus licensed is called the contributor's "contributor version".
464 |
465 | A contributor's "essential patent claims" are all patent claims
466 | owned or controlled by the contributor, whether already acquired or
467 | hereafter acquired, that would be infringed by some manner, permitted
468 | by this License, of making, using, or selling its contributor version,
469 | but do not include claims that would be infringed only as a
470 | consequence of further modification of the contributor version. For
471 | purposes of this definition, "control" includes the right to grant
472 | patent sublicenses in a manner consistent with the requirements of
473 | this License.
474 |
475 | Each contributor grants you a non-exclusive, worldwide, royalty-free
476 | patent license under the contributor's essential patent claims, to
477 | make, use, sell, offer for sale, import and otherwise run, modify and
478 | propagate the contents of its contributor version.
479 |
480 | In the following three paragraphs, a "patent license" is any express
481 | agreement or commitment, however denominated, not to enforce a patent
482 | (such as an express permission to practice a patent or covenant not to
483 | sue for patent infringement). To "grant" such a patent license to a
484 | party means to make such an agreement or commitment not to enforce a
485 | patent against the party.
486 |
487 | If you convey a covered work, knowingly relying on a patent license,
488 | and the Corresponding Source of the work is not available for anyone
489 | to copy, free of charge and under the terms of this License, through a
490 | publicly available network server or other readily accessible means,
491 | then you must either (1) cause the Corresponding Source to be so
492 | available, or (2) arrange to deprive yourself of the benefit of the
493 | patent license for this particular work, or (3) arrange, in a manner
494 | consistent with the requirements of this License, to extend the patent
495 | license to downstream recipients. "Knowingly relying" means you have
496 | actual knowledge that, but for the patent license, your conveying the
497 | covered work in a country, or your recipient's use of the covered work
498 | in a country, would infringe one or more identifiable patents in that
499 | country that you have reason to believe are valid.
500 |
501 | If, pursuant to or in connection with a single transaction or
502 | arrangement, you convey, or propagate by procuring conveyance of, a
503 | covered work, and grant a patent license to some of the parties
504 | receiving the covered work authorizing them to use, propagate, modify
505 | or convey a specific copy of the covered work, then the patent license
506 | you grant is automatically extended to all recipients of the covered
507 | work and works based on it.
508 |
509 | A patent license is "discriminatory" if it does not include within
510 | the scope of its coverage, prohibits the exercise of, or is
511 | conditioned on the non-exercise of one or more of the rights that are
512 | specifically granted under this License. You may not convey a covered
513 | work if you are a party to an arrangement with a third party that is
514 | in the business of distributing software, under which you make payment
515 | to the third party based on the extent of your activity of conveying
516 | the work, and under which the third party grants, to any of the
517 | parties who would receive the covered work from you, a discriminatory
518 | patent license (a) in connection with copies of the covered work
519 | conveyed by you (or copies made from those copies), or (b) primarily
520 | for and in connection with specific products or compilations that
521 | contain the covered work, unless you entered into that arrangement,
522 | or that patent license was granted, prior to 28 March 2007.
523 |
524 | Nothing in this License shall be construed as excluding or limiting
525 | any implied license or other defenses to infringement that may
526 | otherwise be available to you under applicable patent law.
527 |
528 | 12. No Surrender of Others' Freedom.
529 |
530 | If conditions are imposed on you (whether by court order, agreement or
531 | otherwise) that contradict the conditions of this License, they do not
532 | excuse you from the conditions of this License. If you cannot convey a
533 | covered work so as to satisfy simultaneously your obligations under this
534 | License and any other pertinent obligations, then as a consequence you may
535 | not convey it at all. For example, if you agree to terms that obligate you
536 | to collect a royalty for further conveying from those to whom you convey
537 | the Program, the only way you could satisfy both those terms and this
538 | License would be to refrain entirely from conveying the Program.
539 |
540 | 13. Remote Network Interaction; Use with the GNU General Public License.
541 |
542 | Notwithstanding any other provision of this License, if you modify the
543 | Program, your modified version must prominently offer all users
544 | interacting with it remotely through a computer network (if your version
545 | supports such interaction) an opportunity to receive the Corresponding
546 | Source of your version by providing access to the Corresponding Source
547 | from a network server at no charge, through some standard or customary
548 | means of facilitating copying of software. This Corresponding Source
549 | shall include the Corresponding Source for any work covered by version 3
550 | of the GNU General Public License that is incorporated pursuant to the
551 | following paragraph.
552 |
553 | Notwithstanding any other provision of this License, you have
554 | permission to link or combine any covered work with a work licensed
555 | under version 3 of the GNU General Public License into a single
556 | combined work, and to convey the resulting work. The terms of this
557 | License will continue to apply to the part which is the covered work,
558 | but the work with which it is combined will remain governed by version
559 | 3 of the GNU General Public License.
560 |
561 | 14. Revised Versions of this License.
562 |
563 | The Free Software Foundation may publish revised and/or new versions of
564 | the GNU Affero General Public License from time to time. Such new versions
565 | will be similar in spirit to the present version, but may differ in detail to
566 | address new problems or concerns.
567 |
568 | Each version is given a distinguishing version number. If the
569 | Program specifies that a certain numbered version of the GNU Affero General
570 | Public License "or any later version" applies to it, you have the
571 | option of following the terms and conditions either of that numbered
572 | version or of any later version published by the Free Software
573 | Foundation. If the Program does not specify a version number of the
574 | GNU Affero General Public License, you may choose any version ever published
575 | by the Free Software Foundation.
576 |
577 | If the Program specifies that a proxy can decide which future
578 | versions of the GNU Affero General Public License can be used, that proxy's
579 | public statement of acceptance of a version permanently authorizes you
580 | to choose that version for the Program.
581 |
582 | Later license versions may give you additional or different
583 | permissions. However, no additional obligations are imposed on any
584 | author or copyright holder as a result of your choosing to follow a
585 | later version.
586 |
587 | 15. Disclaimer of Warranty.
588 |
589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597 |
598 | 16. Limitation of Liability.
599 |
600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608 | SUCH DAMAGES.
609 |
610 | 17. Interpretation of Sections 15 and 16.
611 |
612 | If the disclaimer of warranty and limitation of liability provided
613 | above cannot be given local legal effect according to their terms,
614 | reviewing courts shall apply local law that most closely approximates
615 | an absolute waiver of all civil liability in connection with the
616 | Program, unless a warranty or assumption of liability accompanies a
617 | copy of the Program in return for a fee.
618 |
619 | END OF TERMS AND CONDITIONS
620 |
621 | How to Apply These Terms to Your New Programs
622 |
623 | If you develop a new program, and you want it to be of the greatest
624 | possible use to the public, the best way to achieve this is to make it
625 | free software which everyone can redistribute and change under these terms.
626 |
627 | To do so, attach the following notices to the program. It is safest
628 | to attach them to the start of each source file to most effectively
629 | state the exclusion of warranty; and each file should have at least
630 | the "copyright" line and a pointer to where the full notice is found.
631 |
632 |
633 | Copyright (C)
634 |
635 | This program is free software: you can redistribute it and/or modify
636 | it under the terms of the GNU Affero General Public License as published
637 | by the Free Software Foundation, either version 3 of the License, or
638 | (at your option) any later version.
639 |
640 | This program is distributed in the hope that it will be useful,
641 | but WITHOUT ANY WARRANTY; without even the implied warranty of
642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643 | GNU Affero General Public License for more details.
644 |
645 | You should have received a copy of the GNU Affero General Public License
646 | along with this program. If not, see .
647 |
648 | Also add information on how to contact you by electronic and paper mail.
649 |
650 | If your software can interact with users remotely through a computer
651 | network, you should also make sure that it provides a way for users to
652 | get its source. For example, if your program is a web application, its
653 | interface could display a "Source" link that leads users to an archive
654 | of the code. There are many ways you could offer source, and different
655 | solutions will be better for different programs; see section 13 for the
656 | specific requirements.
657 |
658 | You should also get your employer (if you work as a programmer) or school,
659 | if any, to sign a "copyright disclaimer" for the program, if necessary.
660 | For more information on this, and how to apply and follow the GNU AGPL, see
661 | .
662 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.md
2 | include LICENSE
3 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 | 
3 | [](https://pyup.io/repos/github/g-farrow/boto3_batch_utils/)
4 |
5 | Boto3 Batch Utils
6 | =================
7 | This library offers some functionality to assist in writing records to AWS services in batches, where your data is not
8 | naturally batched. This helps to achieve significant efficiencies when interacting with those AWS services as batch
9 | writes are often much more efficient than individual writes.
10 |
11 | [Documentation]()
12 |
13 | # Installation
14 | The package can be installed using `pip`:
15 | ```
16 | pip install boto3-batch-utils
17 | ```
18 |
19 | You may install a specific version of the package:
20 | ```
21 | pip install boto3-batch-utils==3.0.0
22 | ```
23 |
24 | ### Boto3 and Configuration
25 | Boto3 Batch Utils is an abstraction around AWS' Boto3 library. `boto3` is a dependency and will be installed
26 | automatically, if it is not already present.
27 |
28 | You will need to configure your AWS credentials and roles in exactly the same way as you would if using `boto3`
29 | directly.
30 |
31 | For more information on `boto3` configuration, refer to the AWS documentation
32 | [here](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html).
33 |
34 | # Concepts
35 | The library is very simple to use. To use it, you must initialise a client, send it the payloads you want to transmit
36 | and finally tell the client to clear down.
37 |
38 | To use the package you do not need care how to batch up the payloads and send them into their target service. The
39 | package will take care of this for you. This allows you to utilise the significant efficiencies of `boto3`'s batch
40 | send/put/write methods, without the headaches of error handling and batch sizes.
41 |
42 | Each of the supported services has it's own dispatcher client. Each has the same 2 methods with which to interact. So
43 | interacting with each of the various service clients is similar and follows the same 3 steps:
44 | * **Initialise**: Instantiate the batch dispatcher, passing in the required configuration.
45 | * **submit_payload**: pass in a payload (e.g. a single message, metric etc).
46 | * **flush_payloads**: send all payloads in the backlog.
47 |
48 | > If you are using `boto3-batch-utils` in AWS Lambda, you should call `.flush_payloads()` at the end of every
49 | invocation.
50 |
51 | # Documentation
52 |
53 | Full documentation is available here: [boto3-batch-utils Docs](https://g-farrow.github.io/boto3_batch_utils/)
54 |
55 | # Contributing
56 | For more information about contributing to this project, please refer to the
57 | [Developer documentation](resources/developer_docs.md).
58 |
59 | # Acknowledgements
60 | [ ~ Dependencies scanned by PyUp.io ~ ]
--------------------------------------------------------------------------------
/boto3_batch_utils/Base.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import boto3
3 | from botocore.exceptions import ClientError
4 |
5 | from boto3_batch_utils.utils import chunks, get_byte_size_of_dict_or_list
6 |
7 | logger = logging.getLogger('boto3-batch-utils')
8 |
9 |
10 | _boto3_interface_type_mapper = {
11 | 'dynamodb': 'resource',
12 | 'kinesis': 'client',
13 | 'sqs': 'client',
14 | 'cloudwatch': 'client'
15 | }
16 |
17 |
18 | class BaseDispatcher:
19 |
20 | def __init__(self, aws_service: str, batch_dispatch_method: str, individual_dispatch_method: str = None,
21 | max_batch_size: int = 1, **kwargs: dict):
22 | """
23 | :param aws_service: object - the boto3 client which shall be called to dispatch each payload
24 | :param batch_dispatch_method: method - the method to be called when attempting to dispatch multiple items in a
25 | payload
26 | :param individual_dispatch_method: method - the method to be called when attempting to dispatch an individual
27 | item to the subject
28 | :param max_batch_size: int - Maximum size of a payload batch to be sent to the target
29 | :param flush_payload_on_max_batch_size: bool - should payload be automatically sent once the payload size is
30 | equal to that of the maximum permissible batch (True), or should the manager wait for a flush payload call
31 | (False)
32 | :param kwargs: dict - keyword arguments passed to aws_service during its creation
33 | """
34 | self.aws_service_name = aws_service
35 | self.aws_service_args = kwargs or {}
36 | self._aws_service = None
37 | self.batch_dispatch_method = batch_dispatch_method
38 | self._batch_dispatch_method = None
39 | self.individual_dispatch_method = individual_dispatch_method
40 | self._individual_dispatch_method = None
41 | self.max_batch_size = max_batch_size
42 | self._aws_service_batch_max_payloads = None
43 | self._aws_service_message_max_bytes = None
44 | self._aws_service_batch_max_bytes = None
45 | self._batch_payload_wrapper = {}
46 | self._batch_payload = None
47 | self._batch_payload_wrapper_byte_size = get_byte_size_of_dict_or_list(self._batch_payload_wrapper) - 2
48 | # Remove 2 bytes for the `[]` which exists in the wrapper and the batch itself, therefore duplicated
49 | self.unprocessed_items = []
50 | logger.debug(f"Batch dispatch initialised: {self.aws_service_name}")
51 |
52 | def _validate_initialisation(self):
53 | """
54 | Ensure that all the initialised values and attributes are valid
55 | """
56 | if self._aws_service_batch_max_payloads and self.max_batch_size > self._aws_service_batch_max_payloads:
57 | raise ValueError(f"Requested max_batch_size '{self.max_batch_size}' exceeds the {self.aws_service_name} "
58 | f"maximum")
59 |
60 | def submit_payload(self, payload: dict):
61 | """ Submit a metric ready to be batched up and sent to Cloudwatch """
62 | self._validate_payload_byte_size(payload)
63 | self._prevent_batch_bytes_overload(payload)
64 | self._append_payload_to_current_batch(payload)
65 | logger.debug(f"Payload has been added to the {self.aws_service_name} dispatcher payload list: {payload}")
66 | self._flush_payload_selector()
67 |
68 | def _validate_payload_byte_size(self, payload):
69 | """ Validate that the payload is within the byte size limit for the AWS service """
70 | payload_byte_size = get_byte_size_of_dict_or_list(payload)
71 | if payload_byte_size > self._aws_service_message_max_bytes:
72 | raise ValueError(f"Submitted payload ({payload_byte_size} bytes) exceeds the maximum payload size "
73 | f"({self._aws_service_message_max_bytes} bytes) for {self.aws_service_name}")
74 |
75 | def _prevent_batch_bytes_overload(self, payload: dict):
76 | """ Check that adding appending the payload to the exiting batch does not overload the batch byte limit """
77 | current_batch_payload_byte_size = get_byte_size_of_dict_or_list(self._batch_payload)
78 | current_batch_payload_byte_size += self._batch_payload_wrapper_byte_size
79 | payload_byte_size = get_byte_size_of_dict_or_list(payload)
80 | if (current_batch_payload_byte_size + payload_byte_size) > self._aws_service_batch_max_bytes:
81 | logger.debug(f"Adding payload ({payload_byte_size} bytes) to the existing batch "
82 | f"({current_batch_payload_byte_size} bytes) would exceed the batch limit for "
83 | f"{self.aws_service_name}, calling flush_payloads")
84 | self.flush_payloads()
85 |
86 | def _append_payload_to_current_batch(self, payload):
87 | """ Append the payload to the service specific batch structure """
88 | self._batch_payload.append(payload)
89 |
90 | def _flush_payload_selector(self):
91 | """ Decide whether or not to flush the payload (usually used following a payload submission) """
92 | logger.debug(f"Payload list now contains '{len(self._batch_payload)}' payloads, "
93 | f"max batch size is '{self.max_batch_size}'")
94 | if len(self._batch_payload) >= self.max_batch_size:
95 | logger.debug("Max batch size has been reached, flushing the payload list contents")
96 | self.flush_payloads()
97 | else:
98 | logger.debug(f"Max batch size of {self.max_batch_size} for {self.aws_service_name} "
99 | "has not yet been reached, continuing")
100 |
101 | def flush_payloads(self) -> list:
102 | """ Push all payloads in the payload list to the subject """
103 | logger.debug(f"{self.aws_service_name} payload list has {len(self._batch_payload)} entries")
104 | self._initialise_aws_client()
105 | if self._batch_payload:
106 | logger.debug(f"Preparing to send {len(self._batch_payload)} records to {self.aws_service_name}")
107 | batch_list = list(chunks(self._batch_payload, self.max_batch_size))
108 | logger.debug(f"Payload list split into {len(batch_list)} batches")
109 | for batch in batch_list:
110 | self._batch_send_payloads(batch)
111 | self._batch_payload = []
112 | else:
113 | logger.info(f"No payloads to flush to {self.aws_service_name}")
114 | return self.unprocessed_items
115 |
116 | def _initialise_aws_client(self):
117 | """
118 | Initialise client/resource for the AWS service
119 | """
120 | if not self._aws_service:
121 | self._aws_service = getattr(boto3, _boto3_interface_type_mapper[self.aws_service_name])(
122 | self.aws_service_name, **self.aws_service_args)
123 | self._batch_dispatch_method = getattr(self._aws_service, str(self.batch_dispatch_method))
124 | if self.individual_dispatch_method:
125 | self._individual_dispatch_method = getattr(self._aws_service, self.individual_dispatch_method)
126 | else:
127 | self._individual_dispatch_method = None
128 | logger.debug("AWS/Boto3 Client is now initialised")
129 |
130 | def _batch_send_payloads(self, batch: (list, dict), retry: int = 4):
131 | """ Attempt to send a single batch of payloads to the subject """
132 | logger.debug(f"Sending batch type {type(batch)} payloads to {self.aws_service_name}")
133 | try:
134 | if isinstance(batch, dict):
135 | response = self._batch_dispatch_method(**batch)
136 | self._process_batch_send_response(response)
137 | else:
138 | response = self._batch_dispatch_method(batch)
139 | self._process_batch_send_response(response)
140 | logger.debug(f"Batch send response: {response}")
141 | except ClientError as e:
142 | if retry > 0:
143 | logger.warning(f"{self.aws_service_name} batch send has caused an error, "
144 | f"retrying send ({retry} retries remaining): {str(e)}")
145 | logger.debug(f"Failed batch: (type: {type(batch)}) {batch}")
146 | self._batch_send_payloads(batch, retry=retry-1)
147 | else:
148 | self._unpack_failed_batch_to_unprocessed_items(batch)
149 |
150 | def _process_batch_send_response(self, response):
151 | """ Process the response data from a batch put request """
152 | pass
153 |
154 | def _unpack_failed_batch_to_unprocessed_items(self, batch: (dict, list)):
155 | """ Process a failed batch and unpack the items into the unprocessed items list """
156 | pass
157 |
158 | def _send_individual_payload(self, payload: (dict, str), retry: int = 4):
159 | """ Send an individual payload to the subject """
160 | logger.debug(f"Attempting to send individual payload ({retry} retries left): {payload}")
161 | try:
162 | if isinstance(payload, dict):
163 | logger.debug("Submitting payload as keyword args")
164 | self._individual_dispatch_method(**payload)
165 | else:
166 | logger.debug("Submitting payload as arg")
167 | self._individual_dispatch_method(payload)
168 | except ClientError as e:
169 | if retry:
170 | logger.debug("Individual send attempt has failed, retrying")
171 | self._send_individual_payload(payload, retry-1)
172 | else:
173 | logger.error(f"Individual send attempt has failed, no more retries remaining: {e}")
174 | self.unprocessed_items.append(self._unpack_individual_failed_payload(payload))
175 |
176 | def _unpack_individual_failed_payload(self, payload):
177 | """ Extract the record from a constructed payload """
178 | return payload
179 |
--------------------------------------------------------------------------------
/boto3_batch_utils/Cloudwatch.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from datetime import datetime
3 |
4 | from boto3_batch_utils.Base import BaseDispatcher
5 | from boto3_batch_utils import constants
6 |
7 |
8 | logger = logging.getLogger('boto3-batch-utils')
9 |
10 |
11 | def cloudwatch_dimension(name: str, value: (str, int)):
12 | """ Structure for forming aCloudwatch dimension """
13 | return {'Name': str(name), 'Value': str(value)}
14 |
15 |
16 | class CloudwatchBatchDispatcher(BaseDispatcher):
17 | """
18 | Manage the batch 'put' of Cloudwatch metrics
19 | """
20 |
21 | def __init__(self, namespace: str, max_batch_size: int = 20, **kwargs: dict):
22 | self.namespace = namespace
23 | super().__init__('cloudwatch', batch_dispatch_method='put_metric_data', max_batch_size=max_batch_size, **kwargs)
24 | self._aws_service_batch_max_payloads = constants.CLOUDWATCH_BATCH_MAX_PAYLOADS
25 | self._aws_service_message_max_bytes = constants.CLOUDWATCH_MESSAGE_MAX_BYTES
26 | self._aws_service_batch_max_bytes = constants.CLOUDWATCH_BATCH_MAX_BYTES
27 | self._batch_payload_wrapper = {'Namespace': self.namespace, 'MetricData': []}
28 | self._batch_payload = []
29 | self._validate_initialisation()
30 |
31 | def __str__(self):
32 | return f"CloudwatchBatchDispatcher::{self.namespace}"
33 |
34 | def submit_metric(self, metric_name: str, value: int, timestamp: datetime = None,
35 | dimensions: (dict, list) = None, unit: str = 'Count'):
36 | """ Submit a metric ready to be batched up and sent to Cloudwatch """
37 | payload = {
38 | 'MetricName': metric_name,
39 | 'Timestamp': timestamp or datetime.now(),
40 | 'Value': value,
41 | 'Unit': unit
42 | }
43 | logger.debug(f"Payload submitted to {self.aws_service_name} dispatcher: {payload}")
44 | if dimensions:
45 | payload['Dimensions'] = dimensions if isinstance(dimensions, list) else [dimensions]
46 | super().submit_payload(payload)
47 |
48 | def _batch_send_payloads(self, batch: dict = None, **kwargs):
49 | """ Attempt to send a single batch of metrics to Cloudwatch """
50 | if 'retry' in kwargs:
51 | super()._batch_send_payloads(batch, kwargs['retry'])
52 | else:
53 | super()._batch_send_payloads({'Namespace': self.namespace, 'MetricData': batch})
54 |
--------------------------------------------------------------------------------
/boto3_batch_utils/Dynamodb.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from botocore.exceptions import ClientError
3 |
4 | from boto3_batch_utils.Base import BaseDispatcher
5 | from boto3_batch_utils.utils import convert_floats_in_dict_to_decimals
6 | from boto3_batch_utils import constants
7 |
8 |
9 | logger = logging.getLogger('boto3-batch-utils')
10 |
11 |
12 | class DynamoBatchDispatcher(BaseDispatcher):
13 | """
14 | Control the submission of writes to DynamoDB
15 | """
16 |
17 | def __init__(self, dynamo_table_name: str, partition_key: str, sort_key: str = None,
18 | partition_key_data_type: type = str, max_batch_size: int = 25, **kwargs: dict):
19 | self.dynamo_table_name = dynamo_table_name
20 | self.partition_key = partition_key
21 | self.sort_key = sort_key
22 | self.partition_key_data_type = partition_key_data_type
23 | self._dynamo_table = None
24 | super().__init__('dynamodb', batch_dispatch_method='batch_write_item', max_batch_size=max_batch_size, **kwargs)
25 | self._aws_service_batch_max_payloads = constants.DYNAMODB_BATCH_MAX_PAYLOADS
26 | self._aws_service_message_max_bytes = constants.DYNAMODB_MESSAGE_MAX_BYTES
27 | self._aws_service_batch_max_bytes = constants.DYNAMODB_BATCH_MAX_BYTES
28 | self._batch_payload_wrapper = {'RequestItems': {self.dynamo_table_name: []}}
29 | self._batch_payload = []
30 | self._validate_initialisation()
31 |
32 | def __str__(self):
33 | return f"DynamoBatchDispatcher::{self.dynamo_table_name}"
34 |
35 | def submit_payload(self, payload, partition_key_location: str = None):
36 | """
37 | Submit a record ready for batch sending to DynamoDB
38 | """
39 | logger.debug(f"Payload submitted to {self.aws_service_name} dispatcher: {payload}")
40 | if partition_key_location:
41 | payload[self.partition_key] = self.partition_key_data_type(payload[partition_key_location])
42 | if self._check_payload_is_unique(payload):
43 | super().submit_payload({
44 | "PutRequest": {
45 | "Item": convert_floats_in_dict_to_decimals(payload)
46 | }
47 | })
48 | else:
49 | logger.warning("The candidate payload has a primary_partition_key which already exists in the "
50 | f"payload_list: {payload}")
51 |
52 | def _check_payload_is_unique(self, payload: dict) -> bool:
53 | """
54 | Check that a payload is unique, according to the partition key (and sort key if applicable)
55 | """
56 | logger.debug("Checking if the payload already exists in the existing batch")
57 | if self.sort_key:
58 | return self._check_payload_is_unique_by_partition_key_and_sort_key(payload)
59 | else:
60 | return self._check_payload_is_unique_by_partition_key(payload)
61 |
62 | def _check_payload_is_unique_by_partition_key_and_sort_key(self, payload: dict) -> bool:
63 | """
64 | Use the partition key AND sort key within the submitted payload to determine the payloads uniqueness,
65 | compared to existing payloads in the batch
66 | """
67 | logger.debug("Checking if the partition key, sort key combination already exists in the existing batch")
68 | if any(
69 | (d['PutRequest']['Item'][self.partition_key] == payload[self.partition_key] and
70 | d['PutRequest']['Item'][self.sort_key] == payload[self.sort_key])
71 | for d in self._batch_payload
72 | ):
73 | logger.debug("This payload has already been submitted")
74 | return False
75 | else:
76 | logger.debug("This payload is unique")
77 | return True
78 |
79 | def _check_payload_is_unique_by_partition_key(self, payload: dict) -> bool:
80 | """
81 | Use the partition key within the submitted payload to determine the payloads uniqueness, compared to existing
82 | payloads in the batch
83 | """
84 | logger.debug("Checking if the partition key already exists in the existing batch")
85 | if any(d['PutRequest']['Item'][self.partition_key] == payload[self.partition_key]
86 | for d in self._batch_payload):
87 | logger.debug("This payload has already been submitted")
88 | return False
89 | else:
90 | logger.debug("This payload is unique")
91 | return True
92 |
93 | def _initialise_aws_client(self):
94 | """
95 | Initialise client/resource for the AWS service
96 | """
97 | super()._initialise_aws_client()
98 | if not self._dynamo_table:
99 | self._dynamo_table = self._aws_service.Table(self.dynamo_table_name)
100 | logger.debug(f"DynamoDB Table Client '{self.dynamo_table_name}' is now initialised")
101 |
102 | def _batch_send_payloads(self, batch: dict = None, **kwargs):
103 | """
104 | Submit the batch to DynamoDB
105 | """
106 | if 'retry' in kwargs:
107 | super()._batch_send_payloads(batch, kwargs['retry'])
108 | else:
109 | super()._batch_send_payloads({'RequestItems': {self.dynamo_table_name: batch}})
110 |
111 | def _process_batch_send_response(self, response: dict):
112 | """
113 | Parse the response from a batch_write call, handle any failures as required.
114 | :param response: Response JSON from a batch_write_item request
115 | """
116 | unprocessed_items = response['UnprocessedItems']
117 | if unprocessed_items:
118 | logger.warning(f"Batch write failed to write some items, "
119 | f"{len(unprocessed_items[self.dynamo_table_name])} were rejected")
120 | for item in unprocessed_items[self.dynamo_table_name]:
121 | if 'PutRequest' in item:
122 | self._send_individual_payload(item['PutRequest']['Item'])
123 | else:
124 | raise TypeError("Individual write type is not supported")
125 |
126 | def _unpack_failed_batch_to_unprocessed_items(self, batch: dict):
127 | """ Extract all records from the attempted batch payload """
128 | extracted_payloads = [pl['PutRequest']['Item'] for pl in batch['RequestItems'][self.dynamo_table_name]]
129 | self.unprocessed_items = self.unprocessed_items + extracted_payloads
130 |
131 | def _send_individual_payload(self, payload: dict, retry: int = 4):
132 | """
133 | Write an individual record to Dynamo
134 | :param payload: JSON representation of a new record to write to the Dynamo table
135 | """
136 | logger.debug(f"Attempting to send individual payload ({retry} retries left): {payload}")
137 | try:
138 | self._dynamo_table.put_item(Item=payload)
139 | except ClientError as e:
140 | if retry:
141 | logger.debug(f"Individual send attempt has failed, retrying: {str(e)}")
142 | self._send_individual_payload(payload, retry - 1)
143 | else:
144 | logger.error(f"Individual send attempt has failed, no more retries remaining: {str(e)}")
145 | logger.debug(f"Failed payload: {payload}")
146 | self.unprocessed_items.append(payload)
147 |
--------------------------------------------------------------------------------
/boto3_batch_utils/Kinesis.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from json import dumps, loads
3 | from copy import deepcopy
4 | from uuid import uuid4
5 |
6 | from boto3_batch_utils.Base import BaseDispatcher
7 | from boto3_batch_utils.utils import DecimalEncoder
8 | from boto3_batch_utils import constants
9 |
10 |
11 | logger = logging.getLogger('boto3-batch-utils')
12 |
13 |
14 | class KinesisBatchDispatcher(BaseDispatcher):
15 | """
16 | Manage the batch 'put' of Kinesis records
17 | """
18 |
19 | def __init__(self, stream_name: str, partition_key_identifier: str = None, max_batch_size: int = 250,
20 | **kwargs: dict):
21 | self.stream_name = stream_name
22 | self.partition_key_identifier = partition_key_identifier
23 | self.batch_in_progress = []
24 | super().__init__('kinesis', batch_dispatch_method='put_records', individual_dispatch_method='put_record',
25 | max_batch_size=max_batch_size, **kwargs)
26 | self._aws_service_batch_max_payloads = constants.KINESIS_BATCH_MAX_PAYLOADS
27 | self._aws_service_message_max_bytes = constants.KINESIS_MESSAGE_MAX_BYTES
28 | self._aws_service_batch_max_bytes = constants.KINESIS_BATCH_MAX_BYTES
29 | self._batch_payload_wrapper = {'StreamName': self.stream_name, 'Records': []}
30 | self._batch_payload = []
31 | self._validate_initialisation()
32 |
33 | def __str__(self):
34 | return f"KinesisBatchDispatcher::{self.stream_name}"
35 |
36 | def submit_payload(self, payload: dict):
37 | """ Submit a metric ready to be batched up and sent to Kinesis """
38 | logger.debug(f"Payload submitted to {self.aws_service_name} dispatcher: {payload}")
39 | constructed_payload = {
40 | 'Data': dumps(payload, cls=DecimalEncoder),
41 | 'PartitionKey': f'{payload[self.partition_key_identifier] if self.partition_key_identifier else uuid4()}'
42 | }
43 | super().submit_payload(constructed_payload)
44 |
45 | def _batch_send_payloads(self, batch: (list, dict) = None, **kwargs):
46 | """ Attempt to send a single batch of metrics to Kinesis """
47 | self.batch_in_progress = batch
48 | if isinstance(batch, list):
49 | batch = {'StreamName': self.stream_name, 'Records': batch}
50 | if 'retry' in kwargs:
51 | super()._batch_send_payloads(batch, kwargs['retry'])
52 | else:
53 | super()._batch_send_payloads(batch)
54 |
55 | def _process_batch_send_response(self, response: dict):
56 | """
57 | Method to send a set of messages on to the Kinesis stream
58 | :param response: Response from the AWS service
59 | """
60 | logger.debug(f"Processing response: {response}")
61 | if "Records" in response:
62 | if response["FailedRecordCount"] == 0:
63 | logger.info(f"{len(self.batch_in_progress)} records successfully batch "
64 | f"sent to Kinesis::{self.stream_name}")
65 | self.batch_in_progress = None
66 | return
67 | else:
68 | logger.info(f"Failed payloads detected ({response['FailedRecordCount']}), processing errors...")
69 | self._process_failed_payloads(response)
70 |
71 | def _process_failed_payloads(self, response: dict, retry=3):
72 | """ Process the contents of a Put Records response when it contains failed records """
73 | failed_records = self._get_index_of_failed_record(response)
74 | if failed_records:
75 | logger.debug(f"Failed Records: {response['FailedRecordCount']}")
76 | batch_of_problematic_records = []
77 | for r in failed_records:
78 | batch_of_problematic_records.append(self.batch_in_progress[r])
79 | if len(failed_records) <= 2:
80 | for payload in batch_of_problematic_records:
81 | self._send_individual_payload(deepcopy(payload))
82 | else:
83 | self._batch_send_payloads(batch_of_problematic_records, retry=retry)
84 | self.batch_in_progress = None
85 |
86 | @staticmethod
87 | def _get_index_of_failed_record(response: dict) -> list:
88 | """ Parse the response object and identify which records failed and return an array of their index positions
89 | within the batch """
90 | i = 0
91 | failed_records = []
92 | for r in response["Records"]:
93 | logger.debug(f"Response: {r}")
94 | if "ErrorCode" in r:
95 | logger.warning(f"Payload failed to be sent to Kinesis. Message content: {r}")
96 | failed_records.append(i)
97 | i += 1
98 | return failed_records
99 |
100 | def _unpack_failed_batch_to_unprocessed_items(self, batch: dict):
101 | """ Extract all records from the attempted batch payload """
102 | extracted_payloads = [self._unpack_individual_failed_payload(pl) for pl in batch['Records']]
103 | self.unprocessed_items = self.unprocessed_items + extracted_payloads
104 |
105 | def _send_individual_payload(self, payload: dict, retry: int = 4):
106 | """ Send an individual payload to Kinesis """
107 | _payload = payload
108 | _payload['StreamName'] = self.stream_name
109 | super()._send_individual_payload(_payload, retry)
110 |
111 | def _unpack_individual_failed_payload(self, payload):
112 | """ Extract the record from a constructed payload """
113 | return loads(payload['Data'])
114 |
--------------------------------------------------------------------------------
/boto3_batch_utils/SQS.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from uuid import uuid4
3 | from json import dumps, loads
4 |
5 | from boto3_batch_utils.Base import BaseDispatcher
6 | from boto3_batch_utils.utils import DecimalEncoder
7 | from boto3_batch_utils import constants
8 |
9 | logger = logging.getLogger('boto3-batch-utils')
10 |
11 |
12 | class SQSBaseBatchDispatcher(BaseDispatcher):
13 |
14 | def __init__(self, queue_name, max_batch_size=10, **kwargs: dict):
15 | self.queue_name = queue_name
16 | self.queue_url = None
17 | self.batch_in_progress = None
18 | self.fifo_queue = False
19 | super().__init__('sqs', batch_dispatch_method='send_message_batch', individual_dispatch_method='send_message',
20 | max_batch_size=max_batch_size, **kwargs)
21 | self._aws_service_batch_max_payloads = constants.SQS_MAX_BATCH_PAYLOADS
22 | self._aws_service_message_max_bytes = constants.SQS_MESSAGE_MAX_BYTES
23 | self._aws_service_batch_max_bytes = constants.SQS_BATCH_MAX_BYTES
24 | self._batch_payload_wrapper = {'QueueUrl': self.queue_url, 'Entries': []}
25 | self._batch_payload = []
26 | self._validate_initialisation()
27 |
28 | def _batch_send_payloads(self, batch: (dict, list) = None, **kwargs):
29 | """ Attempt to send a single batch of records to SQS """
30 | if not self.queue_url:
31 | self.queue_url = self._aws_service.get_queue_url(QueueName=self.queue_name)['QueueUrl']
32 | self.batch_in_progress = batch
33 | if 'retry' in kwargs:
34 | super()._batch_send_payloads(batch, kwargs['retry'])
35 | else:
36 | super()._batch_send_payloads({'Entries': batch, 'QueueUrl': self.queue_url})
37 |
38 | def _process_batch_send_response(self, response: dict):
39 | """ Process the response data from a batch put request """
40 | logger.debug(f"Processing response: {response}")
41 | if "Failed" in response:
42 | logger.info(f"Failed payloads detected ({len(response['Failed'])}), processing errors...")
43 | for failed_payload_response in response['Failed']:
44 | logger.debug(f"Message failed with following error: {failed_payload_response['Message']}")
45 | if failed_payload_response['SenderFault']:
46 | logger.warning(f"Message failed to send due to user error "
47 | f"({failed_payload_response['SenderFault']}): {failed_payload_response['Message']}")
48 | for payload in self.batch_in_progress:
49 | if payload['Id'] == failed_payload_response['Id']:
50 | self._send_individual_payload(payload)
51 | self.batch_in_progress = None
52 |
53 | def _unpack_failed_batch_to_unprocessed_items(self, batch: dict):
54 | """ Extract all records from the attempted batch payload """
55 | extracted_payloads = [self._unpack_individual_failed_payload(pl) for pl in batch['Entries']]
56 | self.unprocessed_items = self.unprocessed_items + extracted_payloads
57 |
58 | def _unpack_individual_failed_payload(self, payload: dict, retry: int = 4):
59 | """ Send an individual payload to Kinesis """
60 | return loads(payload['MessageBody'])
61 |
62 |
63 | class SQSBatchDispatcher(SQSBaseBatchDispatcher):
64 | """
65 | Manage the batch 'send' of SQS messages
66 | """
67 |
68 | def __init__(self, queue_name, max_batch_size=10, **kwargs):
69 | super().__init__(queue_name, max_batch_size, **kwargs)
70 | self.fifo_queue = False
71 |
72 | def __str__(self):
73 | return f"SQSBatchDispatcher::{self.queue_name}"
74 |
75 | def submit_payload(self, payload: dict, message_id: str = None, delay_seconds: int = None):
76 | """ Submit a record ready to be batched up and sent to SQS """
77 | logger.debug(f"Payload submitted to SQS dispatcher: {payload}")
78 | message_id = message_id or uuid4().hex
79 | if not any(d["Id"] == message_id for d in self._batch_payload):
80 | constructed_payload = {
81 | 'Id': message_id,
82 | 'MessageBody': dumps(payload, cls=DecimalEncoder)
83 | }
84 | if isinstance(delay_seconds, int):
85 | constructed_payload['DelaySeconds'] = delay_seconds
86 | logger.debug(f"SQS payload constructed: {constructed_payload}")
87 | super().submit_payload(constructed_payload)
88 | else:
89 | logger.warning(f"Message with message_id ({message_id}) already exists in the batch, skipping...")
90 |
91 | def _send_individual_payload(self, payload: dict, retry: int = 4):
92 | """ Send an individual record to SQS """
93 | kwargs = {'QueueUrl': self.queue_url, 'MessageBody': payload['MessageBody']}
94 | if payload.get('DelaySeconds'):
95 | kwargs['DelaySeconds'] = payload['DelaySeconds']
96 | super()._send_individual_payload(kwargs, retry)
97 |
98 |
99 | class SQSFifoBatchDispatcher(SQSBaseBatchDispatcher):
100 |
101 | def __init__(self, queue_name, max_batch_size=10, **kwargs):
102 | super().__init__(queue_name, max_batch_size, **kwargs)
103 | self.fifo_queue = True
104 |
105 | def __str__(self):
106 | return f"SQSFifoBatchDispatcher::{self.queue_name}"
107 |
108 | def submit_payload(self, payload: dict, message_id: str = None, message_group_id: str = 'unset',
109 | message_deduplication_id: str = None):
110 | """ Submit a record ready to be batched up and sent to SQS """
111 | logger.debug(f"Payload submitted to SQS FIFO dispatcher: {payload}")
112 | constructed_payload = {
113 | 'Id': message_id or uuid4().hex,
114 | 'MessageBody': dumps(payload, cls=DecimalEncoder),
115 | 'MessageGroupId': message_group_id
116 | }
117 | message_is_duplicate = any(
118 | d.get('MessageDeduplicationId', "not_used") == message_deduplication_id or d["Id"] == message_id
119 | for d in self._batch_payload
120 | )
121 | if message_is_duplicate:
122 | logger.warning(f"Message with message_id ({message_id}) already exists in the batch, skipping...")
123 | return
124 | if message_deduplication_id:
125 | constructed_payload['MessageDeduplicationId'] = message_deduplication_id
126 | logger.debug(f"SQS FIFO payload constructed: {constructed_payload}")
127 | super().submit_payload(constructed_payload)
128 |
129 | def _send_individual_payload(self, payload: dict, retry: int = 4):
130 | """ Send an individual record to SQS """
131 | kwargs = {
132 | 'QueueUrl': self.queue_url,
133 | **payload
134 | }
135 | super()._send_individual_payload(kwargs, retry)
136 |
--------------------------------------------------------------------------------
/boto3_batch_utils/__init__.py:
--------------------------------------------------------------------------------
1 | from boto3_batch_utils.Cloudwatch import CloudwatchBatchDispatcher, cloudwatch_dimension
2 | from boto3_batch_utils.Dynamodb import DynamoBatchDispatcher
3 | from boto3_batch_utils.Kinesis import KinesisBatchDispatcher
4 | from boto3_batch_utils.SQS import SQSBatchDispatcher, SQSFifoBatchDispatcher
5 |
6 | __all__ = [
7 | 'CloudwatchBatchDispatcher',
8 | 'cloudwatch_dimension',
9 | 'DynamoBatchDispatcher',
10 | 'KinesisBatchDispatcher',
11 | 'SQSBatchDispatcher',
12 | 'SQSFifoBatchDispatcher'
13 | ]
14 |
15 | __version__ = '5.1.0'
16 |
--------------------------------------------------------------------------------
/boto3_batch_utils/constants.py:
--------------------------------------------------------------------------------
1 | CLOUDWATCH_BATCH_MAX_PAYLOADS = 150
2 | CLOUDWATCH_MESSAGE_MAX_BYTES = 40960
3 | CLOUDWATCH_BATCH_MAX_BYTES = 40960
4 | CLOUDWATCH_MAX_DIMENSIONS_PER_METRIC = 10
5 |
6 | DYNAMODB_BATCH_MAX_PAYLOADS = 25
7 | DYNAMODB_MESSAGE_MAX_BYTES = 400000
8 | DYNAMODB_BATCH_MAX_BYTES = 16000000
9 |
10 | KINESIS_BATCH_MAX_PAYLOADS = 500
11 | KINESIS_MESSAGE_MAX_BYTES = 1000000
12 | KINESIS_BATCH_MAX_BYTES = 5000000
13 |
14 | SQS_MAX_BATCH_PAYLOADS = 10
15 | SQS_MESSAGE_MAX_BYTES = 262144
16 | SQS_BATCH_MAX_BYTES = 262144
17 |
18 | boto3_interface_type_mapper = {
19 | 'dynamodb': 'resource',
20 | 'kinesis': 'client',
21 | 'sqs': 'client',
22 | 'cloudwatch': 'client'
23 | }
24 |
--------------------------------------------------------------------------------
/boto3_batch_utils/utils.py:
--------------------------------------------------------------------------------
1 | from decimal import Decimal
2 | from json import JSONEncoder
3 | import logging
4 | import json
5 | from datetime import date, datetime
6 |
7 |
8 | logger = logging.getLogger('boto3-batch-utils')
9 |
10 |
11 | def chunks(array, chunk_size):
12 | """
13 | Yield successive chunks of a given list, as per chunk size
14 | :param array: Array - Array to be chunked up
15 | :param chunk_size: Int - Size of chunks required
16 | :return: Array - List of chunked arrays
17 | """
18 | for i in range(0, len(array), chunk_size):
19 | yield array[i:i + chunk_size]
20 |
21 |
22 | def convert_floats_in_list_to_decimals(array, level=0):
23 | for i in array:
24 | logger.debug(f"Parsing list item for decimals (level: {level}): {i}")
25 | if isinstance(i, float):
26 | array[array.index(i)] = Decimal(str(i))
27 | elif isinstance(i, dict):
28 | array[array.index(i)] = convert_floats_in_dict_to_decimals(i, level=level+1)
29 | elif isinstance(i, list):
30 | array[array.index(i)] = convert_floats_in_list_to_decimals(i, level=level+1)
31 | return array
32 |
33 |
34 | def convert_floats_in_dict_to_decimals(record, level=0):
35 | """
36 | Floats are not valid object types for Dynamo, they must be converted to Decimals
37 | :param record:
38 | """
39 | new_record = {}
40 | logger.debug(f"Processing dict (level: {level}): {record}")
41 | for k, v in record.items():
42 | logger.debug(f"Parsing attribute '{k}' for decimals: {v} ({type(v)})")
43 | if isinstance(v, float):
44 | new_record[k] = Decimal(str(v))
45 | elif isinstance(v, dict):
46 | new_record[k] = convert_floats_in_dict_to_decimals(v, level=level+1)
47 | logger.debug(f"New dict returned: {new_record[k]}")
48 | elif isinstance(v, list):
49 | new_record[k] = convert_floats_in_list_to_decimals(v, level=level+1)
50 | else:
51 | new_record[k] = v
52 | logger.debug(f"New dict: {new_record}")
53 | return new_record
54 |
55 |
56 | class DecimalEncoder(JSONEncoder):
57 | """
58 | Helper class to convert a replace Decimal objects with floats during JSON conversion.
59 | """
60 | def default(self, o):
61 | if isinstance(o, Decimal):
62 | if o % 1 > 0:
63 | return float(o)
64 | else:
65 | return int(o)
66 | return super().default(o)
67 |
68 |
69 | def default(o):
70 | if isinstance(o, (date, datetime)):
71 | return o.isoformat()
72 |
73 |
74 | def get_byte_size_of_string(string: str) -> int:
75 | """
76 | Return the number of bytes of a string
77 | """
78 | return len(string.encode('utf-8'))
79 |
80 |
81 | def get_byte_size_of_dict_or_list(d: dict) -> int:
82 | """
83 | Return the number of bytes of a string
84 | """
85 | return get_byte_size_of_string(json.dumps(d, default=default))
86 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3>=1.12.28
--------------------------------------------------------------------------------
/resources/developer_docs.md:
--------------------------------------------------------------------------------
1 | # Developer Documentation
2 | ## Linting
3 | Linting is checked by [flake8](https://flake8.pycqa.org/en/latest/). This can be run from the root of the repository
4 | by running `flake8 .`.
5 |
6 | Flake8 refers to configuration within `.flake8`
7 |
8 | ## Coverage
9 | Code coverage is checked by the [Coverage](https://coverage.readthedocs.io/en/coverage-5.0.3/) library.
10 |
11 | Coverage is configured within `.coveragerc`.
12 |
13 | ## Unit Tests
14 | Unit tests are stored within `/tests/unit_tests`. Tests are run using the nose tool: `nosetests tests/unit_tests`
15 |
16 | Unit test coverage must be a minimum of 85%.
17 |
18 | ## Integration Tests
19 | Integration tests are stored within `tests/integration_tests`. Tests are run using the nose tool:
20 | `nosetests tests/integration_tests`
21 |
22 | ## Versions
23 | The project follow Semantic Versioning in the format `X.X.X`:
24 | * **Major**: Release contains removed functionality or other breaking changes
25 | * **Minor**: Functionality has been changed but changes are not breaking
26 | * **Patch**: A defect fix or other small change which will not adversely affect users
27 |
28 | The tool [`python-semantic-release`](https://github.com/relekang/python-semantic-release) is used to manage version
29 | numbers.
30 |
31 | Version number is stored and maintained in `boto3_batch_utils/__init__.py:__version__`. Versions are managed on the
32 | `develop` branch only:
33 | * Code changes are made to a feature branch or `develop`.
34 | * Once ready `master` is merged back into `develop`.
35 | * Version is increased using `semantic-release version --` (which is configured in `setup.cfg`).
36 | * Pull Request made from `develop` branch to `master`.
37 |
38 | ## Automation, CI/CD
39 | Automation is achieved using GitHub Actions.
40 |
41 | ### All Branches
42 | Linting, unit tests and integration tests. These are run automatically upon a `push`.
43 |
44 | [PyUp](https://pyup.io/) is used for dependency checking. This is also automated via GitHub Actions.
45 |
46 | ### Master (Publishing)
47 | In _addition_ to the above, the `master` branch also automatically packages and publishes the library to
48 | [PyPi](https://pypi.org/project/boto3-batch-utils/). It does so using the assigned version (see above).
49 |
50 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [semantic_release]
2 | version_variable = boto3_batch_utils/__init__.py:__version__
3 | upload_to_pypi = False
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | from setuptools import setup, find_packages
4 |
5 | ROOT = os.path.dirname(__file__)
6 | VERSION_RE = re.compile(r'''__version__ = ['"]([0-9.]+)['"]''')
7 |
8 | requires = [
9 | "boto3>=1.11.13"
10 | ]
11 |
12 |
13 | def get_version():
14 | init = open(os.path.join(ROOT, 'boto3_batch_utils', '__init__.py')).read()
15 | return VERSION_RE.search(init).group(1)
16 |
17 |
18 | def readme():
19 | with open('README.md') as f:
20 | return f.read()
21 |
22 |
23 | setup(name='boto3_batch_utils',
24 | description='A Client for managing batch interactions with AWS services',
25 | url='https://g-farrow.github.io/boto3_batch_utils/',
26 | license='GNU APGL v3',
27 | author='Greg Farrow',
28 | author_email='greg.farrow1@gmail.com',
29 | packages=find_packages(include=['boto3_batch_utils*']),
30 | version=get_version(),
31 | keywords='aws boto3 kinesis dynamo dynamodb sqs fifo batch',
32 | long_description=readme(),
33 | long_description_content_type='text/markdown',
34 | include_package_data=True,
35 | install_requires=requires)
36 |
--------------------------------------------------------------------------------
/tests/integration_tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/g-farrow/boto3_batch_utils/6dba31d6e5cc135a178fd507f2fbdce5f36b804a/tests/integration_tests/__init__.py
--------------------------------------------------------------------------------
/tests/integration_tests/cloudwatch/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/g-farrow/boto3_batch_utils/6dba31d6e5cc135a178fd507f2fbdce5f36b804a/tests/integration_tests/cloudwatch/__init__.py
--------------------------------------------------------------------------------
/tests/integration_tests/cloudwatch/test_cloudwatch.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import patch, Mock, call
3 |
4 | from datetime import datetime
5 |
6 | from boto3_batch_utils import CloudwatchBatchDispatcher, cloudwatch_dimension
7 |
8 |
9 | @patch('boto3_batch_utils.Base.boto3', Mock())
10 | class TestCloudwatch(TestCase):
11 |
12 | def test_more_than_one_batch_small_messages(self):
13 | cw_client = CloudwatchBatchDispatcher(namespace='namey_name', max_batch_size=5)
14 |
15 | mock_boto3 = Mock()
16 | cw_client._aws_service = mock_boto3
17 | cw_client._batch_dispatch_method = Mock()
18 |
19 | for i in range(0, 6):
20 | cw_client.submit_metric(metric_name='metricky', value=i+1, timestamp=datetime(2020, 2, 2, 1, 1, 1))
21 |
22 | cw_client.flush_payloads()
23 |
24 | self.assertEqual(2, cw_client._batch_dispatch_method.call_count)
25 | cw_client._batch_dispatch_method.assert_has_calls([
26 | call(**{'MetricData': [
27 | {'MetricName': 'metricky', 'Timestamp': datetime(2020, 2, 2, 1, 1, 1), 'Value': 1, 'Unit': 'Count'},
28 | {'MetricName': 'metricky', 'Timestamp': datetime(2020, 2, 2, 1, 1, 1), 'Value': 2, 'Unit': 'Count'},
29 | {'MetricName': 'metricky', 'Timestamp': datetime(2020, 2, 2, 1, 1, 1), 'Value': 3, 'Unit': 'Count'},
30 | {'MetricName': 'metricky', 'Timestamp': datetime(2020, 2, 2, 1, 1, 1), 'Value': 4, 'Unit': 'Count'},
31 | {'MetricName': 'metricky', 'Timestamp': datetime(2020, 2, 2, 1, 1, 1), 'Value': 5, 'Unit': 'Count'},
32 | ], 'Namespace': 'namey_name'}),
33 | call(**{'MetricData': [
34 | {'MetricName': 'metricky', 'Timestamp': datetime(2020, 2, 2, 1, 1, 1), 'Value': 6, 'Unit': 'Count'}
35 | ], 'Namespace': 'namey_name'})
36 | ])
37 |
--------------------------------------------------------------------------------
/tests/integration_tests/dynamo/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/g-farrow/boto3_batch_utils/6dba31d6e5cc135a178fd507f2fbdce5f36b804a/tests/integration_tests/dynamo/__init__.py
--------------------------------------------------------------------------------
/tests/integration_tests/dynamo/test_dynamo.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import patch, Mock, call
3 |
4 | from botocore.exceptions import ClientError
5 |
6 | from boto3_batch_utils import DynamoBatchDispatcher
7 |
8 |
9 | @patch('boto3_batch_utils.Base.boto3', Mock())
10 | class TestSqsStandard(TestCase):
11 |
12 | def test_more_than_one_batch_small_messages(self):
13 | dy_client = DynamoBatchDispatcher(dynamo_table_name='test_table', partition_key='id', max_batch_size=5)
14 |
15 | mock_boto3 = Mock()
16 | dy_client._aws_service = mock_boto3
17 | dy_client._batch_dispatch_method = Mock(return_value={'UnprocessedItems': {}})
18 |
19 | for i in range(0, 6):
20 | dy_client.submit_payload({"id": f"abc{i}", "message": True})
21 |
22 | dy_client.flush_payloads()
23 |
24 | self.assertEqual(2, dy_client._batch_dispatch_method.call_count)
25 |
26 | def test_batch_of_10_failed_first_time_messages(self):
27 | dy_client = DynamoBatchDispatcher(dynamo_table_name='test_table', partition_key='m_id', max_batch_size=10)
28 |
29 | mock_boto3 = Mock()
30 | dy_client._aws_service = mock_boto3
31 | dy_client._dynamo_table = Mock()
32 |
33 | test_payloads = [
34 | {'m_id': 1, 'message': 'message contents 1'},
35 | {'m_id': 2, 'message': 'message contents 2'},
36 | {'m_id': 3, 'message': 'message contents 3'},
37 | {'m_id': 4, 'message': 'message contents 4'},
38 | {'m_id': 5, 'message': 'message contents 5'},
39 | {'m_id': 6, 'message': 'message contents 6'},
40 | {'m_id': 7, 'message': 'message contents 7'},
41 | {'m_id': 8, 'message': 'message contents 8'},
42 | {'m_id': 9, 'message': 'message contents 9'},
43 | {'m_id': 10, 'message': 'message contents 10'}
44 | ]
45 |
46 | failure_response = {'UnprocessedItems': {'test_table': []}}
47 | for pl in test_payloads:
48 | failure_response['UnprocessedItems']['test_table'].append({'PutRequest': {'Item': pl}})
49 |
50 | dy_client._batch_dispatch_method = Mock(side_effect=[failure_response, True, True])
51 |
52 | for test_payload in test_payloads:
53 | dy_client.submit_payload(test_payload)
54 |
55 | dy_client.flush_payloads()
56 | dy_client._batch_dispatch_method.assert_called_once_with(
57 | **{'RequestItems': {'test_table': [{'PutRequest': {'Item': pl}} for pl in test_payloads]}}
58 | )
59 | dy_client._dynamo_table.put_item.assert_has_calls([
60 | call(**{'Item': {'m_id': 1, 'message': 'message contents 1'}}),
61 | call(**{'Item': {'m_id': 2, 'message': 'message contents 2'}}),
62 | call(**{'Item': {'m_id': 3, 'message': 'message contents 3'}}),
63 | call(**{'Item': {'m_id': 4, 'message': 'message contents 4'}}),
64 | call(**{'Item': {'m_id': 5, 'message': 'message contents 5'}}),
65 | call(**{'Item': {'m_id': 6, 'message': 'message contents 6'}}),
66 | call(**{'Item': {'m_id': 7, 'message': 'message contents 7'}}),
67 | call(**{'Item': {'m_id': 8, 'message': 'message contents 8'}}),
68 | call(**{'Item': {'m_id': 9, 'message': 'message contents 9'}}),
69 | call(**{'Item': {'m_id': 10, 'message': 'message contents 10'}}),
70 | ], any_order=True)
71 |
72 | def test_exception_individual_send_unprocessed_items_are_returned(self):
73 | dy_client = DynamoBatchDispatcher(dynamo_table_name='test_table', partition_key='m_id', max_batch_size=10)
74 | mock_client_error = ClientError({'Error': {'Code': 500, 'Message': 'broken'}}, "Dynamo")
75 | mock_boto3 = Mock()
76 | dy_client._aws_service = mock_boto3
77 | dy_client._dynamo_table = Mock()
78 | dy_client._dynamo_table.put_item = Mock(side_effect=[mock_client_error for _ in range(0, 50)])
79 |
80 | test_payloads = [
81 | {'m_id': 1, 'message': 'message contents 1'},
82 | {'m_id': 2, 'message': 'message contents 2'},
83 | {'m_id': 3, 'message': 'message contents 3'},
84 | {'m_id': 4, 'message': 'message contents 4'},
85 | {'m_id': 5, 'message': 'message contents 5'}
86 | ]
87 |
88 | failure_response = {'UnprocessedItems': {'test_table': []}}
89 | for pl in test_payloads:
90 | failure_response['UnprocessedItems']['test_table'].append({'PutRequest': {'Item': pl}})
91 | dy_client._batch_dispatch_method = Mock(side_effect=[failure_response, failure_response, failure_response,
92 | failure_response, failure_response])
93 |
94 | for test_payload in test_payloads:
95 | dy_client.submit_payload(test_payload)
96 |
97 | response = dy_client.flush_payloads()
98 |
99 | dy_client._batch_dispatch_method.assert_called_once_with(
100 | **{'RequestItems': {'test_table': [{'PutRequest': {'Item': pl}} for pl in test_payloads]}}
101 | )
102 | dy_client._dynamo_table.put_item.assert_has_calls([
103 | call(**{'Item': {'m_id': 1, 'message': 'message contents 1'}}),
104 | call(**{'Item': {'m_id': 1, 'message': 'message contents 1'}}),
105 | call(**{'Item': {'m_id': 1, 'message': 'message contents 1'}}),
106 | call(**{'Item': {'m_id': 1, 'message': 'message contents 1'}}),
107 | call(**{'Item': {'m_id': 1, 'message': 'message contents 1'}}),
108 | call(**{'Item': {'m_id': 2, 'message': 'message contents 2'}}),
109 | call(**{'Item': {'m_id': 2, 'message': 'message contents 2'}}),
110 | call(**{'Item': {'m_id': 2, 'message': 'message contents 2'}}),
111 | call(**{'Item': {'m_id': 2, 'message': 'message contents 2'}}),
112 | call(**{'Item': {'m_id': 2, 'message': 'message contents 2'}}),
113 | call(**{'Item': {'m_id': 3, 'message': 'message contents 3'}}),
114 | call(**{'Item': {'m_id': 3, 'message': 'message contents 3'}}),
115 | call(**{'Item': {'m_id': 3, 'message': 'message contents 3'}}),
116 | call(**{'Item': {'m_id': 3, 'message': 'message contents 3'}}),
117 | call(**{'Item': {'m_id': 3, 'message': 'message contents 3'}}),
118 | call(**{'Item': {'m_id': 4, 'message': 'message contents 4'}}),
119 | call(**{'Item': {'m_id': 4, 'message': 'message contents 4'}}),
120 | call(**{'Item': {'m_id': 4, 'message': 'message contents 4'}}),
121 | call(**{'Item': {'m_id': 4, 'message': 'message contents 4'}}),
122 | call(**{'Item': {'m_id': 4, 'message': 'message contents 4'}}),
123 | call(**{'Item': {'m_id': 5, 'message': 'message contents 5'}}),
124 | call(**{'Item': {'m_id': 5, 'message': 'message contents 5'}}),
125 | call(**{'Item': {'m_id': 5, 'message': 'message contents 5'}}),
126 | call(**{'Item': {'m_id': 5, 'message': 'message contents 5'}}),
127 | call(**{'Item': {'m_id': 5, 'message': 'message contents 5'}})
128 | ], any_order=True)
129 | self.assertEqual(test_payloads, response)
130 |
131 | def test_exception_batch_send_unprocessed_items_are_returned(self):
132 | dy_client = DynamoBatchDispatcher(dynamo_table_name='test_table', partition_key='m_id', max_batch_size=10)
133 | mock_client_error = ClientError({'Error': {'Code': 500, 'Message': 'broken'}}, "Dynamo")
134 | mock_boto3 = Mock()
135 | dy_client._aws_service = mock_boto3
136 | dy_client._dynamo_table = Mock()
137 | dy_client._dynamo_table.put_item = Mock()
138 |
139 | test_payloads = [
140 | {'m_id': 1, 'message': 'message contents 1'},
141 | {'m_id': 2, 'message': 'message contents 2'},
142 | {'m_id': 3, 'message': 'message contents 3'},
143 | {'m_id': 4, 'message': 'message contents 4'},
144 | {'m_id': 5, 'message': 'message contents 5'}
145 | ]
146 |
147 | dy_client._batch_dispatch_method = Mock(side_effect=[mock_client_error, mock_client_error, mock_client_error,
148 | mock_client_error, mock_client_error])
149 |
150 | for test_payload in test_payloads:
151 | dy_client.submit_payload(test_payload)
152 |
153 | response = dy_client.flush_payloads()
154 |
155 | dy_client._batch_dispatch_method.assert_has_calls([
156 | call(**{'RequestItems': {'test_table': [{'PutRequest': {'Item': pl}} for pl in test_payloads]}}),
157 | call(**{'RequestItems': {'test_table': [{'PutRequest': {'Item': pl}} for pl in test_payloads]}}),
158 | call(**{'RequestItems': {'test_table': [{'PutRequest': {'Item': pl}} for pl in test_payloads]}}),
159 | call(**{'RequestItems': {'test_table': [{'PutRequest': {'Item': pl}} for pl in test_payloads]}}),
160 | call(**{'RequestItems': {'test_table': [{'PutRequest': {'Item': pl}} for pl in test_payloads]}})
161 | ])
162 | dy_client._dynamo_table.put_item.assert_not_called()
163 | self.assertEqual(test_payloads, response)
164 |
--------------------------------------------------------------------------------
/tests/integration_tests/kinesis/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/g-farrow/boto3_batch_utils/6dba31d6e5cc135a178fd507f2fbdce5f36b804a/tests/integration_tests/kinesis/__init__.py
--------------------------------------------------------------------------------
/tests/integration_tests/kinesis/test_kinesis.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import patch, Mock, call
3 |
4 | from botocore.exceptions import ClientError
5 |
6 | from boto3_batch_utils import KinesisBatchDispatcher
7 |
8 |
9 | @patch('boto3_batch_utils.Base.boto3', Mock())
10 | class TestKinesis(TestCase):
11 |
12 | def test_more_than_one_batch_small_messages(self):
13 | kinesis_client = KinesisBatchDispatcher(stream_name='test_stream', max_batch_size=30)
14 |
15 | mock_boto3 = Mock()
16 | kinesis_client._aws_service = mock_boto3
17 | kinesis_client._batch_dispatch_method = Mock(return_value={'FailedRecordCount': 0})
18 |
19 | test_payload = {'Id': 123, 'message': True}
20 |
21 | for _ in range(0, 31):
22 | kinesis_client.submit_payload(test_payload)
23 |
24 | kinesis_client.flush_payloads()
25 |
26 | self.assertEqual(2, kinesis_client._batch_dispatch_method.call_count)
27 |
28 | def test_batch_of_10_failed_first_time_messages(self):
29 | kinesis_client = KinesisBatchDispatcher(stream_name='test_stream', partition_key_identifier='m_id')
30 |
31 | mock_boto3 = Mock()
32 | kinesis_client._aws_service = mock_boto3
33 |
34 | test_payloads = [
35 | {'m_id': 1, 'message': 'message contents 1'},
36 | {'m_id': 2, 'message': 'message contents 2'},
37 | {'m_id': 3, 'message': 'message contents 3'},
38 | {'m_id': 4, 'message': 'message contents 4'},
39 | {'m_id': 5, 'message': 'message contents 5'},
40 | {'m_id': 6, 'message': 'message contents 6'},
41 | {'m_id': 7, 'message': 'message contents 7'},
42 | {'m_id': 8, 'message': 'message contents 8'},
43 | {'m_id': 9, 'message': 'message contents 9'},
44 | {'m_id': 10, 'message': 'message contents 10'}
45 | ]
46 |
47 | # All records fail in first attempt
48 | failure_response_1 = {
49 | 'FailedRecordCount': 10,
50 | 'Records': [
51 | {'m_id': 1, 'message': 'message contents 1', 'ErrorCode': 'badness'},
52 | {'m_id': 2, 'message': 'message contents 2', 'ErrorCode': 'badness'},
53 | {'m_id': 3, 'message': 'message contents 3', 'ErrorCode': 'badness'},
54 | {'m_id': 4, 'message': 'message contents 4', 'ErrorCode': 'badness'},
55 | {'m_id': 5, 'message': 'message contents 5', 'ErrorCode': 'badness'},
56 | {'m_id': 6, 'message': 'message contents 6', 'ErrorCode': 'badness'},
57 | {'m_id': 7, 'message': 'message contents 7', 'ErrorCode': 'badness'},
58 | {'m_id': 8, 'message': 'message contents 8', 'ErrorCode': 'badness'},
59 | {'m_id': 9, 'message': 'message contents 9', 'ErrorCode': 'badness'},
60 | {'m_id': 10, 'message': 'message contents 10', 'ErrorCode': 'badness'}
61 | ],
62 | 'EncryptionType': 'NONE'
63 | }
64 |
65 | # first 5 succeed, second 5 fail
66 | failure_response_2 = {
67 | 'FailedRecordCount': 5,
68 | 'Records': [
69 | {'m_id': 1, 'message': 'message contents 1'},
70 | {'m_id': 2, 'message': 'message contents 2'},
71 | {'m_id': 3, 'message': 'message contents 3'},
72 | {'m_id': 4, 'message': 'message contents 4'},
73 | {'m_id': 5, 'message': 'message contents 5'},
74 | {'m_id': 6, 'message': 'message contents 6', 'ErrorCode': 'badness'},
75 | {'m_id': 7, 'message': 'message contents 7', 'ErrorCode': 'badness'},
76 | {'m_id': 8, 'message': 'message contents 8', 'ErrorCode': 'badness'},
77 | {'m_id': 9, 'message': 'message contents 9', 'ErrorCode': 'badness'},
78 | {'m_id': 10, 'message': 'message contents 10', 'ErrorCode': 'badness'}
79 | ],
80 | 'EncryptionType': 'NONE'
81 | }
82 |
83 | # 6, 7 and 8 succeed, 9 and 10 fail
84 | failure_response_3 = {
85 | 'FailedRecordCount': 2,
86 | 'Records': [
87 | {'m_id': 6, 'message': 'message contents 6'},
88 | {'m_id': 7, 'message': 'message contents 7'},
89 | {'m_id': 8, 'message': 'message contents 8'},
90 | {'m_id': 9, 'message': 'message contents 9', 'ErrorCode': 'badness'},
91 | {'m_id': 10, 'message': 'message contents 10', 'ErrorCode': 'badness'}
92 | ],
93 | 'EncryptionType': 'NONE'
94 | }
95 |
96 | kinesis_client._batch_dispatch_method = Mock(side_effect=[failure_response_1, failure_response_2,
97 | failure_response_3])
98 | kinesis_client._individual_dispatch_method = Mock()
99 |
100 | for test_payload in test_payloads:
101 | kinesis_client.submit_payload(test_payload)
102 |
103 | kinesis_client.flush_payloads()
104 |
105 | kinesis_client._batch_dispatch_method.assert_has_calls([
106 | call(**{'Records': [
107 | {'Data': '{"m_id": 1, "message": "message contents 1"}', 'PartitionKey': '1'},
108 | {'Data': '{"m_id": 2, "message": "message contents 2"}', 'PartitionKey': '2'},
109 | {'Data': '{"m_id": 3, "message": "message contents 3"}', 'PartitionKey': '3'},
110 | {'Data': '{"m_id": 4, "message": "message contents 4"}', 'PartitionKey': '4'},
111 | {'Data': '{"m_id": 5, "message": "message contents 5"}', 'PartitionKey': '5'},
112 | {'Data': '{"m_id": 6, "message": "message contents 6"}', 'PartitionKey': '6'},
113 | {'Data': '{"m_id": 7, "message": "message contents 7"}', 'PartitionKey': '7'},
114 | {'Data': '{"m_id": 8, "message": "message contents 8"}', 'PartitionKey': '8'},
115 | {'Data': '{"m_id": 9, "message": "message contents 9"}', 'PartitionKey': '9'},
116 | {'Data': '{"m_id": 10, "message": "message contents 10"}', 'PartitionKey': '10'}
117 | ], 'StreamName': 'test_stream'}),
118 | call(**{'Records': [
119 | {'Data': '{"m_id": 6, "message": "message contents 6"}', 'PartitionKey': '6'},
120 | {'Data': '{"m_id": 7, "message": "message contents 7"}', 'PartitionKey': '7'},
121 | {'Data': '{"m_id": 8, "message": "message contents 8"}', 'PartitionKey': '8'},
122 | {'Data': '{"m_id": 9, "message": "message contents 9"}', 'PartitionKey': '9'},
123 | {'Data': '{"m_id": 10, "message": "message contents 10"}', 'PartitionKey': '10'}
124 | ], 'StreamName': 'test_stream'})
125 | ])
126 | kinesis_client._individual_dispatch_method.assert_has_calls([
127 | call(Data='{"m_id": 9, "message": "message contents 9"}', PartitionKey='9', StreamName='test_stream'),
128 | call(Data='{"m_id": 10, "message": "message contents 10"}', PartitionKey='10', StreamName='test_stream')
129 | ])
130 |
131 | def test_batch_write_throws_exceptions(self):
132 | kinesis_client = KinesisBatchDispatcher(stream_name='test_stream', partition_key_identifier='m_id')
133 | mock_client_error = ClientError({'Error': {'Code': 500, 'Message': 'broken'}}, "Dynamo")
134 | mock_boto3 = Mock()
135 | kinesis_client._aws_service = mock_boto3
136 |
137 | test_payloads = [
138 | {'m_id': 1, 'message': 'message contents 1'},
139 | {'m_id': 2, 'message': 'message contents 2'},
140 | {'m_id': 3, 'message': 'message contents 3'},
141 | {'m_id': 4, 'message': 'message contents 4'},
142 | {'m_id': 5, 'message': 'message contents 5'}
143 | ]
144 |
145 | kinesis_client._batch_dispatch_method = Mock(side_effect=[mock_client_error, mock_client_error,
146 | mock_client_error, mock_client_error,
147 | mock_client_error])
148 | kinesis_client._individual_dispatch_method = Mock()
149 |
150 | for test_payload in test_payloads:
151 | kinesis_client.submit_payload(test_payload)
152 |
153 | kinesis_client.flush_payloads()
154 |
155 | kinesis_client._batch_dispatch_method.assert_has_calls([
156 | call(**{'Records': [
157 | {'Data': '{"m_id": 1, "message": "message contents 1"}', 'PartitionKey': '1'},
158 | {'Data': '{"m_id": 2, "message": "message contents 2"}', 'PartitionKey': '2'},
159 | {'Data': '{"m_id": 3, "message": "message contents 3"}', 'PartitionKey': '3'},
160 | {'Data': '{"m_id": 4, "message": "message contents 4"}', 'PartitionKey': '4'},
161 | {'Data': '{"m_id": 5, "message": "message contents 5"}', 'PartitionKey': '5'}
162 | ], 'StreamName': 'test_stream'}),
163 | call(**{'Records': [
164 | {'Data': '{"m_id": 1, "message": "message contents 1"}', 'PartitionKey': '1'},
165 | {'Data': '{"m_id": 2, "message": "message contents 2"}', 'PartitionKey': '2'},
166 | {'Data': '{"m_id": 3, "message": "message contents 3"}', 'PartitionKey': '3'},
167 | {'Data': '{"m_id": 4, "message": "message contents 4"}', 'PartitionKey': '4'},
168 | {'Data': '{"m_id": 5, "message": "message contents 5"}', 'PartitionKey': '5'}
169 | ], 'StreamName': 'test_stream'}),
170 | call(**{'Records': [
171 | {'Data': '{"m_id": 1, "message": "message contents 1"}', 'PartitionKey': '1'},
172 | {'Data': '{"m_id": 2, "message": "message contents 2"}', 'PartitionKey': '2'},
173 | {'Data': '{"m_id": 3, "message": "message contents 3"}', 'PartitionKey': '3'},
174 | {'Data': '{"m_id": 4, "message": "message contents 4"}', 'PartitionKey': '4'},
175 | {'Data': '{"m_id": 5, "message": "message contents 5"}', 'PartitionKey': '5'}
176 | ], 'StreamName': 'test_stream'})
177 | ])
178 | kinesis_client._individual_dispatch_method.assert_not_called()
179 | self.assertEqual(test_payloads, kinesis_client.unprocessed_items)
180 |
181 | def test_individual_write_throws_exceptions(self):
182 | mock_client_error = ClientError({'Error': {'Code': 500, 'Message': 'broken'}}, "Dynamo")
183 |
184 | kinesis_client = KinesisBatchDispatcher(stream_name='test_stream', partition_key_identifier='m_id')
185 |
186 | mock_boto3 = Mock()
187 | kinesis_client._aws_service = mock_boto3
188 |
189 | test_payloads = [
190 | {'m_id': 1, 'message': 'message contents 1'},
191 | {'m_id': 2, 'message': 'message contents 2'}
192 | ]
193 |
194 | # All records fail in first attempt
195 | failure_response = {
196 | 'FailedRecordCount': 2,
197 | 'Records': [
198 | {'m_id': 1, 'message': 'message contents 1', 'ErrorCode': 'badness'},
199 | {'m_id': 2, 'message': 'message contents 2', 'ErrorCode': 'badness'}
200 | ],
201 | 'EncryptionType': 'NONE'
202 | }
203 |
204 | kinesis_client._batch_dispatch_method = Mock(side_effect=[failure_response])
205 | kinesis_client._individual_dispatch_method = Mock(side_effect=[mock_client_error, mock_client_error,
206 | mock_client_error, mock_client_error,
207 | mock_client_error, mock_client_error,
208 | mock_client_error, mock_client_error,
209 | mock_client_error, mock_client_error])
210 |
211 | for test_payload in test_payloads:
212 | kinesis_client.submit_payload(test_payload)
213 |
214 | kinesis_client.flush_payloads()
215 |
216 | kinesis_client._batch_dispatch_method.assert_called_once_with(
217 | **{'Records': [
218 | {'Data': '{"m_id": 1, "message": "message contents 1"}', 'PartitionKey': '1'},
219 | {'Data': '{"m_id": 2, "message": "message contents 2"}', 'PartitionKey': '2'}
220 | ], 'StreamName': 'test_stream'}
221 | )
222 | kinesis_client._individual_dispatch_method.assert_has_calls([
223 | call(Data='{"m_id": 1, "message": "message contents 1"}', PartitionKey='1', StreamName='test_stream'),
224 | call(Data='{"m_id": 1, "message": "message contents 1"}', PartitionKey='1', StreamName='test_stream'),
225 | call(Data='{"m_id": 1, "message": "message contents 1"}', PartitionKey='1', StreamName='test_stream'),
226 | call(Data='{"m_id": 1, "message": "message contents 1"}', PartitionKey='1', StreamName='test_stream'),
227 | call(Data='{"m_id": 1, "message": "message contents 1"}', PartitionKey='1', StreamName='test_stream'),
228 | call(Data='{"m_id": 2, "message": "message contents 2"}', PartitionKey='2', StreamName='test_stream'),
229 | call(Data='{"m_id": 2, "message": "message contents 2"}', PartitionKey='2', StreamName='test_stream'),
230 | call(Data='{"m_id": 2, "message": "message contents 2"}', PartitionKey='2', StreamName='test_stream'),
231 | call(Data='{"m_id": 2, "message": "message contents 2"}', PartitionKey='2', StreamName='test_stream'),
232 | call(Data='{"m_id": 2, "message": "message contents 2"}', PartitionKey='2', StreamName='test_stream')
233 | ])
234 | self.assertEqual(test_payloads, kinesis_client.unprocessed_items)
235 |
--------------------------------------------------------------------------------
/tests/integration_tests/large_messages.py:
--------------------------------------------------------------------------------
1 | from boto3_batch_utils.utils import get_byte_size_of_dict_or_list
2 |
3 |
4 | def create_dict_of_specific_byte_size(initial_dict: dict, desired_byte_size: int):
5 | new_dict = initial_dict
6 | new_dict['extra_content_to_achieve_required_byte_size'] = ""
7 | initial_byte_size = get_byte_size_of_dict_or_list(initial_dict)
8 | additional_required_bytes = desired_byte_size - initial_byte_size
9 | for _ in range(0, additional_required_bytes):
10 | new_dict['extra_content_to_achieve_required_byte_size'] += "x"
11 | return new_dict
12 |
--------------------------------------------------------------------------------
/tests/integration_tests/sqs/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/g-farrow/boto3_batch_utils/6dba31d6e5cc135a178fd507f2fbdce5f36b804a/tests/integration_tests/sqs/__init__.py
--------------------------------------------------------------------------------
/tests/integration_tests/sqs/test_sqs_fifo.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import patch, Mock, call
3 |
4 | from botocore.exceptions import ClientError
5 |
6 | from boto3_batch_utils import SQSFifoBatchDispatcher
7 |
8 | from .. import large_messages
9 |
10 |
11 | @patch('boto3_batch_utils.Base.boto3', Mock())
12 | class TestSqsStandard(TestCase):
13 |
14 | def test_more_than_one_batch_small_messages(self):
15 | sqs_client = SQSFifoBatchDispatcher(queue_name='test_standard_queue')
16 |
17 | mock_boto3 = Mock()
18 | sqs_client._aws_service = mock_boto3
19 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
20 | sqs_client._batch_dispatch_method = Mock(return_value={'hello!': True})
21 |
22 | test_payload = {'message': True}
23 |
24 | for _ in range(0, 11):
25 | sqs_client.submit_payload(test_payload)
26 | sqs_client.flush_payloads()
27 |
28 | self.assertEqual(2, sqs_client._batch_dispatch_method.call_count)
29 |
30 | def test_one_oversized_message(self):
31 | sqs_client = SQSFifoBatchDispatcher(queue_name='test_standard_queue')
32 |
33 | mock_boto3 = Mock()
34 | sqs_client._aws_service = mock_boto3
35 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
36 | sqs_client._batch_dispatch_method = Mock(return_value={'hello!': True})
37 |
38 | # An additional 92 bytes are required within the submit method
39 | test_payload = large_messages.create_dict_of_specific_byte_size({}, 262144 - 91)
40 |
41 | with self.assertRaises(ValueError) as context:
42 | sqs_client.submit_payload(test_payload)
43 | sqs_client.flush_payloads()
44 |
45 | self.assertIn('exceeds the maximum payload size', str(context.exception))
46 |
47 | def test_one_oversized_message_with_deduplication_id(self):
48 | sqs_client = SQSFifoBatchDispatcher(queue_name='test_standard_queue')
49 |
50 | mock_boto3 = Mock()
51 | sqs_client._aws_service = mock_boto3
52 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
53 | sqs_client._batch_dispatch_method = Mock(return_value={'hello!': True})
54 |
55 | # An additional 123 bytes are required within the submit method when message deup used with a value of 1 char
56 | test_payload = large_messages.create_dict_of_specific_byte_size({}, 262144 - 122)
57 |
58 | with self.assertRaises(ValueError) as context:
59 | sqs_client.submit_payload(test_payload, message_deduplication_id="1")
60 | sqs_client.flush_payloads()
61 |
62 | self.assertIn('exceeds the maximum payload size', str(context.exception))
63 |
64 | def test_more_than_one_batch_large_messages(self):
65 | sqs_client = SQSFifoBatchDispatcher(queue_name='test_standard_queue')
66 |
67 | mock_boto3 = Mock()
68 | sqs_client._aws_service = mock_boto3
69 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
70 | sqs_client._batch_dispatch_method = Mock(return_value={'hello!': True})
71 |
72 | # An additional 96 bytes are required within the submit method
73 | test_payload = large_messages.create_dict_of_specific_byte_size({}, 262144 - 96)
74 |
75 | for _ in range(0, 2):
76 | sqs_client.submit_payload(test_payload)
77 | sqs_client.flush_payloads()
78 |
79 | self.assertEqual(2, sqs_client._batch_dispatch_method.call_count)
80 |
81 | def test_batch_of_10_failed_first_time_messages(self):
82 | sqs_client = SQSFifoBatchDispatcher(queue_name='test_standard_queue')
83 |
84 | mock_boto3 = Mock()
85 | sqs_client._aws_service = mock_boto3
86 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
87 |
88 | failure_response = {
89 | 'Failed': [
90 | {'Id': x, 'Message': 'it failed', 'SenderFault': True} for x in range(1, 11)
91 | ]
92 | }
93 |
94 | sqs_client._batch_dispatch_method = Mock(side_effect=[failure_response, True, True])
95 | sqs_client._individual_dispatch_method = Mock()
96 |
97 | test_payloads = [
98 | {'m_id': 1, 'message': 'message contents 1'},
99 | {'m_id': 2, 'message': 'message contents 2'},
100 | {'m_id': 3, 'message': 'message contents 3'},
101 | {'m_id': 4, 'message': 'message contents 4'},
102 | {'m_id': 5, 'message': 'message contents 5'},
103 | {'m_id': 6, 'message': 'message contents 6'},
104 | {'m_id': 7, 'message': 'message contents 7'},
105 | {'m_id': 8, 'message': 'message contents 8'},
106 | {'m_id': 9, 'message': 'message contents 9'},
107 | {'m_id': 10, 'message': 'message contents 10'}
108 | ]
109 |
110 | for test_payload in test_payloads:
111 | sqs_client.submit_payload(test_payload, message_id=test_payload['m_id'])
112 | sqs_client.flush_payloads()
113 |
114 | sqs_client._batch_dispatch_method.assert_called_once()
115 | sqs_client._individual_dispatch_method.assert_has_calls([
116 | call(**{'Id': 1, 'MessageBody': '{"m_id": 1, "message": "message contents 1"}',
117 | 'MessageGroupId': 'unset', 'QueueUrl': 'test_queue_url'}),
118 | call(**{'Id': 2, 'MessageBody': '{"m_id": 2, "message": "message contents 2"}',
119 | 'MessageGroupId': 'unset', 'QueueUrl': 'test_queue_url'}),
120 | call(**{'Id': 3, 'MessageBody': '{"m_id": 3, "message": "message contents 3"}',
121 | 'MessageGroupId': 'unset', 'QueueUrl': 'test_queue_url'}),
122 | call(**{'Id': 4, 'MessageBody': '{"m_id": 4, "message": "message contents 4"}',
123 | 'MessageGroupId': 'unset', 'QueueUrl': 'test_queue_url'}),
124 | call(**{'Id': 5, 'MessageBody': '{"m_id": 5, "message": "message contents 5"}',
125 | 'MessageGroupId': 'unset', 'QueueUrl': 'test_queue_url'}),
126 | call(**{'Id': 6, 'MessageBody': '{"m_id": 6, "message": "message contents 6"}',
127 | 'MessageGroupId': 'unset', 'QueueUrl': 'test_queue_url'}),
128 | call(**{'Id': 7, 'MessageBody': '{"m_id": 7, "message": "message contents 7"}',
129 | 'MessageGroupId': 'unset', 'QueueUrl': 'test_queue_url'}),
130 | call(**{'Id': 8, 'MessageBody': '{"m_id": 8, "message": "message contents 8"}',
131 | 'MessageGroupId': 'unset', 'QueueUrl': 'test_queue_url'}),
132 | call(**{'Id': 9, 'MessageBody': '{"m_id": 9, "message": "message contents 9"}',
133 | 'MessageGroupId': 'unset', 'QueueUrl': 'test_queue_url'}),
134 | call(**{'Id': 10, 'MessageBody': '{"m_id": 10, "message": "message contents 10"}',
135 | 'MessageGroupId': 'unset', 'QueueUrl': 'test_queue_url'})
136 | ], any_order=True)
137 |
138 | def test_batch_write_throws_exceptions(self):
139 | sqs_client = SQSFifoBatchDispatcher(queue_name='test_standard_queue')
140 | mock_client_error = ClientError({'Error': {'Code': 500, 'Message': 'broken'}}, "Dynamo")
141 | mock_boto3 = Mock()
142 | sqs_client._aws_service = mock_boto3
143 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
144 |
145 | test_payloads = [
146 | {'m_id': 1, 'message': 'message contents 1'},
147 | {'m_id': 2, 'message': 'message contents 2'},
148 | {'m_id': 3, 'message': 'message contents 3'},
149 | {'m_id': 4, 'message': 'message contents 4'},
150 | {'m_id': 5, 'message': 'message contents 5'}
151 | ]
152 |
153 | sqs_client._batch_dispatch_method = Mock(side_effect=[mock_client_error, mock_client_error, mock_client_error,
154 | mock_client_error, mock_client_error])
155 | sqs_client._individual_dispatch_method = Mock()
156 |
157 | for test_payload in test_payloads:
158 | sqs_client.submit_payload(test_payload, message_id=test_payload['m_id'])
159 |
160 | sqs_client.flush_payloads()
161 |
162 | sqs_client._batch_dispatch_method.assert_has_calls([
163 | call(**{'Entries': [
164 | {'Id': 1, 'MessageBody': '{"m_id": 1, "message": "message contents 1"}',
165 | 'MessageGroupId': 'unset'},
166 | {'Id': 2, 'MessageBody': '{"m_id": 2, "message": "message contents 2"}',
167 | 'MessageGroupId': 'unset'},
168 | {'Id': 3, 'MessageBody': '{"m_id": 3, "message": "message contents 3"}',
169 | 'MessageGroupId': 'unset'},
170 | {'Id': 4, 'MessageBody': '{"m_id": 4, "message": "message contents 4"}',
171 | 'MessageGroupId': 'unset'},
172 | {'Id': 5, 'MessageBody': '{"m_id": 5, "message": "message contents 5"}',
173 | 'MessageGroupId': 'unset'}
174 | ], 'QueueUrl': 'test_queue_url'})
175 | for _ in range(0, 5) # Retries 4 times
176 | ])
177 | sqs_client._individual_dispatch_method.assert_not_called()
178 | self.assertEqual(test_payloads, sqs_client.unprocessed_items)
179 |
180 | def test_individual_write_throws_exceptions(self):
181 | sqs_client = SQSFifoBatchDispatcher(queue_name='test_standard_queue')
182 |
183 | mock_client_error = ClientError({'Error': {'Code': 500, 'Message': 'broken'}}, "SQS")
184 |
185 | mock_boto3 = Mock()
186 | sqs_client._aws_service = mock_boto3
187 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
188 |
189 | test_payloads = [
190 | {'m_id': 1, 'message': 'message contents 1'},
191 | {'m_id': 2, 'message': 'message contents 2'}
192 | ]
193 |
194 | # All records fail in first attempt
195 | failure_response = {
196 | 'Failed': [
197 | {'Id': 1, 'SenderFault': True, 'Code': 'green cross', 'Message': 'badness'},
198 | {'Id': 2, 'SenderFault': True, 'Code': 'green cross', 'Message': 'badness'}
199 | ],
200 | 'EncryptionType': 'NONE'
201 | }
202 |
203 | sqs_client._batch_dispatch_method = Mock(side_effect=[failure_response])
204 | sqs_client._individual_dispatch_method = Mock(side_effect=[mock_client_error, mock_client_error,
205 | mock_client_error, mock_client_error,
206 | mock_client_error, mock_client_error,
207 | mock_client_error, mock_client_error,
208 | mock_client_error, mock_client_error])
209 |
210 | for test_payload in test_payloads:
211 | sqs_client.submit_payload(test_payload, message_id=test_payload['m_id'])
212 |
213 | sqs_client.flush_payloads()
214 |
215 | sqs_client._batch_dispatch_method.assert_called_once_with(
216 | **{'Entries': [
217 | {'Id': 1, 'MessageBody': '{"m_id": 1, "message": "message contents 1"}', 'MessageGroupId': 'unset'},
218 | {'Id': 2, 'MessageBody': '{"m_id": 2, "message": "message contents 2"}', 'MessageGroupId': 'unset'}
219 | ], 'QueueUrl': 'test_queue_url'}
220 | )
221 | sqs_client._individual_dispatch_method.assert_has_calls([
222 | call(Id=1, MessageBody='{"m_id": 1, "message": "message contents 1"}', MessageGroupId='unset',
223 | QueueUrl='test_queue_url'),
224 | call(Id=1, MessageBody='{"m_id": 1, "message": "message contents 1"}', MessageGroupId='unset',
225 | QueueUrl='test_queue_url'),
226 | call(Id=1, MessageBody='{"m_id": 1, "message": "message contents 1"}', MessageGroupId='unset',
227 | QueueUrl='test_queue_url'),
228 | call(Id=1, MessageBody='{"m_id": 1, "message": "message contents 1"}', MessageGroupId='unset',
229 | QueueUrl='test_queue_url'),
230 | call(Id=1, MessageBody='{"m_id": 1, "message": "message contents 1"}', MessageGroupId='unset',
231 | QueueUrl='test_queue_url'),
232 | call(Id=2, MessageBody='{"m_id": 2, "message": "message contents 2"}', MessageGroupId='unset',
233 | QueueUrl='test_queue_url'),
234 | call(Id=2, MessageBody='{"m_id": 2, "message": "message contents 2"}', MessageGroupId='unset',
235 | QueueUrl='test_queue_url'),
236 | call(Id=2, MessageBody='{"m_id": 2, "message": "message contents 2"}', MessageGroupId='unset',
237 | QueueUrl='test_queue_url'),
238 | call(Id=2, MessageBody='{"m_id": 2, "message": "message contents 2"}', MessageGroupId='unset',
239 | QueueUrl='test_queue_url'),
240 | call(Id=2, MessageBody='{"m_id": 2, "message": "message contents 2"}', MessageGroupId='unset',
241 | QueueUrl='test_queue_url')
242 | ])
243 | self.assertEqual(test_payloads, sqs_client.unprocessed_items)
--------------------------------------------------------------------------------
/tests/integration_tests/sqs/test_sqs_standard.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import patch, Mock, call
3 |
4 | from botocore.exceptions import ClientError
5 |
6 | from boto3_batch_utils import SQSBatchDispatcher
7 |
8 | from .. import large_messages
9 |
10 |
11 | @patch('boto3_batch_utils.Base.boto3', Mock())
12 | class TestSqsStandard(TestCase):
13 |
14 | def test_more_than_one_batch_small_messages(self):
15 | sqs_client = SQSBatchDispatcher(queue_name='test_standard_queue')
16 |
17 | mock_boto3 = Mock()
18 | sqs_client._aws_service = mock_boto3
19 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
20 | sqs_client._batch_dispatch_method = Mock(return_value={'hello!': True})
21 |
22 | test_payload = {"message": True}
23 |
24 | for _ in range(0, 11):
25 | sqs_client.submit_payload(test_payload)
26 |
27 | sqs_client.flush_payloads()
28 |
29 | self.assertEqual(2, sqs_client._batch_dispatch_method.call_count)
30 |
31 | def test_one_oversized_message(self):
32 | sqs_client = SQSBatchDispatcher(queue_name='test_standard_queue')
33 |
34 | mock_boto3 = Mock()
35 | sqs_client._aws_service = mock_boto3
36 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
37 | sqs_client._batch_dispatch_method = Mock(return_value={'hello!': True})
38 |
39 | # An additional 65 bytes are required within the submit method
40 | test_payload = large_messages.create_dict_of_specific_byte_size({}, 262144 - 64)
41 |
42 | with self.assertRaises(ValueError) as context:
43 | sqs_client.submit_payload(test_payload)
44 |
45 | sqs_client.flush_payloads()
46 |
47 | self.assertIn('exceeds the maximum payload size', str(context.exception))
48 |
49 | def test_more_than_one_batch_large_messages(self):
50 | sqs_client = SQSBatchDispatcher(queue_name='test_standard_queue')
51 |
52 | mock_boto3 = Mock()
53 | sqs_client._aws_service = mock_boto3
54 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
55 | sqs_client._batch_dispatch_method = Mock(return_value={'hello!': True})
56 |
57 | # An additional 69 bytes are required within the submit method
58 | test_payload = large_messages.create_dict_of_specific_byte_size({}, 262144 - 69)
59 |
60 | for _ in range(0, 2):
61 | sqs_client.submit_payload(test_payload)
62 |
63 | sqs_client.flush_payloads()
64 |
65 | self.assertEqual(2, sqs_client._batch_dispatch_method.call_count)
66 |
67 | def test_batch_of_10_failed_first_time_messages(self):
68 | sqs_client = SQSBatchDispatcher(queue_name='test_standard_queue')
69 |
70 | mock_boto3 = Mock()
71 | sqs_client._aws_service = mock_boto3
72 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
73 |
74 | failure_response = {
75 | 'Failed': [
76 | {'Id': x, 'Message': 'it failed', 'SenderFault': True} for x in range(1, 11)
77 | ]
78 | }
79 |
80 | sqs_client._batch_dispatch_method = Mock(side_effect=[failure_response, True, True])
81 | sqs_client._individual_dispatch_method = Mock()
82 |
83 | test_payloads = [
84 | {'m_id': 1, 'message': 'message contents 1'},
85 | {'m_id': 2, 'message': 'message contents 2'},
86 | {'m_id': 3, 'message': 'message contents 3'},
87 | {'m_id': 4, 'message': 'message contents 4'},
88 | {'m_id': 5, 'message': 'message contents 5'},
89 | {'m_id': 6, 'message': 'message contents 6'},
90 | {'m_id': 7, 'message': 'message contents 7'},
91 | {'m_id': 8, 'message': 'message contents 8'},
92 | {'m_id': 9, 'message': 'message contents 9'},
93 | {'m_id': 10, 'message': 'message contents 10'}
94 | ]
95 |
96 | for test_payload in test_payloads:
97 | sqs_client.submit_payload(test_payload, message_id=test_payload['m_id'])
98 |
99 | sqs_client.flush_payloads()
100 | sqs_client._batch_dispatch_method.assert_called_once()
101 | sqs_client._individual_dispatch_method.assert_has_calls([
102 | call(**{'MessageBody': '{"m_id": 1, "message": "message contents 1"}', 'QueueUrl': 'test_queue_url'}),
103 | call(**{'MessageBody': '{"m_id": 2, "message": "message contents 2"}', 'QueueUrl': 'test_queue_url'}),
104 | call(**{'MessageBody': '{"m_id": 3, "message": "message contents 3"}', 'QueueUrl': 'test_queue_url'}),
105 | call(**{'MessageBody': '{"m_id": 4, "message": "message contents 4"}', 'QueueUrl': 'test_queue_url'}),
106 | call(**{'MessageBody': '{"m_id": 5, "message": "message contents 5"}', 'QueueUrl': 'test_queue_url'}),
107 | call(**{'MessageBody': '{"m_id": 6, "message": "message contents 6"}', 'QueueUrl': 'test_queue_url'}),
108 | call(**{'MessageBody': '{"m_id": 7, "message": "message contents 7"}', 'QueueUrl': 'test_queue_url'}),
109 | call(**{'MessageBody': '{"m_id": 8, "message": "message contents 8"}', 'QueueUrl': 'test_queue_url'}),
110 | call(**{'MessageBody': '{"m_id": 9, "message": "message contents 9"}', 'QueueUrl': 'test_queue_url'}),
111 | call(**{'MessageBody': '{"m_id": 10, "message": "message contents 10"}', 'QueueUrl': 'test_queue_url'})
112 | ], any_order=True)
113 |
114 | def test_batch_write_throws_exceptions(self):
115 | sqs_client = SQSBatchDispatcher(queue_name='test_standard_queue')
116 | mock_client_error = ClientError({'Error': {'Code': 500, 'Message': 'broken'}}, "Dynamo")
117 | mock_boto3 = Mock()
118 | sqs_client._aws_service = mock_boto3
119 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
120 |
121 | test_payloads = [
122 | {'m_id': 1, 'message': 'message contents 1'},
123 | {'m_id': 2, 'message': 'message contents 2'},
124 | {'m_id': 3, 'message': 'message contents 3'},
125 | {'m_id': 4, 'message': 'message contents 4'},
126 | {'m_id': 5, 'message': 'message contents 5'}
127 | ]
128 |
129 | sqs_client._batch_dispatch_method = Mock(side_effect=[mock_client_error, mock_client_error, mock_client_error,
130 | mock_client_error, mock_client_error])
131 | sqs_client._individual_dispatch_method = Mock()
132 |
133 | for test_payload in test_payloads:
134 | sqs_client.submit_payload(test_payload, message_id=test_payload['m_id'])
135 |
136 | sqs_client.flush_payloads()
137 |
138 | sqs_client._batch_dispatch_method.assert_has_calls([
139 | call(**{'Entries': [
140 | {'Id': 1, 'MessageBody': '{"m_id": 1, "message": "message contents 1"}'},
141 | {'Id': 2, 'MessageBody': '{"m_id": 2, "message": "message contents 2"}'},
142 | {'Id': 3, 'MessageBody': '{"m_id": 3, "message": "message contents 3"}'},
143 | {'Id': 4, 'MessageBody': '{"m_id": 4, "message": "message contents 4"}'},
144 | {'Id': 5, 'MessageBody': '{"m_id": 5, "message": "message contents 5"}'}
145 | ], 'QueueUrl': 'test_queue_url'})
146 | for _ in range(0, 5) # Retries 4 times
147 | ])
148 | sqs_client._individual_dispatch_method.assert_not_called()
149 | self.assertEqual(test_payloads, sqs_client.unprocessed_items)
150 |
151 | def test_individual_write_throws_exceptions(self):
152 | mock_client_error = ClientError({'Error': {'Code': 500, 'Message': 'broken'}}, "SQS")
153 |
154 | sqs_client = SQSBatchDispatcher(queue_name='test_standard_queue')
155 |
156 | mock_boto3 = Mock()
157 | sqs_client._aws_service = mock_boto3
158 | mock_boto3.get_queue_url.return_value = {'QueueUrl': 'test_queue_url'}
159 |
160 | test_payloads = [
161 | {'m_id': 1, 'message': 'message contents 1'},
162 | {'m_id': 2, 'message': 'message contents 2'}
163 | ]
164 |
165 | # All records fail in first attempt
166 | failure_response = {
167 | 'Failed': [
168 | {'Id': 1, 'SenderFault': True, 'Code': 'green cross', 'Message': 'badness'},
169 | {'Id': 2, 'SenderFault': True, 'Code': 'green cross', 'Message': 'badness'}
170 | ],
171 | 'EncryptionType': 'NONE'
172 | }
173 |
174 | sqs_client._batch_dispatch_method = Mock(side_effect=[failure_response])
175 | sqs_client._individual_dispatch_method = Mock(side_effect=[mock_client_error, mock_client_error,
176 | mock_client_error, mock_client_error,
177 | mock_client_error, mock_client_error,
178 | mock_client_error, mock_client_error,
179 | mock_client_error, mock_client_error])
180 |
181 | for test_payload in test_payloads:
182 | sqs_client.submit_payload(test_payload, message_id=test_payload['m_id'])
183 |
184 | sqs_client.flush_payloads()
185 |
186 | sqs_client._batch_dispatch_method.assert_called_once_with(
187 | **{'Entries': [
188 | {'Id': 1, 'MessageBody': '{"m_id": 1, "message": "message contents 1"}'},
189 | {'Id': 2, 'MessageBody': '{"m_id": 2, "message": "message contents 2"}'}
190 | ], 'QueueUrl': 'test_queue_url'}
191 | )
192 | sqs_client._individual_dispatch_method.assert_has_calls([
193 | call(MessageBody='{"m_id": 1, "message": "message contents 1"}', QueueUrl='test_queue_url'),
194 | call(MessageBody='{"m_id": 1, "message": "message contents 1"}', QueueUrl='test_queue_url'),
195 | call(MessageBody='{"m_id": 1, "message": "message contents 1"}', QueueUrl='test_queue_url'),
196 | call(MessageBody='{"m_id": 1, "message": "message contents 1"}', QueueUrl='test_queue_url'),
197 | call(MessageBody='{"m_id": 1, "message": "message contents 1"}', QueueUrl='test_queue_url'),
198 | call(MessageBody='{"m_id": 2, "message": "message contents 2"}', QueueUrl='test_queue_url'),
199 | call(MessageBody='{"m_id": 2, "message": "message contents 2"}', QueueUrl='test_queue_url'),
200 | call(MessageBody='{"m_id": 2, "message": "message contents 2"}', QueueUrl='test_queue_url'),
201 | call(MessageBody='{"m_id": 2, "message": "message contents 2"}', QueueUrl='test_queue_url'),
202 | call(MessageBody='{"m_id": 2, "message": "message contents 2"}', QueueUrl='test_queue_url')
203 | ])
204 | self.assertEqual(test_payloads, sqs_client.unprocessed_items)
205 |
--------------------------------------------------------------------------------
/tests/unit_tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/g-farrow/boto3_batch_utils/6dba31d6e5cc135a178fd507f2fbdce5f36b804a/tests/unit_tests/__init__.py
--------------------------------------------------------------------------------
/tests/unit_tests/test_base_dispatcher.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import patch, Mock, call
3 |
4 | from botocore.exceptions import ClientError
5 |
6 | from boto3_batch_utils.Base import BaseDispatcher
7 |
8 |
9 | class MockClient:
10 |
11 | def __init__(self, client_name, **kwargs):
12 | self.client_name = client_name + "_client"
13 | self.kwargs = kwargs or {}
14 |
15 | def send_lots(self, batch):
16 | pass
17 |
18 | def send_one(self, payload):
19 | pass
20 |
21 |
22 | mock_boto3_interface_type_mapper = {
23 | 'test_subject': 'client'
24 | }
25 |
26 |
27 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
28 | @patch('boto3_batch_utils.Base.boto3', Mock())
29 | class InitialiseBatchUtilsClient(TestCase):
30 |
31 | def test_init(self):
32 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=1)
33 | self.assertEqual('test_subject', base.aws_service_name)
34 | self.assertIsNone(base._aws_service)
35 | self.assertEqual('send_lots', base.batch_dispatch_method)
36 | self.assertIsNone(base._batch_dispatch_method)
37 | self.assertEqual('send_one', base.individual_dispatch_method)
38 | self.assertIsNone(base._individual_dispatch_method)
39 | self.assertEqual(1, base.max_batch_size)
40 | self.assertIsNone(base._aws_service_batch_max_payloads)
41 | self.assertIsNone(base._aws_service_message_max_bytes)
42 | self.assertIsNone(base._aws_service_batch_max_bytes)
43 | self.assertEqual({}, base._batch_payload_wrapper)
44 | self.assertIsNone(base._batch_payload)
45 | self.assertEqual(0, base._batch_payload_wrapper_byte_size)
46 | self.assertEqual([], base.unprocessed_items)
47 |
48 |
49 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
50 | @patch('boto3_batch_utils.Base.boto3', Mock())
51 | class TestValidateInitialisation(TestCase):
52 |
53 | def test_initialisation_valid(self):
54 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=1)
55 | base._aws_service_batch_max_payloads = 1
56 |
57 | base._validate_initialisation()
58 |
59 | def test_max_batch_too_large_raises_exception(self):
60 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=2)
61 | base._aws_service_batch_max_payloads = 1
62 |
63 | with self.assertRaises(ValueError) as context:
64 | base._validate_initialisation()
65 | self.assertIn("Requested max_batch_size '2' exceeds the test_subject maximum", str(context.exception))
66 |
67 |
68 | @patch('boto3_batch_utils.Base._boto3_interface_type_mapper', mock_boto3_interface_type_mapper)
69 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
70 | @patch('boto3_batch_utils.Base.boto3', Mock())
71 | class InitialiseAwsClient(TestCase):
72 |
73 | def test_init(self):
74 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=1)
75 | base._initialise_aws_client()
76 | self.assertEqual('test_subject_client', base._aws_service.client_name)
77 | self.assertEqual('send_lots', base._batch_dispatch_method.__name__)
78 | self.assertEqual('send_one', base._individual_dispatch_method.__name__)
79 |
80 | def test_boto3_overrides(self):
81 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=1, endpoint_url='https://dummy_endpoint:54321/', aws_session_token='session_token')
82 | base._initialise_aws_client()
83 | self.assertEqual('https://dummy_endpoint:54321/', base._aws_service.kwargs['endpoint_url'])
84 | self.assertEqual('session_token', base._aws_service.kwargs['aws_session_token'])
85 |
86 |
87 |
88 | @patch('boto3_batch_utils.Base._boto3_interface_type_mapper', mock_boto3_interface_type_mapper)
89 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
90 | @patch('boto3_batch_utils.Base.boto3', Mock())
91 | class SubmitPayload(TestCase):
92 |
93 | def test_when_payload_list_is_empty(self):
94 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=1)
95 | base._aws_service_message_max_bytes = 15
96 | base._aws_service_batch_max_bytes = 15
97 | base._append_payload_to_current_batch = Mock()
98 | base._flush_payload_selector = Mock()
99 | pl = {"a": True}
100 | base.submit_payload(pl)
101 | base._append_payload_to_current_batch.assert_called_once_with(pl)
102 | base._flush_payload_selector.assert_called_once()
103 |
104 | def test_when_payload_is_over_byte_size(self):
105 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=1)
106 | base._aws_service_message_max_bytes = 10
107 | base._aws_service_batch_max_bytes = 15
108 | base._append_payload_to_current_batch = Mock()
109 | base._flush_payload_selector = Mock()
110 | pl = {"a": True}
111 | with self.assertRaises(ValueError) as context:
112 | base.submit_payload(pl)
113 | self.assertIn("exceeds the maximum payload size", str(context.exception))
114 | base._append_payload_to_current_batch.assert_not_called()
115 | base._flush_payload_selector.assert_not_called()
116 |
117 | def test_when_payload_is_equal_to_byte_size(self):
118 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=1)
119 | base._aws_service_message_max_bytes = 11
120 | base._aws_service_batch_max_bytes = 15
121 | base._validate_payload_byte_size = Mock(return_value=True)
122 | base._append_payload_to_current_batch = Mock()
123 | base._flush_payload_selector = Mock()
124 | pl = {"a": True}
125 | base.submit_payload(pl)
126 | base._append_payload_to_current_batch.assert_called_once_with(pl)
127 | base._flush_payload_selector.assert_called_once()
128 |
129 |
130 | @patch('boto3_batch_utils.Base._boto3_interface_type_mapper', mock_boto3_interface_type_mapper)
131 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
132 | @patch('boto3_batch_utils.Base.boto3', Mock())
133 | class PayloadSelectorWhenFlushOnMaxIsTrue(TestCase):
134 |
135 | def test_empty_payload_list(self):
136 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
137 | base._batch_payload = []
138 | base._batch_send_payloads = Mock()
139 | base._flush_payload_selector()
140 | base._batch_send_payloads.assert_not_called()
141 |
142 | def test_payload_list_less_than_max_batch_size(self):
143 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
144 | base._batch_payload = [1, 2]
145 | base._batch_send_payloads = Mock()
146 | base._flush_payload_selector()
147 | base._batch_send_payloads.assert_not_called()
148 |
149 | def test_payload_list_equal_max_batch_size(self):
150 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
151 | base._batch_payload = [1, 2, 3]
152 | base._batch_send_payloads = Mock()
153 | base._flush_payload_selector()
154 | base._batch_send_payloads.assert_called_once_with([1, 2, 3])
155 |
156 | def test_payload_list_greater_than_max_batch_size(self):
157 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
158 | base._batch_payload = [1, 2, 3, 4]
159 | base._batch_send_payloads = Mock()
160 | base._flush_payload_selector()
161 | base._batch_send_payloads.assert_has_calls([call([1, 2, 3]), call([4])])
162 |
163 |
164 | @patch('boto3_batch_utils.Base._boto3_interface_type_mapper', mock_boto3_interface_type_mapper)
165 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
166 | @patch('boto3_batch_utils.Base.boto3', Mock())
167 | @patch('boto3_batch_utils.Base.chunks')
168 | class FlushPayloads(TestCase):
169 |
170 | def test_empty_payload_list(self, mock_chunks):
171 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
172 | base._batch_payload = []
173 | base._initialise_aws_client = Mock()
174 | base._batch_send_payloads = Mock()
175 | mock_chunks.return_value = [[]]
176 | base.flush_payloads()
177 | base._initialise_aws_client.assert_called_once()
178 | base._batch_send_payloads.assert_not_called()
179 | self.assertEqual([], base._batch_payload)
180 |
181 | def test_payload_partial_max_batch_size(self, mock_chunks):
182 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
183 | base._batch_payload = [1, 2]
184 | base._initialise_aws_client = Mock()
185 | base._batch_send_payloads = Mock()
186 | mock_chunks.return_value = [[1, 2]]
187 | base.flush_payloads()
188 | base._initialise_aws_client.assert_called_once()
189 | base._batch_send_payloads.assert_called_once_with([1, 2])
190 | self.assertEqual([], base._batch_payload)
191 |
192 | def test_payload_equal_max_batch_size(self, mock_chunks):
193 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
194 | base._batch_payload = [1, 2, 3]
195 | base._initialise_aws_client = Mock()
196 | base._batch_send_payloads = Mock()
197 | mock_chunks.return_value = [[1, 2, 3]]
198 | base.flush_payloads()
199 | base._initialise_aws_client.assert_called_once()
200 | base._batch_send_payloads.assert_called_once_with([1, 2, 3])
201 | self.assertEqual([], base._batch_payload)
202 |
203 | def test_payload_multiple_batches(self, mock_chunks):
204 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
205 | base._batch_payload = [1, 2, 3, 4]
206 | base._initialise_aws_client = Mock()
207 | base._batch_send_payloads = Mock()
208 | mock_chunks.return_value = [[1, 2, 3], [4]]
209 | base.flush_payloads()
210 | base._initialise_aws_client.assert_called_once()
211 | base._batch_send_payloads.assert_has_calls([call([1, 2, 3]), call([4])])
212 | self.assertEqual([], base._batch_payload)
213 |
214 | def test_unprocessed_items_are_returned(self, mock_chunks):
215 | test_unprocessed_items = ['abc1', 'cde2', 'efg3']
216 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
217 | base.unprocessed_items = test_unprocessed_items
218 | base._batch_payload = [1, 2, 3, 4]
219 | base._initialise_aws_client = Mock()
220 | base._batch_send_payloads = Mock()
221 | mock_chunks.return_value = [[1, 2, 3], [4]]
222 |
223 | response = base.flush_payloads()
224 |
225 | base._initialise_aws_client.assert_called_once()
226 | base._batch_send_payloads.assert_has_calls([call([1, 2, 3]), call([4])])
227 | self.assertEqual([], base._batch_payload)
228 | self.assertEqual(test_unprocessed_items, response)
229 |
230 |
231 | @patch('boto3_batch_utils.Base._boto3_interface_type_mapper', mock_boto3_interface_type_mapper)
232 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
233 | @patch('boto3_batch_utils.Base.boto3', Mock())
234 | class BatchSendPayloads(TestCase):
235 |
236 | def test_empty_list(self):
237 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
238 | test_batch = []
239 | base._batch_dispatch_method = Mock(return_value="batch_response")
240 | base._process_batch_send_response = Mock()
241 | base._batch_send_payloads(test_batch)
242 | base._batch_dispatch_method.assert_called_once_with(test_batch)
243 | base._process_batch_send_response.assert_called_once_with("batch_response")
244 |
245 | def test_empty_dict(self):
246 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
247 | test_batch = {}
248 | base._batch_dispatch_method = Mock(return_value="batch_response")
249 | base._process_batch_send_response = Mock()
250 | base._batch_send_payloads(test_batch)
251 | base._batch_dispatch_method.assert_called_once_with(**test_batch)
252 | base._process_batch_send_response.assert_called_once_with("batch_response")
253 |
254 | def test_list(self):
255 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
256 | test_batch = [1, 2, 3, 4, 5, 6, 7, 8, 9]
257 | base._batch_dispatch_method = Mock(return_value="batch_response")
258 | base._process_batch_send_response = Mock()
259 | base._batch_send_payloads(test_batch)
260 | base._batch_dispatch_method.assert_called_once_with(test_batch)
261 | base._process_batch_send_response.assert_called_once_with("batch_response")
262 |
263 | def test_dict(self):
264 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
265 | test_batch = {'something_to_process': [1, 2, 3, 4, 5, 6]}
266 | base._batch_dispatch_method = Mock(return_value="batch_response")
267 | base._process_batch_send_response = Mock()
268 | base._batch_send_payloads(test_batch)
269 | base._batch_dispatch_method.assert_called_once_with(**test_batch)
270 | base._process_batch_send_response.assert_called_once_with("batch_response")
271 |
272 | def test_list_batch_send_failures_sent_to_unprocessed_items(self):
273 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
274 | test_batch = ["abc", "cde"]
275 | base._batch_dispatch_method = Mock(side_effect=ClientError({"Error": {"message": "Something went wrong", "code": 0}}, "A Test"))
276 | base._process_batch_send_response = Mock()
277 | base._handle_client_error = Mock()
278 | base._unpack_failed_batch_to_unprocessed_items = Mock()
279 |
280 | base._batch_send_payloads(test_batch)
281 |
282 | base._batch_dispatch_method.assert_has_calls([call(test_batch), call(test_batch), call(test_batch),
283 | call(test_batch), call(test_batch)])
284 | base._process_batch_send_response.assert_not_called()
285 | base._unpack_failed_batch_to_unprocessed_items.assert_called_once_with(test_batch)
286 |
287 | def test_dict_batch_send_failures_sent_to_unporcessed_items(self):
288 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
289 | test_batch = {'something': 'batchy'}
290 | base._batch_dispatch_method = Mock(
291 | side_effect=[
292 | ClientError({"Error": {"message": "Something went wrong", "code": 0}}, "A Test"),
293 | ClientError({"Error": {"message": "Something went wrong", "code": 0}}, "A Test"),
294 | ClientError({"Error": {"message": "Something went wrong", "code": 0}}, "A Test"),
295 | ClientError({"Error": {"message": "Something went wrong", "code": 0}}, "A Test"),
296 | ClientError({"Error": {"message": "Something went wrong", "code": 0}}, "A Test"),
297 | ClientError({"Error": {"message": "Something went wrong", "code": 0}}, "A Test")
298 | ]
299 | )
300 | base._process_batch_send_response = Mock()
301 | base._handle_client_error = Mock()
302 | base._unpack_failed_batch_to_unprocessed_items = Mock()
303 |
304 | base._batch_send_payloads(test_batch)
305 |
306 | base._batch_dispatch_method.assert_has_calls([call(**test_batch), call(**test_batch), call(**test_batch),
307 | call(**test_batch), call(**test_batch)])
308 | base._process_batch_send_response.assert_not_called()
309 | base._unpack_failed_batch_to_unprocessed_items.assert_called_once_with(test_batch)
310 |
311 |
312 | @patch('boto3_batch_utils.Base._boto3_interface_type_mapper', mock_boto3_interface_type_mapper)
313 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
314 | @patch('boto3_batch_utils.Base.boto3', Mock())
315 | class SendIndividualPayload(TestCase):
316 |
317 | def test_successful_send_non_dict(self):
318 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
319 | base._individual_dispatch_method = Mock()
320 | test_payload = "abc"
321 | base._send_individual_payload(test_payload)
322 | base._individual_dispatch_method.assert_called_once_with(test_payload)
323 |
324 | def test_successfully_sent_after_4_failures(self):
325 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
326 | client_error = ClientError({"Error": {"message": "Something went wrong", "code": 0}}, "A Test")
327 | base._individual_dispatch_method = Mock(side_effect=[client_error, client_error, client_error, client_error, ""])
328 | test_payload = "abc"
329 | base._send_individual_payload(test_payload)
330 | base._individual_dispatch_method.assert_has_calls([
331 | call(test_payload),
332 | call(test_payload),
333 | call(test_payload),
334 | call(test_payload),
335 | call(test_payload)
336 | ])
337 |
338 | def test_non_dict_added_to_unprocessed_items_after_5_failures(self):
339 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
340 | client_error = ClientError({"Error": {"message": "Something went wrong", "code": 0}}, "A Test")
341 | base._individual_dispatch_method = Mock(side_effect=[client_error, client_error, client_error, client_error,
342 | client_error])
343 |
344 | test_payload = "abc"
345 | base._send_individual_payload(test_payload)
346 | base._individual_dispatch_method.assert_has_calls([
347 | call(test_payload),
348 | call(test_payload),
349 | call(test_payload),
350 | call(test_payload),
351 | call(test_payload)
352 | ])
353 | self.assertEqual([test_payload], base.unprocessed_items)
354 |
355 |
356 | def test_successful_send_dict(self):
357 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
358 | base._individual_dispatch_method = Mock()
359 | test_payload = {"abc": 123}
360 | base._send_individual_payload(test_payload)
361 | base._individual_dispatch_method.assert_called_once_with(**test_payload)
362 |
363 | def test_successfully_sent_after_4_failures_dict(self):
364 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
365 | client_error = ClientError({"Error": {"message": "Something went wrong", "code": 0}}, "A Test")
366 | base._individual_dispatch_method = Mock(side_effect=[client_error, client_error, client_error, client_error, ""])
367 | test_payload = {"abc": 123}
368 | base._send_individual_payload(test_payload)
369 | base._individual_dispatch_method.assert_has_calls([
370 | call(**test_payload),
371 | call(**test_payload),
372 | call(**test_payload),
373 | call(**test_payload),
374 | call(**test_payload)
375 | ])
376 |
377 | def test_dict_added_to_unprocessed_items_after_5_failures(self):
378 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=3)
379 | client_error = ClientError({"Error": {"message": "Something went wrong", "code": 0}}, "A Test")
380 | base._individual_dispatch_method = Mock(side_effect=[client_error, client_error, client_error, client_error,
381 | client_error])
382 | test_payload = {"abc": 123}
383 | base._send_individual_payload(test_payload)
384 | base._individual_dispatch_method.assert_has_calls([
385 | call(**test_payload),
386 | call(**test_payload),
387 | call(**test_payload),
388 | call(**test_payload),
389 | call(**test_payload)
390 | ])
391 | self.assertEqual([test_payload], base.unprocessed_items)
392 |
393 |
394 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
395 | @patch('boto3_batch_utils.Base.boto3', Mock())
396 | @patch('boto3_batch_utils.Base.get_byte_size_of_dict_or_list')
397 | class TestValidatePayloadByteSize(TestCase):
398 |
399 | def test_less_than_max(self, mock_get_byte_size_of_dict_or_list):
400 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=1)
401 | base._aws_service_message_max_bytes = 2
402 | mock_get_byte_size_of_dict_or_list.return_value = 1
403 | test_pl = {'stuff': True}
404 |
405 | base._validate_payload_byte_size(test_pl)
406 |
407 | mock_get_byte_size_of_dict_or_list.assert_has_calls([call({}), call(test_pl)], any_order=True)
408 |
409 | def test_equals_max(self, mock_get_byte_size_of_dict_or_list):
410 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=1)
411 | base._aws_service_message_max_bytes = 1
412 | mock_get_byte_size_of_dict_or_list.return_value = 1
413 | test_pl = {'stuff': True}
414 |
415 | base._validate_payload_byte_size(test_pl)
416 |
417 | mock_get_byte_size_of_dict_or_list.assert_has_calls([call({}), call(test_pl)], any_order=True)
418 |
419 | def test_more_than_max(self, mock_get_byte_size_of_dict_or_list):
420 | base = BaseDispatcher('test_subject', 'send_lots', 'send_one', max_batch_size=1)
421 | base._aws_service_message_max_bytes = 1
422 | mock_get_byte_size_of_dict_or_list.return_value = 2
423 | test_pl = {'stuff': True}
424 |
425 | with self.assertRaises(ValueError) as context:
426 | base._validate_payload_byte_size(test_pl)
427 | self.assertIn("exceeds the maximum payload size", str(context.exception))
428 |
429 | mock_get_byte_size_of_dict_or_list.assert_has_calls([call({}), call(test_pl)], any_order=True)
430 |
--------------------------------------------------------------------------------
/tests/unit_tests/test_cloudwatch.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import patch, Mock
3 | from datetime import datetime
4 |
5 | from boto3_batch_utils.Cloudwatch import CloudwatchBatchDispatcher, cloudwatch_dimension
6 | from boto3_batch_utils.Base import BaseDispatcher
7 |
8 |
9 | class MockClient:
10 |
11 | def __init__(self, client_name):
12 | self.client_name = client_name + "_client"
13 |
14 | def put_metric_data(self):
15 | pass
16 |
17 |
18 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
19 | @patch('boto3_batch_utils.Base.boto3', Mock())
20 | @patch.object(BaseDispatcher, 'submit_payload')
21 | class SubmitMetric(TestCase):
22 |
23 | def test(self, mock_submit_payload):
24 | cw = CloudwatchBatchDispatcher('test_space', max_batch_size=1)
25 | mock_metric_name = 'met'
26 | mock_timestamp = datetime.now()
27 | mock_dimensions = None
28 | mock_value = 123
29 | mock_unit = 'Bytes'
30 | cw.submit_metric(metric_name=mock_metric_name, value=mock_value, timestamp=mock_timestamp,
31 | dimensions=mock_dimensions, unit=mock_unit)
32 | mock_submit_payload.assert_called_once_with({
33 | 'MetricName': mock_metric_name,
34 | 'Timestamp': mock_timestamp,
35 | 'Value': mock_value,
36 | 'Unit': mock_unit
37 | })
38 |
39 |
40 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
41 | @patch('boto3_batch_utils.Base.boto3', Mock())
42 | @patch.object(BaseDispatcher, 'flush_payloads')
43 | class FlushPayloads(TestCase):
44 |
45 | def test(self, mock_flush_payloads):
46 | cw = CloudwatchBatchDispatcher('test_space', max_batch_size=1)
47 | cw.flush_payloads()
48 | mock_flush_payloads.assert_called_once_with()
49 |
50 |
51 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
52 | @patch('boto3_batch_utils.Base.boto3', Mock())
53 | @patch.object(BaseDispatcher, '_batch_send_payloads')
54 | class BatchSendPayloads(TestCase):
55 |
56 | def test(self, mock_batch_send_payloads):
57 | cw = CloudwatchBatchDispatcher('test_space', max_batch_size=1)
58 | test_batch = {'test': True}
59 | cw._batch_send_payloads(test_batch)
60 | mock_batch_send_payloads.assert_called_once_with({'Namespace': 'test_space', 'MetricData': test_batch})
61 |
62 |
63 | class CloudwatchDimensionStructure(TestCase):
64 |
65 | def test(self):
66 | self.assertEqual(
67 | {'Name': "test_name", 'Value': '123'},
68 | cloudwatch_dimension("test_name", 123)
69 | )
70 |
--------------------------------------------------------------------------------
/tests/unit_tests/test_dynamodb.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import patch, Mock, call
3 |
4 | from botocore.exceptions import ClientError
5 |
6 | from boto3_batch_utils.Dynamodb import DynamoBatchDispatcher
7 | from boto3_batch_utils.Base import BaseDispatcher
8 |
9 |
10 | class MockClient:
11 |
12 | def __init__(self, client_name):
13 | self.client_name = client_name + "_client"
14 |
15 | def batch_write_item(self):
16 | pass
17 |
18 | def put_item(self):
19 | pass
20 |
21 |
22 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
23 | @patch('boto3_batch_utils.Base.boto3', Mock())
24 | @patch('boto3_batch_utils.utils.convert_floats_in_dict_to_decimals')
25 | @patch.object(BaseDispatcher, 'submit_payload')
26 | class SubmitPayload(TestCase):
27 |
28 | def test_where_key_preexists(self, mock_submit_payload, mock_convert_decimals):
29 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
30 | mock_check_payload_is_unique = Mock(return_value=True)
31 | dy._check_payload_is_unique = mock_check_payload_is_unique
32 | test_payload = {'p_key': 1}
33 | mock_convert_decimals.return_value = test_payload
34 | dy.submit_payload(test_payload)
35 | mock_check_payload_is_unique.assert_called_once_with(test_payload)
36 | mock_submit_payload.assert_called_once_with({"PutRequest": {"Item": test_payload}})
37 |
38 | def test_where_key_requires_mapping(self, mock_submit_payload, mock_convert_decimals):
39 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
40 | mock_check_payload_is_unique = Mock(return_value=True)
41 | dy._check_payload_is_unique = mock_check_payload_is_unique
42 | test_payload = {'unmapped_id': 1}
43 | mock_convert_decimals.return_value = test_payload
44 | dy.submit_payload(test_payload, partition_key_location='unmapped_id')
45 | mock_check_payload_is_unique.assert_called_once_with(test_payload)
46 | mock_submit_payload.assert_called_once_with({"PutRequest": {"Item": test_payload}})
47 |
48 | def test_where_key_not_found(self, mock_submit_payload, mock_convert_decimals):
49 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
50 | mock_check_payload_is_unique = Mock(return_value=True)
51 | dy._check_payload_is_unique = mock_check_payload_is_unique
52 | test_payload = {'there_is_no_real_id_here': 1}
53 | mock_convert_decimals.return_value = test_payload
54 | with self.assertRaises(KeyError):
55 | dy.submit_payload(test_payload, partition_key_location='something_useless')
56 | mock_check_payload_is_unique.assert_not_called()
57 | mock_submit_payload.assert_not_called()
58 |
59 | def test_where_payload_is_a_duplicate(self, mock_submit_payload, mock_convert_decimals):
60 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
61 | mock_check_payload_is_unique = Mock(return_value=False)
62 | dy._check_payload_is_unique = mock_check_payload_is_unique
63 | test_payload = {'p_key': 1}
64 | mock_convert_decimals.return_value = test_payload
65 | dy.submit_payload(test_payload)
66 | mock_check_payload_is_unique.assert_called_once_with(test_payload)
67 | mock_submit_payload.assert_not_called()
68 |
69 |
70 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
71 | @patch('boto3_batch_utils.Base.boto3', Mock())
72 | class TestCheckPayloadIsUnique(TestCase):
73 |
74 | def test_sort_key_given_and_is_not_duplicate(self):
75 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1,
76 | sort_key="sorted")
77 | dy._check_payload_is_unique_by_partition_key_and_sort_key = Mock(return_value=True)
78 | dy._check_payload_is_unique_by_partition_key = Mock()
79 | test_payload = {'test': True}
80 |
81 | self.assertTrue(dy._check_payload_is_unique(test_payload))
82 |
83 | dy._check_payload_is_unique_by_partition_key_and_sort_key.assert_called_once_with(test_payload)
84 | dy._check_payload_is_unique_by_partition_key.assert_not_called()
85 |
86 | def test_sort_key_given_and_is_a_duplicate(self):
87 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1,
88 | sort_key="sorted")
89 | dy._check_payload_is_unique_by_partition_key_and_sort_key = Mock(return_value=False)
90 | dy._check_payload_is_unique_by_partition_key = Mock()
91 | test_payload = {'test': True}
92 |
93 | self.assertFalse(dy._check_payload_is_unique(test_payload))
94 |
95 | dy._check_payload_is_unique_by_partition_key_and_sort_key.assert_called_once_with(test_payload)
96 | dy._check_payload_is_unique_by_partition_key.assert_not_called()
97 |
98 | def test_sort_key_not_given_and_is_not_duplicate(self):
99 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
100 | dy._check_payload_is_unique_by_partition_key_and_sort_key = Mock()
101 | dy._check_payload_is_unique_by_partition_key = Mock(return_value=True)
102 | test_payload = {'test': True}
103 |
104 | self.assertTrue(dy._check_payload_is_unique(test_payload))
105 |
106 | dy._check_payload_is_unique_by_partition_key_and_sort_key.assert_not_called()
107 | dy._check_payload_is_unique_by_partition_key.assert_called_once_with(test_payload)
108 |
109 | def test_sort_key_not_given_and_is_a_duplicate(self):
110 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
111 | dy._check_payload_is_unique_by_partition_key_and_sort_key = Mock()
112 | dy._check_payload_is_unique_by_partition_key = Mock(return_value=False)
113 | test_payload = {'test': True}
114 |
115 | self.assertFalse(dy._check_payload_is_unique(test_payload))
116 |
117 | dy._check_payload_is_unique_by_partition_key_and_sort_key.assert_not_called()
118 | dy._check_payload_is_unique_by_partition_key.assert_called_once_with(test_payload)
119 |
120 |
121 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
122 | @patch('boto3_batch_utils.Base.boto3', Mock())
123 | class TestCheckPayloadIsUniqueByPartitionKey(TestCase):
124 |
125 | def test_empty_batch(self):
126 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
127 | dy._batch_payload = []
128 |
129 | test_payload = {'p_key': 'abc'}
130 |
131 | self.assertTrue(dy._check_payload_is_unique_by_partition_key(test_payload))
132 |
133 | def test_record_already_in_batch(self):
134 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
135 | dy._batch_payload = [{'PutRequest': {'Item': {'p_key': 'abc'}}}]
136 |
137 | test_payload = {'p_key': 'abc'}
138 |
139 | self.assertFalse(dy._check_payload_is_unique_by_partition_key(test_payload))
140 |
141 | def test_record_not_in_batch(self):
142 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
143 | dy._batch_payload = [{'PutRequest': {'Item': {'p_key': 'cde'}}}]
144 |
145 | test_payload = {'p_key': 'abc'}
146 |
147 | self.assertTrue(dy._check_payload_is_unique_by_partition_key(test_payload))
148 |
149 |
150 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
151 | @patch('boto3_batch_utils.Base.boto3', Mock())
152 | class TestCheckPayloadIsUniqueByPartitionKeyAndSortKey(TestCase):
153 |
154 | def test_empty_batch(self):
155 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1,
156 | sort_key='s_key')
157 | dy._batch_payload = []
158 |
159 | test_payload = {'p_key': 'abc', 's_key': 'def'}
160 |
161 | self.assertTrue(dy._check_payload_is_unique_by_partition_key_and_sort_key(test_payload))
162 |
163 | def test_sort_key_in_batch_partition_key_is_not(self):
164 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1,
165 | sort_key='s_key')
166 | dy._batch_payload = [{'PutRequest': {'Item': {'p_key': 'cde', 's_key': 'def'}}}]
167 |
168 | test_payload = {'p_key': 'abc', 's_key': 'def'}
169 |
170 | self.assertTrue(dy._check_payload_is_unique_by_partition_key_and_sort_key(test_payload))
171 |
172 | def test_sort_key_not_in_batch_partition_key_is(self):
173 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1,
174 | sort_key='s_key')
175 | dy._batch_payload = [{'PutRequest': {'Item': {'p_key': 'abc', 's_key': 'ghi'}}}]
176 |
177 | test_payload = {'p_key': 'abc', 's_key': 'def'}
178 |
179 | self.assertTrue(dy._check_payload_is_unique_by_partition_key_and_sort_key(test_payload))
180 |
181 | def test_sort_key_and_partition_key_in_batch(self):
182 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1,
183 | sort_key='s_key')
184 | dy._batch_payload = [{'PutRequest': {'Item': {'p_key': 'abc', 's_key': 'def'}}}]
185 |
186 | test_payload = {'p_key': 'abc', 's_key': 'def'}
187 |
188 | self.assertFalse(dy._check_payload_is_unique_by_partition_key_and_sort_key(test_payload))
189 |
190 |
191 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
192 | @patch('boto3_batch_utils.Base.boto3', Mock())
193 | @patch.object(BaseDispatcher, 'flush_payloads')
194 | class FlushPayloads(TestCase):
195 |
196 | def test(self, mock_flush_payloads):
197 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
198 | dy.flush_payloads()
199 | mock_flush_payloads.assert_called_once_with()
200 |
201 |
202 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
203 | @patch('boto3_batch_utils.Base.boto3', Mock())
204 | @patch.object(BaseDispatcher, '_batch_send_payloads')
205 | class BatchSendPayloads(TestCase):
206 |
207 | def test(self, mock_batch_send_payloads):
208 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
209 | test_batch = {'a_test': True}
210 | dy._batch_send_payloads(test_batch)
211 | mock_batch_send_payloads.assert_called_once_with({'RequestItems': {'test_table_name': test_batch}})
212 |
213 |
214 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
215 | @patch('boto3_batch_utils.Base.boto3', Mock())
216 | @patch.object(BaseDispatcher, '_process_batch_send_response')
217 | class ProcessBatchSendResponse(TestCase):
218 |
219 | def test_no_unprocessed_items(self, mock_base_process_batch_send_response):
220 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
221 | dy._send_individual_payload = Mock()
222 | test_response = {'UnprocessedItems': []}
223 | dy._process_batch_send_response(test_response)
224 | mock_base_process_batch_send_response.assert_not_called()
225 | dy._send_individual_payload.assert_not_called()
226 |
227 | def test_one_unprocessed_item(self, mock_base_process_batch_send_response):
228 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
229 | dy._send_individual_payload = Mock()
230 | test_response = {'UnprocessedItems': {'test_table_name': [{"PutRequest": {"Item": "TEST_ITEM"}}]}}
231 | dy._process_batch_send_response(test_response)
232 | mock_base_process_batch_send_response.assert_not_called()
233 | dy._send_individual_payload.assert_called_once_with("TEST_ITEM")
234 |
235 | def test_several_unprocessed_items(self, mock_base_process_batch_send_response):
236 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
237 | dy._send_individual_payload = Mock()
238 | test_response = {'UnprocessedItems': {
239 | 'test_table_name': [
240 | {"PutRequest": {"Item": "TEST_ITEM1"}},
241 | {"PutRequest": {"Item": "TEST_ITEM2"}},
242 | {"PutRequest": {"Item": "TEST_ITEM3"}}
243 | ]
244 | }}
245 | dy._process_batch_send_response(test_response)
246 | mock_base_process_batch_send_response.assert_not_called()
247 | dy._send_individual_payload.assert_has_calls([
248 | call("TEST_ITEM1"),
249 | call("TEST_ITEM2"),
250 | call("TEST_ITEM3")
251 | ])
252 |
253 |
254 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
255 | @patch('boto3_batch_utils.Base.boto3', Mock())
256 | class SendIndividualPayload(TestCase):
257 |
258 | def test_happy_path(self):
259 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
260 | dy._dynamo_table = Mock()
261 | dy._dynamo_table.put_item = Mock()
262 | test_payload = {"processed_payload": False}
263 | dy._send_individual_payload(test_payload)
264 | dy._dynamo_table.put_item.assert_called_once_with(**{'Item': test_payload})
265 |
266 | def test_client_error_retries_remaining(self):
267 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
268 | dy._dynamo_table = Mock()
269 | dy._dynamo_table.put_item.side_effect = [ClientError({'Error': {'Code': 500, 'Message': 'broken'}}, "Dynamo"),
270 | None]
271 | test_payload = {"processed_payload": False}
272 | dy._send_individual_payload(test_payload, retry=1)
273 | dy._dynamo_table.put_item.assert_has_calls([call(**{'Item': test_payload}), call(**{'Item': test_payload})])
274 |
275 | def test_client_error_no_retries_remaining(self):
276 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
277 | dy._dynamo_table = Mock()
278 | dy._dynamo_table.put_item.side_effect = [ClientError({'Error': {'Code': 500, 'Message': 'broken'}}, "Dynamo")]
279 | test_payload = {"processed_payload": False}
280 | dy._send_individual_payload(test_payload, retry=0)
281 | dy._dynamo_table.put_item.assert_called_once_with(**{'Item': test_payload})
282 | self.assertEqual([test_payload], dy.unprocessed_items)
283 |
284 |
285 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
286 | @patch('boto3_batch_utils.Base.boto3', Mock())
287 | @patch.object(BaseDispatcher, '_initialise_aws_client')
288 | class TestInitialiseAwsClient(TestCase):
289 |
290 | def test(self, mock_initialise_aws_client):
291 | dy = DynamoBatchDispatcher('test_table_name', 'p_key', max_batch_size=1)
292 | dy._aws_service = Mock()
293 | dy._aws_service.Table = Mock(return_value="test table")
294 |
295 | table_client = dy._initialise_aws_client()
296 |
297 | mock_initialise_aws_client.assert_called_once()
298 | dy._aws_service.Table.assert_called_once_with('test_table_name')
299 | self.assertEqual('test table', dy._dynamo_table)
300 |
--------------------------------------------------------------------------------
/tests/unit_tests/test_kinesis.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import patch, Mock, call
3 |
4 | from json import dumps
5 |
6 | from boto3_batch_utils.Kinesis import KinesisBatchDispatcher
7 | from boto3_batch_utils.Base import BaseDispatcher
8 |
9 |
10 | class MockClient:
11 |
12 | def __init__(self, client_name):
13 | self.client_name = client_name + "_client"
14 |
15 | def put_records(self, Records=[], StreamName='string'):
16 | pass
17 |
18 | def put_record(self, StreamName='string', Data=b'bytes', PartitionKey='string'):
19 | pass
20 |
21 |
22 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
23 | @patch('boto3_batch_utils.Base.boto3', Mock())
24 | @patch('boto3_batch_utils.Kinesis.DecimalEncoder')
25 | @patch('boto3_batch_utils.Kinesis.dumps')
26 | @patch.object(BaseDispatcher, 'submit_payload')
27 | class SubmitPayload(TestCase):
28 |
29 | def test(self, mock_submit_payload, mock_json_dumps, mock_decimal_encoder):
30 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
31 | test_payload = {'test_part_key': 123}
32 | mock_json_dumps.return_value = "serialized_test_data"
33 | constructed_payload = {
34 | 'Data': "serialized_test_data",
35 | 'PartitionKey': '123'
36 | }
37 | kn.submit_payload(test_payload)
38 | mock_submit_payload.assert_called_once_with(constructed_payload)
39 | mock_json_dumps.asser_called_once_with(test_payload, cls=mock_decimal_encoder)
40 |
41 |
42 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
43 | @patch('boto3_batch_utils.Base.boto3', Mock())
44 | @patch.object(BaseDispatcher, 'flush_payloads')
45 | class FlushPayloads(TestCase):
46 |
47 | def test(self, mock_flush_payloads):
48 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
49 | kn.flush_payloads()
50 | mock_flush_payloads.assert_called_once_with()
51 |
52 |
53 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
54 | @patch('boto3_batch_utils.Base.boto3', Mock())
55 | @patch.object(BaseDispatcher, '_batch_send_payloads')
56 | class BatchSendPayloads(TestCase):
57 |
58 | def test_list(self, mock_base_batch_send_payloads):
59 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
60 | test_batch = [{"a_test": True}]
61 | kn._batch_send_payloads(test_batch)
62 | mock_base_batch_send_payloads.assert_called_once_with({'StreamName': 'test_stream', 'Records': test_batch})
63 |
64 | def test_dict(self, mock_base_batch_send_payloads):
65 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
66 | test_batch = {'StreamName': 'test_stream', 'Records': ['a', 'b', 'c']}
67 | kn._batch_send_payloads(test_batch)
68 | mock_base_batch_send_payloads.assert_called_once_with(test_batch)
69 |
70 | def test_with_retries(self, mock_base_batch_send_payloads):
71 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
72 | test_batch = {'StreamName': 'test_stream', 'Records': ['a', 'b', 'c']}
73 | kn._batch_send_payloads(test_batch, retry=3)
74 | mock_base_batch_send_payloads.assert_called_once_with(test_batch, 3)
75 |
76 |
77 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
78 | @patch('boto3_batch_utils.Base.boto3', Mock())
79 | class ProcessFailedPayloads(TestCase):
80 |
81 | def test_all_records_failed_in_first_batch_and_are_re_submitted(self):
82 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
83 | kn._batch_send_payloads = Mock()
84 | test_batch = [
85 | {"Id": 1}, {"Id": 2}, {"Id": 3}, {"Id": 4}, {"Id": 5},
86 | {"Id": 6}, {"Id": 7}, {"Id": 8}, {"Id": 9}, {"Id": 10}
87 | ]
88 | kn.batch_in_progress = test_batch
89 | test_response = {
90 | 'FailedRecordCount': 10,
91 | 'Records': [
92 | {'ErrorCode': 'ProvisionedThroughputExceededException',
93 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
94 | ' under aws_account_id.'},
95 | {'ErrorCode': 'ProvisionedThroughputExceededException',
96 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
97 | ' under aws_account_id.'},
98 | {'ErrorCode': 'ProvisionedThroughputExceededException',
99 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
100 | ' under aws_account_id.'},
101 | {'ErrorCode': 'ProvisionedThroughputExceededException',
102 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
103 | ' under aws_account_id.'},
104 | {'ErrorCode': 'ProvisionedThroughputExceededException',
105 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
106 | ' under aws_account_id.'},
107 | {'ErrorCode': 'ProvisionedThroughputExceededException',
108 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
109 | ' under aws_account_id.'},
110 | {'ErrorCode': 'ProvisionedThroughputExceededException',
111 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
112 | ' under aws_account_id.'},
113 | {'ErrorCode': 'ProvisionedThroughputExceededException',
114 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
115 | ' under aws_account_id.'},
116 | {'ErrorCode': 'ProvisionedThroughputExceededException',
117 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
118 | ' under aws_account_id.'},
119 | {'ErrorCode': 'ProvisionedThroughputExceededException',
120 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
121 | ' under aws_account_id.'}
122 | ]
123 | }
124 | kn._process_failed_payloads(test_response)
125 | kn._batch_send_payloads.assert_called_once_with(test_batch, retry=3)
126 |
127 | def test_some_records_are_rejected_some_are_successful(self):
128 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
129 | kn._batch_send_payloads = Mock()
130 | test_batch = [
131 | {"Id": 1}, {"Id": 2}, {"Id": 3}, {"Id": 4}, {"Id": 5},
132 | {"Id": 6}, {"Id": 7}, {"Id": 8}, {"Id": 9}, {"Id": 10}
133 | ]
134 | kn.batch_in_progress = test_batch
135 | test_response = {
136 | 'FailedRecordCount': 5,
137 | 'Records': [
138 | {'SequenceNumber': '49580022882545286363048362619667912448714664261560827906',
139 | 'ShardId': 'shardId-000000000000'},
140 | {'SequenceNumber': '49580022882545286363048362619669121374534278890735534082',
141 | 'ShardId': 'shardId-000000000000'},
142 | {'SequenceNumber': '49580022882545286363048362619670330300353893519910240258',
143 | 'ShardId': 'shardId-000000000000'},
144 | {'SequenceNumber': '49580022882545286363048362619671539226173508149084946434',
145 | 'ShardId': 'shardId-000000000000'},
146 | {'SequenceNumber': '49580022882545286363048362619673957077812737407434358786',
147 | 'ShardId': 'shardId-000000000000'},
148 | {'ErrorCode': 'ProvisionedThroughputExceededException',
149 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
150 | ' under aws_account_id.'},
151 | {'ErrorCode': 'ProvisionedThroughputExceededException',
152 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
153 | ' under aws_account_id.'},
154 | {'ErrorCode': 'ProvisionedThroughputExceededException',
155 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
156 | ' under aws_account_id.'},
157 | {'ErrorCode': 'ProvisionedThroughputExceededException',
158 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
159 | ' under aws_account_id.'},
160 | {'ErrorCode': 'ProvisionedThroughputExceededException',
161 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
162 | ' under aws_account_id.'}
163 | ]
164 | }
165 | kn._process_failed_payloads(test_response)
166 | kn._batch_send_payloads.assert_called_once_with([{"Id": 6}, {"Id": 7}, {"Id": 8}, {"Id": 9}, {"Id": 10}], retry=3)
167 |
168 | def test_two_records_are_rejected_the_rest_are_successful(self):
169 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
170 | kn._individual_dispatch_method = Mock()
171 | test_batch = [
172 | {'Data': dumps({"Id": 1}), 'PartitionKey': 'Id'},
173 | {'Data': dumps({"Id": 2}), 'PartitionKey': 'Id'},
174 | {'Data': dumps({"Id": 3}), 'PartitionKey': 'Id'},
175 | {'Data': dumps({"Id": 4}), 'PartitionKey': 'Id'},
176 | {'Data': dumps({"Id": 5}), 'PartitionKey': 'Id'},
177 | {'Data': dumps({"Id": 6}), 'PartitionKey': 'Id'},
178 | {'Data': dumps({"Id": 7}), 'PartitionKey': 'Id'}
179 | ]
180 | kn.batch_in_progress = test_batch
181 | test_response = {
182 | 'FailedRecordCount': 2,
183 | 'Records': [
184 | {'SequenceNumber': '49580022882545286363048362619667912448714664261560827906',
185 | 'ShardId': 'shardId-000000000000'},
186 | {'SequenceNumber': '49580022882545286363048362619669121374534278890735534082',
187 | 'ShardId': 'shardId-000000000000'},
188 | {'SequenceNumber': '49580022882545286363048362619670330300353893519910240258',
189 | 'ShardId': 'shardId-000000000000'},
190 | {'SequenceNumber': '49580022882545286363048362619671539226173508149084946434',
191 | 'ShardId': 'shardId-000000000000'},
192 | {'SequenceNumber': '49580022882545286363048362619673957077812737407434358786',
193 | 'ShardId': 'shardId-000000000000'},
194 | {'ErrorCode': 'ProvisionedThroughputExceededException',
195 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
196 | ' under aws_account_id.'},
197 | {'ErrorCode': 'ProvisionedThroughputExceededException',
198 | 'ErrorMessage': 'Rate exceeded for shard shardId-000000000000 in test_stream'
199 | ' under aws_account_id.'}
200 | ]
201 | }
202 | kn._process_failed_payloads(test_response)
203 | kn._individual_dispatch_method.assert_has_calls([
204 | call(**{'StreamName': 'test_stream', 'Data': dumps({"Id": 6}), 'PartitionKey': 'Id'}),
205 | call(**{'StreamName': 'test_stream', 'Data': dumps({"Id": 7}), 'PartitionKey': 'Id'})
206 | ])
207 |
208 |
209 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
210 | @patch('boto3_batch_utils.Base.boto3', Mock())
211 | class ProcessBatchSendResponse(TestCase):
212 |
213 | def test_no_records_attribute_in_response(self):
214 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
215 | kn._process_failed_payloads = Mock()
216 | test_batch = [
217 | {"Id": 1}
218 | ]
219 | test_response = {
220 | 'FailedRecordCount': 0,
221 | 'Records': [1],
222 | 'EncryptionType': 'KMS'
223 | }
224 | kn.batch_in_progress = test_batch
225 | kn._process_batch_send_response(test_response)
226 | kn._process_failed_payloads.assert_not_called()
227 |
228 | def test_no_failed_records_in_response(self):
229 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
230 | kn._process_failed_payloads = Mock()
231 | test_batch = [
232 | {"Id": 1}, {"Id": 2}, {"Id": 3}, {"Id": 4}, {"Id": 5},
233 | {"Id": 6}, {"Id": 7}, {"Id": 8}, {"Id": 9}
234 | ]
235 | test_response = {
236 | 'FailedRecordCount': 0,
237 | 'Records': [1, 2, 3, 4, 5, 6, 7, 8, 9],
238 | 'EncryptionType': 'KMS'
239 | }
240 | kn.batch_in_progress = test_batch
241 | kn._process_batch_send_response(test_response)
242 | kn._process_failed_payloads.assert_not_called()
243 |
244 | def test_all_records_failed(self):
245 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
246 | kn._process_failed_payloads = Mock()
247 | test_batch = [
248 | {"Id": 1}, {"Id": 2}, {"Id": 3}, {"Id": 4}, {"Id": 5},
249 | {"Id": 6}, {"Id": 7}, {"Id": 8}, {"Id": 9}, {"Id": 10}
250 | ]
251 | test_response = {
252 | 'FailedRecordCount': 10,
253 | 'Records': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
254 | }
255 | kn.batch_in_progress = test_batch
256 | kn._process_batch_send_response(test_response)
257 | kn._process_failed_payloads.assert_called_once_with(test_response)
258 |
259 | def test_some_records_failed(self):
260 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
261 | kn._process_failed_payloads = Mock()
262 | test_batch = [
263 | {"Id": 1}, {"Id": 2}, {"Id": 3}, {"Id": 4}, {"Id": 5},
264 | {"Id": 6}, {"Id": 7}, {"Id": 8}, {"Id": 9}, {"Id": 10}
265 | ]
266 | test_response = {
267 | 'FailedRecordCount': 5,
268 | 'Records': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
269 | }
270 | kn.batch_in_progress = test_batch
271 | kn._process_batch_send_response(test_response)
272 | kn._process_failed_payloads.assert_called_once_with(test_response)
273 |
274 |
275 |
276 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
277 | @patch('boto3_batch_utils.Base.boto3', Mock())
278 | @patch.object(BaseDispatcher, '_send_individual_payload')
279 | class SendIndividualPayload(TestCase):
280 |
281 | def test(self, mock_send_individual_payload):
282 | kn = KinesisBatchDispatcher("test_stream", partition_key_identifier="test_part_key", max_batch_size=1)
283 | test_payload = {
284 | 'Data': "{'something': 'else'}",
285 | 'PartitionKey': 'Id'
286 | }
287 | kn._send_individual_payload(test_payload)
288 | _test_payload = test_payload
289 | _test_payload['StreamName'] = 'test_stream'
290 | mock_send_individual_payload.assert_called_once_with(_test_payload, 4)
291 |
--------------------------------------------------------------------------------
/tests/unit_tests/test_sqs.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import patch, Mock, call
3 |
4 | from json import dumps
5 |
6 | from boto3_batch_utils.SQS import SQSBatchDispatcher, SQSFifoBatchDispatcher
7 | from boto3_batch_utils.Base import BaseDispatcher
8 |
9 |
10 | class MockClient:
11 |
12 | def __init__(self, client_name):
13 | self.client_name = client_name + "_client"
14 |
15 | def send_message_batch(self):
16 | pass
17 |
18 | def send_message(self):
19 | pass
20 |
21 | def get_queue_url(self, **kwargs):
22 | return {"QueueUrl": "test_url"}
23 |
24 |
25 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
26 | @patch('boto3_batch_utils.Base.boto3', Mock())
27 | class TestInit(TestCase):
28 |
29 | def test_standard_queue_type_initialisation(self):
30 | sqs = SQSBatchDispatcher('test_queue', max_batch_size=1)
31 | self.assertIsNone(sqs.queue_url)
32 | self.assertIsNone(sqs.batch_in_progress)
33 | self.assertFalse(sqs.fifo_queue)
34 |
35 | def test_fifo_queue_type_initialisation(self):
36 | sqs = SQSFifoBatchDispatcher('test_queue', max_batch_size=1)
37 | self.assertIsNone(sqs.queue_url)
38 | self.assertIsNone(sqs.batch_in_progress)
39 | self.assertTrue(sqs.fifo_queue)
40 |
41 |
42 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
43 | @patch('boto3_batch_utils.Base.boto3', Mock())
44 | @patch.object(BaseDispatcher, 'submit_payload')
45 | class SubmitPayload(TestCase):
46 |
47 | def test_standard_queue_with_delay_seconds(self, mock_submit_payload):
48 | sqs = SQSBatchDispatcher('test_queue', max_batch_size=1)
49 | test_message = {'something': 'else'}
50 | test_id = "123"
51 | test_delay = 3
52 | sqs.submit_payload(test_message, test_id, test_delay)
53 | mock_submit_payload.assert_called_once_with(
54 | {'Id': test_id, 'MessageBody': dumps(test_message), 'DelaySeconds': test_delay}
55 | )
56 |
57 | def test_standard_queue_without_delay_seconds(self, mock_submit_payload):
58 | sqs = SQSBatchDispatcher('test_queue', max_batch_size=1)
59 | test_message = {'something': 'else'}
60 | test_id = "123"
61 | sqs.submit_payload(test_message, test_id)
62 | mock_submit_payload.assert_called_once_with(
63 | {'Id': test_id, 'MessageBody': dumps(test_message)}
64 | )
65 |
66 | def test_fifo_queue_message_id_deduplication_ignore_duplicate(self, mock_submit_payload):
67 | fifo = SQSFifoBatchDispatcher('test_queue', max_batch_size=2)
68 | test_message = {'something': 'else'}
69 | test_id = "abcdefg"
70 | fifo._batch_payload = [{
71 | 'Id': 'abcdefg',
72 | 'MessageBody': str(test_message),
73 | 'MessageGroupId': 'asdfg'
74 | }]
75 | fifo.submit_payload(test_message, test_id)
76 | mock_submit_payload.assert_not_called()
77 |
78 | def test_fifo_queue_message_deduplication_id_duplication_ignore_duplicate(self, mock_submit_payload):
79 | fifo = SQSFifoBatchDispatcher('test_queue', max_batch_size=2)
80 | test_message = {'something': 'else'}
81 | test_id = "123"
82 | fifo._batch_payload = [{
83 | 'Id': 'abcdefg',
84 | 'MessageBody': str(test_message),
85 | 'MessageGroupId': 'asdfg',
86 | 'MessageDeduplicationId': 'abc'
87 | }]
88 | fifo.submit_payload(test_message, test_id, message_deduplication_id="abc")
89 | mock_submit_payload.assert_not_called()
90 |
91 |
92 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
93 | @patch('boto3_batch_utils.Base.boto3', Mock())
94 | @patch.object(BaseDispatcher, 'flush_payloads')
95 | class FlushPayloads(TestCase):
96 |
97 | def test(self, mock_flush_payloads):
98 | sqs = SQSBatchDispatcher('test_queue', max_batch_size=1)
99 | sqs.flush_payloads()
100 | mock_flush_payloads.assert_called_once_with()
101 |
102 |
103 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
104 | @patch('boto3_batch_utils.Base.boto3', Mock())
105 | @patch.object(BaseDispatcher, '_batch_send_payloads')
106 | class BatchSendPayloads(TestCase):
107 |
108 | def test(self, mock_batch_send_payloads):
109 | sqs = SQSBatchDispatcher('test_queue', max_batch_size=1)
110 | sqs._aws_service = Mock()
111 | sqs._aws_service.get_queue_url = Mock(return_value={'QueueUrl': 'url:://queue'})
112 | test_batch = "a_test"
113 | sqs._batch_send_payloads(test_batch)
114 | sqs._aws_service.get_queue_url.assert_called_once_with(QueueName='test_queue')
115 | self.assertEqual('url:://queue', sqs.queue_url)
116 | mock_batch_send_payloads.assert_called_once_with({'QueueUrl': "url:://queue", 'Entries': test_batch})
117 |
118 |
119 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
120 | @patch('boto3_batch_utils.Base.boto3', Mock())
121 | class ProcessFailedPayloads(TestCase):
122 |
123 | def test_all_records_failed_in_first_batch_and_are_re_submitted(self):
124 | sqs = SQSBatchDispatcher('test_queue', max_batch_size=1)
125 | sqs._send_individual_payload = Mock()
126 | test_batch = [
127 | {'Id': '1', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
128 | {'Id': '2', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
129 | {'Id': '3', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
130 | {'Id': '4', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
131 | {'Id': '5', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
132 | {'Id': '6', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
133 | {'Id': '7', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
134 | {'Id': '8', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
135 | {'Id': '9', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
136 | {'Id': '10', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}
137 | ]
138 | sqs.batch_in_progress = test_batch
139 | test_response = {
140 | 'Successful': [],
141 | 'Failed': [
142 | {'Id': '1', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
143 | {'Id': '2', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
144 | {'Id': '3', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
145 | {'Id': '4', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
146 | {'Id': '5', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
147 | {'Id': '6', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
148 | {'Id': '7', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
149 | {'Id': '8', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
150 | {'Id': '9', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
151 | {'Id': '10', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
152 | ]
153 | }
154 | sqs._process_batch_send_response(test_response)
155 | sqs._send_individual_payload.assert_has_calls([
156 | call({'Id': '1', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
157 | call({'Id': '2', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
158 | call({'Id': '3', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
159 | call({'Id': '4', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
160 | call({'Id': '5', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
161 | call({'Id': '6', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
162 | call({'Id': '7', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
163 | call({'Id': '8', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
164 | call({'Id': '9', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
165 | call({'Id': '10', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
166 | ])
167 |
168 | def test_some_records_are_rejected_some_are_successful(self):
169 | sqs = SQSBatchDispatcher('test_queue', max_batch_size=1)
170 | sqs._send_individual_payload = Mock()
171 | test_batch = [
172 | {'Id': '1', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
173 | {'Id': '2', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
174 | {'Id': '3', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
175 | {'Id': '4', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
176 | {'Id': '5', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
177 | {'Id': '6', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
178 | {'Id': '7', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
179 | {'Id': '8', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
180 | {'Id': '9', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7},
181 | {'Id': '10', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}
182 | ]
183 | sqs.batch_in_progress = test_batch
184 | test_response = {
185 | 'Successful': [
186 | {'Id': '1', 'MessageId': '', 'MD5OfMessageBody': '', 'MD5OfMessageAttributes': '', 'SequenceNumber': ''},
187 | {'Id': '2', 'MessageId': '', 'MD5OfMessageBody': '', 'MD5OfMessageAttributes': '', 'SequenceNumber': ''},
188 | {'Id': '3', 'MessageId': '', 'MD5OfMessageBody': '', 'MD5OfMessageAttributes': '', 'SequenceNumber': ''},
189 | {'Id': '4', 'MessageId': '', 'MD5OfMessageBody': '', 'MD5OfMessageAttributes': '', 'SequenceNumber': ''},
190 | {'Id': '5', 'MessageId': '', 'MD5OfMessageBody': '', 'MD5OfMessageAttributes': '', 'SequenceNumber': ''},
191 | ],
192 | 'Failed': [
193 | {'Id': '6', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
194 | {'Id': '7', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
195 | {'Id': '8', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
196 | {'Id': '9', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
197 | {'Id': '10', 'SenderFault': True, 'Code': 'ABCD', 'Message': "Something bad happened here"},
198 | ]
199 | }
200 | sqs._process_batch_send_response(test_response)
201 | sqs._send_individual_payload.assert_has_calls([
202 | call({'Id': '6', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
203 | call({'Id': '7', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
204 | call({'Id': '8', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
205 | call({'Id': '9', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
206 | call({'Id': '10', 'MessageBody': {'something_to_send': 'etc'}, 'DelaySeconds': 7}),
207 | ])
208 |
209 |
210 | @patch('boto3_batch_utils.Base.boto3.client', MockClient)
211 | @patch('boto3_batch_utils.Base.boto3', Mock())
212 | @patch.object(BaseDispatcher, '_send_individual_payload')
213 | class SendIndividualPayload(TestCase):
214 |
215 | def test_standard_queue_with_delay_seconds(self, mock_send_individual_payload):
216 | sqs = SQSBatchDispatcher('test_queue', max_batch_size=1)
217 | sqs.queue_url = 'test_url'
218 | test_payload = {
219 | 'Id': 12345,
220 | 'MessageBody': "some_sort_of_payload",
221 | 'DelaySeconds': 99
222 | }
223 | sqs._send_individual_payload(test_payload)
224 | expected_converted_payload = {"QueueUrl": "test_url", "MessageBody": "some_sort_of_payload",
225 | "DelaySeconds": 99}
226 | mock_send_individual_payload.assert_called_once_with(expected_converted_payload, 4)
227 |
228 | def test_standard_queue_without_delay_seconds(self, mock_send_individual_payload):
229 | sqs = SQSBatchDispatcher('test_queue', max_batch_size=1)
230 | sqs.queue_url = 'test_url'
231 | test_payload = {
232 | 'Id': 12345,
233 | 'MessageBody': "some_sort_of_payload"
234 | }
235 | sqs._send_individual_payload(test_payload)
236 | expected_converted_payload = {"QueueUrl": "test_url", "MessageBody": "some_sort_of_payload"}
237 | mock_send_individual_payload.assert_called_once_with(expected_converted_payload, 4)
238 |
239 | def test_fifo_queue(self, mock_send_individual_payload):
240 | sqs = SQSFifoBatchDispatcher('test_queue', max_batch_size=1)
241 | sqs.queue_url = 'test_url'
242 | test_payload = {
243 | 'Id': 12345,
244 | 'MessageBody': 'some_sort_of_payload',
245 | 'MessageGroupId': 'unset'
246 | }
247 | sqs._send_individual_payload(test_payload)
248 | expected_converted_payload = {
249 | 'Id': 12345,
250 | 'QueueUrl': 'test_url',
251 | 'MessageBody': 'some_sort_of_payload',
252 | 'MessageGroupId': 'unset'
253 | }
254 | mock_send_individual_payload.assert_called_once_with(expected_converted_payload, 4)
255 |
--------------------------------------------------------------------------------
/tests/unit_tests/test_utils.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import patch
3 | from decimal import Decimal
4 | import json
5 | from boto3_batch_utils import utils
6 |
7 |
8 | class TestChunks(TestCase):
9 |
10 | def test_array_smaller_than_chunk_size(self):
11 | array = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
12 | batch_size = 20
13 | self.assertEqual([array], list(utils.chunks(array, batch_size)))
14 |
15 | def test_array_equal_to_chunk_size(self):
16 | array = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
17 | batch_size = 10
18 | self.assertEqual([array], list(utils.chunks(array, batch_size)))
19 |
20 | def test_array_greater_than_chunk_size_but_less_than_double(self):
21 | array = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
22 | batch_size = 8
23 | self.assertEqual([[1, 2, 3, 4, 5, 6, 7, 8], [9, 10]], list(utils.chunks(array, batch_size)))
24 |
25 | def test_array_greater_than_double_chunk_size_smaller_than_triple_chunk_size(self):
26 | array = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
27 | batch_size = 4
28 | self.assertEqual([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10]], list(utils.chunks(array, batch_size)))
29 |
30 |
31 | class TestConvertFloatsInListToDecimal(TestCase):
32 |
33 | def test_the_array_is_empty(self):
34 | array = []
35 | new_array = utils.convert_floats_in_list_to_decimals(array)
36 | self.assertEqual([], new_array)
37 |
38 | def test_single_item_in_array_is_not_a_float(self):
39 | array = [True]
40 | new_array = utils.convert_floats_in_list_to_decimals(array)
41 | self.assertEqual(array, new_array)
42 |
43 | def test_single_item_in_array_is_a_float(self):
44 | array = [float(5.0)]
45 | new_array = utils.convert_floats_in_list_to_decimals(array)
46 | self.assertEqual([Decimal(5.0)], new_array)
47 |
48 | def test_multiple_items_in_array_none_are_a_float(self):
49 | array = ["a", "b", "c", Decimal(6.7)]
50 | new_array = utils.convert_floats_in_list_to_decimals(array)
51 | self.assertEqual(array, new_array)
52 |
53 | def test_multiple_items_in_array_some_are_floats(self):
54 | array = ["a", "b", float(2.2), "c", float(5.5), "d"]
55 | new_array = utils.convert_floats_in_list_to_decimals(array)
56 | self.assertEqual(["a", "b", Decimal(str(2.2)), "c", Decimal(str(5.5)), "d"], new_array)
57 |
58 | def test_multiple_items_in_array_all_are_floats(self):
59 | array = [float(5), float(50), float(0.01), float(2.2), float(1.0), float(990)]
60 | new_array = utils.convert_floats_in_list_to_decimals(array)
61 | self.assertEqual([
62 | Decimal(str(5)), Decimal(str(50)), Decimal(str(0.01)),
63 | Decimal(str(2.2)), Decimal(str(1.0)), Decimal(str(990))], new_array)
64 |
65 | def test_some_items_are_lists_containing_floats(self):
66 | array = ["a", "b", ["rr", float(2.2)], "c", ["dd", ["gh", float(5.5)]], "d"]
67 | new_array = utils.convert_floats_in_list_to_decimals(array)
68 | self.assertEqual(["a", "b", ["rr", Decimal(str(2.2))], "c", ["dd", ["gh", Decimal(str(5.5))]], "d"], new_array)
69 |
70 | @patch('boto3_batch_utils.utils.convert_floats_in_dict_to_decimals')
71 | def test_some_items_are_dictionaries(self, mock_convert_floats_to_decimals_in_dict):
72 | mock_convert_floats_to_decimals_in_dict.side_effect = [{"sss": True}]
73 | array = ["a", "b", "c", {"sss": True}]
74 | new_array = utils.convert_floats_in_list_to_decimals(array)
75 | self.assertEqual(array, new_array)
76 | mock_convert_floats_to_decimals_in_dict.assert_called_once_with({"sss": True}, level=1)
77 |
78 |
79 | class TestConvertFloatsInDictToDecimal(TestCase):
80 |
81 | def test_empty_dict(self):
82 | d = {}
83 | new_d = utils.convert_floats_in_dict_to_decimals(d)
84 | self.assertEqual(d, new_d)
85 |
86 | def test_dict_with_no_floats(self):
87 | d = {'adsefsvs': True, 'dfgsdzfvzdsv': Decimal(4.4)}
88 | new_d = utils.convert_floats_in_dict_to_decimals(d)
89 | self.assertEqual(d, new_d)
90 |
91 | def test_dict_with_floats(self):
92 | d = {'sgervv': float(6.7), 'fsrgs': False, 'csfwcda': None}
93 | new_d = utils.convert_floats_in_dict_to_decimals(d)
94 | self.assertEqual({'sgervv': Decimal(str(6.7)),
95 | 'fsrgs': False, 'csfwcda': None}, new_d)
96 |
97 | def test_dict_with_nested_dicts_wiht_floats(self):
98 | d = {'adsefsvs': True, 'dfgsdzfvzdsv': {'sgervv': float(6.7), 'fsrgs': False, 'csfwcda': None}}
99 | new_d = utils.convert_floats_in_dict_to_decimals(d)
100 | self.assertEqual({'adsefsvs': True, 'dfgsdzfvzdsv': {
101 | 'sgervv': Decimal(str(6.7)), 'fsrgs': False, 'csfwcda': None}},
102 | new_d)
103 |
104 | @patch('boto3_batch_utils.utils.convert_floats_in_list_to_decimals')
105 | def test_dict_with_nested_lists(self, mock_convert_floats_in_list_to_decimals):
106 | mock_convert_floats_in_list_to_decimals.side_effect = [[Decimal(3.4), Decimal(66.9)]]
107 | d = {'ersrgsed': 'sgsdvfzdf', 'crvzvf': [Decimal(3.4), float(66.9)]}
108 | new_d = utils.convert_floats_in_dict_to_decimals(d)
109 | self.assertEqual({'ersrgsed': 'sgsdvfzdf', 'crvzvf': [
110 | Decimal(3.399999999999999911182158029987476766109466552734375),
111 | Decimal(66.900000000000005684341886080801486968994140625)
112 | ]}, new_d)
113 | mock_convert_floats_in_list_to_decimals.assert_called_once_with([Decimal(3.4), float(66.9)], level=1)
114 |
115 |
116 | class TestGetByteSizeOfString(TestCase):
117 |
118 | def test_one(self):
119 | test_string = "1234567890"
120 | response = utils.get_byte_size_of_string(test_string)
121 | self.assertEqual(10, response)
122 |
123 | def test_two(self):
124 | test_string = "n*ZSLt%HsC$tG!gd!*xL3SrF!30&PiVN3*&e%bN#2qZ317f2^nUUNpphmDBSwOl*qk*tPV#l6$k0Mzxg$*dK2G7s$J!9aNQc&vK"
125 | response = utils.get_byte_size_of_string(test_string)
126 | self.assertEqual(99, response)
127 |
128 | def test_three(self):
129 | test_string = json.dumps({'dict': True, 'complex': 0, 'stuff': 'etc'})
130 | response = utils.get_byte_size_of_string(test_string)
131 | self.assertEqual(44, response)
132 |
133 |
134 | @patch("boto3_batch_utils.utils.get_byte_size_of_string", return_value=5)
135 | class TestGetByteSizeOfDict(TestCase):
136 |
137 | def test_dict(self, mock_get_byte_size_of_string):
138 | test_dict = {'dict': True, 'complex': 0, 'stuff': 'etc'}
139 |
140 | response = utils.get_byte_size_of_dict_or_list(test_dict)
141 |
142 | mock_get_byte_size_of_string.assert_called_once_with('{"dict": true, "complex": 0, "stuff": "etc"}')
143 | self.assertEqual(5, response)
144 |
145 | def test_list(self, mock_get_byte_size_of_string):
146 | test_dict = [{'dict': True, 'complex': 0, 'stuff': 'etc'}]
147 |
148 | response = utils.get_byte_size_of_dict_or_list(test_dict)
149 |
150 | mock_get_byte_size_of_string.assert_called_once_with('[{"dict": true, "complex": 0, "stuff": "etc"}]')
151 | self.assertEqual(5, response)
152 |
--------------------------------------------------------------------------------