├── .editorconfig
├── .envrc
├── .gitignore
├── .pre-commit-config.yaml
├── .travis.yml
├── LICENSE
├── MANIFEST.in
├── Makefile
├── Pipfile
├── Pipfile.lock
├── README.md
├── artwork
└── python-lambda.svg
├── aws_lambda
├── __init__.py
├── aws_lambda.py
├── helpers.py
└── project_templates
│ ├── config.yaml
│ ├── event.json
│ └── service.py
├── scripts
└── lambda
├── setup.cfg
├── setup.py
└── tests
├── __init__.py
├── dev_requirements.txt
├── functional
└── __init__.py
└── unit
├── __init__.py
├── test_LambdaContext.py
└── test_readHelper.py
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | # Unix-style newlines with a newline ending every file
4 | [*]
5 | end_of_line = lf
6 | insert_final_newline = true
7 |
8 | # Matches multiple files with brace expansion notation
9 | # Set default charset
10 | [*.{js,py}]
11 | charset = utf-8
12 | trim_trailing_whitespace = true
13 |
14 | # 4 space indentation
15 | [*.py]
16 | indent_style = space
17 | indent_size = 4
18 |
19 | [*.rst]
20 | trim_trailing_whitespace = true
21 |
--------------------------------------------------------------------------------
/.envrc:
--------------------------------------------------------------------------------
1 | layout pipenv
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | requirements-dev.txt
27 | requirements.txt
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .coverage
43 | .coverage.*
44 | .cache
45 | nosetests.xml
46 | coverage.xml
47 | *,cover
48 | .hypothesis/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 |
57 | # Sphinx documentation
58 | docs/_build/
59 |
60 | # PyBuilder
61 | target/
62 |
63 | # Jetbrains/PyCharm project files
64 | .idea/
65 |
66 | # vim swap files
67 | .*.sw?
68 | aws_lambda/.DS_Store
69 | .DS_Store
70 | .vscode/
71 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: 'https://github.com/pre-commit/pre-commit-hooks'
3 | rev: v2.4.0
4 | hooks:
5 | - id: pretty-format-json
6 | name: 'Pretty format JSON'
7 | args:
8 | - '--no-sort-keys'
9 | - '--autofix'
10 | - '--indent=2'
11 | - id: trailing-whitespace
12 | name: 'Fix trailing whitespace'
13 | exclude: setup.cfg
14 | - id: end-of-file-fixer
15 | name: 'Fix missing EOF'
16 | exclude: setup.cfg
17 | - id: check-executables-have-shebangs
18 | name: 'Check exeutables for shebangs'
19 | - id: check-merge-conflict
20 | name: 'Check for merge conflict fragments'
21 | - id: check-case-conflict
22 | name: 'Check for filesystem character case conflicts'
23 | - id: detect-private-key
24 | name: 'Check for cleartext private keys stored'
25 | - id: flake8
26 | additional_dependencies:
27 | - flake8-mutable
28 | - flake8-type-annotations
29 | - flake8-eradicate
30 | - flake8-bugbear
31 | name: 'Check for Python style guideline violations'
32 | - id: check-json
33 | name: 'Validate JSON'
34 | - id: check-ast
35 | name: 'Check Python abstract syntax tree'
36 | - repo: 'https://github.com/asottile/reorder_python_imports'
37 | rev: v1.8.0
38 | hooks:
39 | - id: reorder-python-imports
40 | name: 'Reorder Python imports'
41 | - repo: 'https://github.com/pre-commit/mirrors-autopep8'
42 | rev: v1.4.4
43 | hooks:
44 | - id: autopep8
45 | name: 'Pretty format Python'
46 | args:
47 | - '--in-place'
48 | - '--aggressive'
49 | - '--aggressive'
50 | - '--experimental'
51 | - '--remove-all-unused-imports'
52 | - '--ignore-init-module-imports'
53 | - '--remove-unused-variable'
54 | - '--ignore E231'
55 | - repo: https://github.com/psf/black
56 | rev: stable
57 | hooks:
58 | - id: black
59 | name: 'Ruthlessly format Python'
60 | language_version: python3.7
61 | args:
62 | - '--line-length=79'
63 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | cache:
3 | - apt
4 | - pip
5 | python:
6 | - "3.6"
7 | - "3.7"
8 | - "3.8"
9 | before_install:
10 | - pip install --upgrade pip
11 | - pip install pipenv --upgrade
12 | script:
13 | - pipenv install --dev
14 | - pytest
15 | sudo: false
16 | notifications:
17 | slack: watchcloud:rNoT5kJJakPqwLSKuev6oa4C
18 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | ISC License
2 |
3 | Copyright (c) 2017, Nick Ficano
4 |
5 | Permission to use, copy, modify, and/or distribute this software for any
6 | purpose with or without fee is hereby granted, provided that the above
7 | copyright notice and this permission notice appear in all copies.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 | WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 | MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 | ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15 | OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include requirements.txt
2 | include LICENSE
3 | include README.md
4 | include python/boto/endpoints.json
5 | include *.txt
6 | include aws_lambda/project_template/config.yaml
7 | recursive-include tests *.json *.py *.txt *.yaml
8 | recursive-include templates *
9 | recursive-exclude * __pycache__
10 |
11 | recursive-include docs *.md conf.py Makefile make.bat
12 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | dev:
2 | pipenv install --dev
3 |
4 | pipenv:
5 | pip install pipenv
6 | pipenv install --dev
7 |
8 | deploy-patch: clean requirements bumpversion-patch upload clean
9 |
10 | deploy-minor: clean requirements bumpversion-minor upload clean
11 |
12 | deploy-major: clean requirements bumpversion-major upload clean
13 |
14 | requirements:
15 | pipenv_to_requirements
16 |
17 | bumpversion-patch:
18 | bumpversion patch
19 | git push
20 | git push --tags
21 |
22 | bumpversion-minor:
23 | bumpversion minor
24 | git push
25 | git push --tags
26 |
27 | bumpversion-major:
28 | bumpversion major
29 | git push
30 | git push --tags
31 |
32 | clean: clean-build clean-pyc
33 |
34 | clean-build:
35 | rm -fr build/
36 | rm -fr dist/
37 | rm -fr .eggs/
38 | find . -name '*.egg-info' -exec rm -fr {} +
39 | find . -name '*.egg' -exec rm -f {} +
40 | find . -name '*.DS_Store' -exec rm -f {} +
41 | rm -f requirements.*
42 |
43 | clean-pyc:
44 | find . -name '*.pyc' -exec rm -f {} +
45 | find . -name '*.pyo' -exec rm -f {} +
46 | find . -name '*~' -exec rm -f {} +
47 | find . -name '__pycache__' -exec rm -fr {} +
48 | find . -name '.pytest_cache' -exec rm -fr {} +
49 | find . -name '.mypy_cache' -exec rm -fr {} +
50 |
51 | upload:
52 | python setup.py sdist bdist_wheel
53 | twine upload dist/*
54 |
--------------------------------------------------------------------------------
/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | name = "pypi"
3 | url = "https://pypi.org/simple"
4 | verify_ssl = true
5 |
6 | [dev-packages]
7 | twine = "*"
8 | flake8 = "*"
9 | black = "*"
10 | bumpversion = "*"
11 | pipenv-to-requirements = "*"
12 | wheel = "*"
13 | pytest = "*"
14 | codecov = "*"
15 | coveralls = "*"
16 | pytest-cov = "*"
17 |
18 | [packages]
19 | boto3 = ">=1.4.4"
20 | click = "==7.1.2"
21 | docutils = "*"
22 | jmespath = "==0.10.0"
23 | pyaml = "==20.4.0"
24 | python-dateutil = "==2.8.1"
25 | PyYAML = "==5.3.1"
26 |
27 | [pipenv]
28 | allow_prereleases = true
29 |
--------------------------------------------------------------------------------
/Pipfile.lock:
--------------------------------------------------------------------------------
1 | {
2 | "_meta": {
3 | "hash": {
4 | "sha256": "7013bd722325666a778f221f5ee2957a554771203428b2a50c40fe07436a04dd"
5 | },
6 | "pipfile-spec": 6,
7 | "requires": {},
8 | "sources": [
9 | {
10 | "name": "pypi",
11 | "url": "https://pypi.org/simple",
12 | "verify_ssl": true
13 | }
14 | ]
15 | },
16 | "default": {
17 | "boto3": {
18 | "hashes": [
19 | "sha256:7b59ca28e17256ccd900c9a8c02061ce538a24b44b4d816e803e6431f8550ee0",
20 | "sha256:e6915ac164a05367787db2416eeaa8662ae5e0b9c53902dbf23b407686ef5028"
21 | ],
22 | "index": "pypi",
23 | "version": "==1.16.48"
24 | },
25 | "botocore": {
26 | "hashes": [
27 | "sha256:ad4adfcc195b5401d84b0c65d3a89e507c1d54c201879c8761ff10ef5c361e21",
28 | "sha256:d3694f6ef918def8082513e5ef309cd6cd83b612e9984e3a66e8adc98c650a92"
29 | ],
30 | "version": "==1.19.63"
31 | },
32 | "click": {
33 | "hashes": [
34 | "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a",
35 | "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"
36 | ],
37 | "index": "pypi",
38 | "version": "==7.1.2"
39 | },
40 | "docutils": {
41 | "hashes": [
42 | "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
43 | "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"
44 | ],
45 | "index": "pypi",
46 | "version": "==0.16"
47 | },
48 | "jmespath": {
49 | "hashes": [
50 | "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9",
51 | "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"
52 | ],
53 | "index": "pypi",
54 | "version": "==0.10.0"
55 | },
56 | "pyaml": {
57 | "hashes": [
58 | "sha256:29a5c2a68660a799103d6949167bd6c7953d031449d08802386372de1db6ad71",
59 | "sha256:67081749a82b72c45e5f7f812ee3a14a03b3f5c25ff36ec3b290514f8c4c4b99"
60 | ],
61 | "index": "pypi",
62 | "version": "==20.4.0"
63 | },
64 | "python-dateutil": {
65 | "hashes": [
66 | "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
67 | "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
68 | ],
69 | "index": "pypi",
70 | "version": "==2.8.1"
71 | },
72 | "pyyaml": {
73 | "hashes": [
74 | "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",
75 | "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76",
76 | "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",
77 | "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e",
78 | "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648",
79 | "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",
80 | "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f",
81 | "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2",
82 | "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee",
83 | "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a",
84 | "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d",
85 | "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",
86 | "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"
87 | ],
88 | "index": "pypi",
89 | "version": "==5.3.1"
90 | },
91 | "s3transfer": {
92 | "hashes": [
93 | "sha256:35627b86af8ff97e7ac27975fe0a98a312814b46c6333d8a6b889627bcd80994",
94 | "sha256:efa5bd92a897b6a8d5c1383828dca3d52d0790e0756d49740563a3fb6ed03246"
95 | ],
96 | "version": "==0.3.7"
97 | },
98 | "six": {
99 | "hashes": [
100 | "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
101 | "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
102 | ],
103 | "version": "==1.16.0"
104 | },
105 | "urllib3": {
106 | "hashes": [
107 | "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c",
108 | "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"
109 | ],
110 | "markers": "python_version != '3.4'",
111 | "version": "==1.26.5"
112 | }
113 | },
114 | "develop": {
115 | "appdirs": {
116 | "hashes": [
117 | "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41",
118 | "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"
119 | ],
120 | "version": "==1.4.4"
121 | },
122 | "attrs": {
123 | "hashes": [
124 | "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1",
125 | "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"
126 | ],
127 | "version": "==21.2.0"
128 | },
129 | "black": {
130 | "hashes": [
131 | "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"
132 | ],
133 | "index": "pypi",
134 | "version": "==20.8b1"
135 | },
136 | "bleach": {
137 | "hashes": [
138 | "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125",
139 | "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433"
140 | ],
141 | "version": "==3.3.0"
142 | },
143 | "bump2version": {
144 | "hashes": [
145 | "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410",
146 | "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"
147 | ],
148 | "version": "==1.0.1"
149 | },
150 | "bumpversion": {
151 | "hashes": [
152 | "sha256:4ba55e4080d373f80177b4dabef146c07ce73c7d1377aabf9d3c3ae1f94584a6",
153 | "sha256:4eb3267a38194d09f048a2179980bb4803701969bff2c85fa8f6d1ce050be15e"
154 | ],
155 | "index": "pypi",
156 | "version": "==0.6.0"
157 | },
158 | "certifi": {
159 | "hashes": [
160 | "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee",
161 | "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"
162 | ],
163 | "version": "==2021.5.30"
164 | },
165 | "cffi": {
166 | "hashes": [
167 | "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813",
168 | "sha256:04c468b622ed31d408fea2346bec5bbffba2cc44226302a0de1ade9f5ea3d373",
169 | "sha256:06d7cd1abac2ffd92e65c0609661866709b4b2d82dd15f611e602b9b188b0b69",
170 | "sha256:06db6321b7a68b2bd6df96d08a5adadc1fa0e8f419226e25b2a5fbf6ccc7350f",
171 | "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06",
172 | "sha256:0f861a89e0043afec2a51fd177a567005847973be86f709bbb044d7f42fc4e05",
173 | "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea",
174 | "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee",
175 | "sha256:1bf1ac1984eaa7675ca8d5745a8cb87ef7abecb5592178406e55858d411eadc0",
176 | "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396",
177 | "sha256:24a570cd11895b60829e941f2613a4f79df1a27344cbbb82164ef2e0116f09c7",
178 | "sha256:24ec4ff2c5c0c8f9c6b87d5bb53555bf267e1e6f70e52e5a9740d32861d36b6f",
179 | "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73",
180 | "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315",
181 | "sha256:293e7ea41280cb28c6fcaaa0b1aa1f533b8ce060b9e701d78511e1e6c4a1de76",
182 | "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1",
183 | "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49",
184 | "sha256:3c3f39fa737542161d8b0d680df2ec249334cd70a8f420f71c9304bd83c3cbed",
185 | "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892",
186 | "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482",
187 | "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058",
188 | "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5",
189 | "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53",
190 | "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045",
191 | "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3",
192 | "sha256:681d07b0d1e3c462dd15585ef5e33cb021321588bebd910124ef4f4fb71aef55",
193 | "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5",
194 | "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e",
195 | "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c",
196 | "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369",
197 | "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827",
198 | "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053",
199 | "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa",
200 | "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4",
201 | "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322",
202 | "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132",
203 | "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62",
204 | "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa",
205 | "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0",
206 | "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396",
207 | "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e",
208 | "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991",
209 | "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6",
210 | "sha256:cc5a8e069b9ebfa22e26d0e6b97d6f9781302fe7f4f2b8776c3e1daea35f1adc",
211 | "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1",
212 | "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406",
213 | "sha256:df5052c5d867c1ea0b311fb7c3cd28b19df469c056f7fdcfe88c7473aa63e333",
214 | "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d",
215 | "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"
216 | ],
217 | "version": "==1.14.5"
218 | },
219 | "chardet": {
220 | "hashes": [
221 | "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa",
222 | "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"
223 | ],
224 | "version": "==4.0.0"
225 | },
226 | "click": {
227 | "hashes": [
228 | "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a",
229 | "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"
230 | ],
231 | "index": "pypi",
232 | "version": "==7.1.2"
233 | },
234 | "codecov": {
235 | "hashes": [
236 | "sha256:6cde272454009d27355f9434f4e49f238c0273b216beda8472a65dc4957f473b",
237 | "sha256:ba8553a82942ce37d4da92b70ffd6d54cf635fc1793ab0a7dc3fecd6ebfb3df8"
238 | ],
239 | "index": "pypi",
240 | "version": "==2.1.11"
241 | },
242 | "colorama": {
243 | "hashes": [
244 | "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b",
245 | "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"
246 | ],
247 | "version": "==0.4.4"
248 | },
249 | "coverage": {
250 | "hashes": [
251 | "sha256:00368e6328ebff76197fff5f4d5704b44098f89d8d99a67a349ad6674ec0b157",
252 | "sha256:0389690e0a1c94e9a246dc3130355d70805e51ca509db1bf07fbde27efb33aa4",
253 | "sha256:065d2181f44392893d37d0a4f9ff60b485d705f733356d0a2fb292a58c6f2e0f",
254 | "sha256:082febdba717c769da92d5e19e14a659ebef6daab19b67fced304b7b8d2475e2",
255 | "sha256:0a35ae0d590effb7cc96e7d6935ae2ab8a51526a111fbe0f12d1671aa9fdc377",
256 | "sha256:142493f0400a0bd5acf03c52971229e937323c3e24c372800ae1c44a503e0921",
257 | "sha256:186f53367a08e8d24cc534c7cbfa43a82d1618a48dec2e0c56e80577ec1888fe",
258 | "sha256:2163a00bcd613e95e118c01ea2811f705fbbacf1904d657b24d306879e2303d3",
259 | "sha256:24ecf342b1e23de259d81b3adc83578935babeb54f6950c9bd9534b12443a49c",
260 | "sha256:2c24d3e09f433817ddd0cb2e8f82f8b42cd09a8ac558462fedf99be479ed4851",
261 | "sha256:2d741575de4a13869c9d4a685235bacc897c94afd3703e2ad4fdc362f37e87da",
262 | "sha256:305ca73c09dd84054a3a8f53d5b70e0325b5b303245d0b96ed505698dc7e8ea7",
263 | "sha256:4bf1d0a390de707f8bfd49efdcdac9366ce77ed64cb35b344f58b1ec62517317",
264 | "sha256:50d90d6b753debb7568621125aad4e5fb418e7bdcb0dba3fa6f4ee82994b35d4",
265 | "sha256:5a2079bca21fa959608223b90cf2f95ce686a6497fb12bfaaa7bb24c3e298199",
266 | "sha256:60c6d433f0357db7ed2a2a698fb75b8ce919ce547d6d6bc79c576e090f509768",
267 | "sha256:66cfae29bccea703f02d8997f60d71e236c5a321588f5aa5a318bd88ca23dc0a",
268 | "sha256:6d6fc990962559de1f3685eb3e365ca60f2e3257bfd145bf675c566b8ebb1944",
269 | "sha256:703b126f3ad20c463b545e199c4da460695630da5fdfd949de6a6269b45eabab",
270 | "sha256:730cee22c41852b90948343cdfd183db1e96a9de69fd4dabec3532c582afea68",
271 | "sha256:7e4a16bde8a3b7424b2955130f5a6c29e741e7138fe05c5d9d72efc356076a80",
272 | "sha256:801e8277958bc2e6cc1f2443a20a2a97f79583aa64524b130e1c0de44c287ca9",
273 | "sha256:80baa69a78d5696c60b72dee44ac3d5ccf75ee82e84d018938ddf642d036a6a8",
274 | "sha256:80c00ce9cef80afbf18d16cb3052f5601ba8d087501d829169eecb33c153346a",
275 | "sha256:89db5a374d793344087732207ee15869549486b2148e3e2e6effe22146351fcd",
276 | "sha256:917b98cc5725ea2e0b88c74d34182589a9be07092cb35b861ea9e74189174f71",
277 | "sha256:9398f8fd89f6f260e94e57559df1885b8200b18312824b617a8789e0f5e7dc74",
278 | "sha256:95b6f212bb0c7379f1f2f6e47c722fbdc7355d8b7488a68649e83dfa29522704",
279 | "sha256:9f23313f3e494475581d46de3b8b6bdcf618ee1df412490e779a9aa0a6c72162",
280 | "sha256:9f6f26e5b129bb0218aab30d368d6ead750517a457986f8854b1df4b4c318098",
281 | "sha256:a502693c83a2c6558bc45b4c2dc01a00c9b99cb3cf846913438933a44af174fc",
282 | "sha256:aa4999130a8e892fa9051edc18bf4daa0a2839d3f3de2dcfcbf0ae4619ee3b5e",
283 | "sha256:b10be0b80784c1beb8061e5ce938d8511a182125de5fc695a60f0561b984d361",
284 | "sha256:b1f7b23a606aaf2464eb81c23b5b20623e2ba44b4aaca6ea9bfe00e84a1a5264",
285 | "sha256:b78c8d232d97dbc8ad3a3d94cc15fccabe9a331685d76d2e5cb5284acc4a5feb",
286 | "sha256:b88fa862817035ad7921f2641c27a85dab12cc685ad3ef29c0caaf5b3d10a868",
287 | "sha256:b93fb9137070899b5f10d6487724f4427b5945983a785e1e2f1102c5e175c516",
288 | "sha256:b9639e16c1bc4eb8a78b3b30df4146bb78df5d52ba1b7454b634abd89aede6cc",
289 | "sha256:baa3b6be365c97f80d92a397cb8963dcd9bc22d101b39784e77a9cad093812f8",
290 | "sha256:c06c5758bae454a49dc3e7917804b46c31bb4a72cedfc8e7b4f17a318b3de9d6",
291 | "sha256:c544153709e93ea7e9edcefee72f5afcf484a9cb526067065f9419419f4a3694",
292 | "sha256:c6c74260ba130f7c20a340e8f9e544b0941621641f53edcf69e4602e12c9f29e",
293 | "sha256:d040615ff5c02ffd97ba9f0f73b9db34c09b8142fbfdd363b2a79fa6a554242c",
294 | "sha256:d85774b1ac09ec1d958e63baa436cc4c90e2e910294847ba51dcc3ca3ca04a63",
295 | "sha256:e508bb216eee8350e77b436f9f99c4f2d8335ecb51483f5ffd8bf5e84aaa56d1",
296 | "sha256:ea1cb38b1a52392ebb4e93eaf4a44b3cfdec35cca3f78a9a599f27b7f27031e2",
297 | "sha256:ec310e0029d530d1043f638b7a326b349884421572626bc2909408da7b0d03e5",
298 | "sha256:ed04b79f53fa975660f1a598120c504a0f4529170eeaf0d823fcc1f06f4d2e0f",
299 | "sha256:f4909ee1ddabed351f0fa55063a7dbe498001e2245a9602d9fb0fd74afecdca9",
300 | "sha256:f49ae9e19737493911e7f8e551310f719f463e442ea1ec92fe0804c62066a7e8",
301 | "sha256:f4c93e6102087dda4931fcd50fa4ad44e8e43e09419290c5f05cc2c690961ebf",
302 | "sha256:fa1b639d85af4794cb20d7cfd4c5ae38e94a418b17a2318a1992b470fb68260d"
303 | ],
304 | "version": "==5.6b1"
305 | },
306 | "coveralls": {
307 | "hashes": [
308 | "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc",
309 | "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617"
310 | ],
311 | "index": "pypi",
312 | "version": "==2.2.0"
313 | },
314 | "cryptography": {
315 | "hashes": [
316 | "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d",
317 | "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959",
318 | "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6",
319 | "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873",
320 | "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2",
321 | "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713",
322 | "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1",
323 | "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177",
324 | "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250",
325 | "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca",
326 | "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d",
327 | "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"
328 | ],
329 | "version": "==3.4.7"
330 | },
331 | "distlib": {
332 | "hashes": [
333 | "sha256:106fef6dc37dd8c0e2c0a60d3fca3e77460a48907f335fa28420463a6f799736",
334 | "sha256:23e223426b28491b1ced97dc3bbe183027419dfc7982b4fa2f05d5f3ff10711c"
335 | ],
336 | "version": "==0.3.2"
337 | },
338 | "docopt": {
339 | "hashes": [
340 | "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
341 | ],
342 | "version": "==0.6.2"
343 | },
344 | "docutils": {
345 | "hashes": [
346 | "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
347 | "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"
348 | ],
349 | "index": "pypi",
350 | "version": "==0.16"
351 | },
352 | "filelock": {
353 | "hashes": [
354 | "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59",
355 | "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"
356 | ],
357 | "version": "==3.0.12"
358 | },
359 | "flake8": {
360 | "hashes": [
361 | "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839",
362 | "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"
363 | ],
364 | "index": "pypi",
365 | "version": "==3.8.4"
366 | },
367 | "idna": {
368 | "hashes": [
369 | "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
370 | "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
371 | ],
372 | "version": "==2.10"
373 | },
374 | "importlib-metadata": {
375 | "hashes": [
376 | "sha256:960d52ba7c21377c990412aca380bf3642d734c2eaab78a2c39319f67c6a5786",
377 | "sha256:e592faad8de1bda9fe920cf41e15261e7131bcf266c30306eec00e8e225c1dd5"
378 | ],
379 | "version": "==4.4.0"
380 | },
381 | "iniconfig": {
382 | "hashes": [
383 | "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3",
384 | "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"
385 | ],
386 | "version": "==1.1.1"
387 | },
388 | "jeepney": {
389 | "hashes": [
390 | "sha256:7d59b6622675ca9e993a6bd38de845051d315f8b0c72cca3aef733a20b648657",
391 | "sha256:aec56c0eb1691a841795111e184e13cad504f7703b9a64f63020816afa79a8ae"
392 | ],
393 | "markers": "sys_platform == 'linux'",
394 | "version": "==0.6.0"
395 | },
396 | "keyring": {
397 | "hashes": [
398 | "sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8",
399 | "sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48"
400 | ],
401 | "version": "==23.0.1"
402 | },
403 | "mccabe": {
404 | "hashes": [
405 | "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
406 | "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
407 | ],
408 | "version": "==0.6.1"
409 | },
410 | "mypy-extensions": {
411 | "hashes": [
412 | "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d",
413 | "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"
414 | ],
415 | "version": "==0.4.3"
416 | },
417 | "packaging": {
418 | "hashes": [
419 | "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5",
420 | "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"
421 | ],
422 | "version": "==20.9"
423 | },
424 | "pathspec": {
425 | "hashes": [
426 | "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd",
427 | "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"
428 | ],
429 | "version": "==0.8.1"
430 | },
431 | "pbr": {
432 | "hashes": [
433 | "sha256:42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd",
434 | "sha256:c68c661ac5cc81058ac94247278eeda6d2e6aecb3e227b0387c30d277e7ef8d4"
435 | ],
436 | "version": "==5.6.0"
437 | },
438 | "pipenv": {
439 | "hashes": [
440 | "sha256:05958fadcd70b2de6a27542fcd2bd72dd5c59c6d35307fdac3e06361fb06e30e",
441 | "sha256:d180f5be4775c552fd5e69ae18a9d6099d9dafb462efe54f11c72cb5f4d5e977"
442 | ],
443 | "version": "==2021.5.29"
444 | },
445 | "pipenv-to-requirements": {
446 | "hashes": [
447 | "sha256:1c18682a4ec70eb07261d2b558df3ee22ea00192663a1b98fd1e45e22946c163",
448 | "sha256:cb70471a17a7d4658caffe989539413313d51df1b3a54838bcd7e7d3ab3fcc18"
449 | ],
450 | "index": "pypi",
451 | "version": "==0.9.0"
452 | },
453 | "pkginfo": {
454 | "hashes": [
455 | "sha256:029a70cb45c6171c329dfc890cde0879f8c52d6f3922794796e06f577bb03db4",
456 | "sha256:9fdbea6495622e022cc72c2e5e1b735218e4ffb2a2a69cde2694a6c1f16afb75"
457 | ],
458 | "version": "==1.7.0"
459 | },
460 | "pluggy": {
461 | "hashes": [
462 | "sha256:265a94bf44ca13662f12fcd1b074c14d4b269a712f051b6f644ef7e705d6735f",
463 | "sha256:467f0219e89bb5061a8429c6fc5cf055fa3983a0e68e84a1d205046306b37d9e"
464 | ],
465 | "version": "==1.0.0.dev0"
466 | },
467 | "py": {
468 | "hashes": [
469 | "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3",
470 | "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"
471 | ],
472 | "version": "==1.10.0"
473 | },
474 | "pycodestyle": {
475 | "hashes": [
476 | "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367",
477 | "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"
478 | ],
479 | "version": "==2.6.0"
480 | },
481 | "pycparser": {
482 | "hashes": [
483 | "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
484 | "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
485 | ],
486 | "version": "==2.20"
487 | },
488 | "pyflakes": {
489 | "hashes": [
490 | "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92",
491 | "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"
492 | ],
493 | "version": "==2.2.0"
494 | },
495 | "pygments": {
496 | "hashes": [
497 | "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f",
498 | "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"
499 | ],
500 | "version": "==2.9.0"
501 | },
502 | "pyparsing": {
503 | "hashes": [
504 | "sha256:1c6409312ce2ce2997896af5756753778d5f1603666dba5587804f09ad82ed27",
505 | "sha256:f4896b4cc085a1f8f8ae53a1a90db5a86b3825ff73eb974dffee3d9e701007f4"
506 | ],
507 | "version": "==3.0.0b2"
508 | },
509 | "pytest": {
510 | "hashes": [
511 | "sha256:1969f797a1a0dbd8ccf0fecc80262312729afea9c17f1d70ebf85c5e76c6f7c8",
512 | "sha256:66e419b1899bc27346cb2c993e12c5e5e8daba9073c1fbce33b9807abc95c306"
513 | ],
514 | "index": "pypi",
515 | "version": "==6.2.1"
516 | },
517 | "pytest-cov": {
518 | "hashes": [
519 | "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a",
520 | "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"
521 | ],
522 | "index": "pypi",
523 | "version": "==2.12.1"
524 | },
525 | "readme-renderer": {
526 | "hashes": [
527 | "sha256:63b4075c6698fcfa78e584930f07f39e05d46f3ec97f65006e430b595ca6348c",
528 | "sha256:92fd5ac2bf8677f310f3303aa4bce5b9d5f9f2094ab98c29f13791d7b805a3db"
529 | ],
530 | "version": "==29.0"
531 | },
532 | "regex": {
533 | "hashes": [
534 | "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5",
535 | "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79",
536 | "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31",
537 | "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500",
538 | "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11",
539 | "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14",
540 | "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3",
541 | "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439",
542 | "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c",
543 | "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82",
544 | "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711",
545 | "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093",
546 | "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a",
547 | "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb",
548 | "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8",
549 | "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17",
550 | "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000",
551 | "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d",
552 | "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480",
553 | "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc",
554 | "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0",
555 | "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9",
556 | "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765",
557 | "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e",
558 | "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a",
559 | "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07",
560 | "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f",
561 | "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac",
562 | "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7",
563 | "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed",
564 | "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968",
565 | "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7",
566 | "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2",
567 | "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4",
568 | "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87",
569 | "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8",
570 | "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10",
571 | "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29",
572 | "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605",
573 | "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6",
574 | "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"
575 | ],
576 | "version": "==2021.4.4"
577 | },
578 | "requests": {
579 | "hashes": [
580 | "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804",
581 | "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"
582 | ],
583 | "version": "==2.25.1"
584 | },
585 | "requests-toolbelt": {
586 | "hashes": [
587 | "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f",
588 | "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"
589 | ],
590 | "version": "==0.9.1"
591 | },
592 | "rfc3986": {
593 | "hashes": [
594 | "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835",
595 | "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"
596 | ],
597 | "version": "==1.5.0"
598 | },
599 | "secretstorage": {
600 | "hashes": [
601 | "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f",
602 | "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"
603 | ],
604 | "markers": "sys_platform == 'linux'",
605 | "version": "==3.3.1"
606 | },
607 | "six": {
608 | "hashes": [
609 | "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
610 | "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
611 | ],
612 | "version": "==1.16.0"
613 | },
614 | "toml": {
615 | "hashes": [
616 | "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
617 | "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
618 | ],
619 | "version": "==0.10.2"
620 | },
621 | "tqdm": {
622 | "hashes": [
623 | "sha256:736524215c690621b06fc89d0310a49822d75e599fcd0feb7cc742b98d692493",
624 | "sha256:cd5791b5d7c3f2f1819efc81d36eb719a38e0906a7380365c556779f585ea042"
625 | ],
626 | "version": "==4.61.0"
627 | },
628 | "twine": {
629 | "hashes": [
630 | "sha256:2f6942ec2a17417e19d2dd372fc4faa424c87ee9ce49b4e20c427eb00a0f3f41",
631 | "sha256:fcffa8fc37e8083a5be0728371f299598870ee1eccc94e9a25cef7b1dcfa8297"
632 | ],
633 | "index": "pypi",
634 | "version": "==3.3.0"
635 | },
636 | "typed-ast": {
637 | "hashes": [
638 | "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace",
639 | "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff",
640 | "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266",
641 | "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528",
642 | "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6",
643 | "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808",
644 | "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4",
645 | "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363",
646 | "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341",
647 | "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04",
648 | "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41",
649 | "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e",
650 | "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3",
651 | "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899",
652 | "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805",
653 | "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c",
654 | "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c",
655 | "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39",
656 | "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a",
657 | "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3",
658 | "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7",
659 | "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f",
660 | "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075",
661 | "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0",
662 | "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40",
663 | "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428",
664 | "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927",
665 | "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3",
666 | "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f",
667 | "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"
668 | ],
669 | "version": "==1.4.3"
670 | },
671 | "typing-extensions": {
672 | "hashes": [
673 | "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497",
674 | "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342",
675 | "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"
676 | ],
677 | "version": "==3.10.0.0"
678 | },
679 | "urllib3": {
680 | "hashes": [
681 | "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c",
682 | "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"
683 | ],
684 | "markers": "python_version != '3.4'",
685 | "version": "==1.26.5"
686 | },
687 | "virtualenv": {
688 | "hashes": [
689 | "sha256:14fdf849f80dbb29a4eb6caa9875d476ee2a5cf76a5f5415fa2f1606010ab467",
690 | "sha256:2b0126166ea7c9c3661f5b8e06773d28f83322de7a3ff7d06f0aed18c9de6a76"
691 | ],
692 | "version": "==20.4.7"
693 | },
694 | "virtualenv-clone": {
695 | "hashes": [
696 | "sha256:07e74418b7cc64f4fda987bf5bc71ebd59af27a7bc9e8a8ee9fd54b1f2390a27",
697 | "sha256:665e48dd54c84b98b71a657acb49104c54e7652bce9c1c4f6c6976ed4c827a29"
698 | ],
699 | "version": "==0.5.4"
700 | },
701 | "webencodings": {
702 | "hashes": [
703 | "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78",
704 | "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"
705 | ],
706 | "version": "==0.5.1"
707 | },
708 | "wheel": {
709 | "hashes": [
710 | "sha256:78b5b185f0e5763c26ca1e324373aadd49182ca90e825f7853f4b2509215dc0e",
711 | "sha256:e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e"
712 | ],
713 | "index": "pypi",
714 | "version": "==0.36.2"
715 | },
716 | "zipp": {
717 | "hashes": [
718 | "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76",
719 | "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"
720 | ],
721 | "version": "==3.4.1"
722 | }
723 | }
724 | }
725 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 | Python-lambda is a toolset for developing and deploying *serverless* Python code in AWS Lambda.
13 |
14 | # A call for contributors
15 | With python-lambda and pytube both continuing to gain momentum, I'm calling for
16 | contributors to help build out new features, review pull requests, fix bugs,
17 | and maintain overall code quality. If you're interested, please email me at
18 | nficano[at]gmail.com.
19 |
20 | # Description
21 |
22 | AWS Lambda is a service that allows you to write Python, Java, or Node.js code
23 | that gets executed in response to events like http requests or files uploaded
24 | to S3.
25 |
26 | Working with Lambda is relatively easy, but the process of bundling and
27 | deploying your code is not as simple as it could be.
28 |
29 | The *Python-Lambda* library takes away the guess work of developing your
30 | Python-Lambda services by providing you a toolset to streamline the annoying
31 | parts.
32 |
33 | # Requirements
34 |
35 | * Python 2.7, >= 3.6 (At the time of writing this, these are the Python runtimes supported by AWS Lambda).
36 | * Pip (\~8.1.1)
37 | * Virtualenv (\~15.0.0)
38 | * Virtualenvwrapper (\~4.7.1)
39 |
40 |
41 | # Getting Started
42 |
43 | First, you must create an IAM Role on your AWS account called
44 | ``lambda_basic_execution`` with the ``LambdaBasicExecution`` policy attached.
45 |
46 | On your computer, create a new virtualenv and project folder.
47 |
48 | ```bash
49 | $ mkvirtualenv pylambda
50 | (pylambda) $ mkdir pylambda
51 | ```
52 |
53 | Next, download *Python-Lambda* using pip via pypi.
54 |
55 | ```bash
56 | (pylambda) $ pip install python-lambda
57 | ```
58 |
59 | From your ``pylambda`` directory, run the following to bootstrap your project.
60 |
61 | ```bash
62 | (pylambda) $ lambda init
63 | ```
64 |
65 | This will create the following files: ``event.json``, ``__init__.py``,
66 | ``service.py``, and ``config.yaml``.
67 |
68 | Let's begin by opening ``config.yaml`` in the text editor of your choice. For
69 | the purpose of this tutorial, the only required information is
70 | ``aws_access_key_id`` and ``aws_secret_access_key``. You can find these by
71 | logging into the AWS management console.
72 |
73 | Next let's open ``service.py``, in here you'll find the following function:
74 |
75 | ```python
76 | def handler(event, context):
77 | # Your code goes here!
78 | e = event.get('e')
79 | pi = event.get('pi')
80 | return e + pi
81 | ```
82 |
83 | This is the handler function; this is the function AWS Lambda will invoke in
84 | response to an event. You will notice that in the sample code ``e`` and ``pi``
85 | are values in a ``dict``. AWS Lambda uses the ``event`` parameter to pass in
86 | event data to the handler.
87 |
88 | So if, for example, your function is responding to an http request, ``event``
89 | will be the ``POST`` JSON data and if your function returns something, the
90 | contents will be in your http response payload.
91 |
92 | Next let's open the ``event.json`` file:
93 |
94 | ```json
95 | {
96 | "pi": 3.14,
97 | "e": 2.718
98 | }
99 | ```
100 | Here you'll find the values of ``e`` and ``pi`` that are being referenced in
101 | the sample code.
102 |
103 | If you now try and run:
104 |
105 | ```bash
106 | (pylambda) $ lambda invoke -v
107 | ```
108 |
109 | You will get:
110 | ```bash
111 | # 5.858
112 | # execution time: 0.00000310s
113 | # function execution timeout: 15s
114 | ```
115 |
116 | As you probably put together, the ``lambda invoke`` command grabs the values
117 | stored in the ``event.json`` file and passes them to your function.
118 |
119 | The ``event.json`` file should help you develop your Lambda service locally.
120 | You can specify an alternate ``event.json`` file by passing the
121 | ``--event-file=.json`` argument to ``lambda invoke``.
122 |
123 | When you're ready to deploy your code to Lambda simply run:
124 |
125 | ```bash
126 | (pylambda) $ lambda deploy
127 | ```
128 |
129 | The deploy script will evaluate your virtualenv and identify your project
130 | dependencies. It will package these up along with your handler function to a
131 | zip file that it then uploads to AWS Lambda.
132 |
133 | You can now log into the
134 | [AWS Lambda management console](https://console.aws.amazon.com/lambda/) to
135 | verify the code deployed successfully.
136 |
137 | ### Wiring to an API endpoint
138 |
139 | If you're looking to develop a simple microservice you can easily wire your
140 | function up to an http endpoint.
141 |
142 | Begin by navigating to your [AWS Lambda management console](https://console.aws.amazon.com/lambda/) and
143 | clicking on your function. Click the API Endpoints tab and click "Add API endpoint".
144 |
145 | Under API endpoint type select "API Gateway".
146 |
147 | Next change Method to ``POST`` and Security to "Open" and click submit (NOTE:
148 | you should secure this for use in production, open security is used for demo
149 | purposes).
150 |
151 | At last you need to change the return value of the function to comply with the
152 | standard defined for the API Gateway endpoint, the function should now look
153 | like this:
154 |
155 | ```
156 | def handler(event, context):
157 | # Your code goes here!
158 | e = event.get('e')
159 | pi = event.get('pi')
160 | return {
161 | "statusCode": 200,
162 | "headers": { "Content-Type": "application/json"},
163 | "body": e + pi
164 | }
165 | ```
166 |
167 | Now try and run:
168 |
169 | ```bash
170 | $ curl --header "Content-Type:application/json" \
171 | --request POST \
172 | --data '{"pi": 3.14, "e": 2.718}' \
173 | https://
174 | # 5.8580000000000005
175 | ```
176 |
177 | ### Environment Variables
178 | Lambda functions support environment variables. In order to set environment
179 | variables for your deployed code to use, you can configure them in
180 | ``config.yaml``. To load the value for the environment variable at the time of
181 | deployment (instead of hard coding them in your configuration file), you can
182 | use local environment values (see 'env3' in example code below).
183 |
184 | ```yaml
185 | environment_variables:
186 | env1: foo
187 | env2: baz
188 | env3: ${LOCAL_ENVIRONMENT_VARIABLE_NAME}
189 | ```
190 |
191 | This would create environment variables in the lambda instance upon deploy. If
192 | your functions don't need environment variables, simply leave this section out
193 | of your config.
194 |
195 | ### Uploading to S3
196 | You may find that you do not need the toolkit to fully
197 | deploy your Lambda or that your code bundle is too large to upload via the API.
198 | You can use the ``upload`` command to send the bundle to an S3 bucket of your
199 | choosing. Before doing this, you will need to set the following variables in
200 | ``config.yaml``:
201 |
202 | ```yaml
203 | role: basic_s3_upload
204 | bucket_name: 'example-bucket'
205 | s3_key_prefix: 'path/to/file/'
206 | ```
207 | Your role must have ``s3:PutObject`` permission on the bucket/key that you
208 | specify for the upload to work properly. Once you have that set, you can
209 | execute ``lambda upload`` to initiate the transfer.
210 |
211 | ### Deploying via S3
212 | You can also choose to use S3 as your source for Lambda deployments. This can
213 | be done by issuing ``lambda deploy-s3`` with the same variables/AWS permissions
214 | you'd set for executing the ``upload`` command.
215 |
216 | ## Development
217 | Development of "python-lambda" is facilitated exclusively on GitHub.
218 | Contributions in the form of patches, tests and feature creation and/or
219 | requests are very welcome and highly encouraged. Please open an issue if this
220 | tool does not function as you'd expect.
221 |
222 | ### Environment Setup
223 | 1. [Install pipenv](https://github.com/pypa/pipenv)
224 | 2. [Install direnv](https://direnv.net/)
225 | 3. [Install Precommit](https://pre-commit.com/#install) (optional but preferred)
226 | 4. ``cd`` into the project and enter "direnv allow" when prompted. This will begin
227 | installing all the development dependancies.
228 | 5. If you installed pre-commit, run ``pre-commit install`` inside the project
229 | directory to setup the githooks.
230 |
231 | ### Releasing to Pypi
232 | Once you pushed your chances to master, run **one** of the following:
233 |
234 | ```sh
235 | # If you're installing a major release:
236 | make deploy-major
237 |
238 | # If you're installing a minor release:
239 | make deploy-minor
240 |
241 | # If you're installing a patch release:
242 | make deploy-patch
243 | ```
244 |
--------------------------------------------------------------------------------
/artwork/python-lambda.svg:
--------------------------------------------------------------------------------
1 |
2 |
28 |
--------------------------------------------------------------------------------
/aws_lambda/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | __author__ = "Nick Ficano"
3 | __email__ = "nficano@gmail.com"
4 | __version__ = "11.8.0"
5 |
6 | from .aws_lambda import (
7 | deploy,
8 | deploy_s3,
9 | invoke,
10 | init,
11 | build,
12 | upload,
13 | cleanup_old_versions,
14 | )
15 |
16 | # Set default logging handler to avoid "No handler found" warnings.
17 | import logging
18 |
19 | try: # Python 2.7+
20 | from logging import NullHandler
21 | except ImportError:
22 |
23 | class NullHandler(logging.Handler):
24 | def emit(self, record):
25 | pass
26 |
27 |
28 | logging.getLogger(__name__).addHandler(NullHandler())
29 |
--------------------------------------------------------------------------------
/aws_lambda/aws_lambda.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | import json
3 | import logging
4 | import os
5 | import subprocess
6 | import sys
7 | import time
8 | from collections import defaultdict
9 |
10 | from shutil import copy
11 | from shutil import copyfile
12 | from shutil import copystat
13 | from shutil import copytree
14 | from tempfile import mkdtemp
15 |
16 | import boto3
17 | import botocore
18 | import yaml
19 | import sys
20 |
21 | from .helpers import archive
22 | from .helpers import get_environment_variable_value
23 | from .helpers import LambdaContext
24 | from .helpers import mkdir
25 | from .helpers import read
26 | from .helpers import timestamp
27 |
28 |
29 | ARN_PREFIXES = {
30 | "cn-north-1": "aws-cn",
31 | "cn-northwest-1": "aws-cn",
32 | "us-gov-west-1": "aws-us-gov",
33 | }
34 |
35 | log = logging.getLogger(__name__)
36 |
37 |
38 | def load_source(module_name, module_path):
39 | """Loads a python module from the path of the corresponding file."""
40 |
41 | if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
42 | import importlib.util
43 | spec = importlib.util.spec_from_file_location(module_name, module_path)
44 | module = importlib.util.module_from_spec(spec)
45 | spec.loader.exec_module(module)
46 | elif sys.version_info[0] == 3 and sys.version_info[1] < 5:
47 | import importlib.machinery
48 | loader = importlib.machinery.SourceFileLoader(module_name, module_path)
49 | module = loader.load_module()
50 | return module
51 |
52 |
53 | def cleanup_old_versions(
54 | src, keep_last_versions, config_file="config.yaml", profile_name=None,
55 | ):
56 | """Deletes old deployed versions of the function in AWS Lambda.
57 |
58 | Won't delete $Latest and any aliased version
59 |
60 | :param str src:
61 | The path to your Lambda ready project (folder must contain a valid
62 | config.yaml and handler module (e.g.: service.py).
63 | :param int keep_last_versions:
64 | The number of recent versions to keep and not delete
65 | """
66 | if keep_last_versions <= 0:
67 | print("Won't delete all versions. Please do this manually")
68 | else:
69 | path_to_config_file = os.path.join(src, config_file)
70 | cfg = read_cfg(path_to_config_file, profile_name)
71 |
72 | profile_name = cfg.get("profile")
73 | aws_access_key_id = cfg.get("aws_access_key_id")
74 | aws_secret_access_key = cfg.get("aws_secret_access_key")
75 |
76 | client = get_client(
77 | "lambda",
78 | profile_name,
79 | aws_access_key_id,
80 | aws_secret_access_key,
81 | cfg.get("region"),
82 | )
83 |
84 | response = client.list_versions_by_function(
85 | FunctionName=cfg.get("function_name"),
86 | )
87 | versions = response.get("Versions")
88 | if len(response.get("Versions")) < keep_last_versions:
89 | print("Nothing to delete. (Too few versions published)")
90 | else:
91 | version_numbers = [
92 | elem.get("Version") for elem in versions[1:-keep_last_versions]
93 | ]
94 | for version_number in version_numbers:
95 | try:
96 | client.delete_function(
97 | FunctionName=cfg.get("function_name"),
98 | Qualifier=version_number,
99 | )
100 | except botocore.exceptions.ClientError as e:
101 | print(f"Skipping Version {version_number}: {e}")
102 |
103 |
104 | def deploy(
105 | src,
106 | requirements=None,
107 | local_package=None,
108 | config_file="config.yaml",
109 | profile_name=None,
110 | preserve_vpc=False,
111 | ):
112 | """Deploys a new function to AWS Lambda.
113 |
114 | :param str src:
115 | The path to your Lambda ready project (folder must contain a valid
116 | config.yaml and handler module (e.g.: service.py).
117 | :param str local_package:
118 | The path to a local package with should be included in the deploy as
119 | well (and/or is not available on PyPi)
120 | """
121 | # Load and parse the config file.
122 | path_to_config_file = os.path.join(src, config_file)
123 | cfg = read_cfg(path_to_config_file, profile_name)
124 |
125 | # Copy all the pip dependencies required to run your code into a temporary
126 | # folder then add the handler file in the root of this directory.
127 | # Zip the contents of this folder into a single file and output to the dist
128 | # directory.
129 | path_to_zip_file = build(
130 | src,
131 | config_file=config_file,
132 | requirements=requirements,
133 | local_package=local_package,
134 | )
135 |
136 | existing_config = get_function_config(cfg)
137 | if existing_config:
138 | update_function(
139 | cfg, path_to_zip_file, existing_config, preserve_vpc=preserve_vpc
140 | )
141 | else:
142 | create_function(cfg, path_to_zip_file)
143 |
144 |
145 | def deploy_s3(
146 | src,
147 | requirements=None,
148 | local_package=None,
149 | config_file="config.yaml",
150 | profile_name=None,
151 | preserve_vpc=False,
152 | ):
153 | """Deploys a new function via AWS S3.
154 |
155 | :param str src:
156 | The path to your Lambda ready project (folder must contain a valid
157 | config.yaml and handler module (e.g.: service.py).
158 | :param str local_package:
159 | The path to a local package with should be included in the deploy as
160 | well (and/or is not available on PyPi)
161 | """
162 | # Load and parse the config file.
163 | path_to_config_file = os.path.join(src, config_file)
164 | cfg = read_cfg(path_to_config_file, profile_name)
165 |
166 | # Copy all the pip dependencies required to run your code into a temporary
167 | # folder then add the handler file in the root of this directory.
168 | # Zip the contents of this folder into a single file and output to the dist
169 | # directory.
170 | path_to_zip_file = build(
171 | src,
172 | config_file=config_file,
173 | requirements=requirements,
174 | local_package=local_package,
175 | )
176 |
177 | use_s3 = True
178 | s3_file = upload_s3(cfg, path_to_zip_file, use_s3)
179 | existing_config = get_function_config(cfg)
180 | if existing_config:
181 | update_function(
182 | cfg,
183 | path_to_zip_file,
184 | existing_config,
185 | use_s3=use_s3,
186 | s3_file=s3_file,
187 | preserve_vpc=preserve_vpc,
188 | )
189 | else:
190 | create_function(cfg, path_to_zip_file, use_s3=use_s3, s3_file=s3_file)
191 |
192 |
193 | def upload(
194 | src,
195 | requirements=None,
196 | local_package=None,
197 | config_file="config.yaml",
198 | profile_name=None,
199 | ):
200 | """Uploads a new function to AWS S3.
201 |
202 | :param str src:
203 | The path to your Lambda ready project (folder must contain a valid
204 | config.yaml and handler module (e.g.: service.py).
205 | :param str local_package:
206 | The path to a local package with should be included in the deploy as
207 | well (and/or is not available on PyPi)
208 | """
209 | # Load and parse the config file.
210 | path_to_config_file = os.path.join(src, config_file)
211 | cfg = read_cfg(path_to_config_file, profile_name)
212 |
213 | # Copy all the pip dependencies required to run your code into a temporary
214 | # folder then add the handler file in the root of this directory.
215 | # Zip the contents of this folder into a single file and output to the dist
216 | # directory.
217 | path_to_zip_file = build(
218 | src,
219 | config_file=config_file,
220 | requirements=requirements,
221 | local_package=local_package,
222 | )
223 |
224 | upload_s3(cfg, path_to_zip_file)
225 |
226 |
227 | def invoke(
228 | src,
229 | event_file="event.json",
230 | config_file="config.yaml",
231 | profile_name=None,
232 | verbose=False,
233 | ):
234 | """Simulates a call to your function.
235 |
236 | :param str src:
237 | The path to your Lambda ready project (folder must contain a valid
238 | config.yaml and handler module (e.g.: service.py).
239 | :param str alt_event:
240 | An optional argument to override which event file to use.
241 | :param bool verbose:
242 | Whether to print out verbose details.
243 | """
244 | # Load and parse the config file.
245 | path_to_config_file = os.path.join(src, config_file)
246 | cfg = read_cfg(path_to_config_file, profile_name)
247 |
248 | # Set AWS_PROFILE environment variable based on `--profile` option.
249 | if profile_name:
250 | os.environ["AWS_PROFILE"] = profile_name
251 |
252 | # Load environment variables from the config file into the actual
253 | # environment.
254 | env_vars = cfg.get("environment_variables")
255 | if env_vars:
256 | for key, value in env_vars.items():
257 | os.environ[key] = get_environment_variable_value(value)
258 |
259 | # Load and parse event file.
260 | path_to_event_file = os.path.join(src, event_file)
261 | event = read(path_to_event_file, loader=json.loads)
262 |
263 | # Tweak to allow module to import local modules
264 | try:
265 | sys.path.index(src)
266 | except ValueError:
267 | sys.path.append(src)
268 |
269 | handler = cfg.get("handler")
270 | # Inspect the handler string (.) and translate it
271 | # into a function we can execute.
272 | fn = get_callable_handler_function(src, handler)
273 |
274 | timeout = cfg.get("timeout")
275 | if timeout:
276 | context = LambdaContext(cfg.get("function_name"), timeout)
277 | else:
278 | context = LambdaContext(cfg.get("function_name"))
279 |
280 | start = time.time()
281 | results = fn(event, context)
282 | end = time.time()
283 |
284 | print("{0}".format(results))
285 | if verbose:
286 | print(
287 | "\nexecution time: {:.8f}s\nfunction execution "
288 | "timeout: {:2}s".format(end - start, cfg.get("timeout", 15))
289 | )
290 |
291 |
292 | def init(src, minimal=False):
293 | """Copies template files to a given directory.
294 |
295 | :param str src:
296 | The path to output the template lambda project files.
297 | :param bool minimal:
298 | Minimal possible template files (excludes event.json).
299 | """
300 |
301 | templates_path = os.path.join(
302 | os.path.dirname(os.path.abspath(__file__)), "project_templates",
303 | )
304 | for filename in os.listdir(templates_path):
305 | if (minimal and filename == "event.json") or filename.endswith(".pyc"):
306 | continue
307 | dest_path = os.path.join(templates_path, filename)
308 |
309 | if not os.path.isdir(dest_path):
310 | copy(dest_path, src)
311 |
312 |
313 | def build(
314 | src,
315 | requirements=None,
316 | local_package=None,
317 | config_file="config.yaml",
318 | profile_name=None,
319 | ):
320 | """Builds the file bundle.
321 |
322 | :param str src:
323 | The path to your Lambda ready project (folder must contain a valid
324 | config.yaml and handler module (e.g.: service.py).
325 | :param str local_package:
326 | The path to a local package with should be included in the deploy as
327 | well (and/or is not available on PyPi)
328 | """
329 | # Load and parse the config file.
330 | path_to_config_file = os.path.join(src, config_file)
331 | cfg = read_cfg(path_to_config_file, profile_name)
332 |
333 | # Get the absolute path to the output directory and create it if it doesn't
334 | # already exist.
335 | dist_directory = cfg.get("dist_directory", "dist")
336 | path_to_dist = os.path.join(src, dist_directory)
337 | mkdir(path_to_dist)
338 |
339 | # Combine the name of the Lambda function with the current timestamp to use
340 | # for the output filename.
341 | function_name = cfg.get("function_name")
342 | output_filename = "{0}-{1}.zip".format(timestamp(), function_name)
343 |
344 | path_to_temp = mkdtemp(prefix="aws-lambda")
345 | pip_install_to_target(
346 | path_to_temp, requirements=requirements, local_package=local_package,
347 | )
348 |
349 | # Hack for Zope.
350 | if "zope" in os.listdir(path_to_temp):
351 | print(
352 | "Zope packages detected; fixing Zope package paths to "
353 | "make them importable.",
354 | )
355 | # Touch.
356 | with open(os.path.join(path_to_temp, "zope/__init__.py"), "wb"):
357 | pass
358 |
359 | # Gracefully handle whether ".zip" was included in the filename or not.
360 | output_filename = (
361 | "{0}.zip".format(output_filename)
362 | if not output_filename.endswith(".zip")
363 | else output_filename
364 | )
365 |
366 | # Allow definition of source code directories we want to build into our
367 | # zipped package.
368 | build_config = defaultdict(**cfg.get("build", {}))
369 | build_source_directories = build_config.get("source_directories", "")
370 | build_source_directories = (
371 | build_source_directories
372 | if build_source_directories is not None
373 | else ""
374 | )
375 | source_directories = [
376 | d.strip() for d in build_source_directories.split(",")
377 | ]
378 |
379 | files = []
380 | for filename in os.listdir(src):
381 | if os.path.isfile(filename):
382 | if filename == ".DS_Store":
383 | continue
384 | if filename == config_file:
385 | continue
386 | print("Bundling: %r" % filename)
387 | files.append(os.path.join(src, filename))
388 | elif os.path.isdir(filename) and filename in source_directories:
389 | print("Bundling directory: %r" % filename)
390 | files.append(os.path.join(src, filename))
391 |
392 | # "cd" into `temp_path` directory.
393 | os.chdir(path_to_temp)
394 | for f in files:
395 | if os.path.isfile(f):
396 | _, filename = os.path.split(f)
397 |
398 | # Copy handler file into root of the packages folder.
399 | copyfile(f, os.path.join(path_to_temp, filename))
400 | copystat(f, os.path.join(path_to_temp, filename))
401 | elif os.path.isdir(f):
402 | src_path_length = len(src) + 1
403 | destination_folder = os.path.join(
404 | path_to_temp, f[src_path_length:]
405 | )
406 | copytree(f, destination_folder)
407 |
408 | # Zip them together into a single file.
409 | # TODO: Delete temp directory created once the archive has been compiled.
410 | path_to_zip_file = archive("./", path_to_dist, output_filename)
411 | return path_to_zip_file
412 |
413 |
414 | def get_callable_handler_function(src, handler):
415 | """Translate a string of the form "module.function" into a callable
416 | function.
417 |
418 | :param str src:
419 | The path to your Lambda project containing a valid handler file.
420 | :param str handler:
421 | A dot delimited string representing the `.`.
422 | """
423 |
424 | # "cd" into `src` directory.
425 | os.chdir(src)
426 |
427 | module_name, function_name = handler.split(".")
428 | filename = get_handler_filename(handler)
429 |
430 | path_to_module_file = os.path.join(src, filename)
431 | module = load_source(module_name, path_to_module_file)
432 | return getattr(module, function_name)
433 |
434 |
435 | def get_handler_filename(handler):
436 | """Shortcut to get the filename from the handler string.
437 |
438 | :param str handler:
439 | A dot delimited string representing the `.`.
440 | """
441 | module_name, _ = handler.split(".")
442 | return "{0}.py".format(module_name)
443 |
444 |
445 | def _install_packages(path, packages):
446 | """Install all packages listed to the target directory.
447 |
448 | Ignores any package that includes Python itself and python-lambda as well
449 | since its only needed for deploying and not running the code
450 |
451 | :param str path:
452 | Path to copy installed pip packages to.
453 | :param list packages:
454 | A list of packages to be installed via pip.
455 | """
456 |
457 | def _filter_blacklist(package):
458 | blacklist = ["-i", "#", "Python==", "python-lambda=="]
459 | return all(package.startswith(entry) is False for entry in blacklist)
460 |
461 | filtered_packages = filter(_filter_blacklist, packages)
462 | for package in filtered_packages:
463 | if package.startswith("-e "):
464 | package = package.replace("-e ", "")
465 |
466 | print("Installing {package}".format(package=package))
467 | subprocess.check_call(
468 | [
469 | sys.executable,
470 | "-m",
471 | "pip",
472 | "install",
473 | package,
474 | "-t",
475 | path,
476 | "--ignore-installed",
477 | ]
478 | )
479 | print(
480 | "Install directory contents are now: {directory}".format(
481 | directory=os.listdir(path)
482 | )
483 | )
484 |
485 |
486 | def pip_install_to_target(path, requirements=None, local_package=None):
487 | """For a given active virtualenv, gather all installed pip packages then
488 | copy (re-install) them to the path provided.
489 |
490 | :param str path:
491 | Path to copy installed pip packages to.
492 | :param str requirements:
493 | If set, only the packages in the supplied requirements file are
494 | installed.
495 | If not set then installs all packages found via pip freeze.
496 | :param str local_package:
497 | The path to a local package with should be included in the deploy as
498 | well (and/or is not available on PyPi)
499 | """
500 | packages = []
501 | if not requirements:
502 | print("Gathering pip packages")
503 | pkgStr = subprocess.check_output(
504 | [sys.executable, "-m", "pip", "freeze"]
505 | )
506 | packages.extend(pkgStr.decode("utf-8").splitlines())
507 | else:
508 | if os.path.exists(requirements):
509 | print("Gathering requirement packages")
510 | data = read(requirements)
511 | packages.extend(data.splitlines())
512 |
513 | if not packages:
514 | print("No dependency packages installed!")
515 |
516 | if local_package is not None:
517 | if not isinstance(local_package, (list, tuple)):
518 | local_package = [local_package]
519 | for l_package in local_package:
520 | packages.append(l_package)
521 | _install_packages(path, packages)
522 |
523 |
524 | def get_role_name(region, account_id, role):
525 | """Shortcut to insert the `account_id` and `role` into the iam string."""
526 | prefix = ARN_PREFIXES.get(region, "aws")
527 | return "arn:{0}:iam::{1}:role/{2}".format(prefix, account_id, role)
528 |
529 |
530 | def get_account_id(
531 | profile_name, aws_access_key_id, aws_secret_access_key, region=None,
532 | ):
533 | """Query STS for a users' account_id"""
534 | client = get_client(
535 | "sts", profile_name, aws_access_key_id, aws_secret_access_key, region,
536 | )
537 | return client.get_caller_identity().get("Account")
538 |
539 |
540 | def get_client(
541 | client,
542 | profile_name,
543 | aws_access_key_id,
544 | aws_secret_access_key,
545 | region=None,
546 | ):
547 | """Shortcut for getting an initialized instance of the boto3 client."""
548 |
549 | boto3.setup_default_session(
550 | profile_name=profile_name,
551 | aws_access_key_id=aws_access_key_id,
552 | aws_secret_access_key=aws_secret_access_key,
553 | region_name=region,
554 | )
555 | return boto3.client(client)
556 |
557 |
558 | def create_function(cfg, path_to_zip_file, use_s3=False, s3_file=None):
559 | """Register and upload a function to AWS Lambda."""
560 |
561 | print("Creating your new Lambda function")
562 | byte_stream = read(path_to_zip_file, binary_file=True)
563 | profile_name = cfg.get("profile")
564 | aws_access_key_id = cfg.get("aws_access_key_id")
565 | aws_secret_access_key = cfg.get("aws_secret_access_key")
566 |
567 | account_id = get_account_id(
568 | profile_name,
569 | aws_access_key_id,
570 | aws_secret_access_key,
571 | cfg.get("region",),
572 | )
573 | role = get_role_name(
574 | cfg.get("region"),
575 | account_id,
576 | cfg.get("role", "lambda_basic_execution"),
577 | )
578 |
579 | client = get_client(
580 | "lambda",
581 | profile_name,
582 | aws_access_key_id,
583 | aws_secret_access_key,
584 | cfg.get("region"),
585 | )
586 |
587 | # Do we prefer development variable over config?
588 | buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name")
589 | func_name = os.environ.get("LAMBDA_FUNCTION_NAME") or cfg.get(
590 | "function_name"
591 | )
592 | print("Creating lambda function with name: {}".format(func_name))
593 |
594 | if use_s3:
595 | kwargs = {
596 | "FunctionName": func_name,
597 | "Runtime": cfg.get("runtime", "python2.7"),
598 | "Role": role,
599 | "Handler": cfg.get("handler"),
600 | "Code": {
601 | "S3Bucket": "{}".format(buck_name),
602 | "S3Key": "{}".format(s3_file),
603 | },
604 | "Description": cfg.get("description", ""),
605 | "Timeout": cfg.get("timeout", 15),
606 | "MemorySize": cfg.get("memory_size", 512),
607 | "VpcConfig": {
608 | "SubnetIds": cfg.get("subnet_ids", []),
609 | "SecurityGroupIds": cfg.get("security_group_ids", []),
610 | },
611 | "Publish": True,
612 | }
613 | else:
614 | kwargs = {
615 | "FunctionName": func_name,
616 | "Runtime": cfg.get("runtime", "python2.7"),
617 | "Role": role,
618 | "Handler": cfg.get("handler"),
619 | "Code": {"ZipFile": byte_stream},
620 | "Description": cfg.get("description", ""),
621 | "Timeout": cfg.get("timeout", 15),
622 | "MemorySize": cfg.get("memory_size", 512),
623 | "VpcConfig": {
624 | "SubnetIds": cfg.get("subnet_ids", []),
625 | "SecurityGroupIds": cfg.get("security_group_ids", []),
626 | },
627 | "Publish": True,
628 | }
629 |
630 | if "tags" in cfg:
631 | kwargs.update(
632 | Tags={key: str(value) for key, value in cfg.get("tags").items()}
633 | )
634 |
635 | if "environment_variables" in cfg:
636 | kwargs.update(
637 | Environment={
638 | "Variables": {
639 | key: get_environment_variable_value(value)
640 | for key, value in cfg.get("environment_variables").items()
641 | },
642 | },
643 | )
644 |
645 | client.create_function(**kwargs)
646 |
647 | concurrency = get_concurrency(cfg)
648 | if concurrency > 0:
649 | client.put_function_concurrency(
650 | FunctionName=func_name, ReservedConcurrentExecutions=concurrency
651 | )
652 |
653 |
654 | def update_function(
655 | cfg,
656 | path_to_zip_file,
657 | existing_cfg,
658 | use_s3=False,
659 | s3_file=None,
660 | preserve_vpc=False,
661 | ):
662 | """Updates the code of an existing Lambda function"""
663 |
664 | print("Updating your Lambda function")
665 | byte_stream = read(path_to_zip_file, binary_file=True)
666 | profile_name = cfg.get("profile")
667 | aws_access_key_id = cfg.get("aws_access_key_id")
668 | aws_secret_access_key = cfg.get("aws_secret_access_key")
669 |
670 | account_id = get_account_id(
671 | profile_name,
672 | aws_access_key_id,
673 | aws_secret_access_key,
674 | cfg.get("region",),
675 | )
676 | role = get_role_name(
677 | cfg.get("region"),
678 | account_id,
679 | cfg.get("role", "lambda_basic_execution"),
680 | )
681 |
682 | client = get_client(
683 | "lambda",
684 | profile_name,
685 | aws_access_key_id,
686 | aws_secret_access_key,
687 | cfg.get("region"),
688 | )
689 |
690 | # Do we prefer development variable over config?
691 | buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name")
692 |
693 | if use_s3:
694 | client.update_function_code(
695 | FunctionName=cfg.get("function_name"),
696 | S3Bucket="{}".format(buck_name),
697 | S3Key="{}".format(s3_file),
698 | Publish=True,
699 | )
700 | else:
701 | client.update_function_code(
702 | FunctionName=cfg.get("function_name"),
703 | ZipFile=byte_stream,
704 | Publish=True,
705 | )
706 |
707 | # Wait for function to be updated
708 | waiter = client.get_waiter('function_updated')
709 | waiter.wait(FunctionName=cfg.get("function_name"))
710 |
711 | kwargs = {
712 | "FunctionName": cfg.get("function_name"),
713 | "Role": role,
714 | "Runtime": cfg.get("runtime"),
715 | "Handler": cfg.get("handler"),
716 | "Description": cfg.get("description", ""),
717 | "Timeout": cfg.get("timeout", 15),
718 | "MemorySize": cfg.get("memory_size", 512),
719 | }
720 |
721 | if preserve_vpc:
722 | kwargs["VpcConfig"] = existing_cfg.get("Configuration", {}).get(
723 | "VpcConfig"
724 | )
725 | if kwargs["VpcConfig"] is None:
726 | kwargs["VpcConfig"] = {
727 | "SubnetIds": cfg.get("subnet_ids", []),
728 | "SecurityGroupIds": cfg.get("security_group_ids", []),
729 | }
730 | else:
731 | del kwargs["VpcConfig"]["VpcId"]
732 | else:
733 | kwargs["VpcConfig"] = {
734 | "SubnetIds": cfg.get("subnet_ids", []),
735 | "SecurityGroupIds": cfg.get("security_group_ids", []),
736 | }
737 |
738 | if "environment_variables" in cfg:
739 | kwargs.update(
740 | Environment={
741 | "Variables": {
742 | key: str(get_environment_variable_value(value))
743 | for key, value in cfg.get("environment_variables").items()
744 | },
745 | },
746 | )
747 |
748 | ret = client.update_function_configuration(**kwargs)
749 |
750 | concurrency = get_concurrency(cfg)
751 | if concurrency > 0:
752 | client.put_function_concurrency(
753 | FunctionName=cfg.get("function_name"),
754 | ReservedConcurrentExecutions=concurrency,
755 | )
756 | elif "Concurrency" in existing_cfg:
757 | client.delete_function_concurrency(
758 | FunctionName=cfg.get("function_name")
759 | )
760 |
761 | if "tags" in cfg:
762 | tags = {key: str(value) for key, value in cfg.get("tags").items()}
763 | if tags != existing_cfg.get("Tags"):
764 | if existing_cfg.get("Tags"):
765 | client.untag_resource(
766 | Resource=ret["FunctionArn"],
767 | TagKeys=list(existing_cfg["Tags"].keys()),
768 | )
769 | client.tag_resource(Resource=ret["FunctionArn"], Tags=tags)
770 |
771 |
772 | def upload_s3(cfg, path_to_zip_file, *use_s3):
773 | """Upload a function to AWS S3."""
774 |
775 | print("Uploading your new Lambda function")
776 | profile_name = cfg.get("profile")
777 | aws_access_key_id = cfg.get("aws_access_key_id")
778 | aws_secret_access_key = cfg.get("aws_secret_access_key")
779 | client = get_client(
780 | "s3",
781 | profile_name,
782 | aws_access_key_id,
783 | aws_secret_access_key,
784 | cfg.get("region"),
785 | )
786 | byte_stream = b""
787 | with open(path_to_zip_file, mode="rb") as fh:
788 | byte_stream = fh.read()
789 | s3_key_prefix = cfg.get("s3_key_prefix", "/dist")
790 | checksum = hashlib.new("md5", byte_stream).hexdigest()
791 | timestamp = str(time.time())
792 | filename = "{prefix}{checksum}-{ts}.zip".format(
793 | prefix=s3_key_prefix, checksum=checksum, ts=timestamp,
794 | )
795 |
796 | # Do we prefer development variable over config?
797 | buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name")
798 | func_name = os.environ.get("LAMBDA_FUNCTION_NAME") or cfg.get(
799 | "function_name"
800 | )
801 | kwargs = {
802 | "Bucket": "{}".format(buck_name),
803 | "Key": "{}".format(filename),
804 | "Body": byte_stream,
805 | }
806 |
807 | client.put_object(**kwargs)
808 | print("Finished uploading {} to S3 bucket {}".format(func_name, buck_name))
809 | if use_s3:
810 | return filename
811 |
812 |
813 | def get_function_config(cfg):
814 | """Check whether a function exists or not and return its config"""
815 |
816 | function_name = cfg.get("function_name")
817 | profile_name = cfg.get("profile")
818 | aws_access_key_id = cfg.get("aws_access_key_id")
819 | aws_secret_access_key = cfg.get("aws_secret_access_key")
820 | client = get_client(
821 | "lambda",
822 | profile_name,
823 | aws_access_key_id,
824 | aws_secret_access_key,
825 | cfg.get("region"),
826 | )
827 |
828 | try:
829 | return client.get_function(FunctionName=function_name)
830 | except client.exceptions.ResourceNotFoundException as e:
831 | if "Function not found" in str(e):
832 | return False
833 |
834 |
835 | def get_concurrency(cfg):
836 | """Return the Reserved Concurrent Executions if present in the config"""
837 | concurrency = int(cfg.get("concurrency", 0))
838 | return max(0, concurrency)
839 |
840 |
841 | def read_cfg(path_to_config_file, profile_name):
842 | cfg = read(path_to_config_file, loader=yaml.full_load)
843 | if profile_name is not None:
844 | cfg["profile"] = profile_name
845 | elif "AWS_PROFILE" in os.environ:
846 | cfg["profile"] = os.environ["AWS_PROFILE"]
847 | return cfg
848 |
--------------------------------------------------------------------------------
/aws_lambda/helpers.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import datetime as dt
3 | import os
4 | import re
5 | import time
6 | import zipfile
7 |
8 |
9 | def mkdir(path):
10 | if not os.path.exists(path):
11 | os.makedirs(path)
12 |
13 |
14 | def read(path, loader=None, binary_file=False):
15 | open_mode = "rb" if binary_file else "r"
16 | with open(path, mode=open_mode) as fh:
17 | if not loader:
18 | return fh.read()
19 | return loader(fh.read())
20 |
21 |
22 | def archive(src, dest, filename):
23 | output = os.path.join(dest, filename)
24 | zfh = zipfile.ZipFile(output, "w", zipfile.ZIP_DEFLATED)
25 |
26 | for root, _, files in os.walk(src):
27 | for file in files:
28 | zfh.write(os.path.join(root, file))
29 | zfh.close()
30 | return os.path.join(dest, filename)
31 |
32 |
33 | def timestamp(fmt="%Y-%m-%d-%H%M%S"):
34 | now = dt.datetime.utcnow()
35 | return now.strftime(fmt)
36 |
37 |
38 | def get_environment_variable_value(val):
39 | env_val = val
40 | if val is not None and isinstance(val, str):
41 | match = re.search(r"^\${(?P\w+)*}$", val)
42 | if match is not None:
43 | env_val = os.environ.get(match.group("environment_key_name"))
44 | return env_val
45 |
46 |
47 | class LambdaContext:
48 | def current_milli_time(x):
49 | return int(round(time.time() * 1000))
50 |
51 | def get_remaining_time_in_millis(self):
52 | return max(
53 | 0,
54 | self.timeout_millis
55 | - (self.current_milli_time() - self.start_time_millis),
56 | )
57 |
58 | def __init__(self, function_name, timeoutSeconds=3):
59 | self.function_name = function_name
60 | self.function_version = None
61 | self.invoked_function_arn = None
62 | self.memory_limit_in_mb = None
63 | self.aws_request_id = None
64 | self.log_group_name = None
65 | self.log_stream_name = None
66 | self.identity = None
67 | self.client_context = None
68 | self.timeout_millis = timeoutSeconds * 1000
69 | self.start_time_millis = self.current_milli_time()
70 |
--------------------------------------------------------------------------------
/aws_lambda/project_templates/config.yaml:
--------------------------------------------------------------------------------
1 | region: us-east-1
2 |
3 | function_name: my_lambda_function
4 | handler: service.handler
5 | description: My first lambda function
6 | runtime: python2.7
7 | # role: lambda_basic_execution
8 |
9 | # S3 upload requires appropriate role with s3:PutObject permission
10 | # (ex. basic_s3_upload), a destination bucket, and the key prefix
11 | # bucket_name: 'example-bucket'
12 | # s3_key_prefix: 'path/to/file/'
13 |
14 | # if access key and secret are left blank, boto will use the credentials
15 | # defined in the [default] section of ~/.aws/credentials.
16 | aws_access_key_id:
17 | aws_secret_access_key:
18 |
19 | # dist_directory: dist
20 | # timeout: 15
21 | # memory_size: 512
22 | # concurrency: 500
23 | #
24 |
25 | # Experimental Environment variables
26 | environment_variables:
27 | env_1: foo
28 | env_2: baz
29 |
30 | # If `tags` is uncommented then tags will be set at creation or update
31 | # time. During an update all other tags will be removed except the tags
32 | # listed here.
33 | #tags:
34 | # tag_1: foo
35 | # tag_2: bar
36 |
37 | # Build options
38 | build:
39 | source_directories: lib # a comma delimited list of directories in your project root that contains source to package.
40 |
--------------------------------------------------------------------------------
/aws_lambda/project_templates/event.json:
--------------------------------------------------------------------------------
1 | {
2 | "pi": 3.14,
3 | "e": 2.718
4 | }
5 |
--------------------------------------------------------------------------------
/aws_lambda/project_templates/service.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 |
4 | def handler(event, context):
5 | # Your code goes here!
6 | e = event.get("e")
7 | pi = event.get("pi")
8 | return e + pi
9 |
--------------------------------------------------------------------------------
/scripts/lambda:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | import logging
4 | import os
5 |
6 | import click
7 |
8 | import aws_lambda
9 |
10 | CURRENT_DIR = os.getcwd()
11 |
12 | logging.getLogger("pip").setLevel(logging.CRITICAL)
13 |
14 |
15 | @click.group()
16 | def cli():
17 | pass
18 |
19 |
20 | @click.command(help="Create a new function for Lambda.")
21 | @click.option(
22 | "--minimal",
23 | default=False,
24 | is_flag=True,
25 | help="Exclude any unnecessary template files",
26 | )
27 | @click.argument(
28 | "folder", nargs=-1, type=click.Path(file_okay=False, writable=True),
29 | )
30 | def init(folder, minimal):
31 | path = CURRENT_DIR
32 | if len(folder) > 0:
33 | path = os.path.join(CURRENT_DIR, *folder)
34 | if not os.path.exists(path):
35 | os.makedirs(path)
36 | aws_lambda.init(path, minimal=minimal)
37 |
38 |
39 | @click.command(help="Bundles package for deployment.")
40 | @click.option(
41 | "--config-file", default="config.yaml", help="Alternate config file.",
42 | )
43 | @click.option(
44 | "--profile", help="AWS profile to use.",
45 | )
46 | @click.option(
47 | "--requirements",
48 | default=None,
49 | type=click.Path(),
50 | help="Install packages from supplied requirements file.",
51 | )
52 | @click.option(
53 | "--local-package",
54 | default=None,
55 | type=click.Path(),
56 | help="Install local package as well.",
57 | multiple=True,
58 | )
59 | def build(requirements, local_package, config_file, profile):
60 | aws_lambda.build(
61 | CURRENT_DIR,
62 | requirements=requirements,
63 | local_package=local_package,
64 | config_file=config_file,
65 | profile_name=profile,
66 | )
67 |
68 |
69 | @click.command(help="Run a local test of your function.")
70 | @click.option(
71 | "--event-file", default="event.json", help="Alternate event file.",
72 | )
73 | @click.option(
74 | "--config-file", default="config.yaml", help="Alternate config file.",
75 | )
76 | @click.option(
77 | "--profile", help="AWS profile to use.",
78 | )
79 | @click.option("--verbose", "-v", is_flag=True)
80 | def invoke(event_file, config_file, profile, verbose):
81 | aws_lambda.invoke(
82 | CURRENT_DIR,
83 | event_file=event_file,
84 | config_file=config_file,
85 | profile_name=profile,
86 | verbose=verbose,
87 | )
88 |
89 |
90 | @click.command(help="Register and deploy your code to lambda.")
91 | @click.option(
92 | "--config-file", default="config.yaml", help="Alternate config file.",
93 | )
94 | @click.option(
95 | "--profile", help="AWS profile to use.",
96 | )
97 | @click.option(
98 | "--requirements",
99 | default=None,
100 | type=click.Path(),
101 | help="Install all packages defined in supplied requirements file",
102 | )
103 | @click.option(
104 | "--local-package",
105 | default=None,
106 | type=click.Path(),
107 | help="Install local package as well.",
108 | multiple=True,
109 | )
110 | @click.option(
111 | "--preserve-vpc",
112 | default=False,
113 | is_flag=True,
114 | help="Preserve VPC configuration on existing functions",
115 | )
116 | def deploy(requirements, local_package, config_file, profile, preserve_vpc):
117 | aws_lambda.deploy(
118 | CURRENT_DIR,
119 | requirements=requirements,
120 | local_package=local_package,
121 | config_file=config_file,
122 | profile_name=profile,
123 | preserve_vpc=preserve_vpc,
124 | )
125 |
126 |
127 | @click.command(help="Upload your lambda to S3.")
128 | @click.option(
129 | "--config-file", default="config.yaml", help="Alternate config file.",
130 | )
131 | @click.option(
132 | "--profile", help="AWS profile to use.",
133 | )
134 | @click.option(
135 | "--requirements",
136 | default=None,
137 | type=click.Path(),
138 | help="Install all packages defined in supplied requirements file",
139 | )
140 | @click.option(
141 | "--local-package",
142 | default=None,
143 | type=click.Path(),
144 | help="Install local package as well.",
145 | multiple=True,
146 | )
147 | def upload(requirements, local_package, config_file, profile):
148 | aws_lambda.upload(
149 | CURRENT_DIR,
150 | requirements=requirements,
151 | local_package=local_package,
152 | config_file=config_file,
153 | profile_name=profile,
154 | )
155 |
156 |
157 | @click.command(help="Deploy your lambda via S3.")
158 | @click.option(
159 | "--config-file", default="config.yaml", help="Alternate config file.",
160 | )
161 | @click.option(
162 | "--profile", help="AWS profile to use.",
163 | )
164 | @click.option(
165 | "--requirements",
166 | default=None,
167 | type=click.Path(),
168 | help="Install all packages defined in supplied requirements file",
169 | )
170 | @click.option(
171 | "--local-package",
172 | default=None,
173 | type=click.Path(),
174 | multiple=True,
175 | help="Install local package as well.",
176 | )
177 | def deploy_s3(requirements, local_package, config_file, profile):
178 | aws_lambda.deploy_s3(
179 | CURRENT_DIR,
180 | requirements=requirements,
181 | local_package=local_package,
182 | config_file=config_file,
183 | profile_name=profile,
184 | )
185 |
186 |
187 | @click.command(help="Delete old versions of your functions")
188 | @click.option(
189 | "--config-file", default="config.yaml", help="Alternate config file.",
190 | )
191 | @click.option(
192 | "--profile", help="AWS profile to use.",
193 | )
194 | @click.option(
195 | "--keep-last",
196 | type=int,
197 | prompt="Please enter the number of recent versions to keep",
198 | )
199 | def cleanup(keep_last, config_file, profile):
200 | aws_lambda.cleanup_old_versions(
201 | CURRENT_DIR, keep_last, config_file=config_file, profile_name=profile,
202 | )
203 |
204 |
205 | if __name__ == "__main__":
206 | cli.add_command(init)
207 | cli.add_command(invoke)
208 | cli.add_command(deploy)
209 | cli.add_command(upload)
210 | cli.add_command(deploy_s3)
211 | cli.add_command(build)
212 | cli.add_command(cleanup)
213 | cli()
214 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | commit = True
3 | tag = True
4 | current_version = 11.8.0
5 | parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-(?P[a-z]+))?
6 | serialize =
7 | {major}.{minor}.{patch}
8 |
9 | [metadata]
10 | description-file = README.md
11 |
12 | [bumpversion:file:setup.py]
13 |
14 | [bumpversion:file:aws_lambda/__init__.py]
15 |
16 | [coverage:run]
17 | source = aws_lambda
18 |
19 | [flake8]
20 | exclude = docs
21 |
22 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """This module contains setup instructions for python-lambda."""
2 | import codecs
3 | import os
4 | import sys
5 | from shutil import rmtree
6 |
7 | from setuptools import Command
8 | from setuptools import find_packages
9 | from setuptools import setup
10 |
11 | REQUIREMENTS = [
12 | "boto3>=1.4.4",
13 | "click>=6.6",
14 | "PyYAML==5.1",
15 | ]
16 | PACKAGE_DATA = {
17 | "aws_lambda": ["project_templates/*"],
18 | "": ["*.json"],
19 | }
20 | THIS_DIR = os.path.abspath(os.path.dirname(__file__))
21 | README = os.path.join(THIS_DIR, "README.md")
22 |
23 | with codecs.open(README, encoding="utf-8") as fh:
24 | long_description = "\n" + fh.read()
25 |
26 |
27 | class UploadCommand(Command):
28 | """Support setup.py publish."""
29 |
30 | description = "Build and publish the package."
31 | user_options = []
32 |
33 | @staticmethod
34 | def status(s):
35 | """Print in bold."""
36 | print(f"\033[1m{s}\033[0m")
37 |
38 | def initialize_options(self):
39 | """Initialize options."""
40 | pass
41 |
42 | def finalize_options(self):
43 | """Finialize options."""
44 | pass
45 |
46 | def run(self):
47 | """Upload release to Pypi."""
48 | try:
49 | self.status("Removing previous builds ...")
50 | rmtree(os.path.join(THIS_DIR, "dist"))
51 | except Exception:
52 | pass
53 | self.status("Building Source distribution ...")
54 | os.system(f"{sys.executable} setup.py sdist")
55 | self.status("Uploading the package to PyPI via Twine ...")
56 | os.system("twine upload dist/*")
57 | sys.exit()
58 |
59 |
60 | setup(
61 | name="python-lambda",
62 | version="11.8.0",
63 | author="Nick Ficano",
64 | author_email="nficano@gmail.com",
65 | packages=find_packages(),
66 | url="https://github.com/nficano/python-lambda",
67 | license="ISCL",
68 | install_requires=REQUIREMENTS,
69 | package_data=PACKAGE_DATA,
70 | test_suite="tests",
71 | tests_require=[],
72 | classifiers=[
73 | "Development Status :: 2 - Pre-Alpha",
74 | "Intended Audience :: Developers",
75 | "License :: OSI Approved :: ISC License (ISCL)",
76 | "Natural Language :: English",
77 | "Programming Language :: Python :: 3.5",
78 | "Programming Language :: Python :: 3.6",
79 | "Programming Language :: Python :: 3.7",
80 | "Programming Language :: Python :: 3.8",
81 | ],
82 | description="The bare minimum for a Python app running on Amazon Lambda.",
83 | include_package_data=True,
84 | long_description_content_type="text/markdown",
85 | long_description=long_description,
86 | zip_safe=True,
87 | cmdclass={"upload": UploadCommand},
88 | scripts=["scripts/lambda"],
89 | )
90 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nficano/python-lambda/2f9f17a5c5993e65ee2b61d06f29ed5a6689d337/tests/__init__.py
--------------------------------------------------------------------------------
/tests/dev_requirements.txt:
--------------------------------------------------------------------------------
1 | bumpversion==0.5.3
2 | pre-commit==2.6.0
3 | pytest>=3.6
4 | pytest-cov
5 | flake8
6 |
--------------------------------------------------------------------------------
/tests/functional/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nficano/python-lambda/2f9f17a5c5993e65ee2b61d06f29ed5a6689d337/tests/functional/__init__.py
--------------------------------------------------------------------------------
/tests/unit/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nficano/python-lambda/2f9f17a5c5993e65ee2b61d06f29ed5a6689d337/tests/unit/__init__.py
--------------------------------------------------------------------------------
/tests/unit/test_LambdaContext.py:
--------------------------------------------------------------------------------
1 | import time
2 | import unittest
3 |
4 | from aws_lambda.helpers import LambdaContext
5 |
6 |
7 | class TestLambdaContext(unittest.TestCase):
8 | def test_get_remaining_time_in_millis(self):
9 | context = LambdaContext("function_name", 2000)
10 | time.sleep(0.5)
11 | self.assertTrue(context.get_remaining_time_in_millis() < 2000000)
12 |
13 |
14 | if __name__ == "__main__":
15 | unittest.main()
16 |
--------------------------------------------------------------------------------
/tests/unit/test_readHelper.py:
--------------------------------------------------------------------------------
1 | import os
2 | import unittest
3 |
4 | import yaml
5 |
6 | from aws_lambda.helpers import read
7 |
8 |
9 | class TestReadHelper(unittest.TestCase):
10 |
11 | TEST_FILE = "readTmp.txt"
12 |
13 | def setUp(self):
14 | with open(TestReadHelper.TEST_FILE, "w") as tmp_file:
15 | tmp_file.write("testYaml: testing")
16 |
17 | def tearDown(self):
18 | os.remove(TestReadHelper.TEST_FILE)
19 |
20 | def test_read_no_loader_non_binary(self):
21 | fileContents = read(TestReadHelper.TEST_FILE)
22 | self.assertEqual(fileContents, "testYaml: testing")
23 |
24 | def test_read_yaml_loader_non_binary(self):
25 | testYaml = read(TestReadHelper.TEST_FILE, loader=yaml.full_load)
26 | self.assertEqual(testYaml["testYaml"], "testing")
27 |
28 | def test_read_no_loader_binary_mode(self):
29 | fileContents = read(TestReadHelper.TEST_FILE, binary_file=True)
30 | self.assertEqual(fileContents, b"testYaml: testing")
31 |
32 | def test_read_yaml_loader_binary_mode(self):
33 | testYaml = read(
34 | TestReadHelper.TEST_FILE, loader=yaml.full_load, binary_file=True
35 | )
36 | self.assertEqual(testYaml["testYaml"], "testing")
37 |
--------------------------------------------------------------------------------