├── tests ├── __init__.py ├── unit │ ├── __init__.py │ ├── test_bundler.py │ ├── test_util.py │ └── test_dependencies.py └── integration │ ├── __init__.py │ └── test_bundling.py ├── requirements.txt ├── .coveragerc ├── pytest.ini ├── LICENSE ├── setup.py ├── .github └── workflows │ └── build.yml ├── .gitignore ├── README.md └── .pylintrc /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | -e . -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | # omit = 3 | 4 | 5 | [report] 6 | 7 | exclude_lines = 8 | if __name__ == .__main__.: -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | # Ignore Deprecation warnings (these stem from imported packages) 3 | filterwarnings = 4 | ignore::DeprecationWarning 5 | # We want beautiful colors in the output 6 | addopts = --color=yes --cov=lambda_bundler --cov-report=html --cov-branch 7 | junit_family=xunit1 -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Maurice Borgmeier 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """Packaging configuration""" 2 | import json 3 | import os 4 | import setuptools 5 | 6 | with open("README.md", "r") as fh: 7 | long_description = fh.read() 8 | 9 | VERSION = "0.1.0" 10 | 11 | ENV_GITHUB_EVENT_PATH = "GITHUB_EVENT_PATH" 12 | 13 | def _load_event() -> dict: 14 | with open(os.environ[ENV_GITHUB_EVENT_PATH]) as event_handle: 15 | return json.load(event_handle) 16 | 17 | def get_release_from_pipeline(): 18 | """ 19 | If the build has been triggered by a release event in a github pipeline, 20 | we take the version number from the release event. If not we return the 21 | version constant. 22 | """ 23 | 24 | try: 25 | 26 | event = _load_event() 27 | 28 | release_name: str = event["release"]["tag_name"] 29 | # Strip the leading v if it exists 30 | version_number = release_name[1:] if release_name.startswith("v") else release_name 31 | print(f"Setting the version number to {version_number} from the Github Pipeline") 32 | 33 | return version_number 34 | except KeyError: 35 | # We'll get a key error if the Environment variable 36 | # isn't set or the release key is not in the event 37 | # In both cases, we want to stay with the regular 38 | # version number from this file 39 | return VERSION 40 | 41 | VERSION = get_release_from_pipeline() 42 | 43 | setuptools.setup( 44 | name="lambda-bundler", 45 | version=VERSION, 46 | author="Maurice Borgmeier", 47 | description="A utility to bundle python code and/or dependencies for deployment to AWS Lambda", 48 | long_description=long_description, 49 | long_description_content_type="text/markdown", 50 | url="https://github.com/MauriceBrg/lambda_bundler", 51 | packages=setuptools.find_packages(), 52 | classifiers=[ 53 | "Intended Audience :: Developers", 54 | "Programming Language :: Python :: 3", 55 | "Programming Language :: Python :: 3.6", 56 | "Programming Language :: Python :: 3.7", 57 | "Programming Language :: Python :: 3.8", 58 | "License :: OSI Approved :: MIT License", 59 | "Operating System :: OS Independent", 60 | ], 61 | python_requires='>=3.6', 62 | extras_require={ 63 | "dev": [ 64 | "pylint==2.5.3", 65 | "pytest==5.4.3", 66 | "pytest-cov==2.10.0", 67 | "setuptools==40.8", 68 | "twine==3.2.0", 69 | "wheel==0.34.2" 70 | ] 71 | } 72 | ) 73 | -------------------------------------------------------------------------------- /tests/unit/test_bundler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for the lambda_bundler.bundler module. 3 | """ 4 | import unittest 5 | from unittest.mock import patch, ANY 6 | 7 | import lambda_bundler.bundler as target_module 8 | 9 | class TestBundler(unittest.TestCase): 10 | """ 11 | Test Cases for the python dependencies. 12 | """ 13 | 14 | def setUp(self): 15 | self.module = "lambda_bundler.bundler." 16 | 17 | def test_build_layer_package(self): 18 | """Asserts build_layer_package orchestrates the functions as expected.""" 19 | 20 | with patch(self.module + "dependencies.collect_and_merge_requirements") as collect_mock, \ 21 | patch(self.module + "dependencies.create_or_return_zipped_dependencies") as zip_mock: 22 | 23 | zip_mock.return_value = "some/path.zip" 24 | 25 | result = target_module.build_layer_package( 26 | ["abc"] 27 | ) 28 | 29 | collect_mock.assert_called_with("abc") 30 | 31 | zip_mock.assert_called_with( 32 | requirements_information=ANY, 33 | output_directory_path=ANY, 34 | prefix_in_zip="python" 35 | ) 36 | 37 | self.assertEqual("some/path.zip", result) 38 | 39 | def test_build_lambda_package(self): 40 | """Assert this function calls the right subroutines""" 41 | 42 | with patch(self.module + "dependencies.build_lambda_package_without_dependencies") as wo_mock: 43 | 44 | wo_mock.return_value = "without_dependencies.zip" 45 | 46 | return_value = target_module.build_lambda_package( 47 | code_directories=["abc"], 48 | exclude_patterns=["def"] 49 | ) 50 | 51 | wo_mock.assert_called_once_with( 52 | code_directories=["abc"], 53 | exclude_patterns=["def"] 54 | ) 55 | 56 | self.assertEqual("without_dependencies.zip", return_value) 57 | 58 | with patch(self.module + "dependencies.build_lambda_package_with_dependencies") as w_mock: 59 | 60 | w_mock.return_value = "with_dependencies.zip" 61 | 62 | result = target_module.build_lambda_package( 63 | code_directories=["abc"], 64 | requirement_files=["ghi"], 65 | exclude_patterns=["def"] 66 | ) 67 | 68 | w_mock.assert_called_once_with( 69 | code_directories=["abc"], 70 | requirement_files=["ghi"], 71 | exclude_patterns=["def"] 72 | ) 73 | 74 | self.assertEqual("with_dependencies.zip", result) 75 | 76 | if __name__ == "__main__": 77 | unittest.main() 78 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Lambda-Bundler-Build 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | # Trigger on release, this will cause the upload to Pypi 10 | release: 11 | types: 12 | - created 13 | 14 | jobs: 15 | 16 | cross_platform_tests: 17 | runs-on: ${{ matrix.os }} 18 | strategy: 19 | matrix: 20 | os: [macos-latest, windows-latest, ubuntu-latest] 21 | pythonVersion: ["3.6", "3.7", "3.8"] 22 | 23 | fail-fast: true 24 | 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v2 28 | 29 | - name: Install Python 30 | uses: actions/setup-python@v2 31 | with: 32 | python-version: ${{ matrix.pythonVersion }} 33 | 34 | - name: Install dev dependencies 35 | run: pip install -e ".[dev]" 36 | 37 | - name: Run Tests 38 | run: pytest 39 | 40 | build_python_package: 41 | runs-on: ubuntu-latest 42 | needs: cross_platform_tests 43 | 44 | steps: 45 | 46 | # - name: Show Event 47 | # run: | 48 | # echo $GITHUB_EVENT_PATH 49 | # cat $GITHUB_EVENT_PATH 50 | 51 | - name: Checkout 52 | uses: actions/checkout@v2 53 | 54 | - name: Install Python 3.6 55 | uses: actions/setup-python@v2 56 | with: 57 | python-version: 3.6 58 | 59 | - name: Install dev dependencies 60 | run: pip install -e ".[dev]" 61 | 62 | # - name: Run Pytest 63 | # run: pytest --cov-report=xml 64 | 65 | - name: Upload coverage metrics 66 | uses: paambaati/codeclimate-action@v2.6.0 67 | env: 68 | CC_TEST_REPORTER_ID: ${{ secrets.CODECLIMATE_COVERAGE_REPORT}} 69 | with: 70 | coverageCommand: pytest --cov-report=xml 71 | coverageLocations: | 72 | ${{github.workspace}}/coverage.xml:coverage.py 73 | 74 | # - name: Upload coverage metrics 75 | # uses: codecov/codecov-action@v1 76 | # with: 77 | # fail_ci_if_error: true 78 | 79 | - name: Run Pylint 80 | run: pylint lambda_bundler 81 | 82 | - name: Create distribution 📦 83 | run: python3 setup.py sdist bdist_wheel 84 | 85 | - name: Publish artifact 86 | uses: actions/upload-artifact@v2 87 | with: 88 | name: python-packages 89 | path: dist/ 90 | 91 | - name: Publish distribution 📦 to PyPI 92 | if: startsWith(github.ref, 'refs/tags') 93 | uses: pypa/gh-action-pypi-publish@master 94 | with: 95 | password: ${{ secrets.PYPI_LAMBDA_BUNDLER_TOKEN }} 96 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.toptal.com/developers/gitignore/api/python 3 | # Edit at https://www.toptal.com/developers/gitignore?templates=python 4 | 5 | .vscode/ 6 | 7 | ### Python ### 8 | # Byte-compiled / optimized / DLL files 9 | __pycache__/ 10 | *.py[cod] 11 | *$py.class 12 | 13 | # C extensions 14 | *.so 15 | 16 | # Distribution / packaging 17 | .Python 18 | build/ 19 | develop-eggs/ 20 | dist/ 21 | downloads/ 22 | eggs/ 23 | .eggs/ 24 | lib/ 25 | lib64/ 26 | parts/ 27 | sdist/ 28 | var/ 29 | wheels/ 30 | pip-wheel-metadata/ 31 | share/python-wheels/ 32 | *.egg-info/ 33 | .installed.cfg 34 | *.egg 35 | MANIFEST 36 | 37 | # PyInstaller 38 | # Usually these files are written by a python script from a template 39 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 40 | *.manifest 41 | *.spec 42 | 43 | # Installer logs 44 | pip-log.txt 45 | pip-delete-this-directory.txt 46 | 47 | # Unit test / coverage reports 48 | htmlcov/ 49 | .tox/ 50 | .nox/ 51 | .coverage 52 | .coverage.* 53 | .cache 54 | nosetests.xml 55 | coverage.xml 56 | *.cover 57 | *.py,cover 58 | .hypothesis/ 59 | .pytest_cache/ 60 | 61 | # Translations 62 | *.mo 63 | *.pot 64 | 65 | # Django stuff: 66 | *.log 67 | local_settings.py 68 | db.sqlite3 69 | db.sqlite3-journal 70 | 71 | # Flask stuff: 72 | instance/ 73 | .webassets-cache 74 | 75 | # Scrapy stuff: 76 | .scrapy 77 | 78 | # Sphinx documentation 79 | docs/_build/ 80 | 81 | # PyBuilder 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # IPython 88 | profile_default/ 89 | ipython_config.py 90 | 91 | # pyenv 92 | .python-version 93 | 94 | # pipenv 95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 98 | # install all needed dependencies. 99 | #Pipfile.lock 100 | 101 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 102 | __pypackages__/ 103 | 104 | # Celery stuff 105 | celerybeat-schedule 106 | celerybeat.pid 107 | 108 | # SageMath parsed files 109 | *.sage.py 110 | 111 | # Environments 112 | .env 113 | .venv 114 | env/ 115 | venv/ 116 | ENV/ 117 | env.bak/ 118 | venv.bak/ 119 | 120 | # Spyder project settings 121 | .spyderproject 122 | .spyproject 123 | 124 | # Rope project settings 125 | .ropeproject 126 | 127 | # mkdocs documentation 128 | /site 129 | 130 | # mypy 131 | .mypy_cache/ 132 | .dmypy.json 133 | dmypy.json 134 | 135 | # Pyre type checker 136 | .pyre/ 137 | 138 | # pytype static type analyzer 139 | .pytype/ 140 | 141 | # End of https://www.toptal.com/developers/gitignore/api/python -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Lambda Bundler 2 | 3 | ![Lambda-Bundler-Build](https://github.com/MauriceBrg/lambda_bundler/workflows/Lambda-Bundler-Build/badge.svg?branch=master) 4 | [![Maintainability](https://api.codeclimate.com/v1/badges/d8e6323930db603aad30/maintainability)](https://codeclimate.com/github/MauriceBrg/lambda_bundler/maintainability) 5 | [![Test Coverage](https://api.codeclimate.com/v1/badges/d8e6323930db603aad30/test_coverage)](https://codeclimate.com/github/MauriceBrg/lambda_bundler/test_coverage) 6 | [![PyPI version](https://badge.fury.io/py/lambda-bundler.svg)](https://badge.fury.io/py/lambda-bundler) 7 | ![License](https://img.shields.io/pypi/l/lambda-bundler) 8 | ![PythonVersions](https://img.shields.io/pypi/pyversions/lambda-bundler) 9 | 10 | Lambda Bundler helps you package your python lambda functions and their dependencies for deployment to AWS. 11 | 12 | It supports three different modes: 13 | 14 | - Package dependencies for a Lambda layer 15 | - Package code-only dependencies from multiple directories for deployment to Lambda 16 | - Package your own code and external dependencies into a single zip for deployment to Lambda 17 | 18 | Dependencies will be cached if possible in order to provide a fast build experience. 19 | 20 | ## Installation 21 | 22 | The installation is very simple using pip: 23 | 24 | ```text 25 | pip install lambda-bundler 26 | ``` 27 | 28 | ## How to use 29 | 30 | ### Package a Lambda layer 31 | 32 | ```python 33 | from lambda_bundler import build_layer_package 34 | 35 | path_to_deployment_artifact = build_layer_package( 36 | # You can install the dependencies from multiple 37 | # requirement files into a single layer 38 | requirement_files=[ 39 | "path/to/requirements.txt" 40 | ] 41 | ) 42 | 43 | # path_to_deployment_artifact now points to a zip archive with the dependencies. 44 | ``` 45 | 46 | ### Package code directories 47 | 48 | ```python 49 | from lambda_bundler import build_lambda_package 50 | 51 | path_to_deployment_artifact = build_lambda_package( 52 | code_directories=[ 53 | "path/to/package", 54 | "path/to/other/package 55 | ], 56 | exclude_patterns=[ 57 | "*.pyc" 58 | ] 59 | ) 60 | 61 | # path_to_deployment_artifact now contains the path to the zip archive 62 | ``` 63 | 64 | ### Package code directories and dependencies 65 | 66 | If you'd like to package your dependencies directly into the deployment artifact you can do that very easily. Please keep in mind, that the size limit for a zipped deployment package is 50MB according to the [documentation](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-limits.html) and the content of packages larger than 3MB won't be visible in the code editor in the console. 67 | 68 | ```python 69 | from lambda_bundler import build_lambda_package 70 | 71 | path_to_deployment_artifact = build_lambda_package( 72 | code_directories=[ 73 | "path/to/package", 74 | "path/to/other/package 75 | ], 76 | requirement_files=[ 77 | "path/to/requirements.txt 78 | ], 79 | exclude_patterns=[ 80 | "*.pyc" 81 | ] 82 | ) 83 | 84 | # path_to_deployment_artifact now contains the path to the zip archive 85 | ``` 86 | 87 | ## Configuration 88 | 89 | The library uses a working directory to build and cache packages. 90 | By default this is located in the `lambda_bundler_builds` folder in your temporary directory as determined by [python](https://docs.python.org/3/library/tempfile.html#tempfile.gettempdir). 91 | 92 | If you'd like to change that, you can set the `LAMBDA_BUNDLER_BUILD_DIR` environment variable and point it to another directory. 93 | 94 | If you're using the Cloud Development Kit and just want to do a `cdk synth` to check your infrastructure code without actually deploying it, you can set the environment variable `LAMBDA_BUNDLER_SKIP_INSTALL` to `true`. This will skip installing dependencies and bundling the code, which makes the process a lot faster - although it won't work when you try to deploy it with the variable set to `true`. 95 | 96 | ## Demo / Example 97 | 98 | For an example of how to use this, I suggest you check out the [demo repository](https://github.com/MauriceBrg/lambda-bundler-demo) which includes a CDK app that deploys three lambda functions with dependencies of different sizes. 99 | If you take a closer look at the [build pipeline](https://github.com/MauriceBrg/lambda-bundler-demo/actions?query=workflow%3ALambda-Bundler-Demo-Build) you'll see, how effective the caching is. 100 | 101 | ## Known Limitations 102 | 103 | - Packages are downloaded and built on your local machine, that means you might experience problems with libraries that use C-extensions if your platform is not Linux. Building packages with Docker is something I'd like to look into if there's a demand for that. 104 | - Currently there's no warnings/errors if your deployment package surpasses the Lambda limits - if there's a need for that I'll consider adding those. 105 | - This is built towards integration with the AWS CDK in python and doesn't work well standalone. I'm considering adding a CLI interface for use in Deployment Pipelines. Let me know if this is something you could use. 106 | -------------------------------------------------------------------------------- /tests/unit/test_util.py: -------------------------------------------------------------------------------- 1 | """Tests for the lambda_bundler.util module.""" 2 | import os 3 | import pathlib 4 | import shutil 5 | import tempfile 6 | import unittest 7 | 8 | from unittest.mock import patch 9 | 10 | import lambda_bundler.util as target_module 11 | 12 | class UtilTestCases(unittest.TestCase): 13 | """Test cases for the util module""" 14 | 15 | def setUp(self): 16 | self.module = "lambda_bundler.util." 17 | 18 | def test_hash_string(self): 19 | """Asserts hash_string returns the correct sha256 hexdigest""" 20 | 21 | test_string = "test" 22 | expected_hash = "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08" 23 | 24 | self.assertEqual(expected_hash, target_module.hash_string(test_string)) 25 | 26 | def test_get_content_of_files(self): 27 | """Asserts get_content_of_files reads the correct files""" 28 | 29 | content_1 = "a" 30 | content_2 = "b" 31 | 32 | with tempfile.TemporaryDirectory() as input_directory: 33 | 34 | # Set up test files 35 | with open(input_directory + "file1", "w") as handle: 36 | handle.write(content_1) 37 | 38 | with open(input_directory + "file2", "w") as handle: 39 | handle.write(content_2) 40 | 41 | expected_result = [content_1, content_2] 42 | 43 | actual_result = target_module.get_content_of_files( 44 | input_directory + "file1", 45 | input_directory + "file2" 46 | ) 47 | 48 | self.assertEqual(expected_result, actual_result) 49 | 50 | def test_extend_zip(self): 51 | """Asserts that extend_zip works as intended""" 52 | 53 | with tempfile.TemporaryDirectory() as source_directory, \ 54 | tempfile.TemporaryDirectory() as target_directory, \ 55 | tempfile.TemporaryDirectory() as assertion_directory: 56 | 57 | directories_in_source = ["src/lambda", "tests"] 58 | 59 | for directory in directories_in_source: 60 | pathlib.Path(os.path.join(source_directory, directory)).mkdir(parents=True, exist_ok=True) 61 | 62 | pathlib.Path(os.path.join(source_directory, "initial")).mkdir(parents=True, exist_ok=True) 63 | with open(os.path.join(source_directory, "initial", "test.txt"), "w") as handle: 64 | handle.write("test-content") 65 | 66 | with open(os.path.join(source_directory, "src", "lambda", "handler.py"), "w") as handle: 67 | handle.write("test-content") 68 | 69 | zip_path = os.path.join(target_directory, "target") 70 | shutil.make_archive(zip_path, "zip", os.path.join(source_directory, "initial")) 71 | 72 | # Verify the test setup was ok 73 | self.assertTrue(os.path.exists(zip_path + ".zip")) 74 | 75 | target_module.extend_zip( 76 | path_to_zip=zip_path + ".zip", 77 | code_directories=[ 78 | os.path.join(source_directory, "src"), 79 | os.path.join(source_directory, "tests") 80 | ] 81 | ) 82 | 83 | # Verify the zip still exists 84 | self.assertTrue(os.path.exists(zip_path + ".zip")) 85 | 86 | # Extract the zip 87 | shutil.unpack_archive(zip_path + ".zip", assertion_directory) 88 | 89 | # Assert that our code directories exist 90 | self.assertTrue(os.path.exists(os.path.join(assertion_directory, "src", "lambda", "handler.py"))) 91 | self.assertTrue(os.path.exists(os.path.join(assertion_directory, "tests"))) 92 | 93 | # Assert that the content of our initial zip is there as well 94 | self.assertTrue(os.path.exists(os.path.join(assertion_directory, "test.txt"))) 95 | 96 | def test_get_build_dir(self): 97 | """Assert that get_build_dir works with the environment variable""" 98 | 99 | backup_env = os.environ.get(target_module.BUILD_DIR_ENV) 100 | 101 | os.environ[target_module.BUILD_DIR_ENV] = "test" 102 | 103 | self.assertEqual("test", target_module.get_build_dir()) 104 | 105 | del os.environ[target_module.BUILD_DIR_ENV] 106 | 107 | with patch(self.module + "tempfile.gettempdir") as get_temp_mock: 108 | get_temp_mock.return_value = "test" 109 | 110 | self.assertEqual(os.path.join("test", "lambda_bundler_builds"), target_module.get_build_dir()) 111 | 112 | if backup_env is not None: 113 | os.environ[target_module.BUILD_DIR_ENV] = backup_env 114 | 115 | def test_return_empty_if_skip_install(self): 116 | """Assert the decorator works as expected.""" 117 | 118 | prev = os.environ.get("LAMBDA_BUNDLER_SKIP_INSTALL") 119 | 120 | os.environ["LAMBDA_BUNDLER_SKIP_INSTALL"] = "true" 121 | 122 | @target_module.return_empty_if_skip_install 123 | def inner(): 124 | return "installed" 125 | 126 | with tempfile.TemporaryDirectory() as temp: 127 | 128 | os.environ[target_module.BUILD_DIR_ENV] = temp 129 | 130 | result = inner() 131 | self.assertTrue(result.endswith("empty.zip")) 132 | 133 | second_result = inner() 134 | self.assertTrue(second_result.endswith("empty.zip")) 135 | 136 | del os.environ["LAMBDA_BUNDLER_SKIP_INSTALL"] 137 | self.assertEqual("installed", inner()) 138 | 139 | if prev is not None: 140 | os.environ["LAMBDA_BUNDLER_SKIP_INSTALL"] = prev 141 | 142 | if __name__ == "__main__": 143 | unittest.main() 144 | -------------------------------------------------------------------------------- /tests/integration/test_bundling.py: -------------------------------------------------------------------------------- 1 | """ 2 | Integration tests for the lambda bundler 3 | 4 | NOTE: These download packages and thus require internet access! 5 | """ 6 | import os 7 | import pathlib 8 | import shutil 9 | import tempfile 10 | import unittest 11 | 12 | from lambda_bundler import build_lambda_package, build_layer_package 13 | 14 | BUILD_DIR_ENV = "LAMBDA_BUNDLER_BUILD_DIR" 15 | 16 | class LayerTestCases(unittest.TestCase): 17 | """Test cases for build_layer_package""" 18 | 19 | def test_simple_requirement_file(self): 20 | """A Test case for a single requirement file""" 21 | 22 | with tempfile.TemporaryDirectory() as assertion_directory, \ 23 | tempfile.TemporaryDirectory() as build_directory, \ 24 | tempfile.TemporaryDirectory() as source_directory: 25 | 26 | os.environ[BUILD_DIR_ENV] = build_directory 27 | 28 | path_to_requirement = os.path.join(source_directory, "requirements.txt") 29 | 30 | with open(path_to_requirement, "w") as handle: 31 | 32 | handle.write("pytz==2020.01") 33 | 34 | path_to_output = build_layer_package( 35 | requirement_files=[path_to_requirement] 36 | ) 37 | 38 | # Assert a zip archive is returned 39 | self.assertTrue(path_to_output.endswith(".zip")) 40 | 41 | shutil.unpack_archive(path_to_output, assertion_directory) 42 | 43 | # Assert a pytz directory exists 44 | self.assertTrue(os.path.exists( 45 | os.path.join(assertion_directory, "python", "pytz") 46 | )) 47 | 48 | def test_multiple_requirement_files(self): 49 | """A Test case for multiple requirement files""" 50 | 51 | with tempfile.TemporaryDirectory() as assertion_directory, \ 52 | tempfile.TemporaryDirectory() as build_directory, \ 53 | tempfile.TemporaryDirectory() as source_directory: 54 | 55 | os.environ[BUILD_DIR_ENV] = build_directory 56 | 57 | path_to_requirement_1 = os.path.join(source_directory, "requirements_1.txt") 58 | path_to_requirement_2 = os.path.join(source_directory, "requirements_2.txt") 59 | 60 | with open(path_to_requirement_1, "w") as handle_1, \ 61 | open(path_to_requirement_2, "w") as handle_2: 62 | 63 | handle_1.write("pytz==2020.01") 64 | handle_2.write("certifi==2020.6.20") 65 | 66 | path_to_output = build_layer_package( 67 | requirement_files=[path_to_requirement_1, path_to_requirement_2] 68 | ) 69 | 70 | # Assert a zip archive is returned 71 | self.assertTrue(path_to_output.endswith(".zip")) 72 | 73 | shutil.unpack_archive(path_to_output, assertion_directory) 74 | 75 | # Assert a pytz directory exists 76 | self.assertTrue(os.path.exists( 77 | os.path.join(assertion_directory, "python", "pytz") 78 | )) 79 | self.assertTrue(os.path.exists( 80 | os.path.join(assertion_directory, "python", "certifi") 81 | )) 82 | 83 | class PackageTestCase(unittest.TestCase): 84 | """Test cases for build_lambda_package""" 85 | 86 | def test_build_lambda_package_without_dependencies(self): 87 | """Assert a lambda package without dependencies get's put together as expected""" 88 | 89 | with tempfile.TemporaryDirectory() as assertion_directory, \ 90 | tempfile.TemporaryDirectory() as build_directory, \ 91 | tempfile.TemporaryDirectory() as source_directory: 92 | 93 | os.environ[BUILD_DIR_ENV] = build_directory 94 | 95 | directories_in_source = ["src/lambda", "tests", "initial"] 96 | 97 | for directory in directories_in_source: 98 | pathlib.Path(os.path.join(source_directory, directory)).mkdir(parents=True, exist_ok=True) 99 | 100 | pathlib.Path(os.path.join(source_directory, "initial")).mkdir(parents=True, exist_ok=True) 101 | with open(os.path.join(source_directory, "initial", "test.txt"), "w") as handle: 102 | handle.write("test-content") 103 | 104 | with open(os.path.join(source_directory, "src", "lambda", "handler.py"), "w") as handle: 105 | handle.write("test-content") 106 | 107 | path = build_lambda_package( 108 | code_directories=[ 109 | os.path.join(source_directory, directory) for directory in directories_in_source 110 | ], 111 | exclude_patterns=["test.txt"] 112 | ) 113 | 114 | self.assertTrue(path.endswith(".zip")) 115 | 116 | shutil.unpack_archive(path, assertion_directory) 117 | 118 | # These should be in the zip 119 | self.assertTrue(os.path.exists(os.path.join(assertion_directory, "initial"))) 120 | self.assertTrue(os.path.exists(os.path.join(assertion_directory, "lambda", "handler.py"))) 121 | # This shouldn't be in the zip (exclude list) 122 | self.assertFalse(os.path.exists(os.path.join(assertion_directory, "initial", "test.txt"))) 123 | 124 | def test_build_lambda_package_with_dependencies(self): 125 | """Assert a lambda package with dependencies get's put together as expected""" 126 | 127 | with tempfile.TemporaryDirectory() as assertion_directory, \ 128 | tempfile.TemporaryDirectory() as build_directory, \ 129 | tempfile.TemporaryDirectory() as source_directory: 130 | 131 | os.environ[BUILD_DIR_ENV] = build_directory 132 | 133 | directories_in_source = ["src/lambda", "tests", "initial"] 134 | 135 | for directory in directories_in_source: 136 | pathlib.Path(os.path.join(source_directory, directory)).mkdir(parents=True, exist_ok=True) 137 | 138 | pathlib.Path(os.path.join(source_directory, "initial")).mkdir(parents=True, exist_ok=True) 139 | with open(os.path.join(source_directory, "initial", "test.txt"), "w") as handle: 140 | handle.write("test-content") 141 | 142 | with open(os.path.join(source_directory, "src", "lambda", "handler.py"), "w") as handle: 143 | handle.write("test-content") 144 | 145 | path_to_requirement_1 = os.path.join(source_directory, "requirements_1.txt") 146 | path_to_requirement_2 = os.path.join(source_directory, "requirements_2.txt") 147 | 148 | with open(path_to_requirement_1, "w") as handle_1, \ 149 | open(path_to_requirement_2, "w") as handle_2: 150 | 151 | handle_1.write("pytz==2020.01") 152 | handle_2.write("certifi==2020.6.20") 153 | 154 | path = build_lambda_package( 155 | code_directories=[ 156 | os.path.join(source_directory, directory) for directory in directories_in_source 157 | ], 158 | requirement_files=[path_to_requirement_1, path_to_requirement_2], 159 | exclude_patterns=["test.txt"] 160 | ) 161 | 162 | self.assertTrue(path.endswith(".zip")) 163 | 164 | shutil.unpack_archive(path, assertion_directory) 165 | 166 | # These should be in the zip 167 | self.assertTrue(os.path.exists(os.path.join(assertion_directory, "initial"))) 168 | self.assertTrue(os.path.exists(os.path.join(assertion_directory, "lambda", "handler.py"))) 169 | self.assertTrue(os.path.exists(os.path.join(assertion_directory, "certifi"))) 170 | self.assertTrue(os.path.exists(os.path.join(assertion_directory, "pytz"))) 171 | # This shouldn't be in the zip (exclude list) 172 | self.assertFalse(os.path.exists(os.path.join(assertion_directory, "initial", "test.txt"))) 173 | -------------------------------------------------------------------------------- /tests/unit/test_dependencies.py: -------------------------------------------------------------------------------- 1 | """Test cases for lambda_bundler.dependencies""" 2 | import os 3 | import pathlib 4 | import shutil 5 | import tempfile 6 | import unittest 7 | 8 | from unittest.mock import patch, ANY 9 | 10 | import lambda_bundler.dependencies as target_module 11 | 12 | class DependenciesTestCases(unittest.TestCase): 13 | """Tests for the lambda_bundler.dependencies module""" 14 | 15 | def setUp(self): 16 | self.module = "lambda_bundler.dependencies." 17 | 18 | def test_merge_requirement_files(self): 19 | """Assert that merge_requirement_files handles whitespaces and ordering.""" 20 | file_1 = """ 21 | abc 22 | ghj 23 | """ 24 | 25 | file_2 = """ 26 | def 27 | 28 | """ 29 | 30 | expected_output = "\n".join(["abc", "def", "ghj"]) 31 | 32 | actual_output = target_module.merge_requirement_files(file_1, file_2) 33 | 34 | self.assertEqual(expected_output, actual_output) 35 | 36 | def test_collect_and_merge_requirements(self): 37 | """Assert collect_and_merge_requirements calls the correct functions""" 38 | 39 | with patch(self.module + "util.get_content_of_files") as get_mock, \ 40 | patch(self.module + "merge_requirement_files") as merge_mock: 41 | 42 | get_mock.return_value = ["a"] 43 | merge_mock.return_value = "merged" 44 | 45 | list_of_files = ["file1", "file2"] 46 | self.assertEqual("merged", target_module.collect_and_merge_requirements(*list_of_files)) 47 | 48 | get_mock.assert_called_with(*list_of_files) 49 | merge_mock.assert_called_with("a") 50 | 51 | def test_create_zipped_dependencies(self): 52 | """Asserts that create_zipped_dependencies works as expected""" 53 | 54 | with tempfile.TemporaryDirectory() as working_directory, \ 55 | tempfile.TemporaryDirectory() as assertion_directory, \ 56 | patch(self.module + "util.hash_string") as hash_mock, \ 57 | patch(self.module + "install_dependencies") as install_mock: 58 | 59 | hash_mock.return_value = "bla" 60 | requirements = "pytz" 61 | 62 | output_path = target_module.create_zipped_dependencies( 63 | requirements_information=requirements, 64 | output_directory_path=working_directory 65 | ) 66 | 67 | hash_mock.assert_called_with(requirements) 68 | install_mock.assert_called_with( 69 | path_to_requirements=os.path.join(working_directory, "bla", "requirements.txt"), 70 | path_to_target_directory=os.path.join(working_directory, "bla") 71 | ) 72 | 73 | self.assertTrue(output_path.endswith("bla.zip")) 74 | 75 | # If we extract it, there should be a requirements txt at the root 76 | shutil.unpack_archive(output_path, assertion_directory) 77 | self.assertTrue( 78 | os.path.exists( 79 | os.path.join(assertion_directory, "requirements.txt") 80 | ) 81 | ) 82 | 83 | def test_create_zipped_dependencies_with_prefix(self): 84 | """Asserts that create_zipped_dependencies works as expected and honors the prefix""" 85 | 86 | with tempfile.TemporaryDirectory() as working_directory, \ 87 | tempfile.TemporaryDirectory() as assertion_directory, \ 88 | patch(self.module + "util.hash_string") as hash_mock, \ 89 | patch(self.module + "install_dependencies") as install_mock: 90 | 91 | hash_mock.return_value = "bla" 92 | requirements = "pytz" 93 | 94 | output_path = target_module.create_zipped_dependencies( 95 | requirements_information=requirements, 96 | output_directory_path=working_directory, 97 | prefix_in_zip="python" 98 | ) 99 | 100 | hash_mock.assert_called_with(requirements + "python") 101 | install_mock.assert_called_with( 102 | path_to_requirements=os.path.join(working_directory, "bla", "python", "requirements.txt"), 103 | path_to_target_directory=os.path.join(working_directory, "bla", "python") 104 | ) 105 | 106 | self.assertTrue(output_path.endswith("bla.zip")) 107 | 108 | # If we extract it, there should be a requirements txt at the root 109 | shutil.unpack_archive(output_path, assertion_directory) 110 | self.assertTrue( 111 | os.path.exists( 112 | os.path.join(assertion_directory, "python", "requirements.txt") 113 | ) 114 | ) 115 | 116 | def test_create_zipped_dependencies_with_pre_existing_build_dir(self): 117 | """Asserts that create_zipped_dependencies works as expected""" 118 | 119 | with tempfile.TemporaryDirectory() as working_directory, \ 120 | tempfile.TemporaryDirectory() as assertion_directory, \ 121 | patch(self.module + "util.hash_string") as hash_mock, \ 122 | patch(self.module + "install_dependencies") as install_mock, \ 123 | patch(self.module + "LOGGER.warning") as warning_logger: 124 | 125 | # Create the build dir beforehand, the code should still work and log a warning 126 | pathlib.Path(os.path.join(working_directory, "bla")).mkdir() 127 | 128 | hash_mock.return_value = "bla" 129 | requirements = "pytz" 130 | 131 | output_path = target_module.create_zipped_dependencies( 132 | requirements_information=requirements, 133 | output_directory_path=working_directory 134 | ) 135 | 136 | hash_mock.assert_called_with(requirements) 137 | install_mock.assert_called_with( 138 | path_to_requirements=os.path.join(working_directory, "bla", "requirements.txt"), 139 | path_to_target_directory=os.path.join(working_directory, "bla") 140 | ) 141 | warning_logger.assert_called_once() 142 | 143 | self.assertTrue(output_path.endswith("bla.zip")) 144 | 145 | # If we extract it, there should be a requirements txt at the root 146 | shutil.unpack_archive(output_path, assertion_directory) 147 | self.assertTrue( 148 | os.path.exists( 149 | os.path.join(assertion_directory, "requirements.txt") 150 | ) 151 | ) 152 | 153 | def test_create_or_return_zipped_dependencies(self): 154 | """Assert that create_or_return_zipped_dependencies works as intended""" 155 | 156 | with patch(self.module + "util.hash_string") as hash_mock, \ 157 | patch(self.module + "os.path.exists") as exists_mock, \ 158 | patch(self.module + "create_zipped_dependencies") as zip_mock: 159 | 160 | hash_mock.return_value = "a" 161 | exists_mock.return_value = True 162 | zip_mock.return_value = "zipped" 163 | 164 | # Already existing 165 | 166 | result = target_module.create_or_return_zipped_dependencies( 167 | requirements_information="bla", 168 | output_directory_path="/some_path/" 169 | ) 170 | 171 | self.assertEqual("/some_path/a.zip", result) 172 | 173 | # Create new 174 | exists_mock.return_value = False 175 | 176 | result = target_module.create_or_return_zipped_dependencies( 177 | requirements_information="bla", 178 | output_directory_path="/some_path/" 179 | ) 180 | 181 | self.assertEqual("zipped", result) 182 | 183 | def test_build_lambda_package_without_dependencies(self): 184 | """Assert build_lambda_without_dependencies packages code correctly""" 185 | 186 | with tempfile.TemporaryDirectory() as source_directory, \ 187 | tempfile.TemporaryDirectory() as build_directory, \ 188 | tempfile.TemporaryDirectory() as assertion_directory, \ 189 | patch(self.module + "util.get_build_dir") as gbd_mock: 190 | 191 | gbd_mock.return_value = build_directory 192 | 193 | directories_in_source = ["src/lambda", "tests"] 194 | 195 | for directory in directories_in_source: 196 | pathlib.Path(os.path.join(source_directory, directory)).mkdir(parents=True, exist_ok=True) 197 | 198 | pathlib.Path(os.path.join(source_directory, "initial")).mkdir(parents=True, exist_ok=True) 199 | with open(os.path.join(source_directory, "initial", "test.txt"), "w") as handle: 200 | handle.write("test-content") 201 | 202 | with open(os.path.join(source_directory, "src", "lambda", "handler.py"), "w") as handle: 203 | handle.write("test-content") 204 | 205 | zip_archive = target_module.build_lambda_package_without_dependencies( 206 | code_directories=[ 207 | os.path.join(source_directory, directories_in_source[0]), 208 | os.path.join(source_directory, directories_in_source[1]) 209 | ] 210 | ) 211 | 212 | self.assertTrue(zip_archive.endswith(".zip")) 213 | self.assertTrue(os.path.exists(zip_archive)) 214 | 215 | shutil.unpack_archive(zip_archive, assertion_directory) 216 | 217 | # The initial directory is not part of the code_directories 218 | self.assertFalse(os.path.exists(os.path.join(assertion_directory, "initial", "test.txt"))) 219 | 220 | self.assertTrue(os.path.exists(os.path.join(assertion_directory, "lambda", "handler.py"))) 221 | 222 | def test_build_lambda_package_with_dependencies(self): 223 | """Assert that build_lambda_package_with_dependencies orchestrates the correct subroutines""" 224 | 225 | with patch(self.module + "collect_and_merge_requirements") as cam_mock, \ 226 | patch(self.module + "create_or_return_zipped_dependencies") as create_dep_mock, \ 227 | patch(self.module + "util.hash_string") as hash_mock, \ 228 | patch(self.module + "shutil.copyfile") as copy_mock, \ 229 | patch(self.module + "util.extend_zip") as extend_mock: 230 | 231 | cam_mock.return_value = "collected_requirements" 232 | create_dep_mock.return_value = "dependencies.zip" 233 | hash_mock.return_value = "hashed" 234 | 235 | result = target_module.build_lambda_package_with_dependencies( 236 | code_directories=["a", "b", "c"], 237 | requirement_files=["d", "e"] 238 | ) 239 | 240 | cam_mock.assert_called_with("d", "e") 241 | create_dep_mock.assert_called_with( 242 | requirements_information="collected_requirements", 243 | output_directory_path=ANY 244 | ) 245 | hash_mock.assert_called_with("abcde") 246 | copy_mock.assert_called_once() 247 | extend_mock.assert_called_once() 248 | 249 | self.assertTrue(result.endswith("hashed.zip")) 250 | 251 | 252 | def test_install_dependencies(self): 253 | """Assert install_dependencies uses subprocess_output to install dependencies""" 254 | 255 | # NOTE: This is not a complete test of the install, that's what we do with integration tests. 256 | 257 | with patch(self.module + "subprocess.check_output") as subprocess_mock: 258 | 259 | target_module.install_dependencies( 260 | path_to_requirements="abc", 261 | path_to_target_directory="def" 262 | ) 263 | 264 | subprocess_mock.assert_called_once() 265 | 266 | if __name__ == "__main__": 267 | unittest.main() 268 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # A comma-separated list of package or module names from where C extensions may 4 | # be loaded. Extensions are loading into the active Python interpreter and may 5 | # run arbitrary code. 6 | extension-pkg-whitelist= 7 | 8 | # Add files or directories to the blacklist. They should be base names, not 9 | # paths. 10 | ignore=CVS 11 | 12 | # Add files or directories matching the regex patterns to the blacklist. The 13 | # regex matches against base names, not paths. 14 | ignore-patterns= 15 | 16 | # Python code to execute, usually for sys.path manipulation such as 17 | # pygtk.require(). 18 | #init-hook= 19 | 20 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the 21 | # number of processors available to use. 22 | jobs=1 23 | 24 | # Control the amount of potential inferred values when inferring a single 25 | # object. This can help the performance when dealing with large functions or 26 | # complex, nested conditions. 27 | limit-inference-results=100 28 | 29 | # List of plugins (as comma separated values of python module names) to load, 30 | # usually to register additional checkers. 31 | load-plugins= 32 | 33 | # Pickle collected data for later comparisons. 34 | persistent=yes 35 | 36 | # Specify a configuration file. 37 | #rcfile= 38 | 39 | # When enabled, pylint would attempt to guess common misconfiguration and emit 40 | # user-friendly hints instead of false-positive error messages. 41 | suggestion-mode=yes 42 | 43 | # Allow loading of arbitrary C extensions. Extensions are imported into the 44 | # active Python interpreter and may run arbitrary code. 45 | unsafe-load-any-extension=no 46 | 47 | 48 | [MESSAGES CONTROL] 49 | 50 | # Only show warnings with the listed confidence levels. Leave empty to show 51 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. 52 | confidence= 53 | 54 | # Disable the message, report, category or checker with the given id(s). You 55 | # can either give multiple identifiers separated by comma (,) or put this 56 | # option multiple times (only on the command line, not in the configuration 57 | # file where it should appear only once). You can also use "--disable=all" to 58 | # disable everything first and then reenable specific checks. For example, if 59 | # you want to run only the similarities checker, you can use "--disable=all 60 | # --enable=similarities". If you want to run only the classes checker, but have 61 | # no Warning level messages displayed, use "--disable=all --enable=classes 62 | # --disable=W". 63 | # redefined-builtin has been disabled, because the CDK commonly uses the id variable name 64 | disable=logging, 65 | invalid-name, 66 | print-statement, 67 | parameter-unpacking, 68 | unpacking-in-except, 69 | old-raise-syntax, 70 | backtick, 71 | long-suffix, 72 | old-ne-operator, 73 | old-octal-literal, 74 | import-star-module-level, 75 | non-ascii-bytes-literal, 76 | raw-checker-failed, 77 | bad-inline-option, 78 | locally-disabled, 79 | file-ignored, 80 | suppressed-message, 81 | useless-suppression, 82 | deprecated-pragma, 83 | use-symbolic-message-instead, 84 | apply-builtin, 85 | basestring-builtin, 86 | buffer-builtin, 87 | cmp-builtin, 88 | coerce-builtin, 89 | execfile-builtin, 90 | file-builtin, 91 | long-builtin, 92 | raw_input-builtin, 93 | reduce-builtin, 94 | standarderror-builtin, 95 | unicode-builtin, 96 | xrange-builtin, 97 | coerce-method, 98 | delslice-method, 99 | getslice-method, 100 | setslice-method, 101 | no-absolute-import, 102 | old-division, 103 | dict-iter-method, 104 | dict-view-method, 105 | next-method-called, 106 | metaclass-assignment, 107 | indexing-exception, 108 | raising-string, 109 | reload-builtin, 110 | oct-method, 111 | hex-method, 112 | nonzero-method, 113 | cmp-method, 114 | input-builtin, 115 | round-builtin, 116 | intern-builtin, 117 | unichr-builtin, 118 | map-builtin-not-iterating, 119 | zip-builtin-not-iterating, 120 | range-builtin-not-iterating, 121 | filter-builtin-not-iterating, 122 | using-cmp-argument, 123 | eq-without-hash, 124 | div-method, 125 | idiv-method, 126 | rdiv-method, 127 | exception-message-attribute, 128 | invalid-str-codec, 129 | sys-max-int, 130 | bad-python3-import, 131 | deprecated-string-function, 132 | deprecated-str-translate-call, 133 | deprecated-itertools-function, 134 | deprecated-types-field, 135 | next-method-defined, 136 | dict-items-not-iterating, 137 | dict-keys-not-iterating, 138 | dict-values-not-iterating, 139 | deprecated-operator-function, 140 | deprecated-urllib-function, 141 | xreadlines-attribute, 142 | deprecated-sys-function, 143 | exception-escape, 144 | comprehension-escape, 145 | fixme 146 | 147 | # Enable the message, report, category or checker with the given id(s). You can 148 | # either give multiple identifier separated by comma (,) or put this option 149 | # multiple time (only on the command line, not in the configuration file where 150 | # it should appear only once). See also the "--disable" option for examples. 151 | enable=c-extension-no-member 152 | 153 | 154 | [REPORTS] 155 | 156 | # Python expression which should return a score less than or equal to 10. You 157 | # have access to the variables 'error', 'warning', 'refactor', and 'convention' 158 | # which contain the number of messages in each category, as well as 'statement' 159 | # which is the total number of statements analyzed. This score is used by the 160 | # global evaluation report (RP0004). 161 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 162 | 163 | # Template used to display messages. This is a python new-style format string 164 | # used to format the message information. See doc for all details. 165 | #msg-template= 166 | 167 | # Set the output format. Available formats are text, parseable, colorized, json 168 | # and msvs (visual studio). You can also give a reporter class, e.g. 169 | # mypackage.mymodule.MyReporterClass. 170 | output-format=text 171 | 172 | # Tells whether to display a full report or only the messages. 173 | reports=no 174 | 175 | # Activate the evaluation score. 176 | score=yes 177 | 178 | 179 | [REFACTORING] 180 | 181 | # Maximum number of nested blocks for function / method body 182 | max-nested-blocks=5 183 | 184 | # Complete name of functions that never returns. When checking for 185 | # inconsistent-return-statements if a never returning function is called then 186 | # it will be considered as an explicit return statement and no message will be 187 | # printed. 188 | never-returning-functions=sys.exit 189 | 190 | 191 | [BASIC] 192 | 193 | # Naming style matching correct argument names. 194 | argument-naming-style=snake_case 195 | 196 | # Regular expression matching correct argument names. Overrides argument- 197 | # naming-style. 198 | #argument-rgx= 199 | 200 | # Naming style matching correct attribute names. 201 | attr-naming-style=snake_case 202 | 203 | # Regular expression matching correct attribute names. Overrides attr-naming- 204 | # style. 205 | #attr-rgx= 206 | 207 | # Bad variable names which should always be refused, separated by a comma. 208 | bad-names=foo, 209 | bar, 210 | baz, 211 | toto, 212 | tutu, 213 | tata 214 | 215 | # Naming style matching correct class attribute names. 216 | class-attribute-naming-style=any 217 | 218 | # Regular expression matching correct class attribute names. Overrides class- 219 | # attribute-naming-style. 220 | #class-attribute-rgx= 221 | 222 | # Naming style matching correct class names. 223 | class-naming-style=PascalCase 224 | 225 | # Regular expression matching correct class names. Overrides class-naming- 226 | # style. 227 | #class-rgx= 228 | 229 | # Naming style matching correct constant names. 230 | const-naming-style=UPPER_CASE 231 | 232 | # Regular expression matching correct constant names. Overrides const-naming- 233 | # style. 234 | #const-rgx= 235 | 236 | # Minimum line length for functions/classes that require docstrings, shorter 237 | # ones are exempt. 238 | docstring-min-length=-1 239 | 240 | # Naming style matching correct function names. 241 | function-naming-style=snake_case 242 | 243 | # Regular expression matching correct function names. Overrides function- 244 | # naming-style. 245 | #function-rgx= 246 | 247 | # Good variable names which should always be accepted, separated by a comma. 248 | good-names=i, 249 | j, 250 | k, 251 | ex, 252 | Run, 253 | df, 254 | _ 255 | 256 | # Include a hint for the correct naming format with invalid-name. 257 | include-naming-hint=no 258 | 259 | # Naming style matching correct inline iteration names. 260 | inlinevar-naming-style=any 261 | 262 | # Regular expression matching correct inline iteration names. Overrides 263 | # inlinevar-naming-style. 264 | #inlinevar-rgx= 265 | 266 | # Naming style matching correct method names. 267 | method-naming-style=snake_case 268 | 269 | # Regular expression matching correct method names. Overrides method-naming- 270 | # style. 271 | #method-rgx= 272 | 273 | # Naming style matching correct module names. 274 | module-naming-style=snake_case 275 | 276 | # Regular expression matching correct module names. Overrides module-naming- 277 | # style. 278 | #module-rgx= 279 | 280 | # Colon-delimited sets of names that determine each other's naming style when 281 | # the name regexes allow several styles. 282 | name-group= 283 | 284 | # Regular expression which should only match function or class names that do 285 | # not require a docstring. 286 | no-docstring-rgx=^_ 287 | 288 | # List of decorators that produce properties, such as abc.abstractproperty. Add 289 | # to this list to register other decorators that produce valid properties. 290 | # These decorators are taken in consideration only for invalid-name. 291 | property-classes=abc.abstractproperty 292 | 293 | # Naming style matching correct variable names. 294 | variable-naming-style=snake_case 295 | 296 | # Regular expression matching correct variable names. Overrides variable- 297 | # naming-style. 298 | #variable-rgx= 299 | 300 | 301 | [FORMAT] 302 | 303 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 304 | expected-line-ending-format= 305 | 306 | # Regexp for a line that is allowed to be longer than the limit. 307 | ignore-long-lines=^\s*(# )??$ 308 | 309 | # Number of spaces of indent required inside a hanging or continued line. 310 | indent-after-paren=4 311 | 312 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 313 | # tab). 314 | indent-string=' ' 315 | 316 | # Maximum number of characters on a single line. 317 | max-line-length=130 318 | 319 | # Maximum number of lines in a module. 320 | max-module-lines=1000 321 | 322 | # List of optional constructs for which whitespace checking is disabled. `dict- 323 | # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. 324 | # `trailing-comma` allows a space between comma and closing bracket: (a, ). 325 | # `empty-line` allows space-only lines. 326 | no-space-check=trailing-comma, 327 | dict-separator 328 | 329 | # Allow the body of a class to be on the same line as the declaration if body 330 | # contains single statement. 331 | single-line-class-stmt=no 332 | 333 | # Allow the body of an if to be on the same line as the test if there is no 334 | # else. 335 | single-line-if-stmt=no 336 | 337 | 338 | [LOGGING] 339 | 340 | # Format style used to check logging format string. `old` means using % 341 | # formatting, `new` is for `{}` formatting,and `fstr` is for f-strings. 342 | logging-format-style=new 343 | 344 | # Logging modules to check that the string format arguments are in logging 345 | # function parameter format. 346 | logging-modules=logging 347 | 348 | 349 | [MISCELLANEOUS] 350 | 351 | # List of note tags to take in consideration, separated by a comma. 352 | notes=FIXME, 353 | XXX, 354 | TODO 355 | 356 | 357 | [SIMILARITIES] 358 | 359 | # Ignore comments when computing similarities. 360 | ignore-comments=yes 361 | 362 | # Ignore docstrings when computing similarities. 363 | ignore-docstrings=yes 364 | 365 | # Ignore imports when computing similarities. 366 | ignore-imports=no 367 | 368 | # Minimum lines number of a similarity. 369 | min-similarity-lines=4 370 | 371 | 372 | [SPELLING] 373 | 374 | # Limits count of emitted suggestions for spelling mistakes. 375 | max-spelling-suggestions=4 376 | 377 | # Spelling dictionary name. Available dictionaries: none. To make it work, 378 | # install the python-enchant package. 379 | spelling-dict= 380 | 381 | # List of comma separated words that should not be checked. 382 | spelling-ignore-words= 383 | 384 | # A path to a file that contains the private dictionary; one word per line. 385 | spelling-private-dict-file= 386 | 387 | # Tells whether to store unknown words to the private dictionary (see the 388 | # --spelling-private-dict-file option) instead of raising a message. 389 | spelling-store-unknown-words=no 390 | 391 | 392 | [STRING] 393 | 394 | # This flag controls whether the implicit-str-concat-in-sequence should 395 | # generate a warning on implicit string concatenation in sequences defined over 396 | # several lines. 397 | check-str-concat-over-line-jumps=no 398 | 399 | 400 | [TYPECHECK] 401 | 402 | # List of decorators that produce context managers, such as 403 | # contextlib.contextmanager. Add to this list to register other decorators that 404 | # produce valid context managers. 405 | contextmanager-decorators=contextlib.contextmanager 406 | 407 | # List of members which are set dynamically and missed by pylint inference 408 | # system, and so shouldn't trigger E1101 when accessed. Python regular 409 | # expressions are accepted. 410 | generated-members=.*(dynamodb|Table|Queue|Topic|s3).* 411 | 412 | # Tells whether missing members accessed in mixin class should be ignored. A 413 | # mixin class is detected if its name ends with "mixin" (case insensitive). 414 | ignore-mixin-members=yes 415 | 416 | # Tells whether to warn about missing members when the owner of the attribute 417 | # is inferred to be None. 418 | ignore-none=yes 419 | 420 | # This flag controls whether pylint should warn about no-member and similar 421 | # checks whenever an opaque object is returned when inferring. The inference 422 | # can return multiple potential results while evaluating a Python object, but 423 | # some branches might not be evaluated, which results in partial inference. In 424 | # that case, it might be useful to still emit no-member and other checks for 425 | # the rest of the inferred objects. 426 | ignore-on-opaque-inference=yes 427 | 428 | # List of class names for which member attributes should not be checked (useful 429 | # for classes with dynamically set attributes). This supports the use of 430 | # qualified names. 431 | ignored-classes=optparse.Values,thread._local,_thread._local 432 | 433 | # List of module names for which member attributes should not be checked 434 | # (useful for modules/projects where namespaces are manipulated during runtime 435 | # and thus existing member attributes cannot be deduced by static analysis). It 436 | # supports qualified module names, as well as Unix pattern matching. 437 | ignored-modules= 438 | 439 | # Show a hint with possible names when a member name was not found. The aspect 440 | # of finding the hint is based on edit distance. 441 | missing-member-hint=yes 442 | 443 | # The minimum edit distance a name should have in order to be considered a 444 | # similar match for a missing member name. 445 | missing-member-hint-distance=1 446 | 447 | # The total number of similar names that should be taken in consideration when 448 | # showing a hint for a missing member. 449 | missing-member-max-choices=1 450 | 451 | # List of decorators that change the signature of a decorated function. 452 | signature-mutators= 453 | 454 | 455 | [VARIABLES] 456 | 457 | # List of additional names supposed to be defined in builtins. Remember that 458 | # you should avoid defining new builtins when possible. 459 | additional-builtins= 460 | 461 | # Tells whether unused global variables should be treated as a violation. 462 | allow-global-unused-variables=yes 463 | 464 | # List of strings which can identify a callback function by name. A callback 465 | # name must start or end with one of those strings. 466 | callbacks=cb_, 467 | _cb 468 | 469 | # A regular expression matching the name of dummy variables (i.e. expected to 470 | # not be used). 471 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ 472 | 473 | # Argument names that match this expression will be ignored. Default to name 474 | # with leading underscore. 475 | ignored-argument-names=_.*|^ignored_|^unused_ 476 | 477 | # Tells whether we should check for unused import in __init__ files. 478 | init-import=no 479 | 480 | # List of qualified module names which can have objects that can redefine 481 | # builtins. 482 | redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io 483 | 484 | 485 | [CLASSES] 486 | 487 | # List of method names used to declare (i.e. assign) instance attributes. 488 | defining-attr-methods=__init__, 489 | __new__, 490 | setUp, 491 | __post_init__ 492 | 493 | # List of member names, which should be excluded from the protected access 494 | # warning. 495 | exclude-protected=_asdict, 496 | _fields, 497 | _replace, 498 | _source, 499 | _make 500 | 501 | # List of valid names for the first argument in a class method. 502 | valid-classmethod-first-arg=cls 503 | 504 | # List of valid names for the first argument in a metaclass class method. 505 | valid-metaclass-classmethod-first-arg=cls 506 | 507 | 508 | [DESIGN] 509 | 510 | # Maximum number of arguments for function / method. 511 | max-args=7 512 | 513 | # Maximum number of attributes for a class (see R0902). 514 | max-attributes=10 515 | 516 | # Maximum number of boolean expressions in an if statement (see R0916). 517 | max-bool-expr=5 518 | 519 | # Maximum number of branch for function / method body. 520 | max-branches=12 521 | 522 | # Maximum number of locals for function / method body. 523 | max-locals=15 524 | 525 | # Maximum number of parents for a class (see R0901). 526 | max-parents=7 527 | 528 | # Maximum number of public methods for a class (see R0904). 529 | max-public-methods=20 530 | 531 | # Maximum number of return / yield for function / method body. 532 | max-returns=6 533 | 534 | # Maximum number of statements in function / method body. 535 | max-statements=50 536 | 537 | # Minimum number of public methods for a class (see R0903). 538 | min-public-methods=2 539 | 540 | 541 | [IMPORTS] 542 | 543 | # List of modules that can be imported at any level, not just the top level 544 | # one. 545 | allow-any-import-level= 546 | 547 | # Allow wildcard imports from modules that define __all__. 548 | allow-wildcard-with-all=no 549 | 550 | # Analyse import fallback blocks. This can be used to support both Python 2 and 551 | # 3 compatible code, which means that the block might have code that exists 552 | # only in one or another interpreter, leading to false positives when analysed. 553 | analyse-fallback-blocks=no 554 | 555 | # Deprecated modules which should not be used, separated by a comma. 556 | deprecated-modules=optparse,tkinter.tix 557 | 558 | # Create a graph of external dependencies in the given file (report RP0402 must 559 | # not be disabled). 560 | ext-import-graph= 561 | 562 | # Create a graph of every (i.e. internal and external) dependencies in the 563 | # given file (report RP0402 must not be disabled). 564 | import-graph= 565 | 566 | # Create a graph of internal dependencies in the given file (report RP0402 must 567 | # not be disabled). 568 | int-import-graph= 569 | 570 | # Force import order to recognize a module as part of the standard 571 | # compatibility libraries. 572 | known-standard-library= 573 | 574 | # Force import order to recognize a module as part of a third party library. 575 | known-third-party=enchant 576 | 577 | # Couples of modules and preferred modules, separated by a comma. 578 | preferred-modules= 579 | 580 | 581 | [EXCEPTIONS] 582 | 583 | # Exceptions that will emit a warning when being caught. Defaults to 584 | # "BaseException, Exception". 585 | overgeneral-exceptions=BaseException, 586 | Exception 587 | --------------------------------------------------------------------------------