├── .circleci └── config.yml ├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── app ├── __init__.py └── app.py ├── events └── event.json ├── poetry.lock ├── provisioning ├── .gitignore ├── .npmignore ├── README.md ├── bin │ └── provisioning.ts ├── cdk.context.json ├── cdk.json ├── jest.config.js ├── lib │ ├── api-stack.ts │ └── database-stack.ts ├── package-lock.json ├── package.json ├── test │ └── provisioning.test.ts └── tsconfig.json ├── pyproject.toml ├── scripts ├── create_pandas_package.sh ├── create_psycopg2_package.sh ├── install_pyenv.sh ├── install_python.sh ├── samlocal.sh └── unittest.sh ├── setup.cfg ├── template.yaml └── tests ├── conftest.py └── unit ├── __init__.py └── test_handler.py /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | executors: 4 | default: 5 | parameters: 6 | python_version: 7 | type: string 8 | default: latest 9 | docker: 10 | - image: circleci/python:<< parameters.python_version >> 11 | - image: amazon/dynamodb-local 12 | 13 | orbs: 14 | python: circleci/python@0.2.1 15 | 16 | jobs: 17 | build-and-test: 18 | executor: 19 | name: default 20 | steps: 21 | - checkout 22 | - run: 23 | name: Setup Poetry 24 | command: | 25 | pip install poetry 26 | poetry export --dev -f requirements.txt > requirements.txt 27 | - python/load-cache 28 | - python/install-deps 29 | - python/save-cache 30 | - run: 31 | name: Wait for DynamoDB launching 32 | command: | 33 | for i in `seq 1 20`; 34 | do 35 | nc -z localhost 8000 && echo "Launched" && exit 0 36 | echo -n . 37 | sleep 1 38 | done 39 | echo "Failed to launche" && exit 1 40 | - run: 41 | name: Test 42 | command: python -m pytest ./tests/ -vv 43 | 44 | workflows: 45 | main: 46 | jobs: 47 | - build-and-test 48 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode 3 | 4 | ### Linux ### 5 | *~ 6 | 7 | # temporary files which can be created if a process still has a handle open of a deleted file 8 | .fuse_hidden* 9 | 10 | # KDE directory preferences 11 | .directory 12 | 13 | # Linux trash folder which might appear on any partition or disk 14 | .Trash-* 15 | 16 | # .nfs files are created when an open file is removed but is still being accessed 17 | .nfs* 18 | 19 | ### OSX ### 20 | *.DS_Store 21 | .AppleDouble 22 | .LSOverride 23 | 24 | # Icon must end with two \r 25 | Icon 26 | 27 | # Thumbnails 28 | ._* 29 | 30 | # Files that might appear in the root of a volume 31 | .DocumentRevisions-V100 32 | .fseventsd 33 | .Spotlight-V100 34 | .TemporaryItems 35 | .Trashes 36 | .VolumeIcon.icns 37 | .com.apple.timemachine.donotpresent 38 | 39 | # Directories potentially created on remote AFP share 40 | .AppleDB 41 | .AppleDesktop 42 | Network Trash Folder 43 | Temporary Items 44 | .apdisk 45 | 46 | ### PyCharm ### 47 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 48 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 49 | 50 | # User-specific stuff: 51 | .idea/**/workspace.xml 52 | .idea/**/tasks.xml 53 | .idea/dictionaries 54 | 55 | # Sensitive or high-churn files: 56 | .idea/**/dataSources/ 57 | .idea/**/dataSources.ids 58 | .idea/**/dataSources.xml 59 | .idea/**/dataSources.local.xml 60 | .idea/**/sqlDataSources.xml 61 | .idea/**/dynamic.xml 62 | .idea/**/uiDesigner.xml 63 | 64 | # Gradle: 65 | .idea/**/gradle.xml 66 | .idea/**/libraries 67 | 68 | # CMake 69 | cmake-build-debug/ 70 | 71 | # Mongo Explorer plugin: 72 | .idea/**/mongoSettings.xml 73 | 74 | ## File-based project format: 75 | *.iws 76 | 77 | ## Plugin-specific files: 78 | 79 | # IntelliJ 80 | /out/ 81 | 82 | # mpeltonen/sbt-idea plugin 83 | .idea_modules/ 84 | 85 | # JIRA plugin 86 | atlassian-ide-plugin.xml 87 | 88 | # Cursive Clojure plugin 89 | .idea/replstate.xml 90 | 91 | # Ruby plugin and RubyMine 92 | /.rakeTasks 93 | 94 | # Crashlytics plugin (for Android Studio and IntelliJ) 95 | com_crashlytics_export_strings.xml 96 | crashlytics.properties 97 | crashlytics-build.properties 98 | fabric.properties 99 | 100 | ### PyCharm Patch ### 101 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 102 | 103 | # *.iml 104 | # modules.xml 105 | # .idea/misc.xml 106 | # *.ipr 107 | 108 | # Sonarlint plugin 109 | .idea/sonarlint 110 | 111 | ### Python ### 112 | # Byte-compiled / optimized / DLL files 113 | __pycache__/ 114 | *.py[cod] 115 | *$py.class 116 | 117 | # C extensions 118 | *.so 119 | 120 | # Distribution / packaging 121 | .Python 122 | build/ 123 | develop-eggs/ 124 | dist/ 125 | downloads/ 126 | eggs/ 127 | .eggs/ 128 | # lib/ 129 | lib64/ 130 | parts/ 131 | sdist/ 132 | var/ 133 | wheels/ 134 | *.egg-info/ 135 | .installed.cfg 136 | *.egg 137 | 138 | # PyInstaller 139 | # Usually these files are written by a python script from a template 140 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 141 | *.manifest 142 | *.spec 143 | 144 | # Installer logs 145 | pip-log.txt 146 | pip-delete-this-directory.txt 147 | 148 | # Unit test / coverage reports 149 | htmlcov/ 150 | .tox/ 151 | .coverage 152 | .coverage.* 153 | .cache 154 | .pytest_cache/ 155 | nosetests.xml 156 | coverage.xml 157 | *.cover 158 | .hypothesis/ 159 | 160 | # Translations 161 | *.mo 162 | *.pot 163 | 164 | # Flask stuff: 165 | instance/ 166 | .webassets-cache 167 | 168 | # Scrapy stuff: 169 | .scrapy 170 | 171 | # Sphinx documentation 172 | docs/_build/ 173 | 174 | # PyBuilder 175 | target/ 176 | 177 | # Jupyter Notebook 178 | .ipynb_checkpoints 179 | 180 | # pyenv 181 | .python-version 182 | 183 | # celery beat schedule file 184 | celerybeat-schedule.* 185 | 186 | # SageMath parsed files 187 | *.sage.py 188 | 189 | # Environments 190 | .env 191 | .venv 192 | env/ 193 | venv/ 194 | ENV/ 195 | env.bak/ 196 | venv.bak/ 197 | 198 | # Spyder project settings 199 | .spyderproject 200 | .spyproject 201 | 202 | # Rope project settings 203 | .ropeproject 204 | 205 | # mkdocs documentation 206 | /site 207 | 208 | # mypy 209 | .mypy_cache/ 210 | 211 | ### VisualStudioCode ### 212 | .vscode/* 213 | !.vscode/settings.json 214 | !.vscode/tasks.json 215 | !.vscode/launch.json 216 | !.vscode/extensions.json 217 | .history 218 | 219 | ### Windows ### 220 | # Windows thumbnail cache files 221 | Thumbs.db 222 | ehthumbs.db 223 | ehthumbs_vista.db 224 | 225 | # Folder config file 226 | Desktop.ini 227 | 228 | # Recycle Bin used on file shares 229 | $RECYCLE.BIN/ 230 | 231 | # Windows Installer files 232 | *.cab 233 | *.msi 234 | *.msm 235 | *.msp 236 | 237 | # Windows shortcuts 238 | *.lnk 239 | 240 | # Build folder 241 | 242 | */build/* 243 | 244 | # End of https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode 245 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *master* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | 61 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. 62 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 10 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 11 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 13 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 14 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | 16 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## AWS Rapid Prototyping with Python [![CircleCI](https://circleci.com/gh/aws-samples/aws-rapid-prototyping-with-python.svg?style=svg)](https://circleci.com/gh/aws-samples/aws-rapid-prototyping-with-python) 2 | 3 | This is a project to experience application development on AWS with the actual minimal implementations! 4 | It contains: 5 | - Basic 3-tiers serverless WEB application which depends on: 6 | - Amazon API Gateway 7 | - AWS Lambda 8 | - Amazon DynamoDB 9 | - Unit tests 10 | 11 | ## Try it out on your local machine 12 | 13 | ### Prerequisite 14 | 15 | - [SAM (Serverless Application Model) CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) 16 | - Docker 17 | - for SAM Local 18 | - for [DynamoDB Local](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.html) 19 | - Both SAM Local and unit tests depend on it 20 | - Python 3.6 or later 21 | - Specified 3.8 as default 22 | - You can easily change the version by modifying `pyproject.toml` 23 | 24 | ### Create virtual environment and install dependencies 25 | 26 | This project depends on [`Poetry`](https://python-poetry.org/) to manage the environment and dependencies. 27 | So first of all, install it using `pip` command. 28 | 29 | ```sh 30 | $ pip install poetry 31 | ``` 32 | 33 | And then, create its virtual environment and install dependencies. 34 | 35 | ```sh 36 | $ poetry shell 37 | $ poetry install 38 | ``` 39 | 40 | ### Run locally 41 | 42 | You can launch the application on your local machine with the following script, it performs: 43 | - Pull the docker image of DynamoDB Local from [dockerhub](https://hub.docker.com/r/amazon/dynamodb-local) 44 | - Create a docker network common between DynamoDB Local and SAM Local if not exists 45 | - Lanuch DynamoDB Local 46 | - Create DynamoDB schema into DynamoDB Local 47 | - Launch application through SAM Local 48 | 49 | ```sh 50 | $ bash scripts/samlocal.sh 51 | ``` 52 | 53 | ### Run unit test 54 | 55 | You can run unit test with the following script, it performs: 56 | - Lanuch DynamoDB Local 57 | - Different container from SAM Local's one 58 | - Run py.test 59 | 60 | ```sh 61 | bash scripts/unittest.sh 62 | ``` 63 | 64 | ## License 65 | 66 | This library is licensed under the MIT-0 License. See the LICENSE file. 67 | -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-rapid-prototyping-with-python/b3a3632792a26edbedd090aaedf36fbd5b56cb09/app/__init__.py -------------------------------------------------------------------------------- /app/app.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Tuple 2 | 3 | import boto3 4 | from botocore.exceptions import ClientError 5 | 6 | import json 7 | import os 8 | import uuid 9 | 10 | 11 | dynamodb = boto3.resource( 12 | 'dynamodb', 13 | endpoint_url=os.getenv('DYNAMODB_ENDPOINT_URL'), 14 | ) 15 | table = dynamodb.Table(os.environ['DYNAMODB_TABLE_NAME']) 16 | 17 | EventType = Dict[str, Any] 18 | ContextType = Dict[str, Any] 19 | ResponseType = Dict[str, object] 20 | 21 | 22 | class UserHandler: 23 | @classmethod 24 | def dispatch(cls, event: EventType) -> ResponseType: 25 | try: 26 | status_code, body = getattr(cls, event['httpMethod'].lower())(event) 27 | except AttributeError: 28 | status_code, body = 405, {'message': 'METHOD NOT ALLOWED'} 29 | 30 | return {'statusCode': status_code, 'body': json.dumps(body)} 31 | 32 | @classmethod 33 | def get(cls, event: EventType) -> Tuple[int, Dict[str, str]]: 34 | user_id = event['pathParameters']['user_id'] 35 | 36 | try: 37 | response = table.get_item(Key={'user_id': user_id}) 38 | except ClientError: 39 | raise # TODO: Implement appropriate error handling 40 | 41 | if 'Item' not in response: 42 | return 404, {'message': 'No such user found'} 43 | 44 | return 200, response['Item'] 45 | 46 | @classmethod 47 | def put(cls, event: EventType) -> Tuple[int, Dict[str, str]]: 48 | parameters = json.loads(event['body']) 49 | 50 | # TODO: Add a condition to prohibit overriding existing user just in case 51 | item = {'user_id': uuid.uuid4().hex, 'name': parameters['name']} 52 | 53 | try: 54 | table.put_item(Item=item) 55 | except ClientError: 56 | raise # TODO: Implement appropriate error handling 57 | 58 | return 201, item 59 | 60 | @classmethod 61 | def delete(cls, event: EventType) -> Tuple[int, Dict[str, str]]: 62 | user_id = event['pathParameters']['user_id'] 63 | 64 | try: 65 | table.delete_item(Key={'user_id': user_id}) 66 | except ClientError: 67 | raise # TODO: Implement appropriate error handling 68 | 69 | return 204, {} 70 | 71 | @classmethod 72 | def patch(cls, event: EventType) -> Tuple[int, Dict[str, str]]: 73 | user_id = event['pathParameters']['user_id'] 74 | parameters = json.loads(event['body']) 75 | 76 | try: 77 | response = table.update_item( 78 | Key={'user_id': user_id}, 79 | 80 | # Assign an alias to a reserved word. 81 | UpdateExpression='SET #nm = :newname', 82 | ExpressionAttributeNames={'#nm': 'name'}, 83 | 84 | # Condition to avoid creating new item if a datum that has specified id is already stored 85 | ConditionExpression='user_id = :user_id', 86 | 87 | ExpressionAttributeValues={':newname': parameters['name'], ':user_id': user_id}, 88 | ReturnValues='UPDATED_NEW', 89 | ) 90 | except ClientError as e: 91 | if e.response['Error']['Code'] == 'ConditionalCheckFailedException': 92 | return 404, {'message': 'No such user found'} 93 | raise 94 | 95 | return 200, {'user_id': user_id, 'name': response['Attributes']['name']} 96 | 97 | 98 | PATHS = { 99 | '/user': UserHandler, 100 | '/user/{user_id}': UserHandler, 101 | } 102 | 103 | 104 | def dispatch_request(event: EventType, context: ContextType) -> ResponseType: 105 | request_path = event['requestContext']['resourcePath'] 106 | if request_path not in PATHS: 107 | return {'statusCode': 404, 'body': json.dumps({'message': 'NOT FOUND'})} 108 | 109 | return PATHS[request_path].dispatch(event) 110 | -------------------------------------------------------------------------------- /events/event.json: -------------------------------------------------------------------------------- 1 | { 2 | "body": "{\"message\": \"hello world\"}", 3 | "resource": "/{proxy+}", 4 | "path": "/path/to/resource", 5 | "httpMethod": "POST", 6 | "isBase64Encoded": false, 7 | "queryStringParameters": { 8 | "foo": "bar" 9 | }, 10 | "pathParameters": { 11 | "proxy": "/path/to/resource" 12 | }, 13 | "stageVariables": { 14 | "baz": "qux" 15 | }, 16 | "headers": { 17 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 18 | "Accept-Encoding": "gzip, deflate, sdch", 19 | "Accept-Language": "en-US,en;q=0.8", 20 | "Cache-Control": "max-age=0", 21 | "CloudFront-Forwarded-Proto": "https", 22 | "CloudFront-Is-Desktop-Viewer": "true", 23 | "CloudFront-Is-Mobile-Viewer": "false", 24 | "CloudFront-Is-SmartTV-Viewer": "false", 25 | "CloudFront-Is-Tablet-Viewer": "false", 26 | "CloudFront-Viewer-Country": "US", 27 | "Host": "1234567890.execute-api.us-east-1.amazonaws.com", 28 | "Upgrade-Insecure-Requests": "1", 29 | "User-Agent": "Custom User Agent String", 30 | "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", 31 | "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", 32 | "X-Forwarded-For": "127.0.0.1, 127.0.0.2", 33 | "X-Forwarded-Port": "443", 34 | "X-Forwarded-Proto": "https" 35 | }, 36 | "requestContext": { 37 | "accountId": "123456789012", 38 | "resourceId": "123456", 39 | "stage": "prod", 40 | "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", 41 | "requestTime": "09/Apr/2015:12:34:56 +0000", 42 | "requestTimeEpoch": 1428582896000, 43 | "identity": { 44 | "cognitoIdentityPoolId": null, 45 | "accountId": null, 46 | "cognitoIdentityId": null, 47 | "caller": null, 48 | "accessKey": null, 49 | "sourceIp": "127.0.0.1", 50 | "cognitoAuthenticationType": null, 51 | "cognitoAuthenticationProvider": null, 52 | "userArn": null, 53 | "userAgent": "Custom User Agent String", 54 | "user": null 55 | }, 56 | "path": "/prod/path/to/resource", 57 | "resourcePath": "/{proxy+}", 58 | "httpMethod": "POST", 59 | "apiId": "1234567890", 60 | "protocol": "HTTP/1.1" 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | category = "dev" 3 | description = "Atomic file writes." 4 | marker = "sys_platform == \"win32\"" 5 | name = "atomicwrites" 6 | optional = false 7 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 8 | version = "1.3.0" 9 | 10 | [[package]] 11 | category = "dev" 12 | description = "Classes Without Boilerplate" 13 | name = "attrs" 14 | optional = false 15 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 16 | version = "19.3.0" 17 | 18 | [package.extras] 19 | azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"] 20 | dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"] 21 | docs = ["sphinx", "zope.interface"] 22 | tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] 23 | 24 | [[package]] 25 | category = "main" 26 | description = "The AWS SDK for Python" 27 | name = "boto3" 28 | optional = false 29 | python-versions = "*" 30 | version = "1.11.13" 31 | 32 | [package.dependencies] 33 | botocore = ">=1.14.13,<1.15.0" 34 | jmespath = ">=0.7.1,<1.0.0" 35 | s3transfer = ">=0.3.0,<0.4.0" 36 | 37 | [[package]] 38 | category = "main" 39 | description = "Low-level, data-driven core of boto 3." 40 | name = "botocore" 41 | optional = false 42 | python-versions = "*" 43 | version = "1.14.13" 44 | 45 | [package.dependencies] 46 | docutils = ">=0.10,<0.16" 47 | jmespath = ">=0.7.1,<1.0.0" 48 | python-dateutil = ">=2.1,<3.0.0" 49 | 50 | [package.dependencies.urllib3] 51 | python = "<3.4.0 || >=3.5.0" 52 | version = ">=1.20,<1.26" 53 | 54 | [[package]] 55 | category = "dev" 56 | description = "Cross-platform colored terminal text." 57 | marker = "sys_platform == \"win32\"" 58 | name = "colorama" 59 | optional = false 60 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 61 | version = "0.4.3" 62 | 63 | [[package]] 64 | category = "main" 65 | description = "Docutils -- Python Documentation Utilities" 66 | name = "docutils" 67 | optional = false 68 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 69 | version = "0.15.2" 70 | 71 | [[package]] 72 | category = "dev" 73 | description = "Discover and load entry points from installed packages." 74 | name = "entrypoints" 75 | optional = false 76 | python-versions = ">=2.7" 77 | version = "0.3" 78 | 79 | [[package]] 80 | category = "dev" 81 | description = "the modular source code checker: pep8, pyflakes and co" 82 | name = "flake8" 83 | optional = false 84 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 85 | version = "3.7.9" 86 | 87 | [package.dependencies] 88 | entrypoints = ">=0.3.0,<0.4.0" 89 | mccabe = ">=0.6.0,<0.7.0" 90 | pycodestyle = ">=2.5.0,<2.6.0" 91 | pyflakes = ">=2.1.0,<2.2.0" 92 | 93 | [[package]] 94 | category = "dev" 95 | description = "An autocompletion tool for Python that can be used for text editors." 96 | name = "jedi" 97 | optional = false 98 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 99 | version = "0.15.2" 100 | 101 | [package.dependencies] 102 | parso = ">=0.5.2" 103 | 104 | [package.extras] 105 | testing = ["colorama (0.4.1)", "docopt", "pytest (>=3.9.0,<5.0.0)"] 106 | 107 | [[package]] 108 | category = "main" 109 | description = "JSON Matching Expressions" 110 | name = "jmespath" 111 | optional = false 112 | python-versions = "*" 113 | version = "0.9.4" 114 | 115 | [[package]] 116 | category = "dev" 117 | description = "McCabe checker, plugin for flake8" 118 | name = "mccabe" 119 | optional = false 120 | python-versions = "*" 121 | version = "0.6.1" 122 | 123 | [[package]] 124 | category = "dev" 125 | description = "More routines for operating on iterables, beyond itertools" 126 | name = "more-itertools" 127 | optional = false 128 | python-versions = ">=3.5" 129 | version = "8.2.0" 130 | 131 | [[package]] 132 | category = "dev" 133 | description = "Optional static typing for Python" 134 | name = "mypy" 135 | optional = false 136 | python-versions = ">=3.5" 137 | version = "0.761" 138 | 139 | [package.dependencies] 140 | mypy-extensions = ">=0.4.3,<0.5.0" 141 | typed-ast = ">=1.4.0,<1.5.0" 142 | typing-extensions = ">=3.7.4" 143 | 144 | [package.extras] 145 | dmypy = ["psutil (>=4.0)"] 146 | 147 | [[package]] 148 | category = "dev" 149 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 150 | name = "mypy-extensions" 151 | optional = false 152 | python-versions = "*" 153 | version = "0.4.3" 154 | 155 | [[package]] 156 | category = "dev" 157 | description = "Core utilities for Python packages" 158 | name = "packaging" 159 | optional = false 160 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 161 | version = "20.1" 162 | 163 | [package.dependencies] 164 | pyparsing = ">=2.0.2" 165 | six = "*" 166 | 167 | [[package]] 168 | category = "dev" 169 | description = "A Python Parser" 170 | name = "parso" 171 | optional = false 172 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 173 | version = "0.6.1" 174 | 175 | [package.extras] 176 | testing = ["docopt", "pytest (>=3.0.7)"] 177 | 178 | [[package]] 179 | category = "dev" 180 | description = "plugin and hook calling mechanisms for python" 181 | name = "pluggy" 182 | optional = false 183 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 184 | version = "0.13.1" 185 | 186 | [package.extras] 187 | dev = ["pre-commit", "tox"] 188 | 189 | [[package]] 190 | category = "dev" 191 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 192 | name = "py" 193 | optional = false 194 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 195 | version = "1.8.1" 196 | 197 | [[package]] 198 | category = "dev" 199 | description = "Python style guide checker" 200 | name = "pycodestyle" 201 | optional = false 202 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 203 | version = "2.5.0" 204 | 205 | [[package]] 206 | category = "dev" 207 | description = "passive checker of Python programs" 208 | name = "pyflakes" 209 | optional = false 210 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 211 | version = "2.1.1" 212 | 213 | [[package]] 214 | category = "dev" 215 | description = "Python parsing module" 216 | name = "pyparsing" 217 | optional = false 218 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 219 | version = "2.4.6" 220 | 221 | [[package]] 222 | category = "dev" 223 | description = "pytest: simple powerful testing with Python" 224 | name = "pytest" 225 | optional = false 226 | python-versions = ">=3.5" 227 | version = "5.3.5" 228 | 229 | [package.dependencies] 230 | atomicwrites = ">=1.0" 231 | attrs = ">=17.4.0" 232 | colorama = "*" 233 | more-itertools = ">=4.0.0" 234 | packaging = "*" 235 | pluggy = ">=0.12,<1.0" 236 | py = ">=1.5.0" 237 | wcwidth = "*" 238 | 239 | [package.extras] 240 | checkqa-mypy = ["mypy (v0.761)"] 241 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 242 | 243 | [[package]] 244 | category = "dev" 245 | description = "py.test plugin that allows you to add environment variables." 246 | name = "pytest-env" 247 | optional = false 248 | python-versions = "*" 249 | version = "0.6.2" 250 | 251 | [package.dependencies] 252 | pytest = ">=2.6.0" 253 | 254 | [[package]] 255 | category = "dev" 256 | description = "pytest plugin to check FLAKE8 requirements" 257 | name = "pytest-flake8" 258 | optional = false 259 | python-versions = "*" 260 | version = "1.0.4" 261 | 262 | [package.dependencies] 263 | flake8 = ">=3.5" 264 | pytest = ">=3.5" 265 | 266 | [[package]] 267 | category = "dev" 268 | description = "Thin-wrapper around the mock package for easier use with py.test" 269 | name = "pytest-mock" 270 | optional = false 271 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 272 | version = "2.0.0" 273 | 274 | [package.dependencies] 275 | pytest = ">=2.7" 276 | 277 | [package.extras] 278 | dev = ["pre-commit", "tox"] 279 | 280 | [[package]] 281 | category = "main" 282 | description = "Extensions to the standard Python datetime module" 283 | name = "python-dateutil" 284 | optional = false 285 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 286 | version = "2.8.1" 287 | 288 | [package.dependencies] 289 | six = ">=1.5" 290 | 291 | [[package]] 292 | category = "dev" 293 | description = "JSON RPC 2.0 server library" 294 | name = "python-jsonrpc-server" 295 | optional = false 296 | python-versions = "*" 297 | version = "0.3.4" 298 | 299 | [package.dependencies] 300 | ujson = "<=1.35" 301 | 302 | [package.extras] 303 | test = ["versioneer", "pylint", "pycodestyle", "pyflakes", "pytest", "mock", "pytest-cov", "coverage"] 304 | 305 | [[package]] 306 | category = "dev" 307 | description = "Python Language Server for the Language Server Protocol" 308 | name = "python-language-server" 309 | optional = false 310 | python-versions = "*" 311 | version = "0.31.8" 312 | 313 | [package.dependencies] 314 | jedi = ">=0.14.1,<0.16" 315 | pluggy = "*" 316 | python-jsonrpc-server = ">=0.3.2" 317 | ujson = "<=1.35" 318 | 319 | [package.extras] 320 | all = ["autopep8", "flake8", "mccabe", "pycodestyle", "pydocstyle (>=2.0.0)", "pyflakes (>=1.6.0)", "pylint", "rope (>=0.10.5)", "yapf"] 321 | autopep8 = ["autopep8"] 322 | flake8 = ["flake8"] 323 | mccabe = ["mccabe"] 324 | pycodestyle = ["pycodestyle"] 325 | pydocstyle = ["pydocstyle (>=2.0.0)"] 326 | pyflakes = ["pyflakes (>=1.6.0)"] 327 | pylint = ["pylint"] 328 | rope = ["rope (>0.10.5)"] 329 | test = ["versioneer", "pylint", "pytest", "mock", "pytest-cov", "coverage", "numpy", "pandas", "matplotlib", "pyqt5"] 330 | yapf = ["yapf"] 331 | 332 | [[package]] 333 | category = "main" 334 | description = "An Amazon S3 Transfer Manager" 335 | name = "s3transfer" 336 | optional = false 337 | python-versions = "*" 338 | version = "0.3.3" 339 | 340 | [package.dependencies] 341 | botocore = ">=1.12.36,<2.0a.0" 342 | 343 | [[package]] 344 | category = "main" 345 | description = "Python 2 and 3 compatibility utilities" 346 | name = "six" 347 | optional = false 348 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 349 | version = "1.14.0" 350 | 351 | [[package]] 352 | category = "dev" 353 | description = "a fork of Python 2 and 3 ast modules with type comment support" 354 | name = "typed-ast" 355 | optional = false 356 | python-versions = "*" 357 | version = "1.4.1" 358 | 359 | [[package]] 360 | category = "dev" 361 | description = "Backported and Experimental Type Hints for Python 3.5+" 362 | name = "typing-extensions" 363 | optional = false 364 | python-versions = "*" 365 | version = "3.7.4.1" 366 | 367 | [[package]] 368 | category = "dev" 369 | description = "Ultra fast JSON encoder and decoder for Python" 370 | marker = "platform_system != \"Windows\"" 371 | name = "ujson" 372 | optional = false 373 | python-versions = "*" 374 | version = "1.35" 375 | 376 | [[package]] 377 | category = "main" 378 | description = "HTTP library with thread-safe connection pooling, file post, and more." 379 | marker = "python_version != \"3.4\"" 380 | name = "urllib3" 381 | optional = false 382 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 383 | version = "1.25.8" 384 | 385 | [package.extras] 386 | brotli = ["brotlipy (>=0.6.0)"] 387 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 388 | socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] 389 | 390 | [[package]] 391 | category = "dev" 392 | description = "Measures number of Terminal column cells of wide-character codes" 393 | name = "wcwidth" 394 | optional = false 395 | python-versions = "*" 396 | version = "0.1.8" 397 | 398 | [metadata] 399 | content-hash = "1371f31b44fc9ffa786a1208b2b8c66f20aa1ef754ca5de85dd819bbdde90140" 400 | python-versions = "3.8.*" 401 | 402 | [metadata.files] 403 | atomicwrites = [ 404 | {file = "atomicwrites-1.3.0-py2.py3-none-any.whl", hash = "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4"}, 405 | {file = "atomicwrites-1.3.0.tar.gz", hash = "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"}, 406 | ] 407 | attrs = [ 408 | {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, 409 | {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"}, 410 | ] 411 | boto3 = [ 412 | {file = "boto3-1.11.13-py2.py3-none-any.whl", hash = "sha256:664be6e0e20cb064dda4ac3397082e3dcc453abb8b2bd2cf64066677e0fb2266"}, 413 | {file = "boto3-1.11.13.tar.gz", hash = "sha256:09eccb6cd41381c4ff1d626c3a19884b5b1f1424d15a96004d077b532ef393d1"}, 414 | ] 415 | botocore = [ 416 | {file = "botocore-1.14.13-py2.py3-none-any.whl", hash = "sha256:6ffb78b331b0954cfe5c51958cb51522ab0e2999442422949b080a3e1bc76ee1"}, 417 | {file = "botocore-1.14.13.tar.gz", hash = "sha256:6478d9207db6dbcb5106fd4db2cdd5194d0b2dc0b73776019d56877ab802fe87"}, 418 | ] 419 | colorama = [ 420 | {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, 421 | {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, 422 | ] 423 | docutils = [ 424 | {file = "docutils-0.15.2-py2-none-any.whl", hash = "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827"}, 425 | {file = "docutils-0.15.2-py3-none-any.whl", hash = "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0"}, 426 | {file = "docutils-0.15.2.tar.gz", hash = "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99"}, 427 | ] 428 | entrypoints = [ 429 | {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"}, 430 | {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"}, 431 | ] 432 | flake8 = [ 433 | {file = "flake8-3.7.9-py2.py3-none-any.whl", hash = "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca"}, 434 | {file = "flake8-3.7.9.tar.gz", hash = "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb"}, 435 | ] 436 | jedi = [ 437 | {file = "jedi-0.15.2-py2.py3-none-any.whl", hash = "sha256:1349c1e8c107095a55386628bb3b2a79422f3a2cab8381e34ce19909e0cf5064"}, 438 | {file = "jedi-0.15.2.tar.gz", hash = "sha256:e909527104a903606dd63bea6e8e888833f0ef087057829b89a18364a856f807"}, 439 | ] 440 | jmespath = [ 441 | {file = "jmespath-0.9.4-py2.py3-none-any.whl", hash = "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6"}, 442 | {file = "jmespath-0.9.4.tar.gz", hash = "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c"}, 443 | ] 444 | mccabe = [ 445 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 446 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 447 | ] 448 | more-itertools = [ 449 | {file = "more-itertools-8.2.0.tar.gz", hash = "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507"}, 450 | {file = "more_itertools-8.2.0-py3-none-any.whl", hash = "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c"}, 451 | ] 452 | mypy = [ 453 | {file = "mypy-0.761-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:7f672d02fffcbace4db2b05369142e0506cdcde20cea0e07c7c2171c4fd11dd6"}, 454 | {file = "mypy-0.761-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:87c556fb85d709dacd4b4cb6167eecc5bbb4f0a9864b69136a0d4640fdc76a36"}, 455 | {file = "mypy-0.761-cp35-cp35m-win_amd64.whl", hash = "sha256:c6d27bd20c3ba60d5b02f20bd28e20091d6286a699174dfad515636cb09b5a72"}, 456 | {file = "mypy-0.761-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:4b9365ade157794cef9685791032521233729cb00ce76b0ddc78749abea463d2"}, 457 | {file = "mypy-0.761-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:634aef60b4ff0f650d3e59d4374626ca6153fcaff96ec075b215b568e6ee3cb0"}, 458 | {file = "mypy-0.761-cp36-cp36m-win_amd64.whl", hash = "sha256:53ea810ae3f83f9c9b452582261ea859828a9ed666f2e1ca840300b69322c474"}, 459 | {file = "mypy-0.761-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:0a9a45157e532da06fe56adcfef8a74629566b607fa2c1ac0122d1ff995c748a"}, 460 | {file = "mypy-0.761-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7eadc91af8270455e0d73565b8964da1642fe226665dd5c9560067cd64d56749"}, 461 | {file = "mypy-0.761-cp37-cp37m-win_amd64.whl", hash = "sha256:e2bb577d10d09a2d8822a042a23b8d62bc3b269667c9eb8e60a6edfa000211b1"}, 462 | {file = "mypy-0.761-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c35cae79ceb20d47facfad51f952df16c2ae9f45db6cb38405a3da1cf8fc0a7"}, 463 | {file = "mypy-0.761-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f97a605d7c8bc2c6d1172c2f0d5a65b24142e11a58de689046e62c2d632ca8c1"}, 464 | {file = "mypy-0.761-cp38-cp38-win_amd64.whl", hash = "sha256:a6bd44efee4dc8c3324c13785a9dc3519b3ee3a92cada42d2b57762b7053b49b"}, 465 | {file = "mypy-0.761-py3-none-any.whl", hash = "sha256:7e396ce53cacd5596ff6d191b47ab0ea18f8e0ec04e15d69728d530e86d4c217"}, 466 | {file = "mypy-0.761.tar.gz", hash = "sha256:85baab8d74ec601e86134afe2bcccd87820f79d2f8d5798c889507d1088287bf"}, 467 | ] 468 | mypy-extensions = [ 469 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 470 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 471 | ] 472 | packaging = [ 473 | {file = "packaging-20.1-py2.py3-none-any.whl", hash = "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73"}, 474 | {file = "packaging-20.1.tar.gz", hash = "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334"}, 475 | ] 476 | parso = [ 477 | {file = "parso-0.6.1-py2.py3-none-any.whl", hash = "sha256:951af01f61e6dccd04159042a0706a31ad437864ec6e25d0d7a96a9fbb9b0095"}, 478 | {file = "parso-0.6.1.tar.gz", hash = "sha256:56b2105a80e9c4df49de85e125feb6be69f49920e121406f15e7acde6c9dfc57"}, 479 | ] 480 | pluggy = [ 481 | {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, 482 | {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, 483 | ] 484 | py = [ 485 | {file = "py-1.8.1-py2.py3-none-any.whl", hash = "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"}, 486 | {file = "py-1.8.1.tar.gz", hash = "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa"}, 487 | ] 488 | pycodestyle = [ 489 | {file = "pycodestyle-2.5.0-py2.py3-none-any.whl", hash = "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56"}, 490 | {file = "pycodestyle-2.5.0.tar.gz", hash = "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"}, 491 | ] 492 | pyflakes = [ 493 | {file = "pyflakes-2.1.1-py2.py3-none-any.whl", hash = "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0"}, 494 | {file = "pyflakes-2.1.1.tar.gz", hash = "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"}, 495 | ] 496 | pyparsing = [ 497 | {file = "pyparsing-2.4.6-py2.py3-none-any.whl", hash = "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"}, 498 | {file = "pyparsing-2.4.6.tar.gz", hash = "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f"}, 499 | ] 500 | pytest = [ 501 | {file = "pytest-5.3.5-py3-none-any.whl", hash = "sha256:ff615c761e25eb25df19edddc0b970302d2a9091fbce0e7213298d85fb61fef6"}, 502 | {file = "pytest-5.3.5.tar.gz", hash = "sha256:0d5fe9189a148acc3c3eb2ac8e1ac0742cb7618c084f3d228baaec0c254b318d"}, 503 | ] 504 | pytest-env = [ 505 | {file = "pytest-env-0.6.2.tar.gz", hash = "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"}, 506 | ] 507 | pytest-flake8 = [ 508 | {file = "pytest-flake8-1.0.4.tar.gz", hash = "sha256:4d225c13e787471502ff94409dcf6f7927049b2ec251c63b764a4b17447b60c0"}, 509 | {file = "pytest_flake8-1.0.4-py2.py3-none-any.whl", hash = "sha256:d7e2b6b274a255b7ae35e9224c85294b471a83b76ecb6bd53c337ae977a499af"}, 510 | ] 511 | pytest-mock = [ 512 | {file = "pytest-mock-2.0.0.tar.gz", hash = "sha256:b35eb281e93aafed138db25c8772b95d3756108b601947f89af503f8c629413f"}, 513 | {file = "pytest_mock-2.0.0-py2.py3-none-any.whl", hash = "sha256:cb67402d87d5f53c579263d37971a164743dc33c159dfb4fb4a86f37c5552307"}, 514 | ] 515 | python-dateutil = [ 516 | {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, 517 | {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, 518 | ] 519 | python-jsonrpc-server = [ 520 | {file = "python-jsonrpc-server-0.3.4.tar.gz", hash = "sha256:c73bf5495c9dd4d2f902755bedeb6da5afe778e0beee82f0e195c4655352fe37"}, 521 | {file = "python_jsonrpc_server-0.3.4-py3-none-any.whl", hash = "sha256:1f85f75f37f923149cc0aa078474b6df55b708e82ed819ca8846a65d7d0ada7f"}, 522 | ] 523 | python-language-server = [ 524 | {file = "python-language-server-0.31.8.tar.gz", hash = "sha256:f5685e1a6a3f6a2529ff75ea0676c59e769024302b2434564a5e7005d056eb82"}, 525 | {file = "python_language_server-0.31.8-py3-none-any.whl", hash = "sha256:c95470de6da223cdad7e60121bf5d220c292146caf2712eaef47a515c879e29d"}, 526 | ] 527 | s3transfer = [ 528 | {file = "s3transfer-0.3.3-py2.py3-none-any.whl", hash = "sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13"}, 529 | {file = "s3transfer-0.3.3.tar.gz", hash = "sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db"}, 530 | ] 531 | six = [ 532 | {file = "six-1.14.0-py2.py3-none-any.whl", hash = "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"}, 533 | {file = "six-1.14.0.tar.gz", hash = "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a"}, 534 | ] 535 | typed-ast = [ 536 | {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, 537 | {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, 538 | {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, 539 | {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, 540 | {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, 541 | {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, 542 | {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, 543 | {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, 544 | {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, 545 | {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, 546 | {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, 547 | {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, 548 | {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, 549 | {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, 550 | {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, 551 | {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, 552 | {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, 553 | {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, 554 | {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, 555 | {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, 556 | {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, 557 | ] 558 | typing-extensions = [ 559 | {file = "typing_extensions-3.7.4.1-py2-none-any.whl", hash = "sha256:910f4656f54de5993ad9304959ce9bb903f90aadc7c67a0bef07e678014e892d"}, 560 | {file = "typing_extensions-3.7.4.1-py3-none-any.whl", hash = "sha256:cf8b63fedea4d89bab840ecbb93e75578af28f76f66c35889bd7065f5af88575"}, 561 | {file = "typing_extensions-3.7.4.1.tar.gz", hash = "sha256:091ecc894d5e908ac75209f10d5b4f118fbdb2eb1ede6a63544054bb1edb41f2"}, 562 | ] 563 | ujson = [ 564 | {file = "ujson-1.35.tar.gz", hash = "sha256:f66073e5506e91d204ab0c614a148d5aa938bdbf104751be66f8ad7a222f5f86"}, 565 | ] 566 | urllib3 = [ 567 | {file = "urllib3-1.25.8-py2.py3-none-any.whl", hash = "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc"}, 568 | {file = "urllib3-1.25.8.tar.gz", hash = "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"}, 569 | ] 570 | wcwidth = [ 571 | {file = "wcwidth-0.1.8-py2.py3-none-any.whl", hash = "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603"}, 572 | {file = "wcwidth-0.1.8.tar.gz", hash = "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8"}, 573 | ] 574 | -------------------------------------------------------------------------------- /provisioning/.gitignore: -------------------------------------------------------------------------------- 1 | *.js 2 | !jest.config.js 3 | *.d.ts 4 | node_modules 5 | 6 | # CDK asset staging directory 7 | .cdk.staging 8 | cdk.out 9 | -------------------------------------------------------------------------------- /provisioning/.npmignore: -------------------------------------------------------------------------------- 1 | *.ts 2 | !*.d.ts 3 | 4 | # CDK asset staging directory 5 | .cdk.staging 6 | cdk.out 7 | -------------------------------------------------------------------------------- /provisioning/README.md: -------------------------------------------------------------------------------- 1 | # Welcome to your CDK TypeScript project! 2 | 3 | This is a blank project for TypeScript development with CDK. 4 | 5 | The `cdk.json` file tells the CDK Toolkit how to execute your app. 6 | 7 | ## Useful commands 8 | 9 | * `npm run build` compile typescript to js 10 | * `npm run watch` watch for changes and compile 11 | * `npm run test` perform the jest unit tests 12 | * `cdk deploy` deploy this stack to your default AWS account/region 13 | * `cdk diff` compare deployed stack with current state 14 | * `cdk synth` emits the synthesized CloudFormation template 15 | -------------------------------------------------------------------------------- /provisioning/bin/provisioning.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import 'source-map-support/register'; 3 | import * as cdk from '@aws-cdk/core'; 4 | import { DatabaseStack } from '../lib/database-stack'; 5 | import { ApiStack } from '../lib/api-stack'; 6 | 7 | const app = new cdk.App(); 8 | const db = new DatabaseStack(app, 'DatabaseStack'); 9 | const api = new ApiStack(app, 'ApiStack', { database: db }); 10 | -------------------------------------------------------------------------------- /provisioning/cdk.context.json: -------------------------------------------------------------------------------- 1 | { 2 | "@aws-cdk/core:enableStackNameDuplicates": "true", 3 | "aws-cdk:enableDiffNoFail": "true" 4 | } 5 | -------------------------------------------------------------------------------- /provisioning/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node bin/provisioning.ts" 3 | } 4 | -------------------------------------------------------------------------------- /provisioning/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "roots": [ 3 | "/test" 4 | ], 5 | testMatch: [ '**/*.test.ts'], 6 | "transform": { 7 | "^.+\\.tsx?$": "ts-jest" 8 | }, 9 | } 10 | -------------------------------------------------------------------------------- /provisioning/lib/api-stack.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from '@aws-cdk/core'; 2 | import * as lambda from '@aws-cdk/aws-lambda'; 3 | import * as apigateway from '@aws-cdk/aws-apigateway'; 4 | import { DatabaseStack } from './database-stack'; 5 | 6 | export interface ApiStackProps { 7 | database: DatabaseStack, 8 | } 9 | 10 | export class ApiStack extends cdk.Stack { 11 | constructor(scope: cdk.Construct, id: string, apiProps: ApiStackProps, props?: cdk.StackProps) { 12 | super(scope, id, props); 13 | 14 | const userFunction = new lambda.Function(this, 'UserFunction', { 15 | runtime: lambda.Runtime.PYTHON_3_8, 16 | code: lambda.Code.asset('../app'), 17 | environment: { 18 | DYNAMODB_TABLE_NAME: apiProps.database.table.tableName, 19 | }, 20 | functionName: 'UserFunction', 21 | handler: 'app.dispatch_request', 22 | }); 23 | 24 | apiProps.database.table.grantReadWriteData(userFunction); 25 | 26 | const userFunctionIntegration = new apigateway.LambdaIntegration(userFunction, {}); 27 | 28 | const api = new apigateway.RestApi(this, 'UserRestApi', { restApiName: 'userRestApi' }); 29 | const apiUser = api.root.addResource('user'); 30 | const apiUserId = apiUser.addResource('{user_id}'); 31 | 32 | apiUser.addMethod('PUT', userFunctionIntegration); 33 | apiUserId.addMethod('GET', userFunctionIntegration); 34 | apiUserId.addMethod('DELETE', userFunctionIntegration); 35 | apiUserId.addMethod('PATCH', userFunctionIntegration); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /provisioning/lib/database-stack.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from '@aws-cdk/core'; 2 | import * as dynamodb from '@aws-cdk/aws-dynamodb'; 3 | 4 | export class DatabaseStack extends cdk.Stack { 5 | public readonly table: dynamodb.Table; 6 | 7 | constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) { 8 | super(scope, id, props); 9 | 10 | this.table = new dynamodb.Table(this, 'UserTable', { 11 | tableName: 'UserTable', 12 | partitionKey: { 13 | name: 'user_id', 14 | type: dynamodb.AttributeType.STRING, 15 | }, 16 | }); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /provisioning/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "provisioning", 3 | "version": "0.1.0", 4 | "bin": { 5 | "provisioning": "bin/provisioning.js" 6 | }, 7 | "scripts": { 8 | "build": "tsc", 9 | "watch": "tsc -w", 10 | "test": "jest", 11 | "cdk": "cdk" 12 | }, 13 | "devDependencies": { 14 | "@aws-cdk/assert": "1.23.0", 15 | "@types/jest": "^24.0.22", 16 | "@types/node": "10.17.5", 17 | "aws-cdk": "^1.96.0", 18 | "jest": "^26.6.3", 19 | "ts-jest": "^26.5.4", 20 | "ts-node": "^8.8.1", 21 | "typescript": "~3.7.2" 22 | }, 23 | "dependencies": { 24 | "@aws-cdk/aws-apigateway": "^1.30.0", 25 | "@aws-cdk/aws-dynamodb": "^1.30.0", 26 | "@aws-cdk/aws-lambda": "^1.30.0", 27 | "@aws-cdk/core": "1.28.0", 28 | "source-map-support": "^0.5.16" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /provisioning/test/provisioning.test.ts: -------------------------------------------------------------------------------- 1 | import { expect as expectCDK, matchTemplate, MatchStyle } from '@aws-cdk/assert'; 2 | import * as cdk from '@aws-cdk/core'; 3 | -------------------------------------------------------------------------------- /provisioning/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target":"ES2018", 4 | "module": "commonjs", 5 | "lib": ["es2018"], 6 | "declaration": true, 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "strictNullChecks": true, 10 | "noImplicitThis": true, 11 | "alwaysStrict": true, 12 | "noUnusedLocals": false, 13 | "noUnusedParameters": false, 14 | "noImplicitReturns": true, 15 | "noFallthroughCasesInSwitch": false, 16 | "inlineSourceMap": true, 17 | "inlineSources": true, 18 | "experimentalDecorators": true, 19 | "strictPropertyInitialization":false, 20 | "typeRoots": ["./node_modules/@types"] 21 | }, 22 | "exclude": ["cdk.out"] 23 | } 24 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "python-rapid-prototyping" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["studio3104 "] 6 | license = "MIT" 7 | 8 | [tool.poetry.dependencies] 9 | python = "3.8.*" 10 | boto3 = "^1.11.13" 11 | 12 | [tool.poetry.dev-dependencies] 13 | pytest = "^5.3.5" 14 | pytest-mock = "^2.0.0" 15 | pytest-flake8 = "^1.0.4" 16 | mypy = "^0.761" 17 | python-language-server = "^0.31.8" 18 | pytest-env = "^0.6.2" 19 | 20 | [build-system] 21 | requires = ["poetry>=0.12"] 22 | build-backend = "poetry.masonry.api" 23 | -------------------------------------------------------------------------------- /scripts/create_pandas_package.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | PYTHON_VERSION=3.7 3 | LIB_DIR=python/lib/python$PYTHON_VERSION/site-packages 4 | INSTALLER=pip$PYTHON_VERSION 5 | 6 | [ -e $LIB_DIR ] && rm -r $LIB_DIR 7 | [ -e pandas.zip ] && rm pandas.zip 8 | 9 | which $INSTALLER >& /dev/null 10 | [ $? -ne 0 ] && INSTALLER=pip3 11 | echo "pip => $INSTALLER" 12 | 13 | mkdir -p $LIB_DIR 14 | $INSTALLER install pandas xlrd xlwt xlsxwriter -t $LIB_DIR 15 | zip pandas.zip python/ -r 16 | aws lambda publish-layer-version --layer-name pandas --zip-file fileb://./pandas.zip --compatible-runtimes python$PYTHON_VERSION --region ap-northeast-1 17 | -------------------------------------------------------------------------------- /scripts/create_psycopg2_package.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | PYTHON_VERSION=3.7 3 | POSTGRES_VERSION=10.0 4 | PSYCOPG2_VERSION=2.8.4 5 | 6 | LIB_DIR=python/lib/python$PYTHON_VERSION/site-packages 7 | 8 | POSTGRES_PATH=postgresql-$POSTGRES_VERSION 9 | POSTGRES_URL=https://ftp.postgresql.org/pub/source/v$POSTGRES_VERSION/$POSTGRES_PATH.tar.gz 10 | POSTGRES_INSTALL_PATH=/tmp/pg 11 | 12 | PSYCOPG2_PATH=psycopg2-$PSYCOPG2_VERSION 13 | PSYCOPG2_URL=https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/$PSYCOPG2_PATH.tar.gz 14 | 15 | function download_postgres { 16 | if [ -e $POSTGRES_PATH.tar.gz ]; then 17 | echo "skip download postgresql." 18 | else 19 | sudo yum install -y postgresql-devel 20 | wget $POSTGRES_URL 21 | tar -zxvf $POSTGRES_PATH.tar.gz 22 | fi 23 | } 24 | 25 | function install_postgres { 26 | if [ -e $POSTGRES_INSTALL_PATH ]; then 27 | echo "skip install postgresql." 28 | else 29 | cd $POSTGRES_PATH 30 | ./configure --prefix $POSTGRES_INSTALL_PATH --without-readline --without-zlib 31 | make 32 | make install 33 | cd .. 34 | fi 35 | } 36 | 37 | function install_psycopg2 { 38 | if [ -e $PSYCOPG2_PATH.tar.gz ]; then 39 | echo "skip download psycopg2." 40 | else 41 | wget $PSYCOPG2_URL 42 | tar -zxvf $PSYCOPG2_PATH.tar.gz 43 | fi 44 | 45 | cd $PSYCOPG2_PATH 46 | sed -i -e "s#pg_config =.*#pg_config = $POSTGRES_INSTALL_PATH/bin/pg_config#" setup.cfg 47 | sed -i -e "s/static_libpq = 0/static_libpq = 1/" setup.cfg 48 | LD_LIBRARY_PATH=$PG_DIR/lib:$LD_LIBRARY_PATH python$PYTHON_VERSION setup.py build 49 | cd .. 50 | } 51 | 52 | [ -e $LIB_DIR ] && rm -r $LIB_DIR 53 | [ -e psycopg2.zip ] && rm -r psycopg2.zip 54 | 55 | set -e 56 | download_postgres 57 | install_postgres 58 | install_psycopg2 59 | 60 | mkdir -p $LIB_DIR 61 | cp -r $PSYCOPG2_PATH/build/lib.linux-x86_64-$PYTHON_VERSION/psycopg2/ $LIB_DIR 62 | zip psycopg2.zip python/ -r 63 | aws lambda publish-layer-version --layer-name psycopg2 --zip-file fileb://./psycopg2.zip --compatible-runtimes python$PYTHON_VERSION --region ap-northeast-1 64 | set +e 65 | 66 | rm $POSTGRES_PATH.tar.gz $PSYCOPG2_PATH.tar.gz 67 | rm -r $POSTGRES_PATH $PSYCOPG2_PATH $POSTGRES_INSTALL_PATH 68 | echo 'done!' 69 | -------------------------------------------------------------------------------- /scripts/install_pyenv.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | curl https://pyenv.run | bash 3 | 4 | echo 'export PATH="$HOME/.pyenv/bin:$PATH"' >> ~/.bash_profile 5 | echo 'eval "$(pyenv init -)"' >> ~/.bash_profile 6 | echo 'eval "$(pyenv virtualenv-init -)"' >> ~/.bash_profile 7 | 8 | source ~/.bash_profile 9 | pyenv -v 10 | -------------------------------------------------------------------------------- /scripts/install_python.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | VERSION=3.7.3 3 | 4 | if [ ! -e Python-$VERSION.tgz ]; then 5 | wget https://www.python.org/ftp/python/$VERSION/Python-$VERSION.tgz 6 | tar -zxvf Python-$VERSION.tgz 7 | fi 8 | 9 | cd Python-$VERSION 10 | ./configure 11 | make 12 | make test 13 | sudo make altinstall 14 | 15 | cd .. 16 | rm -r Python-$VERSION.tgz Python-$VERSION 17 | -------------------------------------------------------------------------------- /scripts/samlocal.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | DOCKER_NETWORK_NAME=lambda-local 5 | DDB_CONTAINER_PREFIX=dynamodblocalpythonrapid 6 | DDB_CONTAINER_NAME=${DDB_CONTAINER_PREFIX}$(date "+%Y%m%d%H%M%S") 7 | DDB_LOCAL_PORT=8001 8 | EXISTING_CONTAINERS=$(docker ps -aq --filter name=${DDB_CONTAINER_PREFIX}) 9 | PATH_TO_ENVVAR=/tmp/pythonrapidenv.json 10 | 11 | docker pull amazon/dynamodb-local 12 | 13 | # Create a docker network common between DDB Local and SAM Local 14 | if docker network ls | grep ${DOCKER_NETWORK_NAME}; then 15 | echo 16 | else 17 | docker network create ${DOCKER_NETWORK_NAME} 18 | fi 19 | 20 | # Lanuch DDB Local 21 | if [ ! "x${EXISTING_CONTAINERS}" = "x" ]; then 22 | docker stop ${EXISTING_CONTAINERS} 23 | docker rm ${EXISTING_CONTAINERS} 24 | fi 25 | docker run -d --name ${DDB_CONTAINER_NAME} --net ${DOCKER_NETWORK_NAME} -p ${DDB_LOCAL_PORT}:${DDB_LOCAL_PORT} amazon/dynamodb-local 26 | 27 | # Create DDB schema 28 | /usr/local/bin/aws dynamodb create-table --endpoint-url http://localhost:${DDB_LOCAL_PORT} \ 29 | --table-name testUserTable \ 30 | --attribute-definitions AttributeName=user_id,AttributeType=S \ 31 | --key-schema AttributeName=user_id,KeyType=HASH \ 32 | --billing-mode PAY_PER_REQUEST 33 | 34 | # Launch SAM Local 35 | echo "{\"Parameters\": {\"DYNAMODB_ENDPOINT_URL\": \"http://${DDB_CONTAINER_NAME}:${DDB_LOCAL_PORT}\"}}" | jq . > ${PATH_TO_ENVVAR} 36 | sam local start-api --docker-network ${DOCKER_NETWORK_NAME} --env-vars ${PATH_TO_ENVVAR} 37 | -------------------------------------------------------------------------------- /scripts/unittest.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | CWD=$(dirname "${0}") 5 | 6 | DDB_CONTAINER_PREFIX=unittestddblocal 7 | DDB_CONTAINER_NAME=${DDB_CONTAINER_PREFIX}$(date "+%Y%m%d%H%M%S") 8 | DDB_LOCAL_PORT=8000 9 | EXISTING_CONTAINERS=$(docker ps -aq --filter name=${DDB_CONTAINER_PREFIX}) 10 | PATH_TO_ENVVAR=/tmp/pythonrapidenv.json 11 | 12 | docker pull amazon/dynamodb-local 13 | 14 | # Lanuch DDB Local 15 | if [ ! "x${EXISTING_CONTAINERS}" = "x" ]; then 16 | docker stop ${EXISTING_CONTAINERS} 17 | docker rm ${EXISTING_CONTAINERS} 18 | fi 19 | docker run -d --name ${DDB_CONTAINER_NAME} -p ${DDB_LOCAL_PORT}:8000 amazon/dynamodb-local 20 | 21 | # Run py.test 22 | python -m pytest ${CWD}/../tests/ -vv 23 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 120 3 | 4 | [tool:pytest] 5 | env = 6 | AWS_ACCESS_KEY_ID=x 7 | AWS_SECRET_ACCESS_KEY=x 8 | AWS_DEFAULT_REGION=ap-northeast-1 9 | DYNAMODB_ENDPOINT_URL=http://localhost:8000 10 | DYNAMODB_TABLE_NAME=testUserTable 11 | -------------------------------------------------------------------------------- /template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Outputs: 3 | APIHandlerArn: 4 | Value: 5 | Fn::GetAtt: 6 | - APIHandler 7 | - Arn 8 | APIHandlerName: 9 | Value: 10 | Ref: APIHandler 11 | EndpointURL: 12 | Value: 13 | Fn::Sub: https://${RestAPI}.execute-api.${AWS::Region}.amazonaws.com/api/ 14 | RestAPIId: 15 | Value: 16 | Ref: RestAPI 17 | 18 | Globals: 19 | Function: 20 | Environment: 21 | Variables: 22 | DYNAMODB_ENDPOINT_URL: null 23 | DYNAMODB_TABLE_NAME: testUserTable 24 | 25 | Resources: 26 | APIHandler: 27 | Properties: 28 | CodeUri: ./app 29 | Handler: app.dispatch_request 30 | MemorySize: 128 31 | Role: 32 | Fn::GetAtt: 33 | - DefaultRole 34 | - Arn 35 | Runtime: python3.8 36 | Timeout: 60 37 | Type: AWS::Serverless::Function 38 | APIHandlerInvokePermission: 39 | Properties: 40 | Action: lambda:InvokeFunction 41 | FunctionName: 42 | Ref: APIHandler 43 | Principal: apigateway.amazonaws.com 44 | SourceArn: 45 | Fn::Sub: 46 | - arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${RestAPIId}/* 47 | - RestAPIId: 48 | Ref: RestAPI 49 | Type: AWS::Lambda::Permission 50 | DefaultRole: 51 | Properties: 52 | AssumeRolePolicyDocument: 53 | Statement: 54 | - Action: sts:AssumeRole 55 | Effect: Allow 56 | Principal: 57 | Service: lambda.amazonaws.com 58 | Sid: '' 59 | Version: '2012-10-17' 60 | Policies: 61 | - PolicyDocument: 62 | Statement: 63 | - Action: 64 | - logs:CreateLogGroup 65 | - logs:CreateLogStream 66 | - logs:PutLogEvents 67 | Effect: Allow 68 | Resource: arn:aws:logs:*:*:* 69 | Version: '2012-10-17' 70 | PolicyName: DefaultRolePolicy 71 | Type: AWS::IAM::Role 72 | RestAPI: 73 | Properties: 74 | DefinitionBody: 75 | definitions: 76 | Empty: 77 | title: Empty Schema 78 | type: object 79 | info: 80 | title: hoge 81 | version: '1.0' 82 | paths: 83 | /user: 84 | put: 85 | consumes: 86 | - application/json 87 | produces: 88 | - application/json 89 | responses: 90 | '200': 91 | description: 200 response 92 | schema: 93 | $ref: '#/definitions/Empty' 94 | x-amazon-apigateway-integration: 95 | contentHandling: CONVERT_TO_TEXT 96 | httpMethod: POST 97 | passthroughBehavior: when_no_match 98 | responses: 99 | default: 100 | statusCode: '200' 101 | type: aws_proxy 102 | uri: 103 | Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${APIHandler.Arn}/invocations 104 | /user/{user_id}: 105 | delete: 106 | consumes: 107 | - application/json 108 | parameters: 109 | - in: path 110 | name: user_id 111 | required: true 112 | type: string 113 | produces: 114 | - application/json 115 | responses: 116 | '200': 117 | description: 200 response 118 | schema: 119 | $ref: '#/definitions/Empty' 120 | x-amazon-apigateway-integration: 121 | contentHandling: CONVERT_TO_TEXT 122 | httpMethod: POST 123 | passthroughBehavior: when_no_match 124 | responses: 125 | default: 126 | statusCode: '200' 127 | type: aws_proxy 128 | uri: 129 | Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${APIHandler.Arn}/invocations 130 | get: 131 | consumes: 132 | - application/json 133 | parameters: 134 | - in: path 135 | name: user_id 136 | required: true 137 | type: string 138 | produces: 139 | - application/json 140 | responses: 141 | '200': 142 | description: 200 response 143 | schema: 144 | $ref: '#/definitions/Empty' 145 | x-amazon-apigateway-integration: 146 | contentHandling: CONVERT_TO_TEXT 147 | httpMethod: POST 148 | passthroughBehavior: when_no_match 149 | responses: 150 | default: 151 | statusCode: '200' 152 | type: aws_proxy 153 | uri: 154 | Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${APIHandler.Arn}/invocations 155 | patch: 156 | consumes: 157 | - application/json 158 | parameters: 159 | - in: path 160 | name: user_id 161 | required: true 162 | type: string 163 | produces: 164 | - application/json 165 | responses: 166 | '200': 167 | description: 200 response 168 | schema: 169 | $ref: '#/definitions/Empty' 170 | x-amazon-apigateway-integration: 171 | contentHandling: CONVERT_TO_TEXT 172 | httpMethod: POST 173 | passthroughBehavior: when_no_match 174 | responses: 175 | default: 176 | statusCode: '200' 177 | type: aws_proxy 178 | uri: 179 | Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${APIHandler.Arn}/invocations 180 | schemes: 181 | - https 182 | swagger: '2.0' 183 | x-amazon-apigateway-binary-media-types: 184 | - application/octet-stream 185 | - application/x-tar 186 | - application/zip 187 | - audio/basic 188 | - audio/ogg 189 | - audio/mp4 190 | - audio/mpeg 191 | - audio/wav 192 | - audio/webm 193 | - image/png 194 | - image/jpg 195 | - image/jpeg 196 | - image/gif 197 | - video/ogg 198 | - video/mpeg 199 | - video/webm 200 | EndpointConfiguration: EDGE 201 | StageName: api 202 | Type: AWS::Serverless::Api 203 | Transform: AWS::Serverless-2016-10-31 204 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from typing import Collection, Dict, Union 2 | 3 | import pytest 4 | 5 | import os 6 | import uuid 7 | 8 | from app.app import dynamodb 9 | 10 | 11 | IdentifyType = Collection[str] 12 | # IdentifyType = Dict[str, str] 13 | RequestContextType = Dict[str, Union[str, IdentifyType]] 14 | 15 | QueryStringParametersType = Dict[str, str] 16 | HeadersType = Dict[str, str] 17 | PathParametersType = Dict[str, str] 18 | StageVariablesType = Dict[str, str] 19 | 20 | ApiGatewayEventType = Dict[ 21 | str, Union[ 22 | str, 23 | RequestContextType, 24 | QueryStringParametersType, 25 | HeadersType, 26 | PathParametersType, 27 | StageVariablesType, 28 | ] 29 | ] 30 | 31 | 32 | @pytest.fixture() 33 | def apigw_event() -> ApiGatewayEventType: 34 | """ Generates API GW Event""" 35 | 36 | return { 37 | "body": '{ "test": "body"}', 38 | "resource": "/{proxy+}", 39 | "requestContext": { 40 | "resourceId": "123456", 41 | "apiId": "1234567890", 42 | "resourcePath": "/{proxy+}", 43 | "httpMethod": "POST", 44 | "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", 45 | "accountId": "123456789012", 46 | "identity": { 47 | "apiKey": "", 48 | "userArn": "", 49 | "cognitoAuthenticationType": "", 50 | "caller": "", 51 | "userAgent": "Custom User Agent String", 52 | "user": "", 53 | "cognitoIdentityPoolId": "", 54 | "cognitoIdentityId": "", 55 | "cognitoAuthenticationProvider": "", 56 | "sourceIp": "127.0.0.1", 57 | "accountId": "", 58 | }, 59 | "stage": "prod", 60 | }, 61 | "queryStringParameters": {"foo": "bar"}, 62 | "headers": { 63 | "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", 64 | "Accept-Language": "en-US,en;q=0.8", 65 | "CloudFront-Is-Desktop-Viewer": "true", 66 | "CloudFront-Is-SmartTV-Viewer": "false", 67 | "CloudFront-Is-Mobile-Viewer": "false", 68 | "X-Forwarded-For": "127.0.0.1, 127.0.0.2", 69 | "CloudFront-Viewer-Country": "US", 70 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 71 | "Upgrade-Insecure-Requests": "1", 72 | "X-Forwarded-Port": "443", 73 | "Host": "1234567890.execute-api.us-east-1.amazonaws.com", 74 | "X-Forwarded-Proto": "https", 75 | "X-Amz-Cf-Id": "aaaaaaaaaae3VYQb9jd-nvCd-de396Uhbp027Y2JvkCPNLmGJHqlaA==", 76 | "CloudFront-Is-Tablet-Viewer": "false", 77 | "Cache-Control": "max-age=0", 78 | "User-Agent": "Custom User Agent String", 79 | "CloudFront-Forwarded-Proto": "https", 80 | "X-Amz-Cf-Id": "aaaaaaaaaae3VYQb9jd-nvCd-de396Uhbp027Y2JvkCPNLmGJHqlaA==", 81 | "CloudFront-Is-Tablet-Viewer": "false", 82 | "Cache-Control": "max-age=0", 83 | "User-Agent": "Custom User Agent String", 84 | "CloudFront-Forwarded-Proto": "https", 85 | "Accept-Encoding": "gzip, deflate, sdch", 86 | }, 87 | "pathParameters": {"proxy": "/examplepath"}, 88 | "httpMethod": "POST", 89 | "stageVariables": {"baz": "qux"}, 90 | "path": "/examplepath", 91 | } 92 | 93 | 94 | @pytest.fixture(autouse=True) 95 | # TODO: Specify appropriate return type 96 | def fx_dynamodb_table(): # type: ignore 97 | table = dynamodb.create_table( 98 | TableName=os.environ['DYNAMODB_TABLE_NAME'], 99 | KeySchema=[ 100 | { 101 | 'AttributeName': 'user_id', 102 | 'KeyType': 'HASH', 103 | }, 104 | ], 105 | AttributeDefinitions=[ 106 | { 107 | 'AttributeName': 'user_id', 108 | 'AttributeType': 'S', 109 | }, 110 | ], 111 | BillingMode='PAY_PER_REQUEST', 112 | ) 113 | yield table 114 | table.delete() 115 | 116 | 117 | @pytest.fixture 118 | # TODO: Specify appropriate return type 119 | def fx_dummy_user(fx_dynamodb_table): 120 | user = {'user_id': uuid.uuid4().hex, 'name': 'fatsushi'} 121 | fx_dynamodb_table.put_item(Item=user) 122 | yield user 123 | fx_dynamodb_table.delete_item(Key={'user_id': user['user_id']}) 124 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-rapid-prototyping-with-python/b3a3632792a26edbedd090aaedf36fbd5b56cb09/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/test_handler.py: -------------------------------------------------------------------------------- 1 | import json 2 | from http import HTTPStatus 3 | 4 | import pytest 5 | 6 | from app import app 7 | 8 | 9 | class TestDispatchRequestGeneral: 10 | def test_404(self, apigw_event) -> None: 11 | apigw_event['requestContext']['resourcePath'] = '/NOT_IMPLEMENTED_PATH' 12 | response = app.dispatch_request(apigw_event, {}) 13 | assert response['statusCode'] == HTTPStatus.NOT_FOUND 14 | 15 | @pytest.mark.parametrize('not_allowed_method', ( 16 | 'POST', 'OPTIONS', 'CONNECT', 'TRACE', 17 | )) 18 | def test_405(self, apigw_event, not_allowed_method: str) -> None: 19 | apigw_event['requestContext']['resourcePath'] = '/user' 20 | apigw_event['httpMethod'] = not_allowed_method 21 | response = app.dispatch_request(apigw_event, {}) 22 | assert response['statusCode'] == HTTPStatus.METHOD_NOT_ALLOWED 23 | 24 | 25 | class TestDispatchRequestGet: 26 | @pytest.fixture 27 | def event(self, apigw_event, fx_dummy_user): # TODO: Specify type 28 | apigw_event['requestContext']['resourcePath'] = '/user/{user_id}' 29 | apigw_event['httpMethod'] = 'GET' 30 | apigw_event['pathParameters']['user_id'] = fx_dummy_user['user_id'] 31 | return apigw_event 32 | 33 | def test_200(self, event, fx_dummy_user) -> None: 34 | response = app.dispatch_request(event, {}) 35 | assert response['statusCode'] == HTTPStatus.OK 36 | assert json.loads(response['body']) == fx_dummy_user 37 | 38 | def test_404(self, event) -> None: 39 | event['pathParameters']['user_id'] = 'DUMMYID' 40 | 41 | response = app.dispatch_request(event, {}) 42 | assert response['statusCode'] == HTTPStatus.NOT_FOUND 43 | 44 | 45 | class TestDispatchRequestPut: 46 | @pytest.fixture 47 | def event(self, apigw_event): # TODO: Specify type 48 | apigw_event['requestContext']['resourcePath'] = '/user' 49 | apigw_event['httpMethod'] = 'PUT' 50 | apigw_event['body'] = json.dumps({'name': 'fatsushi'}) 51 | return apigw_event 52 | 53 | def test_200(self, event, fx_dynamodb_table) -> None: 54 | response = app.dispatch_request(event, {}) 55 | response_json = json.loads(response['body']) 56 | assert response['statusCode'] == HTTPStatus.CREATED 57 | assert fx_dynamodb_table.scan()['Items'][0] == response_json 58 | 59 | 60 | class TestDispatchRequestDelete: 61 | @pytest.fixture 62 | def event(self, apigw_event, fx_dummy_user): # TODO: Specify type 63 | apigw_event['requestContext']['resourcePath'] = '/user/{user_id}' 64 | apigw_event['httpMethod'] = 'DELETE' 65 | apigw_event['pathParameters']['user_id'] = fx_dummy_user['user_id'] 66 | return apigw_event 67 | 68 | def test_200(self, event, fx_dummy_user, fx_dynamodb_table) -> None: 69 | # Ensure that the dummy user exists before calling the method 70 | assert fx_dynamodb_table.scan()['Items'][0] == fx_dummy_user 71 | 72 | response = app.dispatch_request(event, {}) 73 | assert response['statusCode'] == HTTPStatus.NO_CONTENT 74 | assert json.loads(response['body']) == {} 75 | assert fx_dynamodb_table.scan()['Items'] == [] 76 | 77 | 78 | class TestDispatchRequestPatch: 79 | @pytest.fixture 80 | def event(self, apigw_event, fx_dummy_user): # TODO: Specify type 81 | apigw_event['requestContext']['resourcePath'] = '/user/{user_id}' 82 | apigw_event['httpMethod'] = 'PATCH' 83 | apigw_event['pathParameters']['user_id'] = fx_dummy_user['user_id'] 84 | return apigw_event 85 | 86 | def test_200(self, event, fx_dynamodb_table) -> None: 87 | new_name = 'fatsushi2' 88 | event['body'] = json.dumps({'name': new_name}) 89 | 90 | response = app.dispatch_request(event, {}) 91 | response_json = json.loads(response['body']) 92 | 93 | assert response['statusCode'] == HTTPStatus.OK 94 | assert fx_dynamodb_table.scan()['Items'][0] == response_json 95 | assert response_json['name'] == new_name 96 | 97 | def test_404(self, event) -> None: 98 | event['pathParameters']['user_id'] = 'DUMMYID' 99 | event['body'] = json.dumps({'name': 'DUMMYNAME'}) 100 | 101 | response = app.dispatch_request(event, {}) 102 | assert response['statusCode'] == HTTPStatus.NOT_FOUND 103 | --------------------------------------------------------------------------------