├── .gitignore ├── LICENSE ├── README.md ├── requirements.txt ├── sammy ├── __init__.py ├── custom_properties.py ├── examples │ ├── __init__.py │ ├── alexa_skill.py │ ├── api_backend.py │ ├── hello_world.py │ └── yaml │ │ ├── alexa_skill.yaml │ │ ├── api_backend.yaml │ │ └── hello_world.yaml ├── exceptions.py └── tests.py └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | *.ipynb 9 | *.idea/ 10 | # Distribution / packaging 11 | .Python 12 | env/ 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # Jupyter Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # SageMath parsed files 81 | *.sage.py 82 | 83 | # dotenv 84 | .env 85 | 86 | # virtualenv 87 | .venv 88 | venv/ 89 | ENV/ 90 | 91 | # Spyder project settings 92 | .spyderproject 93 | .spyproject 94 | 95 | # Rope project settings 96 | .ropeproject 97 | 98 | # mkdocs documentation 99 | /site 100 | 101 | # mypy 102 | .mypy_cache/ 103 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # sammy 2 | Python library for generating AWS SAM (Serverless Application Model) templates with validations. 3 | 4 | 5 | ## Features 6 | 7 | - Render templates with YAML or JSON 8 | - Validations are done with [Valley](https://github.com/capless/valley) 9 | 10 | ## Documentation 11 | 12 | ### Python Versions Supported 13 | 14 | - Python 3.6+ 15 | 16 | ### Quick Start 17 | 18 | #### Install 19 | 20 | ```bash 21 | pip install sammy 22 | ``` 23 | 24 | #### Simple Example 25 | 26 | ```python 27 | from sammy import SAM, Function, API, SimpleTable 28 | 29 | f = Function('testpublish',Handler='s3imageresize.handler', 30 | Runtime='python3.6', 31 | CodeUri='s3://your-bucket/photoresizer.zip') 32 | 33 | ddb = SimpleTable('maintable',PrimaryKey={'Name':'_id','Type':'String'}) 34 | 35 | s = SAM(render_type='yaml') 36 | 37 | s.add_resource(f) 38 | s.add_resource(ddb) 39 | 40 | print(s.to_yaml()) 41 | ``` 42 | 43 | 44 | ### SAM 45 | 46 | SAM is the class that generates the SAM template. 47 | 48 | ```python 49 | from sammy import SAM, SimpleTable 50 | 51 | s = SAM(resources=[SimpleTable('maintable',PrimaryKey={'Name':'_id','Type':'String'})], 52 | render_type='json') 53 | ``` 54 | 55 | #### Keyword Arguments 56 | 57 | - **resources** - List of resource classes (API, SimpleTable, or Function) 58 | - **render_type** - This is a string and there are only two options JSON or YAML. 59 | 60 | #### Methods 61 | 62 | ##### add_resource(resource) 63 | 64 | Add a resource class to the template 65 | 66 | ###### Example 67 | 68 | ```python 69 | from sammy import Function, SAM 70 | 71 | s = SAM(render_type='json') 72 | 73 | f = Function('testpublish',Handler='s3imageresize.handler', 74 | Runtime='python3.6', 75 | CodeUri='s3://your-bucket/photoresizer.zip') 76 | 77 | s.add_resource(f) 78 | ``` 79 | 80 | ##### add_parameter(parameter) 81 | 82 | Add a parameter class to the template 83 | 84 | ###### Example 85 | 86 | ```python 87 | import sammy as sm 88 | 89 | s = sm.SAM(render_type='json') 90 | 91 | s.add_parameter(sm.Parameter(name='Bucket',Type='String')) 92 | ``` 93 | 94 | ##### get_template_dict() 95 | 96 | Returns Python *dict* object representation of the template. 97 | 98 | ##### to_json() 99 | 100 | Returns a JSON representation of the template. 101 | 102 | ##### to_yaml() 103 | 104 | Returns a YAML representation of the template. 105 | 106 | ##### get_template() 107 | 108 | Returns a YAML or JSON representation of the template depending on what you set the render_type to on initialization. 109 | 110 | ##### publish_template(bucket_name) 111 | 112 | Publishes the SAM template to S3 113 | 114 | ##### publish(stack_name) 115 | 116 | Publishes the SAM template to Cloudformation 117 | 118 | 119 | ### Function 120 | 121 | This class represents an AWS Lambda function 122 | 123 | ```python 124 | from sammy import Function 125 | 126 | f = Function('testpublish',Handler='s3imageresize.handler', 127 | Runtime='python3.6', 128 | CodeUri='s3://your-bucket/photoresizer.zip') 129 | ``` 130 | 131 | ### API 132 | 133 | This class represents an AWS API Gateway 134 | 135 | ```python 136 | from sammy import API 137 | 138 | a = API(StageName='dev',DefinitionUri='s3://your-bucket/your-swagger.yml', 139 | CacheClusterEnabled=False,CacheClusterSize=None,Variables={'SOME_VAR':'test'}) 140 | ``` 141 | 142 | ### SimpleTable 143 | 144 | This class represents a simple DynamoDB table 145 | 146 | ```python 147 | from sammy import SimpleTable 148 | 149 | ddb = SimpleTable('maintable',PrimaryKey={'Name':'_id','Type':'String'}) 150 | ``` 151 | 152 | ### Ref 153 | 154 | This class represents a reference 155 | 156 | ```python 157 | import sammy as sm 158 | 159 | 160 | sam = sm.SAM(Description='A hello world application.',render_type='yaml') 161 | 162 | sam.add_parameter(sm.Parameter(name='Bucket',Type='String')) 163 | 164 | sam.add_parameter(sm.Parameter(name='CodeZipKey',Type='String')) 165 | 166 | sam.add_resource( 167 | sm.Function(name='HelloWorldFunction', 168 | Handler='sample.handler', Runtime='python3.6', CodeUri=sm.S3URI( 169 | Bucket=sm.Ref(Ref='Bucket'),Key=sm.Ref(Ref='CodeZipKey')))) 170 | 171 | ``` -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | boto3>=1.9.93 2 | PyYAML>=3.12 3 | valley>=1.5.2 -------------------------------------------------------------------------------- /sammy/__init__.py: -------------------------------------------------------------------------------- 1 | import collections 2 | 3 | import boto3 4 | import json 5 | import time 6 | 7 | import botocore 8 | import sys 9 | import yaml 10 | from botocore.exceptions import ProfileNotFound 11 | 12 | from valley.properties import * 13 | from valley.contrib import Schema 14 | from valley.utils.json_utils import ValleyEncoderNoType 15 | 16 | from sammy.custom_properties import ForeignInstanceListProperty, \ 17 | CharForeignProperty, IntForeignProperty 18 | from sammy.exceptions import DeployFailedError 19 | 20 | API_METHODS = { 21 | 'post': 'post', 22 | 'get': 'get', 23 | 'head': 'head', 24 | 'delete': 'delete', 25 | 'put': 'put', 26 | 'options': 'options', 27 | 'connect': 'connect', 28 | 'any': 'any' 29 | } 30 | 31 | RENDER_FORMATS = {'json': 'json', 'yaml': 'yaml'} 32 | 33 | LOG = logging.getLogger(__name__) 34 | 35 | ChangeSetResult = collections.namedtuple( 36 | "ChangeSetResult", ["changeset_id", "changeset_type"]) 37 | 38 | 39 | def remove_nulls(obj_dict): 40 | null_keys = [] 41 | for k, v in obj_dict.items(): 42 | if not v: 43 | null_keys.insert(0, k) 44 | for k in null_keys: 45 | obj_dict.pop(k) 46 | return obj_dict 47 | 48 | 49 | class SAMSchema(Schema): 50 | 51 | def __init__(self, **kwargs): 52 | super(SAMSchema, self).__init__(**kwargs) 53 | self.validate() 54 | 55 | 56 | class Ref(SAMSchema): 57 | Ref = CharProperty(required=True) 58 | 59 | 60 | class Sub(SAMSchema): 61 | Sub = CharForeignProperty(Ref, required=True) 62 | Map = DictProperty() 63 | 64 | def to_dict(self): 65 | obj = remove_nulls(self._data.copy()) 66 | Map = obj.get('Map', None) 67 | if not Map: 68 | return { 69 | "Fn::Sub": obj.get('Sub'), 70 | } 71 | else: 72 | return { 73 | "Fn::Sub": [obj.get('Sub'), obj.get('Map')] 74 | } 75 | 76 | 77 | class S3URI(SAMSchema): 78 | Bucket = CharForeignProperty(Ref, required=True) 79 | Key = CharForeignProperty(Ref, required=True) 80 | 81 | def to_dict(self): 82 | obj = remove_nulls(self._data.copy()) 83 | return obj 84 | 85 | 86 | class LambdaCode(SAMSchema): 87 | S3Bucket = CharForeignProperty(Ref, required=True) 88 | S3Key = CharForeignProperty(Ref, required=True) 89 | S3ObjectVersion = CharForeignProperty(Ref, required=True) 90 | ZipFile = CharForeignProperty(Ref, required=True) 91 | 92 | def to_dict(self): 93 | obj = remove_nulls(self._data.copy()) 94 | return obj 95 | 96 | 97 | class S3KeyFilter(SAMSchema): 98 | S3Key = CharForeignProperty(Ref) 99 | 100 | 101 | class Environment(SAMSchema): 102 | Variables = DictProperty(required=True) 103 | 104 | 105 | class Parameter(SAMSchema): 106 | name = CharForeignProperty(Ref, required=True) 107 | Type = CharForeignProperty(Ref, required=True) 108 | 109 | def to_dict(self): 110 | obj = remove_nulls(self._data.copy()) 111 | name = obj.pop('name') 112 | return { 113 | 'name': name, 114 | 'r': obj 115 | } 116 | 117 | 118 | class Output(SAMSchema): 119 | name = CharForeignProperty(Ref, required=True) 120 | Description = CharForeignProperty(Ref) 121 | Value = CharForeignProperty(Ref) 122 | Export = DictProperty() 123 | 124 | def to_dict(self): 125 | obj = remove_nulls(self._data.copy()) 126 | name = obj.pop('name') 127 | return { 128 | 'name': name, 129 | 'r': obj 130 | } 131 | 132 | class Resource(SAMSchema): 133 | _resource_type = None 134 | 135 | name = CharForeignProperty(Ref, required=True) 136 | 137 | def to_dict(self): 138 | obj = remove_nulls(self._data.copy()) 139 | name = obj.pop('name') 140 | 141 | r_attrs = { 142 | 'Type': self._resource_type 143 | } 144 | if len(obj.keys()) > 0: 145 | r_attrs['Properties'] = {k: v for k, v in obj.items() if v} 146 | return { 147 | 'name': name, 148 | 'r': r_attrs 149 | } 150 | 151 | def add_attr(self, k, v): 152 | self.r_attrs['Properties'][k] = v 153 | 154 | 155 | class EventSchema(SAMSchema): 156 | _event_type = None 157 | 158 | name = CharForeignProperty(Ref, required=True) 159 | 160 | def to_dict(self): 161 | obj = remove_nulls(self._data.copy()) 162 | event = {'name': obj.pop('name'), 163 | 'r': { 164 | 'Type': self._event_type 165 | } 166 | } 167 | 168 | if len(obj.keys()) > 0: 169 | event['r']['Properties'] = obj 170 | return event 171 | 172 | 173 | class APIEvent(EventSchema): 174 | _event_type = 'Api' 175 | 176 | Path = CharForeignProperty(Ref, required=True) 177 | Method = CharForeignProperty(Ref, required=True, choices=API_METHODS) 178 | RestApiId = CharForeignProperty(Ref) 179 | 180 | 181 | class S3Event(EventSchema): 182 | _event_type = 'S3' 183 | 184 | Bucket = CharForeignProperty(Ref, required=True) 185 | Events = ListProperty(required=True) 186 | Filter = ForeignProperty(S3KeyFilter) 187 | 188 | 189 | class SNSEvent(EventSchema): 190 | _event_type = 'SNS' 191 | 192 | Topic = CharForeignProperty(Ref, required=True) 193 | 194 | 195 | class SQSEvent(EventSchema): 196 | _event_type = 'SQS' 197 | 198 | Queue = CharForeignProperty(Ref, required=True) 199 | BatchSize = IntForeignProperty(Ref) 200 | 201 | 202 | class KinesisEvent(EventSchema): 203 | _event_type = 'Kinesis' 204 | 205 | Stream = CharForeignProperty(Ref, required=True) 206 | StartingPosition = CharForeignProperty(Ref, required=True) 207 | BatchSize = IntegerProperty() 208 | 209 | 210 | class DynamoDBEvent(EventSchema): 211 | _event_type = 'DynamoDB' 212 | 213 | Stream = CharForeignProperty(Ref, required=True) 214 | StartingPosition = CharForeignProperty(Ref, required=True) 215 | BatchSize = IntegerProperty() 216 | 217 | 218 | class ScheduleEvent(EventSchema): 219 | Schedule = CharForeignProperty(Ref, required=True) 220 | Input = CharForeignProperty(Ref) 221 | 222 | 223 | class CloudWatchEvent(EventSchema): 224 | Pattern = DictProperty(required=True) 225 | Input = CharForeignProperty(Ref) 226 | InputPath = CharForeignProperty(Ref) 227 | 228 | 229 | class IoTRuleEvent(EventSchema): 230 | Sql = CharForeignProperty(Ref, required=True) 231 | AwsIotSqlVersion = CharForeignProperty(Ref) 232 | 233 | 234 | class AlexaSkillEvent(EventSchema): 235 | _event_type = 'AlexaSkill' 236 | 237 | 238 | class DeadLetterQueueSchema(SAMSchema): 239 | _dlq_type = None 240 | 241 | name = CharForeignProperty(Ref, required=True) 242 | TargetArn = CharForeignProperty(Ref, required=True) 243 | 244 | def to_dict(self): 245 | obj = remove_nulls(self._data.copy()) 246 | event = {'name': obj.pop('name'), 247 | 'r': { 248 | 'Type': self._dlq_type, 249 | 'Properties': obj 250 | }} 251 | 252 | return event 253 | 254 | 255 | class SNSLetterQueue(DeadLetterQueueSchema): 256 | _dlq_type = 'SNS' 257 | 258 | 259 | class SQSLetterQueue(DeadLetterQueueSchema): 260 | _dlq_type = 'SQS' 261 | 262 | 263 | class Role(Resource): 264 | _resource_type = 'AWS::IAM::Role' 265 | _serverless_type = False 266 | 267 | AssumeRolePolicyDocument = DictProperty() 268 | ManagedPolicyArns = ListProperty() 269 | MaxSessionDuration = IntegerProperty() 270 | Path = CharProperty() 271 | Policies = ListProperty() 272 | RoleName = CharProperty() 273 | 274 | 275 | class S3(Resource): 276 | _resource_type = 'AWS::S3::Bucket' 277 | _serverless_type = False 278 | 279 | BucketName = CharForeignProperty(Ref) 280 | 281 | 282 | class SNS(Resource): 283 | _resource_type = 'AWS::SNS::Topic' 284 | _serverless_type = False 285 | 286 | 287 | class SQS(Resource): 288 | _resource_type = 'AWS::SQS::Queue' 289 | _serverless_type = False 290 | 291 | ContentBasedDeduplication = BooleanProperty() 292 | DelaySeconds = IntForeignProperty(SAMSchema) 293 | FifoQueue = BooleanProperty() 294 | KmsMasterKeyId = CharForeignProperty(SAMSchema) 295 | KmsDataKeyReusePeriodSeconds = IntForeignProperty(SAMSchema) 296 | MaximumMessageSize = IntForeignProperty(SAMSchema) 297 | MessageRetentionPeriod = IntForeignProperty(SAMSchema) 298 | ReceiveMessageWaitTimeSeconds = IntForeignProperty(SAMSchema) 299 | VisibilityTimeout = IntForeignProperty(SAMSchema) 300 | QueueName = CharForeignProperty(Ref) 301 | 302 | 303 | class AbstractFunction(Resource): 304 | Handler = CharForeignProperty(Ref, required=True) 305 | Runtime = CharForeignProperty(Ref, required=True, max_length=15) 306 | FunctionName = CharForeignProperty(Ref) 307 | Description = CharForeignProperty(Ref) 308 | MemorySize = IntegerProperty() 309 | Timeout = IntegerProperty() 310 | Role = CharForeignProperty(SAMSchema) 311 | Environment = ForeignProperty(Environment) 312 | VpcConfig = DictProperty() 313 | KmsKeyArn = CharForeignProperty(Ref) 314 | Tags = DictProperty() 315 | 316 | def to_dict(self): 317 | 318 | obj = super(AbstractFunction, self).to_dict() 319 | try: 320 | events = [i.to_dict() for i in obj['r']['Properties'].pop('Events')] 321 | 322 | obj['r']['Properties']['Events'] = {i.get('name'): i.get('r') for i in events} 323 | except KeyError: 324 | pass 325 | 326 | try: 327 | dlq = [i.to_dict() for i in obj['r']['Properties'].pop('DeadLetterQueue')] 328 | obj['r']['Properties']['DeadLetterQueue'] = {i.get('name'): i.get('r') for i in dlq} 329 | except KeyError: 330 | pass 331 | 332 | return obj 333 | 334 | 335 | class Function(AbstractFunction): 336 | _resource_type = 'AWS::Serverless::Function' 337 | _serverless_type = True 338 | 339 | CodeUri = ForeignProperty(S3URI) 340 | Policies = CharForeignProperty(Ref) 341 | Events = ForeignInstanceListProperty(EventSchema) 342 | Tracing = CharForeignProperty(Ref) 343 | DeadLetterQueue = ForeignInstanceListProperty(DeadLetterQueueSchema) 344 | ReservedConcurrentExecutions = IntegerProperty() 345 | 346 | 347 | class CFunction(AbstractFunction): 348 | _resource_type = 'AWS::Lambda::Function' 349 | _serverless_type = False 350 | 351 | Code = ForeignProperty(LambdaCode) 352 | Layers = ListProperty() 353 | TracingConfig = DictProperty() 354 | 355 | 356 | 357 | class API(Resource): 358 | _resource_type = "AWS::Serverless::Api" 359 | _serverless_type = True 360 | 361 | StageName = CharForeignProperty(Ref, required=True) 362 | DefinitionUri = CharForeignProperty(Ref) 363 | DefinitionBody = DictProperty() 364 | CacheClusterEnabled = BooleanProperty() 365 | CacheClusterSize = CharForeignProperty(Ref) 366 | Variables = DictProperty() 367 | 368 | 369 | class SimpleTable(Resource): 370 | _resource_type = "AWS::Serverless::SimpleTable" 371 | _serverless_type = True 372 | 373 | PrimaryKey = DictProperty() 374 | ProvisionedThroughput = DictProperty() 375 | 376 | 377 | class DynamoDBTable(Resource): 378 | _resource_type = "AWS::DynamoDB::Table" 379 | _serverless_type = False 380 | 381 | AttributeDefinitions = ListProperty(required=True) 382 | TableName = CharForeignProperty(SAMSchema, required=True) 383 | GlobalSecondaryIndexes = ListProperty() 384 | KeySchema = ListProperty(required=True) 385 | BillingMode = CharForeignProperty(Ref) 386 | LocalSecondaryIndexes = ListProperty() 387 | PointInTimeRecoverySpecification = DictProperty() 388 | ProvisionedThroughput = DictProperty() 389 | SSESpecification = DictProperty() 390 | StreamSpecification = DictProperty() 391 | Tags = DictProperty() 392 | TimeToLiveSpecification = DictProperty() 393 | 394 | 395 | class SAM(SAMSchema): 396 | aws_template_format_version = '2010-09-09' 397 | transform = 'AWS::Serverless-2016-10-31' 398 | Description = CharProperty() 399 | resources = ForeignInstanceListProperty(Resource) 400 | parameters = ForeignInstanceListProperty(Parameter) 401 | render_type = CharProperty(choices=RENDER_FORMATS, default_value='yaml') 402 | 403 | def __init__(self, region_name='us-east-1', profile_name='default', **kwargs): 404 | super(SAM, self).__init__(**kwargs) 405 | self.region_name = region_name 406 | self.profile_name = profile_name 407 | self.changeset_prefix = 'sammy-deploy-' 408 | self.build_clients_resources() 409 | 410 | def build_clients_resources(self, region_name=None, profile_name=None): 411 | region_name = region_name or self.region_name 412 | profile_name = profile_name or self.profile_name 413 | 414 | self.cf_client = self.get_client('cloudformation', region_name=region_name, profile_name=profile_name) 415 | self.cf_resource = self.get_resource('cloudformation', region_name=region_name, profile_name=profile_name) 416 | self.s3 = self.get_resource('s3', region_name=region_name, profile_name=profile_name) 417 | self.sts = self.get_client('sts', region_name=region_name,profile_name=profile_name) 418 | 419 | def add_parameter(self, parameter): 420 | self._base_properties.get('parameters').validate([parameter], 'parameters') 421 | parameters = self._data.get('parameters') or [] 422 | parameters.append(parameter) 423 | parameters = set(parameters) 424 | self._data['parameters'] = list(parameters) 425 | 426 | def add_resource(self, resource): 427 | self._base_properties.get('resources').validate([resource], 'resources') 428 | resources = self._data.get('resources') or [] 429 | resources.append(resource) 430 | resources = set(resources) 431 | self._data['resources'] = list(resources) 432 | 433 | def check_global_valid(self): 434 | """ 435 | Makes sure there aren't any SAM resources in a template that will be used in a CloudFormation StackSet 436 | :return: bool 437 | """ 438 | serverless_cnt = len(list(filter(lambda x: x._serverless_type, self._data['resources']))) 439 | if serverless_cnt > 0: 440 | return False 441 | return True 442 | 443 | def to_dict(self): 444 | obj = remove_nulls(self._data.copy()) 445 | rl = [i.to_dict() for i in obj.get('resources')] 446 | 447 | resources = {i.get('name'): i.get('r') for i in rl} 448 | 449 | template = { 450 | 'AWSTemplateFormatVersion': self.aws_template_format_version, 451 | 'Resources': resources 452 | } 453 | if self.transform: 454 | template['Transform'] = self.transform 455 | if obj.get('Description'): 456 | template['Description'] = obj.get('Description') 457 | if obj.get('parameters'): 458 | pl = [i.to_dict() for i in obj.get('parameters')] 459 | parameters = {i.get('name'): i.get('r') for i in pl} 460 | if len(parameters.keys()) > 0: 461 | template['Parameters'] = parameters 462 | return template 463 | 464 | def get_template_dict(self): 465 | return self.to_dict() 466 | 467 | def publish_template(self, bucket, name): 468 | 469 | filename = '{}.{}'.format(name, self.render_type) 470 | 471 | self.s3.Object(bucket, filename).put( 472 | Body=self.get_template()) 473 | 474 | def get_template(self): 475 | if self.render_type == 'json': 476 | return self.to_json() 477 | else: 478 | return self.to_yaml() 479 | 480 | def has_stack(self, stack_name): 481 | """ 482 | Checks if a CloudFormation stack with given name exists 483 | :param stack_name: Name or ID of the stack 484 | :return: True if stack exists. False otherwise 485 | """ 486 | cf = self.cf_client 487 | try: 488 | resp = cf.describe_stacks(StackName=stack_name) 489 | if len(resp["Stacks"]) != 1: 490 | return False 491 | 492 | # When you run CreateChangeSet on a a stack that does not exist, 493 | # CloudFormation will create a stack and set it's status 494 | # REVIEW_IN_PROGRESS. However this stack is cannot be manipulated 495 | # by "update" commands. Under this circumstances, we treat like 496 | # this stack does not exist and call CreateChangeSet will 497 | # ChangeSetType set to CREATE and not UPDATE. 498 | stack = resp["Stacks"][0] 499 | return stack["StackStatus"] != "REVIEW_IN_PROGRESS" 500 | 501 | except botocore.exceptions.ClientError as e: 502 | # If a stack does not exist, describe_stacks will throw an 503 | # exception. Unfortunately we don't have a better way than parsing 504 | # the exception msg to understand the nature of this exception. 505 | msg = str(e) 506 | 507 | if "Stack with id {0} does not exist".format(stack_name) in msg: 508 | LOG.debug("Stack with id {0} does not exist".format( 509 | stack_name)) 510 | return False 511 | else: 512 | # We don't know anything about this exception. Don't handle 513 | LOG.debug("Unable to get stack details.", exc_info=e) 514 | raise e 515 | 516 | def get_changeset_status(self, change_set_name): 517 | print(change_set_name) 518 | response = self.cf_client.describe_change_set( 519 | ChangeSetName=change_set_name, 520 | ) 521 | return response['Status'] 522 | 523 | def is_stack_instances_current(self, stackset_name, op_id, no_replication_groups): 524 | obj_list = self.cf_client.list_stack_instances(StackSetName=stackset_name)['Summaries'] 525 | current_list = len(list(filter(lambda x: x.get('Status') == 'CURRENT', obj_list))) 526 | if current_list != no_replication_groups: 527 | print( 528 | 'Only {} of the {} replication groups (stack instances) are ready yet. '.format( 529 | current_list, no_replication_groups), 530 | 'Checking again in 30 seconds. Stack Name: {}, Operation ID: {}'.format(stackset_name, op_id) 531 | ) 532 | return False 533 | return True 534 | 535 | def get_session(self, profile_name='default'): 536 | try: 537 | return boto3.Session(profile_name=profile_name) 538 | except ProfileNotFound: 539 | return boto3.Session() 540 | 541 | def get_client(self, service_name, region_name='us-east-1', profile_name='default'): 542 | s = self.get_session(profile_name=profile_name) 543 | return s.client(service_name, region_name=region_name) 544 | 545 | def get_resource(self, service_name, region_name='us-east-1', profile_name='default'): 546 | s = self.get_session(profile_name=profile_name) 547 | return s.resource(service_name, region_name=region_name) 548 | 549 | def publish_global(self, stackset_name, replication_groups): 550 | if not self.check_global_valid(): 551 | raise DeployFailedError('The publish_global method cannot publish SAM templates.') 552 | # Create Stack Set 553 | print('Creating {} Stack Set'.format(stackset_name)) 554 | 555 | self.cf_client.create_stack_set( 556 | StackSetName=stackset_name, 557 | TemplateBody=self.get_template(), 558 | ) 559 | # Create Stack Instances 560 | print('Creating {} Stack Instances'.format(stackset_name)) 561 | op_id = self.cf_client.create_stack_instances( 562 | StackSetName=stackset_name, 563 | Accounts=[ 564 | self.sts.get_caller_identity().get('Account') 565 | ], 566 | Regions=replication_groups 567 | )['OperationId'] 568 | # Number of Replication Groups 569 | no_replication_groups = len(replication_groups) 570 | # Wait until all stack instances are created. 571 | while not self.is_stack_instances_current(stackset_name, op_id, no_replication_groups): 572 | time.sleep(30) 573 | print('Stack Set Creation Completed') 574 | 575 | def publish(self, stack_name, **kwargs): 576 | param_list = [{'ParameterKey': k, 'ParameterValue': v} for k, v in kwargs.items()] 577 | changeset_name = self.changeset_prefix + str(int(time.time())) 578 | if self.has_stack(stack_name): 579 | changeset_type = "UPDATE" 580 | else: 581 | changeset_type = "CREATE" 582 | 583 | cf = self.cf_client 584 | 585 | resp = cf.create_change_set(StackName=stack_name, TemplateBody=self.get_template(), 586 | Parameters=param_list, ChangeSetName=changeset_name, 587 | Capabilities=['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'], 588 | ChangeSetType=changeset_type) 589 | change_set_name = resp['Id'] 590 | 591 | result = ChangeSetResult(resp["Id"], changeset_type) 592 | 593 | sys.stdout.write("Waiting for {} changeset {} to complete\n".format( 594 | stack_name, changeset_type.lower())) 595 | 596 | sys.stdout.flush() 597 | 598 | while True: 599 | response = self.get_changeset_status(change_set_name) 600 | print(str(response)) 601 | time.sleep(10) 602 | if response in ['CREATE_COMPLETE', 'FAILED']: 603 | print('Changeset {}'.format(response)) 604 | break 605 | 606 | if response == 'CREATE_COMPLETE': 607 | cf.execute_change_set( 608 | ChangeSetName=result.changeset_id, 609 | StackName=stack_name) 610 | 611 | sys.stdout.write("Waiting for {} stack {} to complete\n".format( 612 | stack_name, changeset_type.lower())) 613 | sys.stdout.flush() 614 | # Pick the right waiter 615 | if changeset_type == "CREATE": 616 | waiter = cf.get_waiter("stack_create_complete") 617 | elif changeset_type == "UPDATE": 618 | waiter = cf.get_waiter("stack_update_complete") 619 | else: 620 | raise RuntimeError("Invalid changeset type {0}" 621 | .format(changeset_type)) 622 | try: 623 | waiter.wait(StackName=stack_name, 624 | WaiterConfig={'Delay': 5, 'MaxAttempts': 720}) 625 | except botocore.exceptions.WaiterError as ex: 626 | LOG.debug("Execute changeset waiter exception", exc_info=ex) 627 | raise DeployFailedError 628 | 629 | return self.cf_resource.Stack(stack_name) 630 | else: 631 | # Print the reason for failure 632 | print(cf.describe_change_set( 633 | ChangeSetName=change_set_name, 634 | )['StatusReason']) 635 | 636 | def unpublish(self, stack_name): 637 | print('Deleting {} stack'.format(stack_name)) 638 | self.cf_client.delete_stack(StackName=stack_name) 639 | 640 | def to_yaml(self): 641 | jd = json.dumps(self.get_template_dict(), cls=ValleyEncoderNoType) 642 | # TODO: Write this without converting to JSON first 643 | jl = json.loads(jd) 644 | return yaml.safe_dump(jl, 645 | default_flow_style=False) 646 | 647 | def to_json(self): 648 | return json.dumps(self.get_template_dict(), cls=ValleyEncoderNoType) 649 | 650 | 651 | class CFT(SAM): 652 | transform = None 653 | 654 | outputs = ForeignInstanceListProperty(Output) 655 | 656 | def add_output(self, output): 657 | self._base_properties.get('outputs').validate([output], 'outputs') 658 | outputs = self._data.get('outputs') or [] 659 | outputs.append(output) 660 | outputs = set(outputs) 661 | self._data['outputs'] = list(outputs) 662 | 663 | def to_dict(self): 664 | template = super(CFT, self).to_dict() 665 | obj = remove_nulls(self._data.copy()) 666 | if obj.get('outputs'): 667 | pl = [i.to_dict() for i in obj.get('outputs')] 668 | outputs = {i.get('name'): i.get('r') for i in pl} 669 | if len(outputs.keys()) > 0: 670 | template['Outputs'] = outputs 671 | return template 672 | -------------------------------------------------------------------------------- /sammy/custom_properties.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from valley.exceptions import ValidationException 4 | from valley.mixins import ListMixin 5 | from valley.properties import BaseProperty, MultiProperty 6 | from valley.utils.json_utils import ValleyEncoder 7 | from valley.validators import ForeignListValidator, StringValidator, ForeignValidator, IntegerValidator 8 | 9 | 10 | class ForeignSubclassListValidator(ForeignListValidator): 11 | 12 | def validate(self, value, key): 13 | if value: 14 | for obj in value: 15 | if not issubclass(obj.__class__,self.foreign_class): 16 | raise ValidationException( 17 | '{0}: This value ({1}) should be an instance of {2}.'.format( 18 | key, obj, self.foreign_class.__name__) 19 | ) 20 | 21 | 22 | 23 | class ForeignSubclassListMixin(ListMixin): 24 | 25 | def get_validators(self): 26 | super(ForeignSubclassListMixin, self).get_validators() 27 | self.validators.insert(len(self.validators),ForeignSubclassListValidator(self.foreign_class)) 28 | 29 | def get_db_value(self, value): 30 | if self.return_type == 'single': 31 | if not self.return_prop: 32 | raise ValueError('ForeignProperty classes requires the ' 33 | 'return_prop argument if return_type equals "single"') 34 | return value._data[self.return_prop] 35 | if self.return_type == 'list': 36 | return [obj._data for obj in value] 37 | if self.return_type == 'json': 38 | return json.dumps(value, cls=ValleyEncoder) 39 | else: 40 | return value 41 | 42 | 43 | class ForeignInstanceListProperty(ForeignSubclassListMixin,BaseProperty): 44 | 45 | def __init__(self,foreign_class,return_type=None,return_prop=None,**kwargs): 46 | self.foreign_class = foreign_class 47 | super(ForeignInstanceListProperty, self).__init__(**kwargs) 48 | self.return_type = return_type 49 | self.return_prop = return_prop 50 | 51 | 52 | class CharForeignProperty(MultiProperty): 53 | 54 | def __init__(self,foreign_class,**kwargs): 55 | 56 | super(CharForeignProperty, self).__init__( 57 | validators=[ForeignValidator(foreign_class), 58 | StringValidator()],**kwargs) 59 | 60 | 61 | class IntForeignProperty(MultiProperty): 62 | 63 | def __init__(self,foreign_class,**kwargs): 64 | 65 | super(IntForeignProperty, self).__init__( 66 | validators=[ForeignValidator(foreign_class), 67 | IntegerValidator()],**kwargs) -------------------------------------------------------------------------------- /sammy/examples/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/capless/sammy/4fa9680ccfad108537de7de08e70644ac13bc8e8/sammy/examples/__init__.py -------------------------------------------------------------------------------- /sammy/examples/alexa_skill.py: -------------------------------------------------------------------------------- 1 | import sammy as sm 2 | 3 | 4 | sam = sm.SAM(Description='Alexa Skill https://developer.amazon.com/alexa-skills-kit') 5 | 6 | sam.add_resource(sm.Function( 7 | name='AlexaSkillFunction', 8 | CodeUri=sm.S3URI(Bucket='',Key='sammytest.zip'), 9 | Handler='sample.handler', 10 | Runtime='python3.6', 11 | Events=[sm.AlexaSkillEvent(name='AlexaSkillEvent')] 12 | )) 13 | -------------------------------------------------------------------------------- /sammy/examples/api_backend.py: -------------------------------------------------------------------------------- 1 | import sammy as sm 2 | 3 | 4 | sam = sm.SAM(Description='Simple CRUD webservice. State is stored in a SimpleTable (DynamoDB) resource.') 5 | 6 | sam.add_resource(sm.Function( 7 | name='GetFunction', 8 | CodeUri=sm.S3URI(Bucket='',Key='sammytest.zip'), 9 | Handler='index.get', 10 | Runtime='python3.6', 11 | Policies='AmazonDynamoDBReadOnlyAccess', 12 | Environment=sm.Environment(Variables={'TABLE_NAME':sm.Ref(Ref='Table')}), 13 | Events=[sm.APIEvent(name='GetResource',Path='/resource/{resourceId}',Method='get')] 14 | )) 15 | 16 | sam.add_resource(sm.Function( 17 | name='PutFunction', 18 | Handler='index.put', 19 | Runtime='python3.6', 20 | CodeUri=sm.S3URI(Bucket='',Key='sammytest.zip'), 21 | Policies='AmazonDynamoDBFullAccess', 22 | Environment=sm.Environment(Variables={'TABLE_NAME':sm.Ref(Ref='Table')}), 23 | Events=[sm.APIEvent(name='PutResource',Path='/resource/{resourceId}',Method='put')] 24 | )) 25 | 26 | sam.add_resource(sm.Function( 27 | name='DeleteFunction', 28 | Handler='index.delete', 29 | Runtime='python3.6', 30 | CodeUri=sm.S3URI(Bucket='',Key='sammytest.zip'), 31 | Policies='AmazonDynamoDBFullAccess', 32 | Environment=sm.Environment(Variables={'TABLE_NAME':sm.Ref(Ref='Table')}), 33 | Events=[sm.APIEvent(name='DeleteResource',Path='/resource/{resourceId}',Method='delete')] 34 | )) 35 | 36 | sam.add_resource(sm.SimpleTable(name='Table')) -------------------------------------------------------------------------------- /sammy/examples/hello_world.py: -------------------------------------------------------------------------------- 1 | import sammy as sm 2 | 3 | 4 | sam = sm.SAM(Description='A hello world application.',render_type='yaml') 5 | 6 | sam.add_parameter(sm.Parameter(name='Bucket',Type='String')) 7 | 8 | sam.add_parameter(sm.Parameter(name='CodeZipKey',Type='String')) 9 | 10 | sam.add_resource( 11 | sm.Function(name='HelloWorldFunction', 12 | Handler='sample.handler', Runtime='python3.6', CodeUri=sm.S3URI( 13 | Bucket=sm.Ref(Ref='Bucket'),Key=sm.Ref(Ref='CodeZipKey')))) 14 | -------------------------------------------------------------------------------- /sammy/examples/yaml/alexa_skill.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Alexa Skill https://developer.amazon.com/alexa-skills-kit 3 | Resources: 4 | AlexaSkillFunction: 5 | Properties: 6 | CodeUri: 7 | Bucket: 8 | Key: sammytest.zip 9 | Events: 10 | AlexaSkillEvent: 11 | Type: AlexaSkill 12 | Handler: sample.handler 13 | Runtime: python3.6 14 | Type: AWS::Serverless::Function 15 | Transform: AWS::Serverless-2016-10-31 -------------------------------------------------------------------------------- /sammy/examples/yaml/api_backend.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Simple CRUD webservice. State is stored in a SimpleTable (DynamoDB) resource. 3 | Resources: 4 | DeleteFunction: 5 | Properties: 6 | CodeUri: 7 | Bucket: 8 | Key: sammytest.zip 9 | Environment: 10 | Variables: 11 | TABLE_NAME: 12 | Ref: Table 13 | Events: 14 | DeleteResource: 15 | Properties: 16 | Method: delete 17 | Path: /resource/{resourceId} 18 | Type: Api 19 | Handler: index.delete 20 | Policies: AmazonDynamoDBFullAccess 21 | Runtime: python3.6 22 | Type: AWS::Serverless::Function 23 | GetFunction: 24 | Properties: 25 | CodeUri: 26 | Bucket: 27 | Key: sammytest.zip 28 | Environment: 29 | Variables: 30 | TABLE_NAME: 31 | Ref: Table 32 | Events: 33 | GetResource: 34 | Properties: 35 | Method: get 36 | Path: /resource/{resourceId} 37 | Type: Api 38 | Handler: index.get 39 | Policies: AmazonDynamoDBReadOnlyAccess 40 | Runtime: python3.6 41 | Type: AWS::Serverless::Function 42 | PutFunction: 43 | Properties: 44 | CodeUri: 45 | Bucket: 46 | Key: sammytest.zip 47 | Environment: 48 | Variables: 49 | TABLE_NAME: 50 | Ref: Table 51 | Events: 52 | PutResource: 53 | Properties: 54 | Method: put 55 | Path: /resource/{resourceId} 56 | Type: Api 57 | Handler: index.put 58 | Policies: AmazonDynamoDBFullAccess 59 | Runtime: python3.6 60 | Type: AWS::Serverless::Function 61 | Table: 62 | Type: AWS::Serverless::SimpleTable 63 | Transform: AWS::Serverless-2016-10-31 -------------------------------------------------------------------------------- /sammy/examples/yaml/hello_world.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: A hello world application. 3 | Parameters: 4 | Bucket: 5 | Type: String 6 | CodeZipKey: 7 | Type: String 8 | 9 | Resources: 10 | HelloWorldFunction: 11 | Properties: 12 | CodeUri: 13 | Bucket: 14 | Ref: Bucket 15 | Key: 16 | Ref: CodeZipKey 17 | Handler: sample.handler 18 | Runtime: python3.6 19 | Type: AWS::Serverless::Function 20 | Transform: AWS::Serverless-2016-10-31 -------------------------------------------------------------------------------- /sammy/exceptions.py: -------------------------------------------------------------------------------- 1 | class DeployFailedError(Exception): 2 | pass -------------------------------------------------------------------------------- /sammy/tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import pathlib as pl 3 | import os 4 | import yaml 5 | 6 | from sammy.examples.alexa_skill import sam as al 7 | from sammy.examples.api_backend import sam as ab 8 | from sammy.examples.hello_world import sam as hw 9 | 10 | 11 | class AlexaTestCase(unittest.TestCase): 12 | template_name = 'alexa_skill.yaml' 13 | template = al 14 | 15 | def setUp(self): 16 | template_path = '{}/{}'.format( 17 | pl.PurePath( 18 | os.path.abspath(__file__)).parent / 'examples/yaml', self.template_name) 19 | with open(template_path, 'r') as f: 20 | self.template_dict = yaml.load(f) 21 | 22 | def test_template(self): 23 | s = yaml.safe_load(self.template.get_template()) 24 | self.assertEqual(self.template_dict,s) 25 | 26 | 27 | class APIBackendTestCase(AlexaTestCase): 28 | template_name = 'api_backend.yaml' 29 | template = ab 30 | 31 | 32 | class HelloWorldTestCase(AlexaTestCase): 33 | template_name = 'hello_world.yaml' 34 | template = hw -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from pip._internal.req import parse_requirements 2 | from setuptools import setup, find_packages 3 | 4 | 5 | install_reqs = parse_requirements('requirements.txt', session=False) 6 | 7 | version = '0.4.2' 8 | 9 | setup( 10 | name='sammy', 11 | version=version, 12 | description="Python library for generating AWS SAM " 13 | "(Serverless Application Model) templates with validation.", 14 | classifiers=[ 15 | "Programming Language :: Python", 16 | "Topic :: Software Development :: Libraries :: Python Modules", 17 | "Environment :: Web Environment", 18 | "Programming Language :: Python :: 3.6", 19 | "Programming Language :: Python :: 3 :: Only" 20 | ], 21 | keywords='serverless, cloudformation, sam', 22 | author='Brian Jinwright', 23 | author_email='opensource@ipoots.com', 24 | maintainer='Brian Jinwright', 25 | packages=find_packages(), 26 | url='https://github.com/capless/sammy', 27 | license='GNU General Public License v3.0', 28 | install_requires=[str(ir.req) for ir in install_reqs], 29 | include_package_data=True, 30 | zip_safe=False, 31 | ) 32 | --------------------------------------------------------------------------------