├── .gitignore
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE.md
├── README.md
├── ROADMAP.md
├── SETUP.md
├── TODO.md
├── assets
├── TailorArchDesign.jpg
├── nipap-main-screen.png
└── tailor-logo.png
├── buildspec.yml
├── cfn
├── cfn-core-2az.json
├── cfn-core-3az.json
├── cfn-nipap-backend.json
└── cfn-nipap-daemon.json
├── docs
├── images
│ ├── how-to-manually-enable-vpcflowlogs.md
│ ├── postman-accountupdate-headers.png
│ ├── postman-aws-sigv4.png
│ ├── postman-vpcflowlogs-body.png
│ └── vpc-info.png
├── requeststatus.md
└── vpcflowlogs.md
├── pipeline
├── cfn-deploypipeline-github.yaml
├── cfn-deploypipeline-s3.yaml
└── functions
│ └── slack-notify
│ ├── handler.py
│ └── requirements.txt
├── sam
├── api
│ └── swagger.yaml
├── cfn
│ ├── cfn-cloudwatch-alarms.yaml
│ ├── cfn-customresource-regionsns.yaml
│ ├── cfn-dynamodb-tables.yaml
│ ├── cfn-elasticsearch-domain.json
│ ├── cfn-iam-apigateway-lambda-policies.yaml
│ ├── cfn-iam-core-functions-roles.yaml
│ ├── cfn-lambda-core-functions.yaml
│ └── cfn-sns-topics.yaml
└── functions
│ ├── talr-accountupdate-cloudability
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-accountupdate-cloudtrail
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-accountupdate-config
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-accountupdate-configrules
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-accountupdate-metadata
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-accountupdate-vpcdns
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-accountupdate-vpcflowlogs
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-acmwhitelist
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-cla
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-cloudability
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-cloudtrail
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-config-complianceaggregator
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-config-deployrulefunctions
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-config
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-configrule-ec2notinpublicsubnet
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-cresource-sns
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-directconnect
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-director
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-entsupport
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-iam
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-inquirer
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-lex-accountrequest
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-nipap
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-notify
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-ops-ddb-backups
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-ops-slack-notifications
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-poll-accountcompliance
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-poll-accountreconcile
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-poll-cla
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-poll-configcompliance
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-receptionist
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-requeststatus
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-validator
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-vpc
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-vpcdns
│ ├── handler.py
│ └── requirements.txt
│ ├── talr-vpcflowlogs
│ ├── handler.py
│ └── requirements.txt
│ └── talr-vpciam
│ ├── handler.py
│ └── requirements.txt
└── tailor.yaml
/.gitignore:
--------------------------------------------------------------------------------
1 | ### JetBrains template
2 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
3 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
4 |
5 | # User-specific stuff:
6 | .idea/
7 | .vscode/
8 | vendored/
9 | sam-output.yaml
10 | cfn-config.json
11 | *.graffle
12 |
13 | ## File-based project format:
14 | *.iws
15 |
16 | ## Plugin-specific files:
17 |
18 | # IntelliJ
19 | /out/
20 |
21 | # mpeltonen/sbt-idea plugin
22 | .idea_modules/
23 |
24 | # JIRA plugin
25 | atlassian-ide-plugin.xml
26 |
27 | # Crashlytics plugin (for Android Studio and IntelliJ)
28 | com_crashlytics_export_strings.xml
29 | crashlytics.properties
30 | crashlytics-build.properties
31 | fabric.properties
32 | ### Python template
33 | # Byte-compiled / optimized / DLL files
34 | __pycache__/
35 | *.py[cod]
36 | *$py.class
37 |
38 | # C extensions
39 | *.so
40 |
41 | # Distribution / packaging
42 | .Python
43 | env/
44 | build/
45 | develop-eggs/
46 | dist/
47 | downloads/
48 | eggs/
49 | .eggs/
50 | lib/
51 | lib64/
52 | parts/
53 | sdist/
54 | var/
55 | *.egg-info/
56 | .installed.cfg
57 | *.egg
58 |
59 | # PyInstaller
60 | # Usually these files are written by a python script from a template
61 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
62 | *.manifest
63 | *.spec
64 |
65 | # Installer logs
66 | pip-log.txt
67 | pip-delete-this-directory.txt
68 |
69 | # Unit test / coverage reports
70 | htmlcov/
71 | .tox/
72 | .coverage
73 | .coverage.*
74 | .cache
75 | nosetests.xml
76 | coverage.xml
77 | *,cover
78 | .hypothesis/
79 |
80 | # Translations
81 | *.mo
82 | *.pot
83 |
84 | # Django stuff:
85 | *.log
86 | local_settings.py
87 |
88 | # Flask instance folder
89 | instance/
90 |
91 | # Scrapy stuff:
92 | .scrapy
93 |
94 | # Sphinx documentation
95 | docs/_build/
96 |
97 | # PyBuilder
98 | target/
99 |
100 | # IPython Notebook
101 | .ipynb_checkpoints
102 |
103 | # pyenv
104 | .python-version
105 |
106 | # celery beat schedule file
107 | celerybeat-schedule
108 |
109 | # dotenv
110 | .env
111 |
112 | # virtualenv
113 | venv/
114 | ENV/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 |
119 | # Rope project settings
120 | .ropeproject
121 | # Created by .ignore support plugin (hsz.mobi)
122 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
6 |
7 | ## Our Standards
8 |
9 | Examples of behavior that contributes to creating a positive environment include:
10 |
11 | * Using welcoming and inclusive language
12 | * Being respectful of differing viewpoints and experiences
13 | * Gracefully accepting constructive criticism
14 | * Focusing on what is best for the community
15 | * Showing empathy towards other community members
16 |
17 | Examples of unacceptable behavior by participants include:
18 |
19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances
20 | * Trolling, insulting/derogatory comments, and personal or political attacks
21 | * Public or private harassment
22 | * Publishing others' private information, such as a physical or electronic address, without explicit permission
23 | * Other conduct which could reasonably be considered inappropriate in a professional setting
24 |
25 | ## Our Responsibilities
26 |
27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
28 |
29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
30 |
31 | ## Scope
32 |
33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
34 |
35 | ## Enforcement
36 |
37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at aws-tailor@alanwill.io. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
38 |
39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
40 |
41 | ## Attribution
42 |
43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
44 |
45 | [homepage]: http://contributor-covenant.org
46 | [version]: http://contributor-covenant.org/version/1/4/
47 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | Thanks for your interest in Tailor and making it better. I've released Tailor as an MVP (Minimum Viable Product) right now, meaning it's very rough around the edges. Tests for example aren't fully implemented yet.
2 |
3 | ## Reporting Bugs
4 |
5 | * Search all existing [issues](https://github.com/alanwill/aws-tailor/issues) to see if someone has already reported the problem.
6 | * Open an issue and provide as much information as possible.
7 |
8 | ## Fixing Bugs
9 |
10 | If you're willing to tackle fixing a bug, please fork this repo, then create and submit a Pull Request.
11 |
--------------------------------------------------------------------------------
/ROADMAP.md:
--------------------------------------------------------------------------------
1 | # Tailor Roadmap
2 |
3 | The following features are what's coming in Tailor in the coming months:
4 |
5 |
6 | Account update APIs for Config, IAM and VPCs
7 |
8 | Extending the existing account update APIs provision Config, IAM or VPC on an existing account.
9 |
10 |
11 | Tailor Bot
12 |
13 | A Slack bot to be able to engage with Tailor for provisioning as well as querying
14 |
15 |
16 | Config Rules Triggers
17 |
18 | Defining a set of rules and deploying a mechanism that would allow Tailor to correct deviations from set rules
19 |
20 |
21 |
--------------------------------------------------------------------------------
/SETUP.md:
--------------------------------------------------------------------------------
1 | ## Tailor initial setup instructions
2 |
3 | 1. Run pipeline/cfn-deploypipeline.yaml as a SAM CFN template
4 | 2. Run cfn/cfn-nipap-backend.json
5 | 3. Configure NIPAP AMI
6 | 4. Run tailor.yaml as SAM CFN template
7 | 5. Configure DyanoDB config tables
8 |
--------------------------------------------------------------------------------
/TODO.md:
--------------------------------------------------------------------------------
1 | # Tailor To Do List
2 |
3 | The following items are tasks that are on my radar to get completed. They are not features per se but more just things that need to get done which aren't completed yet.
4 |
5 | - [ ] Documentation. Completely rewrite the documentation to be not only more complete but more useful
6 | - [ ] Test coverage. Tests. Period.
7 |
--------------------------------------------------------------------------------
/assets/TailorArchDesign.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/assets/TailorArchDesign.jpg
--------------------------------------------------------------------------------
/assets/nipap-main-screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/assets/nipap-main-screen.png
--------------------------------------------------------------------------------
/assets/tailor-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/assets/tailor-logo.png
--------------------------------------------------------------------------------
/buildspec.yml:
--------------------------------------------------------------------------------
1 | version: 0.2
2 |
3 | phases:
4 | install:
5 | commands:
6 | # Purely informational
7 | - aws --version
8 | # Upgrade pip
9 | - pip install --upgrade pip
10 | # Upgrade to the latest version of awscli because the one bundled with
11 | # the CodeBuild image has a bug that prevents SAM templates from
12 | # packaging correctly.
13 | - pip install --upgrade awscli
14 | pre_build:
15 | commands:
16 | # Loop through directory structure and pip install all function libraries
17 | # as specified in their respective requirements.txt
18 | - for dir in sam/functions/*; do echo "$dir"; if [ -f "$dir"/requirements.txt ]; then (pip install -t "$dir"/vendored/ -r "$dir"/requirements.txt); fi; done
19 | # Inject the account number and region into the Swagger file
20 | - sed -i -e 's//'"$AWS_ACCOUNT_NUMBER"'/g' sam/api/swagger.yaml
21 | - sed -i -e 's//'"$AWS_DEFAULT_REGION"'/g' sam/api/swagger.yaml
22 | # Package SAM template
23 | - aws cloudformation package --template-file $SAM_INPUT_FILE --s3-bucket $S3_BUCKET --s3-prefix $STAGE_NAME/deploy --output-template-file sam-output.yaml
24 | # Upload output template to S3. This is being done to overcome the 51kb limit of
25 | # CFN for create stack operations on templates not in S3
26 | - aws s3 cp sam-output.yaml s3://$S3_BUCKET/$STAGE_NAME/config/sam-output.yaml
27 | # Download CFN parameter file
28 | - aws s3 cp s3://$S3_BUCKET/$STAGE_NAME/config/cfn-config.json .
29 | build:
30 | commands:
31 | # Create CFN change set
32 | - aws cloudformation create-change-set --template-url https://$S3_BUCKET.s3.amazonaws.com/$STAGE_NAME/config/sam-output.yaml --parameters file://cfn-config.json --role-arn arn:aws:iam::$AWS_ACCOUNT_NUMBER:role/$CFN_ROLE --change-set-name tailor-$STAGE_NAME-ChangeSet --stack-name tailor-$STAGE_NAME --capabilities CAPABILITY_IAM
33 |
--------------------------------------------------------------------------------
/cfn/cfn-nipap-backend.json:
--------------------------------------------------------------------------------
1 | {
2 | "AWSTemplateFormatVersion": "2010-09-09",
3 |
4 | "Description": "Builds Postgres database for Tailor's NIPAP backend",
5 |
6 | "Parameters": {
7 | "AppName": {
8 | "Description": "What is the name of the application? This is used to tag the resources",
9 | "Type": "String",
10 | "Default": "tailor",
11 | "MinLength": "3",
12 | "MaxLength": "25"
13 | },
14 | "EnvironmentName": {
15 | "Description": "What is the environment type, e.g. prd/stg/dev/tst.",
16 | "Type": "String",
17 | "MinLength": "3",
18 | "MaxLength": "5",
19 | "AllowedValues": ["prd", "stg", "dev", "tst"]
20 | },
21 | "TailorNipapDbInstanceClass": {
22 | "Description": "Database Instance Class",
23 | "Type": "String",
24 | "Default": "db.t2.micro",
25 | "AllowedValues": ["db.t2.micro", "db.t2.small", "db.t2.medium", "db.t2.large"]
26 | },
27 | "TailorNipapDbSubnetGroup": {
28 | "Description": "Database Subnet Group Name",
29 | "Type": "String",
30 | "MinLength": "3",
31 | "MaxLength": "50"
32 | },
33 | "TailorNipapDbMasterPassword": {
34 | "NoEcho": "true",
35 | "Description": "Database Master Password",
36 | "Type": "String",
37 | "MinLength": "1",
38 | "MaxLength": "41",
39 | "AllowedPattern": "^(?=.*[0-9])(?=.*[a-z])(?=.*[A-Z])(?=.*[@#$%^&+=])(?=\\S+$).{8,}$"
40 | },
41 | "VPCID": {
42 | "Description": "VPC ID",
43 | "Type": "AWS::EC2::VPC::Id",
44 | "ConstraintDescription": "Must be the VPC ID of the VPC where you're building this application in the form of vpc-abcdefgh"
45 | }
46 |
47 | },
48 |
49 | "Mappings": {
50 | "AWSInstanceType2Arch": {
51 | "t2.micro": {
52 | "Arch": "HVM64"
53 | },
54 | "t2.small": {
55 | "Arch": "HVM64"
56 | },
57 | "t2.medium": {
58 | "Arch": "HVM64"
59 | },
60 | "t2.large": {
61 | "Arch": "HVM64"
62 | },
63 | "m3.medium": {
64 | "Arch": "HVM64"
65 | },
66 | "m3.large": {
67 | "Arch": "HVM64"
68 | },
69 | "m3.xlarge": {
70 | "Arch": "HVM64"
71 | },
72 | "m3.2xlarge": {
73 | "Arch": "HVM64"
74 | },
75 | "m4.large": {
76 | "Arch": "HVM64"
77 | },
78 | "m4.xlarge": {
79 | "Arch": "HVM64"
80 | },
81 | "m4.2xlarge": {
82 | "Arch": "HVM64"
83 | },
84 | "m4.4xlarge": {
85 | "Arch": "HVM64"
86 | },
87 | "c3.large": {
88 | "Arch": "HVM64"
89 | },
90 | "c3.xlarge": {
91 | "Arch": "HVM64"
92 | },
93 | "c3.2xlarge": {
94 | "Arch": "HVM64"
95 | },
96 | "c3.4xlarge": {
97 | "Arch": "HVM64"
98 | },
99 | "c3.8xlarge": {
100 | "Arch": "HVM64"
101 | },
102 | "r3.large": {
103 | "Arch": "HVM64"
104 | },
105 | "r3.xlarge": {
106 | "Arch": "HVM64"
107 | },
108 | "r3.2xlarge": {
109 | "Arch": "HVM64"
110 | },
111 | "r3.4xlarge": {
112 | "Arch": "HVM64"
113 | },
114 | "r3.8xlarge": {
115 | "Arch": "HVM64"
116 | },
117 | "i2.xlarge": {
118 | "Arch": "HVM64"
119 | },
120 | "i2.2xlarge": {
121 | "Arch": "HVM64"
122 | },
123 | "i2.4xlarge": {
124 | "Arch": "HVM64"
125 | },
126 | "i2.8xlarge": {
127 | "Arch": "HVM64"
128 | },
129 | "hs1.8xlarge": {
130 | "Arch": "HVM64"
131 | }
132 | },
133 | "AWSRegionArch2AMI": {
134 | "us-west-2": {
135 | "HVM64": "ami-9abea4fb"
136 | },
137 | "us-west-1": {
138 | "HVM64": "ami-06116566"
139 | },
140 | "us-east-1": {
141 | "HVM64": "ami-fce3c696"
142 | }
143 | }
144 | },
145 |
146 | "Resources": {
147 | "RdsNipapDb": {
148 | "Type": "AWS::RDS::DBInstance",
149 | "Properties": {
150 | "AllocatedStorage": "5",
151 | "AllowMajorVersionUpgrade": true,
152 | "AutoMinorVersionUpgrade": true,
153 | "BackupRetentionPeriod": "30",
154 | "DBInstanceClass": "db.t2.micro",
155 | "DBInstanceIdentifier": "talr-nipap",
156 | "DBName": "nipap",
157 | "DBSecurityGroups": [{
158 | "Ref": "SgNipapPostgres"
159 | }],
160 | "DBSubnetGroupName": {
161 | "Ref": "TailorNipapDbSubnetGroup"
162 | },
163 | "Engine": "postgres",
164 | "MasterUsername": "root",
165 | "MasterUserPassword": {
166 | "Ref": "TailorNipapDbMasterPassword"
167 | },
168 | "MultiAZ": true,
169 | "StorageType": "gp2",
170 | "Tags": [{
171 | "Key": "Name",
172 | "Value": "Tailor NIPAP"
173 | }, {
174 | "Key": "stack",
175 | "Value": {
176 | "Ref": "EnvironmentName"
177 | }
178 | }, {
179 | "Key": "app",
180 | "Value": {
181 | "Ref": "AppName"
182 | }
183 | }, {
184 | "Key": "purpose",
185 | "Value": "DB backend for NIPAP IPAM tool"
186 | }]
187 | },
188 | "DeletionPolicy": "Snapshot"
189 | },
190 | "SgNipapPostgres": {
191 | "Type": "AWS::RDS::DBSecurityGroup",
192 | "Properties": {
193 | "GroupDescription": "Postgres",
194 | "EC2VpcId": {
195 | "Ref": "VPCID"
196 | },
197 | "DBSecurityGroupIngress": [{
198 | "EC2SecurityGroupId": {
199 | "Ref": "SgTailorComponents"
200 | }
201 | }],
202 | "Tags": [{
203 | "Key": "purpose",
204 | "Value": "NIPAP Postgres"
205 | }, {
206 | "Key": "stack",
207 | "Value": {
208 | "Ref": "EnvironmentName"
209 | }
210 | }, {
211 | "Key": "app",
212 | "Value": {
213 | "Ref": "AppName"
214 | }
215 | }]
216 | }
217 | },
218 | "SgTailorComponents": {
219 | "Type": "AWS::EC2::SecurityGroup",
220 | "Properties": {
221 | "GroupDescription": "Tailor infrastructure resources",
222 | "VpcId": {
223 | "Ref": "VPCID"
224 | },
225 | "SecurityGroupIngress": [],
226 | "Tags": [{
227 | "Key": "purpose",
228 | "Value": "Tailor infrastructure resources"
229 | }, {
230 | "Key": "stack",
231 | "Value": {
232 | "Ref": "EnvironmentName"
233 | }
234 | }, {
235 | "Key": "app",
236 | "Value": {
237 | "Ref": "AppName"
238 | }
239 | }]
240 | }
241 | }
242 | },
243 |
244 | "Outputs": {
245 | "PostgressDbEndpoint": {
246 | "Description": "NIPAP Postgress DB Endpoint",
247 | "Value": {
248 | "Fn::GetAtt" : [ "RdsNipapDb", "Endpoint.Address" ]
249 | }
250 | },
251 | "PostgressDbPort": {
252 | "Description": "NIPAP Postgress DB Port",
253 | "Value": {
254 | "Fn::GetAtt" : [ "RdsNipapDb", "Endpoint.Port" ]
255 | }
256 | },
257 | "TailorComponentsSg": {
258 | "Description": "Shared Security Group for Tailor Components",
259 | "Value": {
260 | "Fn::GetAtt" : [ "SgTailorComponents", "GroupId" ]
261 | }
262 | }
263 |
264 | }
265 |
266 |
267 | }
268 |
--------------------------------------------------------------------------------
/cfn/cfn-nipap-daemon.json:
--------------------------------------------------------------------------------
1 | {
2 | "AWSTemplateFormatVersion": "2010-09-09",
3 |
4 | "Description": "Builds Daemon instance for connecting to NIPAP backend",
5 |
6 | "Parameters": {
7 | "AppName": {
8 | "Description": "What is the name of the application? This is used to tag the resources",
9 | "Type": "String",
10 | "Default": "tailor",
11 | "MinLength": "3",
12 | "MaxLength": "25"
13 | },
14 | "EnvironmentName": {
15 | "Description": "What is the environment type, e.g. prd/stg/dev/tst.",
16 | "Type": "String",
17 | "MinLength": "3",
18 | "MaxLength": "5",
19 | "AllowedValues": ["prd", "stg", "dev", "tst"]
20 | },
21 | "ApplicationSubnetAZ1": {
22 | "Description": "Subnet ID for the Application subnet in AZ1",
23 | "Type": "AWS::EC2::Subnet::Id"
24 | },
25 | "TailorNipapDaemonInstanceType": {
26 | "Description": "Daemon Instance Class",
27 | "Type": "String",
28 | "Default": "t2.micro",
29 | "AllowedValues": ["t2.nano", "t2.micro", "t2.small", "t2.medium", "t2.large"]
30 | },
31 | "TailorNipapDaemonAmi": {
32 | "Description": "NIPAP Daemon AMI",
33 | "Type": "AWS::EC2::Image::Id"
34 | },
35 | "TailorComponentsSecurityGroup": {
36 | "Description": "TailorComponentsSg Id from NIPAP Backend CFN Stack",
37 | "Type": "AWS::EC2::SecurityGroup::Id"
38 | },
39 | "TailorRequestId": {
40 | "Description": "Tailor requestId",
41 | "Type": "String"
42 | },
43 | "VPCID": {
44 | "Description": "VPC ID",
45 | "Type": "AWS::EC2::VPC::Id"
46 | }
47 | },
48 |
49 | "Mappings": {
50 | "AWSInstanceType2Arch": {
51 | "t2.nano": {
52 | "Arch": "HVM64"
53 | },
54 | "t2.micro": {
55 | "Arch": "HVM64"
56 | },
57 | "t2.small": {
58 | "Arch": "HVM64"
59 | },
60 | "t2.medium": {
61 | "Arch": "HVM64"
62 | },
63 | "t2.large": {
64 | "Arch": "HVM64"
65 | },
66 | "m3.medium": {
67 | "Arch": "HVM64"
68 | },
69 | "m3.large": {
70 | "Arch": "HVM64"
71 | },
72 | "m3.xlarge": {
73 | "Arch": "HVM64"
74 | },
75 | "m3.2xlarge": {
76 | "Arch": "HVM64"
77 | },
78 | "m4.large": {
79 | "Arch": "HVM64"
80 | },
81 | "m4.xlarge": {
82 | "Arch": "HVM64"
83 | },
84 | "m4.2xlarge": {
85 | "Arch": "HVM64"
86 | },
87 | "m4.4xlarge": {
88 | "Arch": "HVM64"
89 | },
90 | "c3.large": {
91 | "Arch": "HVM64"
92 | },
93 | "c3.xlarge": {
94 | "Arch": "HVM64"
95 | },
96 | "c3.2xlarge": {
97 | "Arch": "HVM64"
98 | },
99 | "c3.4xlarge": {
100 | "Arch": "HVM64"
101 | },
102 | "c3.8xlarge": {
103 | "Arch": "HVM64"
104 | },
105 | "r3.large": {
106 | "Arch": "HVM64"
107 | },
108 | "r3.xlarge": {
109 | "Arch": "HVM64"
110 | },
111 | "r3.2xlarge": {
112 | "Arch": "HVM64"
113 | },
114 | "r3.4xlarge": {
115 | "Arch": "HVM64"
116 | },
117 | "r3.8xlarge": {
118 | "Arch": "HVM64"
119 | },
120 | "i2.xlarge": {
121 | "Arch": "HVM64"
122 | },
123 | "i2.2xlarge": {
124 | "Arch": "HVM64"
125 | },
126 | "i2.4xlarge": {
127 | "Arch": "HVM64"
128 | },
129 | "i2.8xlarge": {
130 | "Arch": "HVM64"
131 | },
132 | "hs1.8xlarge": {
133 | "Arch": "HVM64"
134 | }
135 | },
136 | "AWSRegionArch2AMI": {
137 | "us-east-1": {
138 | "HVM64": ""
139 | }
140 | }
141 | },
142 |
143 | "Resources": {
144 | "NipapDaemonInstance": {
145 | "Type": "AWS::EC2::Instance",
146 | "Properties": {
147 | "InstanceType": {
148 | "Ref": "TailorNipapDaemonInstanceType"
149 | },
150 | "SubnetId": {
151 | "Ref": "ApplicationSubnetAZ1"
152 | },
153 | "ImageId": {
154 | "Ref": "TailorNipapDaemonAmi"
155 | },
156 | "SecurityGroupIds": [{
157 | "Ref": "NipapDaemonSg"
158 | }, {
159 | "Ref": "TailorComponentsSecurityGroup"
160 | }],
161 | "BlockDeviceMappings": [{
162 | "DeviceName": "/dev/sda1",
163 | "Ebs": {
164 | "VolumeSize": "8",
165 | "VolumeType": "gp2"
166 | }
167 | }],
168 | "UserData": {
169 | "Fn::Base64": {
170 | "Fn::Join": ["", [
171 | "#!/bin/bash \n",
172 | "apt-get update -y\n",
173 | "apt-get upgrade -y\n"
174 | ]]
175 | }
176 | },
177 | "Tags": [{
178 | "Key": "purpose",
179 | "Value": "NIPAP Deamon Instance"
180 | }, {
181 | "Key": "stack",
182 | "Value": {
183 | "Ref": "EnvironmentName"
184 | }
185 | }, {
186 | "Key": "app",
187 | "Value": {
188 | "Ref": "AppName"
189 | }
190 | }, {
191 | "Key": "Name",
192 | "Value": "NIPAP Daemon Instance"
193 | }]
194 | }
195 | },
196 |
197 | "NipapDaemonSg": {
198 | "Type": "AWS::EC2::SecurityGroup",
199 | "Properties": {
200 | "GroupDescription": "NIPAP Deamon",
201 | "VpcId": {
202 | "Ref": "VPCID"
203 | },
204 | "SecurityGroupIngress": [{
205 | "IpProtocol": "tcp",
206 | "FromPort": "1337",
207 | "ToPort": "1337",
208 | "SourceSecurityGroupId": {
209 | "Ref": "TailorComponentsSecurityGroup"
210 | }
211 | }],
212 | "Tags": [{
213 | "Key": "purpose",
214 | "Value": "NIPAP Deamon"
215 | }, {
216 | "Key": "stack",
217 | "Value": {
218 | "Ref": "EnvironmentName"
219 | }
220 | }, {
221 | "Key": "app",
222 | "Value": {
223 | "Ref": "AppName"
224 | }
225 | }]
226 | }
227 | }
228 | },
229 |
230 | "Outputs": {
231 | "NipapDaemonIp": {
232 | "Description": "NIPAP Daemon IP address",
233 | "Value": {
234 | "Fn::GetAtt" : [ "NipapDaemonInstance", "PrivateIp" ]
235 | }
236 | },
237 | "TailorRequestId": {
238 | "Description": "Tailor requestId",
239 | "Value": {
240 | "Ref": "TailorRequestId"
241 | }
242 | }
243 | }
244 |
245 |
246 | }
247 |
--------------------------------------------------------------------------------
/docs/images/how-to-manually-enable-vpcflowlogs.md:
--------------------------------------------------------------------------------
1 | # How to manually enable VPC Flow Logs
2 |
3 | ## Prerequisites
4 |
5 | * IAM keys with full access to VPC and Cloudwatch Logs
6 | * AWS CLI already installed and configured
7 |
8 | ## Step 1
9 |
10 | ### Set required environment variables
11 | ```
12 | export vpcRegion="us-east-1"
13 | export vpcId="vpc-3c779159"
14 | ```
15 |
16 | ### Create Cloudwatch Logs Group
17 |
18 | Note: Replace `myprofilename` with the name of the profile you created as part of the AWS CLI configuration
19 |
20 | ```
21 | aws logs create-log-group --log-group-name /`aws iam list-account-aliases --profile myprofilename --output text --query 'AccountAliases[0]'`/vpcflowlogs/`echo $vpcRegion`/`echo $vpcId` --region $vpcRegion --profile myprofilename
22 | ```
23 |
24 | ## Step 2
25 |
26 | ### Set Log Group Retention
27 |
28 | First lookup the full log group name:
29 | ```
30 | aws logs describe-log-groups --profile myprofilename --region $vpcRegion
31 | ```
32 |
33 | Replace `my-logs` with the log group name, then run the command below to set the retention:
34 | ```
35 | aws logs put-retention-policy --log-group-name my-logs --retention-in-days 7 --region `echo $vpcRegion` --profile myprofilename
36 | ```
37 |
38 | ## Step 3
39 |
40 | ### Create Flow Logs
41 |
--------------------------------------------------------------------------------
/docs/images/postman-accountupdate-headers.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/docs/images/postman-accountupdate-headers.png
--------------------------------------------------------------------------------
/docs/images/postman-aws-sigv4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/docs/images/postman-aws-sigv4.png
--------------------------------------------------------------------------------
/docs/images/postman-vpcflowlogs-body.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/docs/images/postman-vpcflowlogs-body.png
--------------------------------------------------------------------------------
/docs/images/vpc-info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/docs/images/vpc-info.png
--------------------------------------------------------------------------------
/docs/requeststatus.md:
--------------------------------------------------------------------------------
1 | # Using the /requeststatus endpoint
2 |
3 | The /requeststatus endpoint is a GET enabled resource which allows querying Tailor for the status of requests and child tasks.
4 |
5 | ## Authorization
6 |
7 | Like all Tailor APIs, this endpoint is authorized via AWS IAM credentials and all requests must be signed with [AWS's Signature Version 4](https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html) signing process.
8 |
9 | In order to use this endpoint IAM access and secret keys would need to be provisioned for you ahead of time. Contact the internal AWS Support Team for access.
10 |
11 | ## API Request
12 |
13 | The request is a GET method comprised of a query parameter called `requestId`.
14 |
15 | Sample call:
16 |
17 | ```
18 | GET https:///requeststatus?requestId=bcc394a5-87c1-49bd-beac-g4308a67d227
19 | ```
20 |
21 | ## API Response
22 |
23 | A successful response will return a 200 HTTP status code and payload similar to the following:
24 |
25 | ```
26 | {
27 | "status": "complete",
28 | "taskStatus": {
29 | "vpcFlowLogs": "complete"
30 | },
31 | "accountId": "123456789012",
32 | "accountName": "ACME STG"
33 | }
34 | ```
35 |
36 | Other possible responses are:
37 |
38 | * 400 - Typically if the `requestId` is missing or incorrectly spelled
39 | * 404 - Typically if the requestId provided is incorrect or no longer exists in Tailor.
40 |
--------------------------------------------------------------------------------
/docs/vpcflowlogs.md:
--------------------------------------------------------------------------------
1 | # Using the /vpcflowlogs endpoint
2 |
3 | The `/vpcflowlogs` endpoint is a PUT enabled resource which allows for VPC Flow Logs to be enabled on an existing account.
4 |
5 | Once enabled, logs are dispatched to a Kinesis Stream in the Core Services account where the logs are persisted in S3 for consumption.
6 |
7 | ## Assumptions
8 |
9 | * The VPC was created with [cfn-core](https://github.com/alanwill/cfn-core)
10 | * VPC exists in us-east-1, us-west-1 or us-west-2
11 | * Account is known to Tailor, meaning the account either was created by Tailor or is part of a support AWS Billing/Payer Account
12 |
13 | ## Authorization
14 |
15 | Like all Tailor APIs, this endpoint is authorized via AWS IAM credentials and all requests must be signed with [AWS's Signature Version 4](https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html) signing process.
16 |
17 | In order to use this endpoint IAM access and secret keys would need to be provisioned for you ahead of time. Contact the internal AWS Support Team for access.
18 |
19 | ## API Request
20 |
21 | The request is a PUT method comprised of 2 unique parameters:
22 |
23 | * Header: `accountCbAlias`
24 | * Request body (application/json content type):
25 | ```
26 | {
27 | "region": "{region}",
28 | "stackName": "{stackName}",
29 | "accountId": "{accountId}"
30 | }
31 | ```
32 |
33 | where `stackName` refers to the name Cloudformation stack used to create the VPC (this should be a cfn-core template). Looking at an existing VPC and reviewing the Tags would give you all the information needed to populate the request body:
34 |
35 |
36 |
37 | ## API Response
38 |
39 | A successful response will return a 202 HTTP status code and a payload similar to the following:
40 |
41 | ```
42 | {
43 | "message": "Request Accepted",
44 | "code": "2020",
45 | "requestId": "bcc394a5-87c1-49bd-beac-g4308a67d227"
46 | }
47 | ```
48 |
49 | Other possible responses are:
50 | * 400 - Typically if there's a missing header or body or incorrect values for either.
51 | * 404 - Typically if either the account is unknown or the stackName is incorrect or the region is incorrect
52 |
53 | The resulting `requestId` in the 202 response can subsequently be used in the [/requeststatus](./requeststatus.md) endpoint to query the status of the request.
54 |
55 | ## Calling the API via Postman
56 |
57 | The following steps walk through how to using [Postman](https://www.getpostman.com/) to invoke the /vpcflowlogs API:
58 |
59 | 1. Configure AWS SigV4 authorization:
60 |
61 |
62 | Click on the orange `Update Request` button so that the AWS headers are populated.
63 |
64 | 2. Set the `accountCbAlias` request header:
65 |
66 |
67 | 3. Specify the request body:
68 |
69 |
70 | Click Send.
71 |
--------------------------------------------------------------------------------
/pipeline/functions/slack-notify/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | import os
8 | import sys
9 | from base64 import b64decode
10 |
11 | # Path to modules needed to package local lambda function for upload
12 | currentdir = os.path.dirname(os.path.realpath(__file__))
13 | sys.path.append(os.path.join(currentdir, "./vendored"))
14 |
15 | # Modules downloaded into the vendored directory
16 | import requests
17 |
18 | # Logging for Serverless
19 | log = logging.getLogger()
20 | log.setLevel(logging.DEBUG)
21 |
22 | # Initializing AWS services
23 | cp = boto3.client('codepipeline')
24 |
25 |
26 | def handler(event, context):
27 | log.debug("Received event {}".format(json.dumps(event)))
28 |
29 | slackChannelName = os.environ['SLACK_CHANNEL_NAME']
30 | slackWebhookUrl = os.environ['SLACK_WEBHOOK_URL']
31 |
32 | try:
33 | if "CodePipeline.job" in event:
34 | jobId = event['CodePipeline.job']['id']
35 | callSlack(slackChannelName, slackWebhookUrl, jobId)
36 | cp.put_job_success_result(
37 | jobId=jobId,
38 | executionDetails={
39 | 'summary': 'Notification posted to Slack',
40 | 'percentComplete': 100
41 | }
42 | )
43 | return
44 |
45 | except Exception as e:
46 | print(e)
47 | return "Cannot recognize input"
48 |
49 |
50 | def callSlack(slack_channel_name, slack_webhook_url, job_id):
51 | getJobDetails = cp.get_job_details(
52 | jobId=job_id
53 | )
54 |
55 | pipelineName = getJobDetails['jobDetails']['data']['pipelineContext']['pipelineName']
56 | stageName = getJobDetails['jobDetails']['data']['pipelineContext']['stage']['name']
57 |
58 | getPipelineState = cp.get_pipeline_state(
59 | name=pipelineName
60 | )
61 | pipelineExecutionId = getPipelineState['stageStates'][0]['latestExecution']['pipelineExecutionId']
62 |
63 | slackMessage = {
64 | 'channel': slack_channel_name,
65 | 'username': "tailorbot",
66 | 'icon_emoji': ":robot_face:",
67 | "attachments": [
68 | {
69 | "color": "good",
70 | "title": 'Pipeline %s (%s)' % (pipelineName, pipelineExecutionId),
71 | "text": 'The %s stage is executing' % (stageName),
72 | "mrkdwn_in": ["text"]
73 | }
74 | ]
75 | }
76 |
77 | # Send notification
78 | slackWebhookResponse = requests.post(slack_webhook_url, data=json.dumps(slackMessage))
79 | print(slackWebhookResponse)
80 |
81 | return
82 |
--------------------------------------------------------------------------------
/pipeline/functions/slack-notify/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 |
--------------------------------------------------------------------------------
/sam/cfn/cfn-customresource-regionsns.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | AWSTemplateFormatVersion: '2010-09-09'
3 | Description: Provisions Tailor's cfn-core CFN response SNS topic in all regions
4 | Parameters:
5 | stage:
6 | Type: String
7 | lambdaArnTalrCresourceSns:
8 | Type: String
9 | Resources:
10 | snsTalrVpcCfnResponse:
11 | Type: Custom::RegionSns
12 | Properties:
13 | ServiceToken: !Ref lambdaArnTalrCresourceSns
14 | Stage: !Ref stage
15 | TopicNamePrefix: talr-vpccfn-response
16 | Outputs:
17 | snsTopicNameTalrVpcCfnResponse:
18 | Description: SNS topic name for talr-vpccfn-response in all regions
19 | Value:
20 | Fn::GetAtt:
21 | - snsTalrVpcCfnResponse
22 | - TopicName
23 |
--------------------------------------------------------------------------------
/sam/cfn/cfn-elasticsearch-domain.json:
--------------------------------------------------------------------------------
1 | {
2 | "AWSTemplateFormatVersion": "2010-09-09",
3 | "Description": "Provisions Tailor ElasticSearch cluster",
4 | "Parameters": {},
5 | "Resources": {
6 | "EsDomain": {
7 | "Type": "AWS::Elasticsearch::Domain",
8 | "Properties": {
9 | "AccessPolicies": {
10 | "Version": "2012-10-17",
11 | "Statement": [{
12 | "Effect": "Allow",
13 | "Principal": {
14 | "AWS": [{
15 | "Ref": "AWS::AccountId"
16 | }]
17 | },
18 | "Action": [
19 | "es:*"
20 | ],
21 | "Resource": {
22 | "Fn::Join": ["", ["arn:aws:es:us-east-1:", {
23 | "Ref": "AWS::AccountId"
24 | }, ":domain/tailor/*"]]
25 | }
26 | }, {
27 | "Effect": "Allow",
28 | "Principal": {
29 | "AWS": "*"
30 | },
31 | "Action": [
32 | "es:*"
33 | ],
34 | "Resource": {
35 | "Fn::Join": ["", ["arn:aws:es:us-east-1:", {
36 | "Ref": "AWS::AccountId"
37 | }, ":domain/tailor/*"]]
38 | },
39 | "Condition": {
40 | "IpAddress": {
41 | "aws:SourceIp": "132.188.0.0/16"
42 | }
43 | }
44 | }]
45 | },
46 | "AdvancedOptions": {
47 | "rest.action.multi.allow_explicit_index": "true"
48 | },
49 | "EBSOptions": {
50 | "EBSEnabled": true,
51 | "VolumeSize": "20",
52 | "VolumeType": "gp2"
53 | },
54 | "DomainName": "tailor",
55 | "ElasticsearchClusterConfig": {
56 | "InstanceCount": 2,
57 | "InstanceType": "t2.small.elasticsearch",
58 | "ZoneAwarenessEnabled": true
59 | },
60 | "ElasticsearchVersion": "5.1",
61 | "SnapshotOptions": {
62 | "AutomatedSnapshotStartHour": 0
63 | },
64 | "Tags": [{
65 | "Key": "Name",
66 | "Value": "tailor-es"
67 | }, {
68 | "Key": "app",
69 | "Value": "tailor"
70 | }, {
71 | "Key": "stack",
72 | "Value": "prd"
73 | }]
74 | }
75 | }
76 |
77 | },
78 | "Outputs": {
79 | "ElasticsearchEndpoint": {
80 | "Description": "Elasticsearch domain endpoint",
81 | "Value": {
82 | "Fn::GetAtt": [
83 | "EsDomain", "DomainEndpoint"
84 | ]
85 | }
86 | }
87 | }
88 | }
89 |
--------------------------------------------------------------------------------
/sam/cfn/cfn-iam-apigateway-lambda-policies.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Description: Provisions Tailor API Gateway Lambda policies
3 | Parameters:
4 | apiGateway:
5 | Type: String
6 | talrInquirerFunction:
7 | Type: String
8 | talrReceptionistFunction:
9 | Type: String
10 | talrAccountupdateConfigrulesFunction:
11 | Type: String
12 | talrRequeststatusFunction:
13 | Type: String
14 | talrAccountupdateVpcflowlogsFunction:
15 | Type: String
16 | talrAccountupdateVpcdnsFunction:
17 | Type: String
18 | talrAccountupdateMetadataFunction:
19 | Type: String
20 | talrAccountupdateCloudabilityFunction:
21 | Type: String
22 | talrAccountupdateConfigFunction:
23 | Type: String
24 | talrAccountupdateCloudtrailFunction:
25 | Type: String
26 | Resources:
27 | talrInquirerFunctionPermission1:
28 | Type: AWS::Lambda::Permission
29 | Properties:
30 | FunctionName: !Ref talrInquirerFunction
31 | Action: lambda:InvokeFunction
32 | Principal: apigateway.amazonaws.com
33 | SourceArn: !Sub "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${apiGateway}/*/GET/accounts"
34 | talrInquirerFunctionPermission2:
35 | Type: AWS::Lambda::Permission
36 | Properties:
37 | FunctionName: !Ref talrInquirerFunction
38 | Action: lambda:InvokeFunction
39 | Principal: apigateway.amazonaws.com
40 | SourceArn: !Sub "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${apiGateway}/*/GET/accounts/ids"
41 | talrReceptionistFunctionPermission1:
42 | Type: AWS::Lambda::Permission
43 | Properties:
44 | FunctionName: !Ref talrReceptionistFunction
45 | Action: lambda:InvokeFunction
46 | Principal: apigateway.amazonaws.com
47 | SourceArn: !Sub "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${apiGateway}/*/POST/accounts"
48 | talrAccountupdateConfigrulesFunctionPermission1:
49 | Type: AWS::Lambda::Permission
50 | Properties:
51 | FunctionName: !Ref talrAccountupdateConfigrulesFunction
52 | Action: lambda:InvokeFunction
53 | Principal: apigateway.amazonaws.com
54 | SourceArn: !Sub "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${apiGateway}/*/PUT/configrules"
55 | talrRequeststatusFunctionPermission1:
56 | Type: AWS::Lambda::Permission
57 | Properties:
58 | FunctionName: !Ref talrRequeststatusFunction
59 | Action: lambda:InvokeFunction
60 | Principal: apigateway.amazonaws.com
61 | SourceArn: !Sub "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${apiGateway}/*/GET/requeststatus/*"
62 | talrAccountupdateVpcflowlogsFunctionPermission1:
63 | Type: AWS::Lambda::Permission
64 | Properties:
65 | FunctionName: !Ref talrAccountupdateVpcflowlogsFunction
66 | Action: lambda:InvokeFunction
67 | Principal: apigateway.amazonaws.com
68 | SourceArn: !Sub "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${apiGateway}/*/PUT/vpcflowlogs"
69 | talrAccountupdateVpcdnsFunctionPermission1:
70 | Type: AWS::Lambda::Permission
71 | Properties:
72 | FunctionName: !Ref talrAccountupdateVpcdnsFunction
73 | Action: lambda:InvokeFunction
74 | Principal: apigateway.amazonaws.com
75 | SourceArn: !Sub "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${apiGateway}/*/PUT/vpcdns"
76 | talrAccountupdateCloudabilityFunctionPermission1:
77 | Type: AWS::Lambda::Permission
78 | Properties:
79 | FunctionName: !Ref talrAccountupdateCloudabilityFunction
80 | Action: lambda:InvokeFunction
81 | Principal: apigateway.amazonaws.com
82 | SourceArn: !Sub "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${apiGateway}/*/PUT/cloudability/*"
83 | talrAccountupdateMetadataFunctionPermission1:
84 | Type: AWS::Lambda::Permission
85 | Properties:
86 | FunctionName: !Ref talrAccountupdateMetadataFunction
87 | Action: lambda:InvokeFunction
88 | Principal: apigateway.amazonaws.com
89 | SourceArn: !Sub "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${apiGateway}/*/PUT/accounts/*"
90 | talrAccountupdateConfigFunctionPermission1:
91 | Type: AWS::Lambda::Permission
92 | Properties:
93 | FunctionName: !Ref talrAccountupdateConfigFunction
94 | Action: lambda:InvokeFunction
95 | Principal: apigateway.amazonaws.com
96 | SourceArn: !Sub "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${apiGateway}/*/PUT/config/*"
97 | talrAccountupdateCloudtrailFunctionPermission1:
98 | Type: AWS::Lambda::Permission
99 | Properties:
100 | FunctionName: !Ref talrAccountupdateCloudtrailFunction
101 | Action: lambda:InvokeFunction
102 | Principal: apigateway.amazonaws.com
103 | SourceArn: !Sub "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${apiGateway}/*/PUT/cloudtrail/*"
104 |
--------------------------------------------------------------------------------
/sam/cfn/cfn-lambda-core-functions.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Transform: 'AWS::Serverless-2016-10-31'
3 | Description: Provisions Lambda core functions
4 | Parameters:
5 | stage:
6 | Type: String
7 | Resources:
8 | talrReceptionistFunction:
9 | Type: AWS::Serverless::Function
10 | Properties:
11 | FunctionName: talr-receptionist1
12 | Description: Validates account request and starts workflow
13 | Runtime: python2.7
14 | CodeUri: ../functions/talr-receptionist
15 | Handler: handler.handler
16 | MemorySize: 128
17 | Timeout: 30
18 | Role: !GetAtt iamCoreFunctionsRolesStack.Outputs.iamRoleArnLambdaTalrReceptionist
19 | Variables:
20 | TALR_TABLENAME_CBINFO: !GetAtt dynamodbTablesStack.Outputs.dynamodbTableNameTalrCbInfo
21 | TALR_TABLENAME_TASKSTATUS: !GetAtt dynamodbTablesStack.Outputs.dynamodbTableNameTalrTaskStatus
22 | TALR_TABLENAME_ACCOUNTINFO: !GetAtt dynamodbTablesStack.Outputs.dynamodbTableNameTalrAccountInfo
23 | STAGE: !Ref stage
24 |
--------------------------------------------------------------------------------
/sam/cfn/cfn-sns-topics.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Description: Provisions Tailor SNS topics
3 | Parameters:
4 | stage:
5 | Type: String
6 | Resources:
7 | snsTalrOpsNotifications:
8 | Type: AWS::SNS::Topic
9 | Properties:
10 | DisplayName: !Join [ "-", [ "talr-ops-notifications", !Ref stage ] ]
11 | TopicName: !Join [ "-", [ "talr-ops-notifications", !Ref stage ] ]
12 | snsTalrClaRequest:
13 | Type: AWS::SNS::Topic
14 | Properties:
15 | DisplayName: !Join [ "-", [ "talr-cla-request", !Ref stage ] ]
16 | TopicName: !Join [ "-", [ "talr-cla-request", !Ref stage ] ]
17 | snsTalrClaResponse:
18 | Type: AWS::SNS::Topic
19 | Properties:
20 | DisplayName: !Join [ "-", [ "talr-cla-response", !Ref stage ] ]
21 | TopicName: !Join [ "-", [ "talr-cla-response", !Ref stage ] ]
22 | snsTalrVpcCfnResponse:
23 | Type: AWS::SNS::Topic
24 | Properties:
25 | DisplayName: !Join [ "-", [ "talr-vpccfn-response", !Ref stage ] ]
26 | TopicName: !Join [ "-", [ "talr-vpccfn-response", !Ref stage ] ]
27 | snsPolicyTalrVpcCfnResponse:
28 | Type: AWS::SNS::TopicPolicy
29 | Properties:
30 | PolicyDocument:
31 | Id: SnsTopicPolicyTalrVpcCfnResponse
32 | Version: '2012-10-17'
33 | Statement:
34 | - Sid: All_Linked_Accounts_Publish_Access
35 | Effect: Allow
36 | Principal:
37 | AWS: '*'
38 | Action: sns:Publish
39 | Resource: !Ref 'snsTalrVpcCfnResponse'
40 | Topics:
41 | - !Ref 'snsTalrVpcCfnResponse'
42 | snsTalrNipapCfnResponse:
43 | Type: AWS::SNS::Topic
44 | Properties:
45 | DisplayName: !Join [ "-", [ "talr-nipapcfn-response", !Ref stage ] ]
46 | TopicName: !Join [ "-", [ "talr-nipapcfn-response", !Ref stage ] ]
47 | snsTalrDispatchRequest:
48 | Type: AWS::SNS::Topic
49 | Properties:
50 | DisplayName: !Join [ "-", [ "talr-dispatch-request", !Ref stage ] ]
51 | TopicName: !Join [ "-", [ "talr-dispatch-request", !Ref stage ] ]
52 | snsTalrNotifyRequest:
53 | Type: AWS::SNS::Topic
54 | Properties:
55 | DisplayName: !Join [ "-", [ "talr-notify-request", !Ref stage ] ]
56 | TopicName: !Join [ "-", [ "talr-notify-request", !Ref stage ] ]
57 | snsTalrEventsPush:
58 | Type: AWS::SNS::Topic
59 | Properties:
60 | DisplayName: !Join [ "-", [ "talr-events-push", !Ref stage ] ]
61 | TopicName: !Join [ "-", [ "talr-events-push", !Ref stage ] ]
62 | Outputs:
63 | snsTopicNameTalrOpsNotifications:
64 | Description: SNS topic name for talr-ops-notifications
65 | Value: !GetAtt [snsTalrOpsNotifications, TopicName]
66 | snsArnTalrOpsNotifications:
67 | Description: SNS Arn for talr-ops-notifications
68 | Value: !Ref 'snsTalrOpsNotifications'
69 | snsTopicNameTalrClaRequest:
70 | Description: SNS topic name for talr-cla-request
71 | Value: !GetAtt [snsTalrClaRequest, TopicName]
72 | snsArnTalrClaRequest:
73 | Description: SNS Arn for talr-cla-request
74 | Value: !Ref 'snsTalrClaRequest'
75 | snsTopicNameTalrClaResponse:
76 | Description: SNS topic name for talr-cla-response
77 | Value: !GetAtt [snsTalrClaResponse, TopicName]
78 | snsArnTalrClaResponse:
79 | Description: SNS Arn for talr-cla-response
80 | Value: !Ref 'snsTalrClaResponse'
81 | snsTopicNameTalrVpcCfnResponse:
82 | Description: SNS topic name for talr-vpccfn-response
83 | Value: !GetAtt [snsTalrVpcCfnResponse, TopicName]
84 | snsArnTalrVpcCfnResponse:
85 | Description: SNS Arn for talr-vpccfn-response
86 | Value: !Ref 'snsTalrVpcCfnResponse'
87 | snsTopicNameTalrNipapCfnResponse:
88 | Description: SNS topic name for talr-nipapcfn-response
89 | Value: !GetAtt [snsTalrNipapCfnResponse, TopicName]
90 | snsArnTalrNipapCfnResponse:
91 | Description: SNS Arn for talr-nipapcfn-response
92 | Value: !Ref 'snsTalrNipapCfnResponse'
93 | snsTopicNameTalrDispatchRequest:
94 | Description: SNS topic name for talr-dispatch-request
95 | Value: !GetAtt [snsTalrDispatchRequest, TopicName]
96 | snsArnTalrDispatchRequest:
97 | Description: SNS Arn for talr-dispatch-request
98 | Value: !Ref 'snsTalrDispatchRequest'
99 | snsTopicNameTalrNotifyRequest:
100 | Description: SNS topic name for talr-notify-request
101 | Value: !GetAtt [snsTalrNotifyRequest, TopicName]
102 | snsArnTalrNotifyRequest:
103 | Description: SNS Arn for talr-notify-request
104 | Value: !Ref 'snsTalrNotifyRequest'
105 | snsTopicNameTalrEventsPush:
106 | Description: SNS topic name for talr-events-push
107 | Value: !GetAtt [snsTalrEventsPush, TopicName]
108 | snsArnTalrEventsPush:
109 | Description: SNS Arn for talr-events-push
110 | Value: !Ref 'snsTalrEventsPush'
111 |
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-cloudability/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Attr
8 | import os
9 | import sys
10 | import uuid
11 | import re
12 |
13 | # Path to modules needed to package local lambda function for upload
14 | currentdir = os.path.dirname(os.path.realpath(__file__))
15 | sys.path.append(os.path.join(currentdir, "./vendored"))
16 |
17 | # Modules downloaded into the vendored directory
18 |
19 | # Logging for Serverless
20 | log = logging.getLogger()
21 | log.setLevel(logging.DEBUG)
22 |
23 | # Initializing AWS services
24 | dynamodb = boto3.resource('dynamodb')
25 | awslambda = boto3.client('lambda')
26 | sts = boto3.client('sts')
27 |
28 |
29 | def handler(event, context):
30 | log.debug("Received event {}".format(json.dumps(event)))
31 |
32 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
33 |
34 | try:
35 | print('context:resource-path', event['context']['resource-path'] == '/cloudability/{accountId}')
36 | print('path:accountId', re.match("^[0-9]{12}$", event['params']['path']['accountId']))
37 | except Exception as e:
38 | print(e)
39 | print("regex not matching any values passed in request")
40 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
41 |
42 | # Payload processing logic
43 | if event['context']['resource-path'] == '/cloudability/{accountId}' and \
44 | re.match("^[0-9]{12}$", event['params']['path']['accountId']):
45 |
46 | requestId = str(uuid.uuid4())
47 | accountId = event['params']['path']['accountId']
48 | stage = event['stage-variables']['stage']
49 |
50 | # Check if account already exists
51 | getAccountId = accountInfo.scan(
52 | FilterExpression=Attr('accountId').eq(accountId)
53 | )
54 |
55 | if getAccountId['Count'] == 0:
56 | print("Account not found")
57 | raise Exception({"code": "4040", "message": "ERROR: Not found"})
58 |
59 | elif int(getAccountId['Count']) > 0:
60 |
61 | try:
62 | if getAccountId['Items'][0]['requestorDepartment']:
63 | pass
64 | except KeyError as e:
65 | print(e)
66 | raise Exception({"code": "5000", "message": "ERROR: Internal error. Metadata not complete."})
67 |
68 | try:
69 | if getAccountId['Items'][0]['accountTechnicalContactFullName']:
70 | pass
71 | except KeyError as e:
72 | print(e)
73 | raise Exception({"code": "5000", "message": "ERROR: Internal error. Metadata not complete."})
74 |
75 | try:
76 | if getAccountId['Items'][0]['accountTagCostCenter']:
77 | pass
78 | except KeyError as e:
79 | print(e)
80 | raise Exception({"code": "5000", "message": "ERROR: Internal error. Metadata not complete."})
81 |
82 | try:
83 | if getAccountId['Items'][0]['accountTagEnvironment']:
84 | pass
85 | except KeyError as e:
86 | print(e)
87 | raise Exception({"code": "5000", "message": "ERROR: Internal error. Metadata not complete."})
88 |
89 | # Update accountInfo with new requestId
90 | accountInfo.update_item(
91 | Key={
92 | 'accountEmailAddress': getAccountId['Items'][0]['accountEmailAddress']
93 | },
94 | UpdateExpression='SET #requestId = :val1',
95 | ExpressionAttributeNames={'#requestId': "requestId"},
96 | ExpressionAttributeValues={':val1': requestId}
97 | )
98 |
99 | # Build Lambda invoke payload
100 | message = {"requestId": requestId,
101 | "accountId": accountId,
102 | "accountEmailAddress": getAccountId['Items'][0]['accountEmailAddress']}
103 | payload = {"message": message}
104 |
105 | # Call Lambda
106 | awslambda.invoke(
107 | FunctionName='talr-cloudability-' + stage,
108 | InvocationType='Event',
109 | Payload=json.dumps(payload),
110 | )
111 |
112 | return {"code": "2020", "message": "Request Accepted", "requestId": requestId}
113 |
114 | else:
115 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
116 |
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-cloudability/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-cloudtrail/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Attr
8 | import os
9 | import sys
10 | import uuid
11 | import re
12 |
13 | # Path to modules needed to package local lambda function for upload
14 | currentdir = os.path.dirname(os.path.realpath(__file__))
15 | sys.path.append(os.path.join(currentdir, "./vendored"))
16 |
17 | # Modules downloaded into the vendored directory
18 |
19 | # Logging for Serverless
20 | log = logging.getLogger()
21 | log.setLevel(logging.DEBUG)
22 |
23 | # Initializing AWS services
24 | dynamodb = boto3.resource('dynamodb')
25 | awslambda = boto3.client('lambda')
26 | sts = boto3.client('sts')
27 |
28 |
29 | def handler(event, context):
30 | log.debug("Received event {}".format(json.dumps(event)))
31 |
32 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
33 |
34 | try:
35 | print('context:resource-path', event['context']['resource-path'] == '/cloudtrail/{accountId}')
36 | print('path:accountId', re.match("^[0-9]{12}$", event['params']['path']['accountId']))
37 | except Exception as e:
38 | print(e)
39 | print("regex not matching any values passed in request")
40 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
41 |
42 | # Payload processing logic
43 | if event['context']['resource-path'] == '/cloudtrail/{accountId}' and \
44 | re.match("^[0-9]{12}$", event['params']['path']['accountId']):
45 |
46 | requestId = str(uuid.uuid4())
47 | accountId = event['params']['path']['accountId']
48 | stage = event['stage-variables']['stage']
49 |
50 | # Check if account is known to Tailor
51 | getAccountId = accountInfo.scan(
52 | ProjectionExpression='accountId, accountEmailAddress',
53 | FilterExpression=Attr('accountId').eq(accountId)
54 | )
55 |
56 | if getAccountId['Count'] == 0:
57 | print("Account not found")
58 | raise Exception({"code": "4040", "message": "ERROR: Not found"})
59 |
60 | elif int(getAccountId['Count']) > 0:
61 |
62 | # Update accountInfo with new requestId
63 | accountInfo.update_item(
64 | Key={
65 | 'accountEmailAddress': getAccountId['Items'][0]['accountEmailAddress']
66 | },
67 | UpdateExpression='SET #requestId = :val1',
68 | ExpressionAttributeNames={'#requestId': "requestId"},
69 | ExpressionAttributeValues={':val1': requestId}
70 | )
71 |
72 | # Build Lambda invoke payload
73 | message = {"requestId": requestId,
74 | "accountId": accountId,
75 | "accountEmailAddress": getAccountId['Items'][0]['accountEmailAddress']}
76 | payload = {"message": message}
77 |
78 | # Call Lambda
79 | awslambda.invoke(
80 | FunctionName='talr-cloudtrail-' + stage,
81 | InvocationType='Event',
82 | Payload=json.dumps(payload),
83 | )
84 |
85 | return {"code": "2020", "message": "Request Accepted", "requestId": requestId}
86 |
87 | else:
88 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
89 |
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-cloudtrail/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-accountupdate-cloudtrail/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-config/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Attr
8 | import os
9 | import sys
10 | import uuid
11 | import re
12 |
13 | # Path to modules needed to package local lambda function for upload
14 | currentdir = os.path.dirname(os.path.realpath(__file__))
15 | sys.path.append(os.path.join(currentdir, "./vendored"))
16 |
17 | # Modules downloaded into the vendored directory
18 |
19 | # Logging for Serverless
20 | log = logging.getLogger()
21 | log.setLevel(logging.DEBUG)
22 |
23 | # Initializing AWS services
24 | dynamodb = boto3.resource('dynamodb')
25 | awslambda = boto3.client('lambda')
26 | sts = boto3.client('sts')
27 |
28 |
29 | def handler(event, context):
30 | log.debug("Received event {}".format(json.dumps(event)))
31 |
32 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
33 |
34 | try:
35 | print('context:resource-path', event['context']['resource-path'] == '/cloudabilty')
36 | print('body-json:accountId', re.match("^[0-9]{12}$", event['body-json']['accountId']))
37 | except Exception as e:
38 | print(e)
39 | print("regex not matching any values passed in request")
40 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
41 |
42 | # VPC DNS logic
43 | if event['context']['resource-path'] == '/cloudability' and \
44 | re.match("^[0-9]{12}$", event['body-json']['accountId']):
45 |
46 | requestId = str(uuid.uuid4())
47 | accountId = event['body-json']['accountId']
48 | stage = event['stage-variables']['stage']
49 |
50 | # Check if account already exists
51 | getAccountId = accountInfo.scan(
52 | ProjectionExpression='accountId, accountEmailAddress',
53 | FilterExpression=Attr('accountId').eq(accountId)
54 | )
55 |
56 | if getAccountId['Count'] == 0:
57 | print("Account not found")
58 | raise Exception({"code": "4040", "message": "ERROR: Not found"})
59 |
60 | elif int(getAccountId['Count']) > 0:
61 |
62 | # Update accountInfo with new requestId
63 | accountInfo.update_item(
64 | Key={
65 | 'accountEmailAddress': getAccountId['Items'][0]['accountEmailAddress']
66 | },
67 | UpdateExpression='SET #requestId = :val1',
68 | ExpressionAttributeNames={'#requestId': "requestId"},
69 | ExpressionAttributeValues={':val1': requestId}
70 | )
71 |
72 | # Build Lambda invoke payload
73 | message = {"requestId": requestId, "accountId": accountId, "accountEmailAddress": getAccountId['Items'][0]['accountEmailAddress'] }
74 | payload = {"Records": [{"Sns": {"Message": message}}]}
75 |
76 | # Call Lambda
77 | awslambda.invoke(
78 | FunctionName='talr-cloudability-' + stage,
79 | InvocationType='Event',
80 | Payload=json.dumps(payload),
81 | )
82 |
83 | return {"code": "2020", "message": "Request Accepted", "requestId": requestId}
84 |
85 | else:
86 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
87 |
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-config/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-accountupdate-config/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-configrules/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Key, Attr
8 | import os
9 | import sys
10 | import uuid
11 | import re
12 |
13 | # Path to modules needed to package local lambda function for upload
14 | currentdir = os.path.dirname(os.path.realpath(__file__))
15 | sys.path.append(os.path.join(currentdir, "./vendored"))
16 |
17 | # Modules downloaded into the vendored directory
18 |
19 | # Logging for Serverless
20 | log = logging.getLogger()
21 | log.setLevel(logging.DEBUG)
22 |
23 | # Initializing AWS services
24 | sns = boto3.client('sns')
25 | dynamodb = boto3.resource('dynamodb')
26 | awslambda = boto3.client('lambda')
27 | sts = boto3.client('sts')
28 |
29 |
30 | def handler(event, context):
31 | log.debug("Received event {}".format(json.dumps(event)))
32 |
33 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
34 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
35 |
36 | try:
37 | print('context:resource-path', event['context']['resource-path'] == '/configrules')
38 | print('body-json:accountId', re.match("^[0-9]{12}$", event['body-json']['accountId']))
39 | print('header:accountCbAlias', event['params']['header']['accountCbAlias'])
40 | except Exception as e:
41 | print(e)
42 | print("regex not matching any values passed in request")
43 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
44 |
45 | # VPC Flow Logs logic
46 | if event['context']['resource-path'] == '/configrules' and \
47 | re.match("^[0-9]{12}$", event['body-json']['accountId']) and \
48 | re.match("^[a-z-]{4,15}$", event['params']['header']['accountCbAlias']):
49 |
50 | requestId = str(uuid.uuid4())
51 | accountId = event['body-json']['accountId']
52 | accountCbAlias = event['params']['header']['accountCbAlias']
53 | functionAlias = event['stage-variables']['functionAlias']
54 |
55 | # Check if account already exists
56 | getAccountId = accountInfo.scan(
57 | ProjectionExpression='accountId, accountEmailAddress',
58 | FilterExpression=Attr('accountId').eq(accountId)
59 | )
60 | accountEmailAddress = getAccountId['Items'][0]['accountEmailAddress']
61 |
62 | if getAccountId['Count'] == 0:
63 | print("Account not found")
64 | raise Exception({"code": "4040", "message": "ERROR: Not found"})
65 |
66 | elif int(getAccountId['Count']) > 0:
67 |
68 | # Update accountInfo with new requestId
69 | updateAccountInfo = accountInfo.update_item(
70 | Key={
71 | 'accountEmailAddress': accountEmailAddress
72 | },
73 | UpdateExpression='SET #requestId = :val1',
74 | ExpressionAttributeNames={'#requestId': "requestId"},
75 | ExpressionAttributeValues={':val1': requestId}
76 | )
77 |
78 | # Lookup payer account number
79 | getCbInfo = cbInfo.get_item(
80 | Key={
81 | 'accountCbAlias': accountCbAlias
82 | }
83 | )
84 | accountCbId = getCbInfo['Item']['accountCbId']
85 |
86 | # Initialize credentials for linked account
87 | la_aws_access_key_id, la_aws_secret_access_key, la_aws_session_token = \
88 | initialize_la_services(account_cb_id=accountCbId, la_account_id=accountId)
89 |
90 | # Build Lambda invoke payload
91 | message = "lambda={'requestId':'" + requestId + "', 'accountEmailAddress': '" + accountEmailAddress + "'}"
92 | payload = {"Records": [{"Sns": {"Message": message}}]}
93 |
94 | # Call Lambda
95 | invokeVpcFlowLogs = awslambda.invoke(
96 | FunctionName='talr-config',
97 | InvocationType='Event',
98 | Payload=json.dumps(payload),
99 | Qualifier=functionAlias
100 | )
101 |
102 | return {"code": "2020", "message": "Request Accepted", "requestId": requestId}
103 |
104 | else:
105 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
106 |
107 |
108 | def initialize_la_services(account_cb_id, la_account_id):
109 | # Payer account credentials
110 | payerAssumeRole = sts.assume_role(
111 | RoleArn="arn:aws:iam::" + account_cb_id + ":role/tailor",
112 | RoleSessionName="talrIamPayerAssumeRole"
113 | )
114 | payerCredentials = payerAssumeRole['Credentials']
115 | payer_aws_access_key_id = payerCredentials['AccessKeyId']
116 | payer_aws_secret_access_key = payerCredentials['SecretAccessKey']
117 | payer_aws_session_token = payerCredentials['SessionToken']
118 |
119 | # Linked account credentials
120 | laSts = boto3.client(
121 | 'sts',
122 | aws_access_key_id=payer_aws_access_key_id,
123 | aws_secret_access_key=payer_aws_secret_access_key,
124 | aws_session_token=payer_aws_session_token,
125 | )
126 |
127 | laAssumeRole = laSts.assume_role(
128 | RoleArn="arn:aws:iam::" + la_account_id + ":role/PayerAccountAccessRole",
129 | RoleSessionName="talrIamLaAssumeRole"
130 | )
131 | laCredentials = laAssumeRole['Credentials']
132 | la_aws_access_key_id = laCredentials['AccessKeyId']
133 | la_aws_secret_access_key = laCredentials['SecretAccessKey']
134 | la_aws_session_token = laCredentials['SessionToken']
135 |
136 | # Initialize IAM client with Linked Account credentials
137 | laIam = boto3.client(
138 | 'iam',
139 | aws_access_key_id=la_aws_access_key_id,
140 | aws_secret_access_key=la_aws_secret_access_key,
141 | aws_session_token=la_aws_session_token,
142 | )
143 |
144 | return (la_aws_access_key_id, la_aws_secret_access_key, la_aws_session_token)
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-configrules/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-accountupdate-configrules/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-metadata/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Attr
8 | import os
9 | import sys
10 | import uuid
11 | import re
12 |
13 | # Path to modules needed to package local lambda function for upload
14 | currentdir = os.path.dirname(os.path.realpath(__file__))
15 | sys.path.append(os.path.join(currentdir, "./vendored"))
16 |
17 | # Modules downloaded into the vendored directory
18 |
19 | # Logging for Serverless
20 | log = logging.getLogger()
21 | log.setLevel(logging.DEBUG)
22 |
23 | # Initializing AWS services
24 | dynamodb = boto3.resource('dynamodb')
25 | awslambda = boto3.client('lambda')
26 | sts = boto3.client('sts')
27 |
28 |
29 | def handler(event, context):
30 | log.debug("Received event {}".format(json.dumps(event)))
31 |
32 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
33 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
34 |
35 | try:
36 | print('context:resource-path', event['context']['resource-path'] == '/accounts')
37 | print('body-json:accountId', re.match("^[0-9]{12}$", event['body-json']['accountId']))
38 | except Exception as e:
39 | print(e)
40 | print("regex not matching any values passed in request")
41 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
42 |
43 | # Header validation
44 | try:
45 | print('header:accountCbAlias', re.match("^[a-z]{3,4}-[a-z]{3,5}$", event['params']['header']['accountCbAlias']))
46 |
47 | # Test if the accountCbAlias key exists
48 | getCbInfo = cbInfo.get_item(
49 | Key={
50 | 'accountCbAlias': event['params']['header']['accountCbAlias']
51 | }
52 | )
53 |
54 | # Test if the value of accountCbAlias is valid, it will be if cbInfo returns an entry.
55 | accountCbAlias = getCbInfo['Item']['accountCbAlias']
56 |
57 | except Exception as e:
58 | print(e)
59 | print("regex not matching any values passed in request")
60 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
61 |
62 | # accountId validation
63 | accountId = None
64 | try:
65 | if event['context']['resource-path'] == '/accounts' and event['params']['querystring']['accountid']:
66 | if re.match("^[0-9]{12}$", event['params']['querystring']['accountid']) or \
67 | re.match("^[0-9]{4}-[0-9]{4}-[0-9]{4}$", event['params']['querystring']['accountid']):
68 |
69 | accountId = re.sub('-', '', event['params']['querystring']['accountid'])
70 | stage = event['stage-variables']['stage']
71 | requestId = str(uuid.uuid4())
72 | accountIdFound = True
73 | print('accoountIdFound', accountIdFound)
74 |
75 | # Check if account exists
76 | getAccountId = accountInfo.scan(
77 | ProjectionExpression='accountId, accountEmailAddress',
78 | FilterExpression=Attr('accountId').eq(accountId)
79 | )
80 |
81 | if getAccountId['Count'] == 0:
82 | print("Account not found")
83 | raise Exception({"code": "4040", "message": "ERROR: Not found"})
84 |
85 | elif int(getAccountId['Count']) > 0:
86 |
87 | # Update accountInfo with new requestId
88 | accountInfo.update_item(
89 | Key={
90 | 'accountEmailAddress': getAccountId['Items'][0]['accountEmailAddress']
91 | },
92 | UpdateExpression='SET #requestId = :val1',
93 | ExpressionAttributeNames={'#requestId': "requestId"},
94 | ExpressionAttributeValues={':val1': requestId}
95 | )
96 |
97 | else:
98 | accountIdFound = False
99 | print('accoountIdFound', accountIdFound)
100 | except KeyError as e:
101 | print(e)
102 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
103 |
104 |
105 | # accountUpdate body request validation
106 |
107 |
108 |
109 |
110 | # if event['context']['resource-path'] == '/accounts' and \
111 | # re.match("^[0-9]{12}$", event['body-json']['accountId']):
112 | #
113 | # requestId = str(uuid.uuid4())
114 | # accountId = event['body-json']['accountId']
115 | # stage = event['stage-variables']['stage']
116 | #
117 | # # Check if account already exists
118 | # getAccountId = accountInfo.scan(
119 | # ProjectionExpression='accountId, accountEmailAddress',
120 | # FilterExpression=Attr('accountId').eq(accountId)
121 | # )
122 | #
123 | # if getAccountId['Count'] == 0:
124 | # print("Account not found")
125 | # raise Exception({"code": "4040", "message": "ERROR: Not found"})
126 | #
127 | # elif int(getAccountId['Count']) > 0:
128 | #
129 | # # Update accountInfo with new requestId
130 | # accountInfo.update_item(
131 | # Key={
132 | # 'accountEmailAddress': getAccountId['Items'][0]['accountEmailAddress']
133 | # },
134 | # UpdateExpression='SET #requestId = :val1',
135 | # ExpressionAttributeNames={'#requestId': "requestId"},
136 | # ExpressionAttributeValues={':val1': requestId}
137 | # )
138 | #
139 | # # Build Lambda invoke payload
140 | # message = {"requestId": requestId, "accountId": accountId, "accountEmailAddress": getAccountId['Items'][0]['accountEmailAddress'] }
141 | # payload = {"Records": [{"Sns": {"Message": message}}]}
142 | #
143 | # # Call Lambda
144 | # awslambda.invoke(
145 | # FunctionName='talr-cloudability-' + stage,
146 | # InvocationType='Event',
147 | # Payload=json.dumps(payload),
148 | # )
149 | #
150 | # return {"code": "2020", "message": "Request Accepted", "requestId": requestId}
151 | #
152 | # else:
153 | # raise Exception({"code": "4000", "message": "ERROR: Bad request"})
154 |
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-metadata/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-accountupdate-metadata/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-vpcdns/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Attr
8 | import os
9 | import sys
10 | import uuid
11 | import re
12 |
13 | # Path to modules needed to package local lambda function for upload
14 | currentdir = os.path.dirname(os.path.realpath(__file__))
15 | sys.path.append(os.path.join(currentdir, "./vendored"))
16 |
17 | # Modules downloaded into the vendored directory
18 |
19 | # Logging for Serverless
20 | log = logging.getLogger()
21 | log.setLevel(logging.DEBUG)
22 |
23 | # Initializing AWS services
24 | dynamodb = boto3.resource('dynamodb')
25 | awslambda = boto3.client('lambda')
26 | sts = boto3.client('sts')
27 |
28 |
29 | def handler(event, context):
30 | log.debug("Received event {}".format(json.dumps(event)))
31 |
32 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
33 |
34 | try:
35 | print('context:resource-path', event['context']['resource-path'] == '/vpcdns')
36 | print('body-json:region', re.match("^us-[a-z]{4}-[1|2]$", event['body-json']['region']))
37 | print('body-json:vpcId', re.match("^vpc-[a-z0-9]{8}$", event['body-json']['vpcId']))
38 | print('body-json:accountId', re.match("^[0-9]{12}$", event['body-json']['accountId']))
39 | except Exception as e:
40 | print(e)
41 | print("regex not matching any values passed in request")
42 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
43 |
44 | # VPC DNS logic
45 | if event['context']['resource-path'] == '/vpcdns' and \
46 | re.match("^vpc-[a-z0-9]{8}$", event['body-json']['vpcId']) and \
47 | re.match("^us-[a-z]{4}-[1|2]$", event['body-json']['region']) and \
48 | re.match("^[0-9]{12}$", event['body-json']['accountId']):
49 |
50 | requestId = str(uuid.uuid4())
51 | region = event['body-json']['region']
52 | accountId = event['body-json']['accountId']
53 | vpcId = event['body-json']['vpcId']
54 | stage = event['stage-variables']['stage']
55 |
56 | # Check if account already exists
57 | getAccountId = accountInfo.scan(
58 | ProjectionExpression='accountId, accountEmailAddress',
59 | FilterExpression=Attr('accountId').eq(accountId)
60 | )
61 |
62 | if getAccountId['Count'] == 0:
63 | print("Account not found")
64 | raise Exception({"code": "4040", "message": "ERROR: Not found"})
65 |
66 | elif int(getAccountId['Count']) > 0:
67 |
68 | # Update accountInfo with new requestId
69 | accountInfo.update_item(
70 | Key={
71 | 'accountEmailAddress': getAccountId['Items'][0]['accountEmailAddress']
72 | },
73 | UpdateExpression='SET #requestId = :val1',
74 | ExpressionAttributeNames={'#requestId': "requestId"},
75 | ExpressionAttributeValues={':val1': requestId}
76 | )
77 |
78 | # Build Lambda invoke payload
79 | message = "requestId='" + requestId + "'\naccountId='" + accountId + "'\nregion='" + region + "'\nvpcId='" + vpcId + "'\n"
80 | payload = {"Records": [{"Sns": {"Message": message}}]}
81 |
82 | # Call Lambda
83 | awslambda.invoke(
84 | FunctionName='talr-vpcdns-' + stage,
85 | InvocationType='Event',
86 | Payload=json.dumps(payload),
87 | )
88 |
89 | return {"code": "2020", "message": "Request Accepted", "requestId": requestId}
90 |
91 | else:
92 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
93 |
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-vpcdns/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-accountupdate-vpcdns/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-vpcflowlogs/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Attr
8 | import os
9 | import sys
10 | import uuid
11 | import re
12 |
13 | # Path to modules needed to package local lambda function for upload
14 | currentdir = os.path.dirname(os.path.realpath(__file__))
15 | sys.path.append(os.path.join(currentdir, "./vendored"))
16 |
17 | # Modules downloaded into the vendored directory
18 |
19 | # Logging for Serverless
20 | log = logging.getLogger()
21 | log.setLevel(logging.DEBUG)
22 |
23 | # Initializing AWS services
24 | sns = boto3.client('sns')
25 | dynamodb = boto3.resource('dynamodb')
26 | awslambda = boto3.client('lambda')
27 | sts = boto3.client('sts')
28 |
29 |
30 | def handler(event, context):
31 | log.debug("Received event {}".format(json.dumps(event)))
32 |
33 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
34 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
35 |
36 | try:
37 | print('context:resource-path', event['context']['resource-path'] == '/vpcflowlogs')
38 | print('body-json:region', re.match("^us-[a-z]{4}-[1|2]$", event['body-json']['region']))
39 | print('body-json:stackName', re.match("^[0-9a-z|_-]{4,35}$", event['body-json']['stackName']))
40 | print('body-json:accountId', re.match("^[0-9]{12}$", event['body-json']['accountId']))
41 | print('header:accountCbAlias', event['params']['header']['accountCbAlias'])
42 | except Exception as e:
43 | print(e)
44 | print("regex not matching any values passed in request")
45 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
46 |
47 | # VPC Flow Logs logic
48 | if event['context']['resource-path'] == '/vpcflowlogs' and \
49 | re.match("^[0-9a-z|_-]{4,35}$", event['body-json']['stackName']) and \
50 | re.match("^us-[a-z]{4}-[1|2]$", event['body-json']['region']) and \
51 | re.match("^[0-9]{12}$", event['body-json']['accountId']) and \
52 | re.match("^[a-z-]{4,15}$", event['params']['header']['accountCbAlias']):
53 |
54 | requestId = str(uuid.uuid4())
55 | region = event['body-json']['region']
56 | accountId = event['body-json']['accountId']
57 | stackName = event['body-json']['stackName']
58 | accountCbAlias = event['params']['header']['accountCbAlias']
59 | stage = event['stage-variables']['stage']
60 |
61 | # Check if account already exists
62 | getAccountId = accountInfo.scan(
63 | ProjectionExpression='accountId, accountEmailAddress',
64 | FilterExpression=Attr('accountId').eq(accountId)
65 | )
66 |
67 | if getAccountId['Count'] == 0:
68 | print("Account not found")
69 | raise Exception({"code": "4040", "message": "ERROR: Not found"})
70 |
71 | elif int(getAccountId['Count']) > 0:
72 |
73 | # Update accountInfo with new requestId
74 | updateAccountInfo = accountInfo.update_item(
75 | Key={
76 | 'accountEmailAddress': getAccountId['Items'][0]['accountEmailAddress']
77 | },
78 | UpdateExpression='SET #requestId = :val1',
79 | ExpressionAttributeNames={'#requestId': "requestId"},
80 | ExpressionAttributeValues={':val1': requestId}
81 | )
82 |
83 | # Lookup payer account number
84 | getCbInfo = cbInfo.get_item(
85 | Key={
86 | 'accountCbAlias': accountCbAlias
87 | }
88 | )
89 | accountCbId = getCbInfo['Item']['accountCbId']
90 |
91 | # Initialize credentials for linked account
92 | la_aws_access_key_id, la_aws_secret_access_key, la_aws_session_token = \
93 | initialize_la_services(account_cb_id=accountCbId, la_account_id=accountId)
94 |
95 | # Lookup stackId
96 | laCfn = boto3.client(
97 | 'cloudformation',
98 | region_name=region,
99 | aws_access_key_id=la_aws_access_key_id,
100 | aws_secret_access_key=la_aws_secret_access_key,
101 | aws_session_token=la_aws_session_token,
102 | )
103 | try:
104 | describeStack = laCfn.describe_stacks(
105 | StackName=stackName
106 | )
107 | stackId = describeStack['Stacks'][0]['StackId']
108 | except Exception as e:
109 | print(e)
110 | print("Stack not found")
111 | raise Exception({"code": "4040", "message": "ERROR: Not found"})
112 |
113 | # Build Lambda invoke payload
114 | message = "StackId='" + stackId + "'\nLogicalResourceId='core'\nNamespace='" + accountId + "'\nPhysicalResourceId='" + stackId + "'\nResourceStatus='CREATE_COMPLETE'\nStackName='" + stackName + "'\n"
115 | payload = {"Records": [{"Sns": {"Message": message}}]}
116 |
117 | # Call Lambda
118 | awslambda.invoke(
119 | FunctionName='talr-vpcflowlogs-' + stage,
120 | InvocationType='Event',
121 | Payload=json.dumps(payload)
122 | )
123 |
124 | return {"code": "2020", "message": "Request Accepted", "requestId": requestId}
125 |
126 | else:
127 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
128 |
129 |
130 | def initialize_la_services(account_cb_id, la_account_id):
131 | # Payer account credentials
132 | payerAssumeRole = sts.assume_role(
133 | RoleArn="arn:aws:iam::" + account_cb_id + ":role/tailor",
134 | RoleSessionName="talrIamPayerAssumeRole"
135 | )
136 | payerCredentials = payerAssumeRole['Credentials']
137 | payer_aws_access_key_id = payerCredentials['AccessKeyId']
138 | payer_aws_secret_access_key = payerCredentials['SecretAccessKey']
139 | payer_aws_session_token = payerCredentials['SessionToken']
140 |
141 | # Linked account credentials
142 | laSts = boto3.client(
143 | 'sts',
144 | aws_access_key_id=payer_aws_access_key_id,
145 | aws_secret_access_key=payer_aws_secret_access_key,
146 | aws_session_token=payer_aws_session_token,
147 | )
148 |
149 | laAssumeRole = laSts.assume_role(
150 | RoleArn="arn:aws:iam::" + la_account_id + ":role/PayerAccountAccessRole",
151 | RoleSessionName="talrIamLaAssumeRole"
152 | )
153 | laCredentials = laAssumeRole['Credentials']
154 | la_aws_access_key_id = laCredentials['AccessKeyId']
155 | la_aws_secret_access_key = laCredentials['SecretAccessKey']
156 | la_aws_session_token = laCredentials['SessionToken']
157 |
158 | # Initialize IAM client with Linked Account credentials
159 | laIam = boto3.client(
160 | 'iam',
161 | aws_access_key_id=la_aws_access_key_id,
162 | aws_secret_access_key=la_aws_secret_access_key,
163 | aws_session_token=la_aws_session_token,
164 | )
165 |
166 | return (la_aws_access_key_id, la_aws_secret_access_key, la_aws_session_token)
--------------------------------------------------------------------------------
/sam/functions/talr-accountupdate-vpcflowlogs/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-accountupdate-vpcflowlogs/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-acmwhitelist/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import os
7 | import sys
8 | import time
9 | import boto3
10 |
11 | # Path to modules needed to package local lambda function for upload
12 | currentdir = os.path.dirname(os.path.realpath(__file__))
13 | sys.path.append(os.path.join(currentdir, "./vendored"))
14 |
15 | # Modules downloaded into the vendored directory
16 |
17 | # Logging for Serverless
18 | log = logging.getLogger()
19 | log.setLevel(logging.DEBUG)
20 |
21 | # Initializing AWS services
22 | dynamodb = boto3.resource('dynamodb')
23 | sts = boto3.client('sts')
24 |
25 |
26 | def handler(event, context):
27 | log.debug("Received event {}".format(json.dumps(event)))
28 |
29 | taskStatus = dynamodb.Table(os.environ['TAILOR_TABLENAME_TASKSTATUS'])
30 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
31 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
32 | incomingMessage = json.loads(event['Records'][0]['Sns']['Message'])
33 | accountEmailAddress = incomingMessage['lambda']['accountEmailAddress']
34 |
35 | getAccountInfo = accountInfo.get_item(
36 | Key={
37 | 'accountEmailAddress': accountEmailAddress
38 | }
39 | )
40 | laAccountId = getAccountInfo['Item']['accountId']
41 | requestId = getAccountInfo['Item']['requestId']
42 | accountCbAlias = getAccountInfo['Item']['accountCbAlias']
43 | accountTagEnvironment = getAccountInfo['Item']['accountTagEnvironment']
44 |
45 | # Update task start status
46 | taskStatus.put_item(
47 | Item={
48 | "requestId": requestId,
49 | "eventTimestamp": str(time.time()),
50 | "period": "start",
51 | "taskName": "ACMWHITELIST",
52 | "function": "talr-acmwhitelist",
53 | "message": incomingMessage
54 | }
55 | )
56 |
57 | getCbInfo = cbInfo.get_item(
58 | Key={
59 | 'accountCbAlias': accountCbAlias
60 | }
61 | )
62 | accountDomainName = getCbInfo['Item']['accountDomainName']
63 | accountCbId = getCbInfo['Item']['accountCbId']
64 | accountSupportTeamEmail = getCbInfo['Item']['accountSupportTeamEmail']
65 |
66 | if accountTagEnvironment != 'tst':
67 |
68 | # Initialize credentials for linked account
69 | laCredentials = initialize_la_services(account_cb_id=accountCbId, la_account_id=laAccountId)
70 |
71 | # Initialize Support client with Linked Account credentials
72 | laSupport = boto3.client(
73 | 'support',
74 | aws_access_key_id=laCredentials[0],
75 | aws_secret_access_key=laCredentials[1],
76 | aws_session_token=laCredentials[2],
77 | region_name='us-east-1'
78 | )
79 |
80 | # Create case in Payer Account requested Enterprise Support on Linked Account
81 | createCase = laSupport.create_case(
82 | subject='Whitelist request',
83 | serviceCode='amazon-acm-service',
84 | severityCode='normal',
85 | categoryCode='domain-whitelisting',
86 | communicationBody='Please whitelist this account for cert requests to *.' + accountDomainName + '.',
87 | ccEmailAddresses=[
88 | accountSupportTeamEmail,
89 | ],
90 | language='en',
91 | issueType='technical'
92 | )
93 | print(createCase)
94 |
95 | # Update task end status
96 | taskStatus.put_item(
97 | Item={
98 | "requestId": requestId,
99 | "eventTimestamp": str(time.time()),
100 | "period": "end",
101 | "taskName": "ACMWHITELIST",
102 | "function": "talr-acmwhitelist",
103 | "message": incomingMessage
104 | }
105 | )
106 |
107 | else:
108 | print("No ACM whitelisting requested for", laAccountId)
109 |
110 | # Update task end status
111 | taskStatus.put_item(
112 | Item={
113 | "requestId": requestId,
114 | "eventTimestamp": str(time.time()),
115 | "period": "end",
116 | "taskName": "ACMWHITELIST",
117 | "function": "talr-acmwhitelist",
118 | "message": incomingMessage
119 | }
120 | )
121 |
122 | return
123 |
124 |
125 | def initialize_la_services(account_cb_id, la_account_id):
126 |
127 | """
128 | :param account_cb_id: Account number of the consolidated billing (payer) account
129 | :param la_account_id: Account number of the Linked Account
130 | :return: access key, secret key and session token used to assume a session into the Linked Account.
131 | """
132 |
133 | # Payer account credentials
134 | payerAssumeRole = sts.assume_role(
135 | RoleArn="arn:aws:iam::" + account_cb_id + ":role/tailor",
136 | RoleSessionName="talrIamPayerAssumeRole"
137 | )
138 | payerCredentials = payerAssumeRole['Credentials']
139 | payer_aws_access_key_id = payerCredentials['AccessKeyId']
140 | payer_aws_secret_access_key = payerCredentials['SecretAccessKey']
141 | payer_aws_session_token = payerCredentials['SessionToken']
142 |
143 | # Linked account credentials
144 | laSts = boto3.client(
145 | 'sts',
146 | aws_access_key_id=payer_aws_access_key_id,
147 | aws_secret_access_key=payer_aws_secret_access_key,
148 | aws_session_token=payer_aws_session_token,
149 | )
150 |
151 | laAssumeRole = laSts.assume_role(
152 | RoleArn="arn:aws:iam::" + la_account_id + ":role/PayerAccountAccessRole",
153 | RoleSessionName="talrIamLaAssumeRole"
154 | )
155 | laCredentials = laAssumeRole['Credentials']
156 | la_aws_access_key_id = laCredentials['AccessKeyId']
157 | la_aws_secret_access_key = laCredentials['SecretAccessKey']
158 | la_aws_session_token = laCredentials['SessionToken']
159 |
160 | return la_aws_access_key_id, la_aws_secret_access_key, la_aws_session_token
161 |
--------------------------------------------------------------------------------
/sam/functions/talr-acmwhitelist/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-acmwhitelist/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-cla/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | import os
8 | import sys
9 | import time
10 | import datetime
11 | import hashlib
12 | import hmac
13 |
14 | # Path to modules needed to package local lambda function for upload
15 | currentdir = os.path.dirname(os.path.realpath(__file__))
16 | sys.path.append(os.path.join(currentdir, "./vendored"))
17 |
18 | # Modules downloaded into the vendored directory
19 | import requests
20 |
21 | # Logging for Serverless
22 | log = logging.getLogger()
23 | log.setLevel(logging.DEBUG)
24 |
25 | # Initializing AWS services
26 | sns = boto3.client('sns')
27 | sts = boto3.client('sts')
28 | dynamodb = boto3.resource('dynamodb')
29 |
30 |
31 | def handler(event, context):
32 | log.debug("Received event {}".format(json.dumps(event)))
33 |
34 | taskStatus = dynamodb.Table(os.environ['TAILOR_TABLENAME_TASKSTATUS'])
35 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
36 | claStatus = dynamodb.Table(os.environ['TAILOR_TABLENAME_CLASTATUS'])
37 | incomingMessage = json.loads(event['Records'][0]['Sns']['Message'])
38 | requestId = incomingMessage['lambda']['requestId']
39 | accountTagLongProjectName = incomingMessage['lambda']['accountTagLongProjectName']
40 | accountCbAlias = incomingMessage['lambda']['accountCbAlias']
41 | accountEmailAddress = incomingMessage['lambda']['accountEmailAddress']
42 |
43 | # Update task start status
44 | updateStatus = taskStatus.put_item(
45 | Item={
46 | "requestId": requestId,
47 | "eventTimestamp": str(time.time()),
48 | "period": "start",
49 | "taskName": "CLA_SUBMISSION",
50 | "function": "talr-cla",
51 | "message": "-"
52 | }
53 | )
54 |
55 | # Querying cbInfo to extract other cb related values like accountNumber
56 | getCbInfo = cbInfo.get_item(
57 | Key={
58 | 'accountCbAlias': accountCbAlias
59 | }
60 | )
61 | accountCbId = getCbInfo['Item']['accountCbId']
62 |
63 | # Assuming Payer/CB role and extracting credentials to be used by the CLA call
64 | payerAssumeRole = sts.assume_role(
65 | RoleArn="arn:aws:iam::" + accountCbId + ":role/tailor",
66 | RoleSessionName="talrClaPayerAssumeRole"
67 | )
68 | payerCredentials = payerAssumeRole['Credentials']
69 | aws_access_key_id = payerCredentials['AccessKeyId']
70 | aws_secret_access_key = payerCredentials['SecretAccessKey']
71 | aws_session_token = payerCredentials['SessionToken']
72 |
73 | accountRequest = {
74 | "AccountName": accountTagLongProjectName,
75 | "Email": accountEmailAddress,
76 | "IamUserAccessToBilling": "ALLOW",
77 | "RoleName": "PayerAccountAccessRole"
78 | }
79 |
80 | endpoint, headers, data = sig_v4_post(aws_access_key_id=aws_access_key_id,
81 | aws_secret_access_key=aws_secret_access_key,
82 | aws_session_token=aws_session_token,
83 | payload=accountRequest)
84 | createAccountsResponse = requests.post(endpoint, headers=headers, data=data)
85 | responseData = json.loads(createAccountsResponse.content)
86 | print(responseData)
87 |
88 | updateClaStatus = claStatus.put_item(
89 | Item={
90 | "requestId": requestId,
91 | "claRequestId": responseData['CreateAccountStatus']['Id'],
92 | "accountName": responseData['CreateAccountStatus']['AccountName'],
93 | "requestedTimestamp": str(responseData['CreateAccountStatus']['RequestedTimestamp']),
94 | "state": responseData['CreateAccountStatus']['State']
95 | }
96 | )
97 |
98 | # Update task end status
99 | updateStatus = taskStatus.put_item(
100 | Item={
101 | "requestId": requestId,
102 | "eventTimestamp": str(time.time()),
103 | "period": "end",
104 | "taskName": "CLA_SUBMISSION",
105 | "function": "talr-cla",
106 | "message": "-"
107 | }
108 | )
109 |
110 |
111 | # -------------------------------------------------------------------------------------------------
112 | # This version makes a POST request and passes request parameters
113 | # in the body (payload) of the request. Auth information is passed in
114 | # an Authorization header.
115 | def sig_v4_post(payload, aws_access_key_id, aws_secret_access_key, aws_session_token):
116 | if aws_access_key_id is None or aws_secret_access_key is None or aws_session_token is None:
117 | print('No credentials available.')
118 | sys.exit()
119 |
120 | data = json.dumps(payload)
121 |
122 | method = 'POST'
123 | service = 'organizations'
124 | host = 'organizations.us-east-1.amazonaws.com'
125 | region = 'us-east-1'
126 | endpoint = 'https://organizations.us-east-1.amazonaws.com'
127 | content_type = 'application/x-amz-json-1.1'
128 | amz_target = 'AWSOrganizationsV20161128.CreateAccount'
129 |
130 | # Create a date for headers and the credential string
131 | t = datetime.datetime.utcnow()
132 | amz_date = t.strftime('%Y%m%dT%H%M%SZ')
133 | date_stamp = t.strftime('%Y%m%d') # Date w/o time, used in credential scope
134 |
135 | # ************* TASK 1: CREATE A CANONICAL REQUEST *************
136 | # http://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
137 |
138 | # Step 1 is to define the verb (GET, POST, etc.)--already done.
139 |
140 | # Step 2: Create canonical URI--the part of the URI from domain to query
141 | # string (use '/' if no path)
142 | canonical_uri = '/'
143 |
144 | # # Step 3: Create the canonical query string. In this example, request
145 | # parameters are passed in the body of the request and the query string
146 | # is blank.
147 | canonical_querystring = ''
148 |
149 | # Step 4: Create the canonical headers. Header names and values
150 | # must be trimmed and lowercase, and sorted in ASCII order.
151 | # Note that there is a trailing \n.
152 | canonical_headers = 'content-type:' + content_type + '\n' + 'host:' + host + '\n' + 'x-amz-date:' + amz_date + '\n' + 'x-amz-target:' + amz_target + '\n'
153 |
154 | # Step 5: Create the list of signed headers. This lists the headers
155 | # in the canonical_headers list, delimited with ";" and in alpha order.
156 | # Note: The request can include any headers; canonical_headers and
157 | # signed_headers include those that you want to be included in the
158 | # hash of the request. "Host" and "x-amz-date" are always required.
159 | # For DynamoDB, content-type and x-amz-target are also required.
160 | signed_headers = 'content-type;host;x-amz-date;x-amz-target'
161 |
162 | # Step 6: Create payload hash. In this example, the payload (body of
163 | # the request) contains the request parameters.
164 | payload_hash = hashlib.sha256(data).hexdigest()
165 |
166 | # Step 7: Combine elements to create create canonical request
167 | canonical_request = method + '\n' + canonical_uri + '\n' + canonical_querystring + '\n' + canonical_headers + '\n' + signed_headers + '\n' + payload_hash
168 |
169 | # ************* TASK 2: CREATE THE STRING TO SIGN*************
170 | # Match the algorithm to the hashing algorithm you use, either SHA-1 or
171 | # SHA-256 (recommended)
172 | algorithm = 'AWS4-HMAC-SHA256'
173 | credential_scope = date_stamp + '/' + region + '/' + service + '/' + 'aws4_request'
174 | string_to_sign = algorithm + '\n' + amz_date + '\n' + credential_scope + '\n' + hashlib.sha256(
175 | canonical_request).hexdigest()
176 |
177 | # ************* TASK 3: CALCULATE THE SIGNATURE *************
178 | # Create the signing key using the function defined below.
179 | signing_key = getSignatureKey(aws_secret_access_key, date_stamp, region, service)
180 |
181 | # Sign the string_to_sign using the signing_key
182 | signature = hmac.new(signing_key, (string_to_sign).encode('utf-8'), hashlib.sha256).hexdigest()
183 |
184 | # ************* TASK 4: ADD SIGNING INFORMATION TO THE REQUEST *************
185 | # Put the signature information in a header named Authorization.
186 | authorization_header = algorithm + ' ' + 'Credential=' + aws_access_key_id + '/' + credential_scope + ', ' + 'SignedHeaders=' + signed_headers + ', ' + 'Signature=' + signature
187 |
188 | # For CreateLinkedAccount, the request can include any headers, but MUST include "host", "x-amz-date",
189 | # "x-amz-target", "content-type", and "Authorization". Except for the authorization
190 | # header, the headers must be included in the canonical_headers and signed_headers values, as
191 | # noted earlier. Order here is not significant.
192 | # # Python note: The 'host' header is added automatically by the Python 'requests' library.
193 | headers = {'Content-Type': content_type,
194 | 'X-Amz-Date': amz_date,
195 | 'X-Amz-Target': amz_target,
196 | 'X-Amz-security-token': aws_session_token,
197 | 'Authorization': authorization_header}
198 |
199 | return endpoint, headers, data
200 |
201 |
202 | # Key derivation functions. See:
203 | # http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html#signature-v4-examples-python
204 | def sign(key, msg):
205 | return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest()
206 |
207 |
208 | def getSignatureKey(key, date_stamp, regionName, serviceName):
209 | kDate = sign(('AWS4' + key).encode('utf-8'), date_stamp)
210 | kRegion = sign(kDate, regionName)
211 | kService = sign(kRegion, serviceName)
212 | kSigning = sign(kService, 'aws4_request')
213 | return kSigning
--------------------------------------------------------------------------------
/sam/functions/talr-cla/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 |
--------------------------------------------------------------------------------
/sam/functions/talr-cloudability/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 | retry
--------------------------------------------------------------------------------
/sam/functions/talr-cloudtrail/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | import os
8 | import sys
9 | import time
10 |
11 | # Path to modules needed to package local lambda function for upload
12 | currentdir = os.path.dirname(os.path.realpath(__file__))
13 | sys.path.append(os.path.join(currentdir, "./vendored"))
14 |
15 | # Modules downloaded into the vendored directory
16 |
17 | # Logging for Serverless
18 | log = logging.getLogger()
19 | log.setLevel(logging.DEBUG)
20 |
21 | # Initializing AWS services
22 | dynamodb = boto3.resource('dynamodb')
23 | sts = boto3.client('sts')
24 |
25 |
26 | def handler(event, context):
27 | log.debug("Received event {}".format(json.dumps(event)))
28 |
29 | taskStatus = dynamodb.Table(os.environ['TAILOR_TABLENAME_TASKSTATUS'])
30 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
31 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
32 |
33 | accountEmailAddress = None
34 | try:
35 | if event['Records'][0]['Sns']['Message']:
36 | incomingMessage = json.loads(event['Records'][0]['Sns']['Message'])
37 | accountEmailAddress = incomingMessage['lambda']['accountEmailAddress']
38 | except KeyError as e:
39 | print(e)
40 | try:
41 | if event['message']:
42 | accountEmailAddress = event['message']['accountEmailAddress']
43 | except KeyError as e:
44 | print(e)
45 | return "Could not interpret event."
46 |
47 | getAccountInfo = accountInfo.get_item(
48 | Key={
49 | 'accountEmailAddress': accountEmailAddress
50 | }
51 | )
52 | laAccountId = getAccountInfo['Item']['accountId']
53 | accountIamAlias = getAccountInfo['Item']['accountIamAlias']
54 | requestId = getAccountInfo['Item']['requestId']
55 | accountCbAlias = getAccountInfo['Item']['accountCbAlias']
56 |
57 | # Update task start status
58 | taskStatus.put_item(
59 | Item={
60 | "requestId": requestId,
61 | "eventTimestamp": str(time.time()),
62 | "period": "start",
63 | "taskName": "CLOUDTRAIL",
64 | "function": "talr-cloudtrail",
65 | "message": accountEmailAddress
66 | }
67 | )
68 |
69 | getCbInfo = cbInfo.get_item(
70 | Key={
71 | 'accountCbAlias': accountCbAlias
72 | }
73 | )
74 | accountCbId = getCbInfo['Item']['accountCbId']
75 | accountCloudtrailS3Bucket = getCbInfo['Item']['accountCloudtrailS3Bucket']
76 |
77 | laCredentials = initialize_la_services(account_cb_id=accountCbId, la_account_id=laAccountId)
78 |
79 | if check_trails(la_credentials=laCredentials, s3_bucket=accountCloudtrailS3Bucket) is True:
80 | # Update task end status
81 | taskStatus.put_item(
82 | Item={
83 | "requestId": requestId,
84 | "eventTimestamp": str(time.time()),
85 | "period": "end",
86 | "taskName": "CLOUDTRAIL",
87 | "function": "talr-cloudtrail",
88 | "message": accountEmailAddress
89 | }
90 | )
91 | return
92 | else:
93 | cleanup_resources(la_credentials=laCredentials,regions=all_regions(la_credentials=laCredentials))
94 | create_trails(la_credentials=laCredentials,
95 | la_account_id=laAccountId,
96 | s3_bucket=accountCloudtrailS3Bucket,
97 | account_alias=accountIamAlias)
98 |
99 | # Update task end status
100 | taskStatus.put_item(
101 | Item={
102 | "requestId": requestId,
103 | "eventTimestamp" : str(time.time()),
104 | "period" : "end",
105 | "taskName" : "CLOUDTRAIL",
106 | "function" : "talr-cloudtrail",
107 | "message" : accountEmailAddress
108 | }
109 | )
110 |
111 | return
112 |
113 |
114 | def create_trails(la_credentials, la_account_id, s3_bucket, account_alias):
115 |
116 | laCloudtrail = boto3.client(
117 | 'cloudtrail',
118 | region_name='us-east-1',
119 | aws_access_key_id=la_credentials[0],
120 | aws_secret_access_key=la_credentials[1],
121 | aws_session_token=la_credentials[2],
122 | )
123 | # Create Cloudtrail trail
124 | createTrail = laCloudtrail.create_trail(
125 | Name='default',
126 | S3BucketName=s3_bucket,
127 | S3KeyPrefix=account_alias,
128 | IncludeGlobalServiceEvents=True,
129 | IsMultiRegionTrail=True,
130 | EnableLogFileValidation=True
131 | )
132 |
133 | # Start Cloudtrail trail logging
134 | startLogging = laCloudtrail.start_logging(
135 | Name='arn:aws:cloudtrail:us-east-1:' + la_account_id + ':trail/default'
136 | )
137 |
138 | # Describe trail
139 | describeTrail = laCloudtrail.describe_trails()
140 | print(describeTrail)
141 |
142 | return
143 |
144 |
145 | def cleanup_resources(la_credentials, regions):
146 |
147 | # Clean up resources
148 | try:
149 | for region in regions:
150 | laCloudtrail = boto3.client(
151 | 'cloudtrail',
152 | region_name=region,
153 | aws_access_key_id=la_credentials[0],
154 | aws_secret_access_key=la_credentials[1],
155 | aws_session_token=la_credentials[2],
156 | )
157 | describeTrail = laCloudtrail.describe_trails()
158 | for trail in describeTrail['trailList']:
159 | deleteTrail = laCloudtrail.delete_trail(
160 | Name=trail['TrailARN']
161 | )
162 | print(deleteTrail)
163 | except Exception as e:
164 | print(e)
165 | print("No trails to delete")
166 |
167 | return
168 |
169 |
170 | def all_regions(la_credentials):
171 |
172 | # Initialize a Session object in order to look up service regions
173 | boto3Session = boto3.Session(
174 | aws_access_key_id=la_credentials[0],
175 | aws_secret_access_key=la_credentials[1],
176 | aws_session_token=la_credentials[2]
177 | )
178 | regions = boto3Session.get_available_regions(
179 | service_name='cloudtrail',
180 | partition_name='aws',
181 | )
182 |
183 | return regions
184 |
185 |
186 | def check_trails(la_credentials, s3_bucket):
187 |
188 | laCloudtrail = boto3.client(
189 | 'cloudtrail',
190 | region_name='us-east-1',
191 | aws_access_key_id=la_credentials[0],
192 | aws_secret_access_key=la_credentials[1],
193 | aws_session_token=la_credentials[2],
194 | )
195 |
196 | checkTrail = laCloudtrail.describe_trails(
197 | trailNameList=['default'],
198 | )
199 |
200 | if len(checkTrail['trailList']) == 1 \
201 | and checkTrail['trailList'][0]['IsMultiRegionTrail'] is True \
202 | and checkTrail['trailList'][0]['S3BucketName'] == s3_bucket:
203 | return True
204 | else:
205 | return False
206 |
207 |
208 | def initialize_la_services(account_cb_id, la_account_id):
209 | # Payer account credentials
210 | payerAssumeRole = sts.assume_role(
211 | RoleArn="arn:aws:iam::" + account_cb_id + ":role/tailor",
212 | RoleSessionName="talrIamPayerAssumeRole"
213 | )
214 | payerCredentials = payerAssumeRole['Credentials']
215 | payer_aws_access_key_id = payerCredentials['AccessKeyId']
216 | payer_aws_secret_access_key = payerCredentials['SecretAccessKey']
217 | payer_aws_session_token = payerCredentials['SessionToken']
218 |
219 | # Linked account credentials
220 | laSts = boto3.client(
221 | 'sts',
222 | aws_access_key_id=payer_aws_access_key_id,
223 | aws_secret_access_key=payer_aws_secret_access_key,
224 | aws_session_token=payer_aws_session_token,
225 | )
226 |
227 | laAssumeRole = laSts.assume_role(
228 | RoleArn="arn:aws:iam::" + la_account_id + ":role/PayerAccountAccessRole",
229 | RoleSessionName="talrIamLaAssumeRole"
230 | )
231 | laCredentials = laAssumeRole['Credentials']
232 | la_aws_access_key_id = laCredentials['AccessKeyId']
233 | la_aws_secret_access_key = laCredentials['SecretAccessKey']
234 | la_aws_session_token = laCredentials['SessionToken']
235 |
236 | # Initialize IAM client with Linked Account credentials
237 | laIam = boto3.client(
238 | 'iam',
239 | aws_access_key_id=la_aws_access_key_id,
240 | aws_secret_access_key=la_aws_secret_access_key,
241 | aws_session_token=la_aws_session_token,
242 | )
243 |
244 | return la_aws_access_key_id, la_aws_secret_access_key, la_aws_session_token
245 |
--------------------------------------------------------------------------------
/sam/functions/talr-cloudtrail/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-cloudtrail/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-config-complianceaggregator/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Key, Attr
8 | import os
9 | import sys
10 | import time
11 | import zipfile
12 |
13 | # Path to modules needed to package local lambda function for upload
14 | currentdir = os.path.dirname(os.path.realpath(__file__))
15 | sys.path.append(os.path.join(currentdir, "./vendored"))
16 |
17 | # Modules downloaded into the vendored directory
18 |
19 | # Logging for Serverless
20 | log = logging.getLogger()
21 | log.setLevel(logging.DEBUG)
22 |
23 | # Initializing AWS services
24 | sts = boto3.client('sts')
25 | dynamodb = boto3.resource('dynamodb')
26 |
27 |
28 | def handler(event, context):
29 | log.debug("Received event {}".format(json.dumps(event)))
30 |
31 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
32 | configRulesCompliance = dynamodb.Table(os.environ['TAILOR_TABLENAME_CONFIGRULESCOMPLIANCE'])
33 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
34 | accountId = event['accountId']
35 |
36 | getAccountId = accountInfo.scan(
37 | ProjectionExpression='accountId, accountCbAlias',
38 | FilterExpression=Attr('accountId').eq(accountId)
39 | )
40 | accountCbAlias = getAccountId['Items'][0]['accountCbAlias']
41 |
42 | # Lookup payer account number
43 | getCbInfo = cbInfo.get_item(
44 | Key={
45 | 'accountCbAlias': accountCbAlias
46 | }
47 | )
48 | accountCbId = getCbInfo['Item']['accountCbId']
49 |
50 | # Intialize Config session
51 | la_aws_access_key_id, la_aws_secret_access_key, la_aws_session_token = \
52 | initialize_la_services(account_cb_id=accountCbId, la_account_id=accountId)
53 |
54 | # Only deploy in regions where Config Rules exist
55 | boto3Session = boto3.Session()
56 | configRegions = boto3Session.get_available_regions(
57 | service_name='config',
58 | partition_name='aws',
59 | )
60 |
61 | for region in configRegions:
62 | if region != 'ca-central-1' and region != 'sa-east-1' and region != 'ap-south-1':
63 | # Linked account credentials
64 | laConfig = boto3.client(
65 | 'config',
66 | region_name=region,
67 | aws_access_key_id=la_aws_access_key_id,
68 | aws_secret_access_key=la_aws_secret_access_key,
69 | aws_session_token=la_aws_session_token,
70 | )
71 |
72 | # Get list of rules in account
73 | getRules = laConfig.describe_config_rules()
74 |
75 | configRules = list()
76 | for i in getRules['ConfigRules']:
77 | if i['ConfigRuleState'] == 'ACTIVE':
78 | configRules.append(i['ConfigRuleName'])
79 | else:
80 | pass
81 |
82 | # Check rule compliance status
83 | checkComplianceStatus = laConfig.describe_compliance_by_config_rule(
84 | ConfigRuleNames=configRules
85 | )
86 |
87 | # Check evaluation status
88 | checkEvaluationStatus = laConfig.describe_config_rule_evaluation_status(
89 | ConfigRuleNames=configRules
90 | )
91 |
92 | for i in checkComplianceStatus['ComplianceByConfigRules']:
93 | pollTimestamp = str(time.time())
94 | updateConfigRulesCompliance = configRulesCompliance.put_item(
95 | Item={
96 | "accountId": accountId,
97 | "pollTimestamp": pollTimestamp,
98 | "ruleName": i['ConfigRuleName'],
99 | "complianceStatus": i['Compliance']['ComplianceType']
100 | }
101 | )
102 |
103 | for ii in checkEvaluationStatus['ConfigRulesEvaluationStatus']:
104 | if ii['ConfigRuleName'] == i['ConfigRuleName']:
105 | try:
106 | updateConfigRulesCompliance = configRulesCompliance.update_item(
107 | Key={
108 | 'accountId': accountId,
109 | 'pollTimestamp': pollTimestamp
110 | },
111 | UpdateExpression='SET #ruleName = :val1, '
112 | '#lastSuccessfulInvocationTime = :val2, '
113 | '#ruleArn = :val3, '
114 | '#lastSuccessfulEvaluationTime = :val4, '
115 | '#region = :val5, '
116 | '#lastErrorMessage = :val6, '
117 | '#lastErrorCode = :val7',
118 | ExpressionAttributeNames={'#ruleName': 'ruleName',
119 | '#lastSuccessfulInvocationTime': 'lastSuccessfulInvocationTime',
120 | '#ruleArn': 'ruleArn',
121 | '#lastSuccessfulEvaluationTime': 'lastSuccessfulEvaluationTime',
122 | '#region': 'region',
123 | '#lastErrorMessage': 'lastErrorMessage',
124 | '#lastErrorCode': 'lastErrorCode'},
125 | ExpressionAttributeValues={':val1': ii['ConfigRuleName'],
126 | ':val2': ii['LastSuccessfulInvocationTime'].strftime("%Y-%m-%dT%H:%M:%SZ"),
127 | ':val3': ii['ConfigRuleArn'],
128 | ':val4': ii['LastSuccessfulEvaluationTime'].strftime("%Y-%m-%dT%H:%M:%SZ"),
129 | ':val5': ii['ConfigRuleArn'].split(":")[3],
130 | ':val6': ii['LastErrorMessage'],
131 | ':val7': ii['LastErrorCode']}
132 | )
133 | except KeyError:
134 | updateConfigRulesCompliance = configRulesCompliance.update_item(
135 | Key={
136 | 'accountId': accountId,
137 | 'pollTimestamp': pollTimestamp
138 | },
139 | UpdateExpression='SET #ruleName = :val1, '
140 | '#ruleArn = :val3, '
141 | '#region = :val5',
142 | ExpressionAttributeNames={'#ruleName': 'ruleName',
143 | '#ruleArn': 'ruleArn',
144 | '#region': 'region'},
145 | ExpressionAttributeValues={':val1': ii['ConfigRuleName'],
146 | ':val3': ii['ConfigRuleArn'],
147 | ':val5': ii['ConfigRuleArn'].split(":")[3]}
148 | )
149 |
150 | return
151 |
152 |
153 | def initialize_la_services(account_cb_id, la_account_id):
154 | # Payer account credentials
155 | payerAssumeRole = sts.assume_role(
156 | RoleArn="arn:aws:iam::" + account_cb_id + ":role/tailor",
157 | RoleSessionName="talrIamPayerAssumeRole"
158 | )
159 | payerCredentials = payerAssumeRole['Credentials']
160 | payer_aws_access_key_id = payerCredentials['AccessKeyId']
161 | payer_aws_secret_access_key = payerCredentials['SecretAccessKey']
162 | payer_aws_session_token = payerCredentials['SessionToken']
163 |
164 | # Linked account credentials
165 | laSts = boto3.client(
166 | 'sts',
167 | aws_access_key_id=payer_aws_access_key_id,
168 | aws_secret_access_key=payer_aws_secret_access_key,
169 | aws_session_token=payer_aws_session_token,
170 | )
171 |
172 | laAssumeRole = laSts.assume_role(
173 | RoleArn="arn:aws:iam::" + la_account_id + ":role/PayerAccountAccessRole",
174 | RoleSessionName="talrIamLaAssumeRole"
175 | )
176 | laCredentials = laAssumeRole['Credentials']
177 | la_aws_access_key_id = laCredentials['AccessKeyId']
178 | la_aws_secret_access_key = laCredentials['SecretAccessKey']
179 | la_aws_session_token = laCredentials['SessionToken']
180 |
181 | return (la_aws_access_key_id, la_aws_secret_access_key, la_aws_session_token)
182 |
--------------------------------------------------------------------------------
/sam/functions/talr-config-complianceaggregator/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-config-complianceaggregator/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-config-deployrulefunctions/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | import os
8 | import sys
9 | import shutil
10 |
11 | # Path to modules needed to package local lambda function for upload
12 | currentdir = os.path.dirname(os.path.realpath(__file__))
13 | sys.path.append(os.path.join(currentdir, "./vendored"))
14 |
15 | # Modules downloaded into the vendored directory
16 |
17 | # Logging for Serverless
18 | log = logging.getLogger()
19 | log.setLevel(logging.DEBUG)
20 |
21 | # Initializing AWS services
22 | sts = boto3.client('sts')
23 | config = boto3.client('config')
24 | s3 = boto3.client('s3')
25 | dynamodb = boto3.resource('dynamodb')
26 |
27 |
28 | def handler(event, context):
29 | log.debug("Received event {}".format(json.dumps(event)))
30 |
31 | accountId = sts.get_caller_identity()['Account']
32 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
33 | getCbInfo = cbInfo.get_item(
34 | Key={
35 | 'accountCbAlias': event['accountCbAlias']
36 | }
37 | )
38 | accountTailorConfigBucket = getCbInfo['Item']['accountTailorConfigBucket']
39 |
40 | # Only deploy in regions where Config Rules exist
41 | boto3Session = boto3.Session()
42 | configRegions = boto3Session.get_available_regions(
43 | service_name='config',
44 | partition_name='aws',
45 | )
46 |
47 | functionName = 'talr-configrule-ec2notinpublicsubnet'
48 | # List files
49 | listObjects = s3.list_objects_v2(
50 | Bucket=accountTailorConfigBucket,
51 | Prefix='lambda/' + functionName,
52 | )
53 |
54 | # Directory Cleanup
55 | shutil.rmtree('/tmp/lambda/{}'.format(functionName), ignore_errors=True)
56 | os.makedirs('/tmp/lambda/{}'.format(functionName))
57 |
58 | # Download files
59 | for i in listObjects['Contents']:
60 | if i['Key'].endswith('/'):
61 | os.makedirs('/tmp/lambda/{}'.format('/tmp/' + i['Key']))
62 | elif not i['Key'].endswith('/'):
63 | s3.download_file(accountTailorConfigBucket, i['Key'], '/tmp/' + i['Key'])
64 |
65 | # Zip up files
66 | shutil.make_archive('/tmp/' + functionName, 'zip', '/tmp/lambda/' + functionName)
67 |
68 | for region in configRegions:
69 | if region != 'ca-central-1' and region != 'sa-east-1':
70 | # Set AWS Lambda client and region
71 | awslambda = boto3.client('lambda', region_name=region)
72 |
73 | # Delete function if it already exists
74 | try:
75 | deleteFunction = awslambda.delete_function(
76 | FunctionName='talr-configrule-ec2notinpublicsubnet'
77 | )
78 | except Exception as e:
79 | print(e)
80 |
81 | # Create function
82 | createFunctionEc2NotInPublicSubnet = awslambda.create_function(
83 | FunctionName='talr-configrule-ec2notinpublicsubnet',
84 | Runtime='python2.7',
85 | Role='arn:aws:iam::' + accountId + ':role/tailor-' + context.invoked_function_arn.split(':')[7] + '-ConfigRuleLambdaFunctionRole',
86 | Handler='handler.handler',
87 | Code={
88 | 'ZipFile': open('/tmp/talr-configrule-ec2notinpublicsubnet.zip', 'rb').read(),
89 | },
90 | Description='Config Rule: EC2 not in Pubic subnet',
91 | Timeout=30,
92 | MemorySize=128,
93 | Environment={
94 | 'Variables': {
95 | 'TAILOR_TABLENAME_CBINFO': os.environ['TAILOR_TABLENAME_CBINFO'],
96 | 'TAILOR_TABLENAME_ACCOUNTINFO': os.environ['TAILOR_TABLENAME_ACCOUNTINFO']
97 | }
98 | },
99 | )
100 | addPermissionFunctionEc2NotInPublicSubnet = awslambda.add_permission(
101 | FunctionName='talr-configrule-ec2notinpublicsubnet',
102 | StatementId='ConfigAccess',
103 | Action='lambda:InvokeFunction',
104 | Principal='config.amazonaws.com'
105 | )
106 |
--------------------------------------------------------------------------------
/sam/functions/talr-config-deployrulefunctions/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-config-deployrulefunctions/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-config/requirements.txt:
--------------------------------------------------------------------------------
1 | retrying
2 |
--------------------------------------------------------------------------------
/sam/functions/talr-configrule-ec2notinpublicsubnet/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Key, Attr
8 | import os
9 | import StringIO
10 | import sys
11 | import time
12 | import zipfile
13 |
14 | # Path to modules needed to package local lambda function for upload
15 | currentdir = os.path.dirname(os.path.realpath(__file__))
16 | sys.path.append(os.path.join(currentdir, "./vendored"))
17 |
18 | # Modules downloaded into the vendored directory
19 |
20 | # Logging for Serverless
21 | log = logging.getLogger()
22 | log.setLevel(logging.DEBUG)
23 |
24 | # Initializing AWS services
25 | sts = boto3.client('sts')
26 | dynamodb = boto3.resource('dynamodb')
27 |
28 |
29 | def handler(event, context):
30 | log.debug("Received event {}".format(json.dumps(event)))
31 |
32 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
33 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
34 | accountId = event['accountId']
35 | resultToken = event['resultToken']
36 | invokingEvent = json.loads(event['invokingEvent'])
37 | configurationItem = invokingEvent['configurationItem']
38 | isPrivate = True
39 |
40 | try:
41 | subnetId = configurationItem['configuration']['subnetId']
42 | vpcId = configurationItem['configuration']['vpcId']
43 | except TypeError as e:
44 | print(e)
45 |
46 | getAccountId = accountInfo.scan(
47 | ProjectionExpression='accountId, accountCbAlias',
48 | FilterExpression=Attr('accountId').eq(accountId)
49 | )
50 | accountCbAlias = getAccountId['Items'][0]['accountCbAlias']
51 |
52 | # Lookup payer account number
53 | getCbInfo = cbInfo.get_item(
54 | Key={
55 | 'accountCbAlias': accountCbAlias
56 | }
57 | )
58 | accountCbId = getCbInfo['Item']['accountCbId']
59 |
60 | # If the resource is deleted mark the evaluation as compliant
61 | if configurationItem['configurationItemStatus'] == 'ResourceDeleted':
62 | evaluation = {
63 | "compliance_type": "COMPLIANT",
64 | "annotation": 'Its in private subnet'
65 | }
66 | put_evaluation(account_cb_id=accountCbId,
67 | la_account_id=accountId,
68 | invoking_event=invokingEvent,
69 | evaluation=evaluation,
70 | result_token=resultToken)
71 |
72 | # Intialize EC2 session
73 | la_aws_access_key_id, la_aws_secret_access_key, la_aws_session_token = \
74 | initialize_la_services(account_cb_id=accountCbId, la_account_id=accountId)
75 |
76 | # Linked account credentials
77 | laEc2 = boto3.client(
78 | 'ec2',
79 | aws_access_key_id=la_aws_access_key_id,
80 | aws_secret_access_key=la_aws_secret_access_key,
81 | aws_session_token=la_aws_session_token,
82 | )
83 |
84 | lookupRouteTables = laEc2.describe_route_tables(
85 | Filters=[
86 | {
87 | 'Name': 'route.destination-cidr-block',
88 | 'Values': ['0.0.0.0/0']
89 | }
90 | ]
91 | )
92 | # If only default route table exists then
93 | # all subnets are automatically attached to this route table
94 | # Otherwise check if subnet is explicitly attached to another route table
95 | # Private subnet condition applies only when route doesn't contains
96 | # destination CIDR block = 0.0.0.0/0 or no Internet Gateway is attached
97 | for i in lookupRouteTables['RouteTables']:
98 | if i['VpcId'] == vpcId:
99 | for ii in i['Associations']:
100 | if ii['Main'] is True:
101 | for iii in i['Routes']:
102 | try:
103 | if iii['DestinationCidrBlock'] == '0.0.0.0/0' and iii['GatewayId'].startswith('igw-'):
104 | isPrivate = False
105 | except KeyError as e:
106 | print(e)
107 | else:
108 | if ii['SubnetId'] == subnetId:
109 | try:
110 | for iii in i['Routes']:
111 | if iii['DestinationCidrBlock'] == '0.0.0.0/0' and iii['GatewayId'].startswith('igw-'):
112 | isPrivate = False
113 | except KeyError as e:
114 | print(e)
115 |
116 | if isPrivate:
117 | evaluation = {
118 | "compliance_type": "COMPLIANT",
119 | "annotation": 'Its in private subnet'
120 | }
121 | else:
122 | evaluation = {
123 | "compliance_type": "NON_COMPLIANT",
124 | "annotation": 'Not in private subnet'
125 | }
126 |
127 | put_evaluation(account_cb_id=accountCbId,
128 | la_account_id=accountId,
129 | invoking_event=invokingEvent,
130 | evaluation=evaluation,
131 | result_token=resultToken)
132 |
133 |
134 | def put_evaluation(account_cb_id, la_account_id, invoking_event, evaluation, result_token):
135 |
136 | # Intialize Config session
137 | la_aws_access_key_id, la_aws_secret_access_key, la_aws_session_token = \
138 | initialize_la_services(account_cb_id=account_cb_id, la_account_id=la_account_id)
139 |
140 | # Linked account credentials
141 | laConfig = boto3.client(
142 | 'config',
143 | aws_access_key_id=la_aws_access_key_id,
144 | aws_secret_access_key=la_aws_secret_access_key,
145 | aws_session_token=la_aws_session_token,
146 | )
147 |
148 | pushToConfig = laConfig.put_evaluations(
149 | Evaluations=[
150 | {
151 | 'ComplianceResourceType': invoking_event['configurationItem']['resourceType'],
152 | 'ComplianceResourceId': invoking_event['configurationItem']['resourceId'],
153 | 'ComplianceType': evaluation['compliance_type'],
154 | "Annotation": evaluation['annotation'],
155 | 'OrderingTimestamp': invoking_event['configurationItem']['configurationItemCaptureTime']
156 | },
157 | ],
158 | ResultToken=result_token)
159 | return
160 |
161 |
162 | def initialize_la_services(account_cb_id, la_account_id):
163 | # Payer account credentials
164 | payerAssumeRole = sts.assume_role(
165 | RoleArn="arn:aws:iam::" + account_cb_id + ":role/tailor",
166 | RoleSessionName="talrIamPayerAssumeRole"
167 | )
168 | payerCredentials = payerAssumeRole['Credentials']
169 | payer_aws_access_key_id = payerCredentials['AccessKeyId']
170 | payer_aws_secret_access_key = payerCredentials['SecretAccessKey']
171 | payer_aws_session_token = payerCredentials['SessionToken']
172 |
173 | # Linked account credentials
174 | laSts = boto3.client(
175 | 'sts',
176 | aws_access_key_id=payer_aws_access_key_id,
177 | aws_secret_access_key=payer_aws_secret_access_key,
178 | aws_session_token=payer_aws_session_token,
179 | )
180 |
181 | laAssumeRole = laSts.assume_role(
182 | RoleArn="arn:aws:iam::" + la_account_id + ":role/PayerAccountAccessRole",
183 | RoleSessionName="talrIamLaAssumeRole"
184 | )
185 | laCredentials = laAssumeRole['Credentials']
186 | la_aws_access_key_id = laCredentials['AccessKeyId']
187 | la_aws_secret_access_key = laCredentials['SecretAccessKey']
188 | la_aws_session_token = laCredentials['SessionToken']
189 |
190 | return (la_aws_access_key_id, la_aws_secret_access_key, la_aws_session_token)
191 |
--------------------------------------------------------------------------------
/sam/functions/talr-configrule-ec2notinpublicsubnet/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-configrule-ec2notinpublicsubnet/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-cresource-sns/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | import os
8 | import sys
9 |
10 | # Path to modules needed to package local lambda function for upload
11 | currentdir = os.path.dirname(os.path.realpath(__file__))
12 | sys.path.append(os.path.join(currentdir, "./vendored"))
13 |
14 | # Modules downloaded into the vendored directory
15 | import requests
16 |
17 | # Logging for Serverless
18 | log = logging.getLogger()
19 | log.setLevel(logging.DEBUG)
20 |
21 | # Initializing AWS services
22 | sts = boto3.client('sts')
23 | awslambda = boto3.client('lambda')
24 |
25 | def handler(event, context):
26 | log.debug("Received event {}".format(json.dumps(event)))
27 |
28 | stage = event['ResourceProperties']['Stage']
29 | topicNamePrefix = event['ResourceProperties']['TopicNamePrefix']
30 | topicName = topicNamePrefix + '-' + stage
31 | requestType = event['RequestType']
32 |
33 | # Initialize a Session object in order to look up Config regions
34 | boto3Session = boto3.Session()
35 |
36 | # All Config regions
37 | snsRegions = boto3Session.get_available_regions(
38 | service_name='sns',
39 | partition_name='aws',
40 | )
41 |
42 | if "Create" in requestType:
43 | create_topics(snsRegions, topicName, context, event, stage)
44 |
45 | elif "Update" in requestType:
46 | create_topics(snsRegions, topicName, context, event, stage)
47 |
48 | elif "Delete" in requestType:
49 | delete_topics(snsRegions, topicName, context, event)
50 |
51 | def cfn_response(response_status, response_data, reason, physical_resource_id, event, context):
52 | # Put together the response to be sent to the S3 pre-signed URL
53 |
54 | if reason:
55 | reason = reason
56 | else:
57 | reason = 'See the details in CloudWatch Log Stream: ' + context.log_stream_name
58 |
59 | responseBody = {'Status': response_status,
60 | 'Reason': 'See the details in CloudWatch Log Stream: ' + context.log_stream_name,
61 | 'PhysicalResourceId': physical_resource_id,
62 | 'StackId': event['StackId'],
63 | 'RequestId': event['RequestId'],
64 | 'LogicalResourceId': event['LogicalResourceId'],
65 | 'Data': response_data
66 | }
67 | print('Response Body:', responseBody)
68 | response = requests.put(event['ResponseURL'], data=json.dumps(responseBody))
69 | if response.status_code != 200:
70 | print(response.text)
71 | raise Exception('Response error received.')
72 | return
73 |
74 | def list_lambda_functions(stage):
75 |
76 | functionArns = list()
77 | whitelist = {'talr-vpciam-' + stage,
78 | 'talr-vpcflowlogs-' + stage,
79 | 'talr-vpcdns-' + stage,
80 | 'talr-directconnect-' + stage}
81 |
82 | lambdaFunctions = awslambda.list_functions()
83 | for i in lambdaFunctions['Functions']:
84 | if i['FunctionName'] in whitelist:
85 | functionArns.append(i['FunctionArn'])
86 |
87 | marker = True
88 | while marker is True:
89 | try:
90 | if lambdaFunctions['NextMarker']:
91 | lambdaFunctions = awslambda.list_functions(
92 | Marker=lambdaFunctions['NextMarker']
93 | )
94 | for i in lambdaFunctions['Functions']:
95 | if i['FunctionName'] in whitelist:
96 | functionArns.append(i['FunctionArn'])
97 | except KeyError as e:
98 | print(e)
99 | marker = False
100 |
101 | return functionArns
102 |
103 |
104 | def create_topics(sns_regions, topic_name, context, event, stage):
105 |
106 | # Get list of Lambda functions to subsribe
107 | functionArns = list_lambda_functions(stage)
108 | print(functionArns)
109 |
110 | failures = 0
111 | for region in sns_regions:
112 |
113 | # Set region to create topic
114 | sns = boto3.client(
115 | 'sns',
116 | region_name=region
117 | )
118 |
119 | createTopic = sns.create_topic(
120 | Name=topic_name
121 | )
122 | print('CreateTopic response: ', createTopic)
123 |
124 | createTopicPolicy = sns.add_permission(
125 | TopicArn=createTopic['TopicArn'],
126 | Label='LinkedAccountPublishAccess',
127 | AWSAccountId=[
128 | '*',
129 | ],
130 | ActionName=[
131 | 'Publish',
132 | ]
133 | )
134 | print('CreateTopicPolicy response: ', createTopicPolicy)
135 |
136 | # Subscribe each function to the SNS topic created
137 | if isinstance(functionArns, list):
138 | for i in functionArns:
139 | # Subscribe Lambda functions
140 | sns.subscribe(
141 | TopicArn=createTopic['TopicArn'],
142 | Protocol='lambda',
143 | Endpoint=i
144 | )
145 |
146 | # Add invoke permission for each SNS topic created
147 | for i in functionArns:
148 | awslambda.add_permission(
149 | FunctionName=i,
150 | StatementId=region + 'SnsTopicPermission',
151 | Action='lambda:InvokeFunction',
152 | Principal='sns.amazonaws.com',
153 | SourceArn=createTopic['TopicArn'],
154 | )
155 |
156 | # Check if topic creation was successful
157 | if 'TopicArn' not in createTopic or createTopicPolicy['ResponseMetadata']['HTTPStatusCode'] != 200:
158 | failures = failures + 1
159 |
160 | if failures == 0:
161 | responseStatus = "SUCCESS"
162 | responseData = {"TopicName": topic_name}
163 | physicalResourceId = topic_name
164 | reason = ""
165 | else:
166 | reason = "At least one region failed to provision, check logs " + context.log_stream_name
167 | responseStatus = "FAILED"
168 | responseData = {}
169 | physicalResourceId = context.log_stream_name
170 |
171 | # Send response to Cloudformation
172 | cfn_response(response_status=responseStatus,
173 | response_data=responseData,
174 | physical_resource_id=physicalResourceId,
175 | reason=reason,
176 | event=event,
177 | context=context)
178 | return
179 |
180 | def delete_topics(sns_regions, topic_name, context, event):
181 |
182 | accountId = sts.get_caller_identity()['Account']
183 | failures = 0
184 | for region in sns_regions:
185 |
186 | # Set region to create topic
187 | sns = boto3.client(
188 | 'sns',
189 | region_name=region
190 | )
191 |
192 | deleteTopic = sns.delete_topic(
193 | TopicArn='arn:aws:sns:' + region + ':' + accountId + ':' + topic_name
194 | )
195 | print('DeleteTopic response: ', deleteTopic)
196 |
197 | # Check if topic deletion was successful
198 | if deleteTopic['ResponseMetadata']['HTTPStatusCode'] != 200:
199 | failures = failures + 1
200 |
201 | if failures == 0:
202 | responseStatus = "SUCCESS"
203 | responseData = {}
204 | reason = ""
205 | else:
206 | reason = "At least one region failed to delete, check logs " + context.log_stream_name
207 | responseStatus = "FAILED"
208 | responseData = deleteTopic
209 |
210 | # Send response to Cloudformation
211 | cfn_response(response_status=responseStatus,
212 | response_data=responseData,
213 | physical_resource_id=context.log_stream_name,
214 | reason=reason,
215 | event=event,
216 | context=context)
217 | return
218 |
--------------------------------------------------------------------------------
/sam/functions/talr-cresource-sns/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
--------------------------------------------------------------------------------
/sam/functions/talr-directconnect/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-directconnect/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-director/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | import os
8 | import sys
9 | import time
10 |
11 | # Path to modules needed to package local lambda function for upload
12 | currentdir = os.path.dirname(os.path.realpath(__file__))
13 | sys.path.append(os.path.join(currentdir, "./vendored"))
14 |
15 | # Modules downloaded into the vendored directory
16 |
17 | # Logging for Serverless
18 | log = logging.getLogger()
19 | log.setLevel(logging.DEBUG)
20 |
21 | # Initializing AWS services
22 | dynamodb = boto3.resource('dynamodb')
23 | sts = boto3.client('sts')
24 | sns = boto3.client('sns')
25 |
26 |
27 | def handler(event, context):
28 | log.debug("Received event {}".format(json.dumps(event)))
29 |
30 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
31 | taskStatus = dynamodb.Table(os.environ['TAILOR_TABLENAME_TASKSTATUS'])
32 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
33 | dispatchRequestArn = os.environ['TAILOR_SNSARN_DISPATCH_REQUEST']
34 | incomingMessage = json.loads(event['Records'][0]['Sns']['Message'])
35 |
36 | try:
37 | if incomingMessage['info'] == "LinkedAccountCreationStarted":
38 | getAccountInfo = accountInfo.get_item(
39 | Key={
40 | 'accountEmailAddress': incomingMessage['email']
41 | }
42 | )
43 | requestId = getAccountInfo['Item']['requestId']
44 |
45 | # Update task start status
46 | updateStatus = taskStatus.put_item(
47 | Item={
48 | "requestId": requestId,
49 | "eventTimestamp": str(time.time()),
50 | "period": "start",
51 | "taskName": "CLA_CREATION",
52 | "function": "talr-director",
53 | "message": incomingMessage
54 | }
55 | )
56 | return
57 | except KeyError:
58 | pass
59 |
60 | # Look up email address and other account fields in accountInfo table
61 | accountEmailAddress = incomingMessage['linkedAccountEmail']
62 | getAccountInfo = accountInfo.get_item(
63 | Key={
64 | 'accountEmailAddress': accountEmailAddress
65 | }
66 | )
67 | requestId = getAccountInfo['Item']['requestId']
68 | accountTagShortProjectName = getAccountInfo['Item']['accountTagShortProjectName']
69 | accountTagEnvironment = getAccountInfo['Item']['accountTagEnvironment']
70 | accountCbAlias = getAccountInfo['Item']['accountCbAlias']
71 |
72 | # Look up account division
73 | getCbInfo = cbInfo.get_item(
74 | Key={
75 | 'accountCbAlias': accountCbAlias
76 | }
77 | )
78 | accountDivision = getCbInfo['Item']['accountDivision'].lower()
79 | accountCompanyCode = getCbInfo['Item']['accountCompanyCode']
80 | accountCbId = getCbInfo['Item']['accountCbId']
81 |
82 | if "linkedAccountId" in incomingMessage and getAccountInfo['Item']['accountEmailAddress'] == accountEmailAddress:
83 |
84 | # Update task end status
85 | updateStatus = taskStatus.put_item(
86 | Item={
87 | "requestId": requestId,
88 | "eventTimestamp": str(time.time()),
89 | "period": "end",
90 | "taskName": "CLA_CREATION",
91 | "function": "talr-director",
92 | "message": incomingMessage
93 | }
94 | )
95 |
96 | laAccountId = incomingMessage['linkedAccountId']
97 | print("New linked account: " + laAccountId)
98 |
99 | updateAccountInfo = accountInfo.update_item(
100 | Key={
101 | 'accountEmailAddress': accountEmailAddress
102 | },
103 | UpdateExpression='SET #accountId = :val1',
104 | ExpressionAttributeNames={'#accountId': "accountId"},
105 | ExpressionAttributeValues={':val1': incomingMessage['linkedAccountId']}
106 | )
107 | else:
108 | # Update task failure status
109 | updateStatus = taskStatus.put_item(
110 | Item={
111 | "requestId": requestId,
112 | "eventTimestamp": str(time.time()),
113 | "period": "failed",
114 | "taskName": "CLA_CREATION",
115 | "function": "talr-director",
116 | "message": incomingMessage
117 | }
118 | )
119 |
120 | return {"code": "601", "requestId": requestId, "message": "ERROR: Linked account failed to create"}
121 |
122 | # Start linked account validation
123 | updateStatus = taskStatus.put_item(
124 | Item={
125 | "requestId": requestId,
126 | "eventTimestamp": str(time.time()),
127 | "period": "start",
128 | "taskName": "CLA_VALIDATION",
129 | "function": "talr-director",
130 | "message": "Linked account: " + laAccountId
131 | }
132 | )
133 |
134 | # Payer account credentials
135 | payerAssumeRole = sts.assume_role(
136 | RoleArn="arn:aws:iam::" + accountCbId + ":role/tailor",
137 | RoleSessionName="talrDirectorPayerAssumeRole"
138 | )
139 | payerCredentials = payerAssumeRole['Credentials']
140 | payer_aws_access_key_id = payerCredentials['AccessKeyId']
141 | payer_aws_secret_access_key = payerCredentials['SecretAccessKey']
142 | payer_aws_session_token = payerCredentials['SessionToken']
143 |
144 | # Linked account credentials
145 | laSts = boto3.client(
146 | 'sts',
147 | aws_access_key_id=payer_aws_access_key_id,
148 | aws_secret_access_key=payer_aws_secret_access_key,
149 | aws_session_token=payer_aws_session_token,
150 | )
151 |
152 | laAssumeRole = laSts.assume_role(
153 | RoleArn="arn:aws:iam::" + laAccountId + ":role/PayerAccountAccessRole",
154 | RoleSessionName="talrDirectorLaAssumeRole"
155 | )
156 | laCredentials = laAssumeRole['Credentials']
157 | la_aws_access_key_id = laCredentials['AccessKeyId']
158 | la_aws_secret_access_key = laCredentials['SecretAccessKey']
159 | la_aws_session_token = laCredentials['SessionToken']
160 |
161 | # List roles in linked account to validate access
162 | laIam = boto3.client(
163 | 'iam',
164 | aws_access_key_id=la_aws_access_key_id,
165 | aws_secret_access_key=la_aws_secret_access_key,
166 | aws_session_token=la_aws_session_token,
167 | )
168 |
169 | laListRoles = laIam.list_roles()
170 | print(laListRoles)
171 |
172 | # Create IAM Account Alias in Linked Account
173 | accountIamAlias = accountCompanyCode + "-" + accountDivision.lower() + "-" + \
174 | accountTagShortProjectName + "-" + accountTagEnvironment
175 | laCreateAccountIamAlias = laIam.create_account_alias(
176 | AccountAlias=accountIamAlias
177 | )
178 |
179 | # Add account IAM alias to accountInfo table
180 | updateAccountInfo = accountInfo.update_item(
181 | Key={
182 | 'accountEmailAddress': accountEmailAddress
183 | },
184 | UpdateExpression='SET #accountIamAlias = :val1',
185 | ExpressionAttributeNames={'#accountIamAlias': "accountIamAlias"},
186 | ExpressionAttributeValues={':val1': accountIamAlias}
187 | )
188 |
189 | # Update task end status
190 | updateStatus = taskStatus.put_item(
191 | Item={
192 | "requestId": requestId,
193 | "eventTimestamp": str(time.time()),
194 | "period": "end",
195 | "taskName": "CLA_VALIDATION",
196 | "function": "talr-director",
197 | "message": "Linked account: " + laAccountId
198 | }
199 | )
200 |
201 | publishToTalrDispatchRequest = sns.publish(
202 | TopicArn=dispatchRequestArn,
203 | Message='{ "default" : { "requestId": "' + requestId + '", "accountEmailAddress": "' +
204 | accountEmailAddress + '" }, "lambda" : { "requestId": "' + requestId +
205 | '", "accountEmailAddress": "' + accountEmailAddress + '" }}'
206 | )
207 |
--------------------------------------------------------------------------------
/sam/functions/talr-director/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-director/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-entsupport/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | import os
8 | import sys
9 | import time
10 |
11 | # Path to modules needed to package local lambda function for upload
12 | currentdir = os.path.dirname(os.path.realpath(__file__))
13 | sys.path.append(os.path.join(currentdir, "./vendored"))
14 |
15 | # Modules downloaded into the vendored directory
16 |
17 | # Logging for Serverless
18 | log = logging.getLogger()
19 | log.setLevel(logging.DEBUG)
20 |
21 | # Initializing AWS services
22 | dynamodb = boto3.resource('dynamodb')
23 | sts = boto3.client('sts')
24 |
25 |
26 | def handler(event, context):
27 | log.debug("Received event {}".format(json.dumps(event)))
28 |
29 | taskStatus = dynamodb.Table(os.environ['TAILOR_TABLENAME_TASKSTATUS'])
30 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
31 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
32 | incomingMessage = json.loads(event['Records'][0]['Sns']['Message'])
33 | accountEmailAddress = incomingMessage['lambda']['accountEmailAddress']
34 |
35 | getAccountInfo = accountInfo.get_item(
36 | Key={
37 | 'accountEmailAddress': accountEmailAddress
38 | }
39 | )
40 | laAccountId = getAccountInfo['Item']['accountId']
41 | requestId = getAccountInfo['Item']['requestId']
42 | accountCbAlias = getAccountInfo['Item']['accountCbAlias']
43 | accountTagEnvironment = getAccountInfo['Item']['accountTagEnvironment']
44 |
45 | # Update task start status
46 | updateStatus = taskStatus.put_item(
47 | Item={
48 | "requestId": requestId,
49 | "eventTimestamp": str(time.time()),
50 | "period": "start",
51 | "taskName": "ENTSUPPORT",
52 | "function": "talr-entsupport",
53 | "message": incomingMessage
54 | }
55 | )
56 |
57 | getCbInfo = cbInfo.get_item(
58 | Key={
59 | 'accountCbAlias': accountCbAlias
60 | }
61 | )
62 | accountCompanyName = getCbInfo['Item']['accountCompanyName']
63 | accountCbId = getCbInfo['Item']['accountCbId']
64 | accountSupportTeamEmail = getCbInfo['Item']['accountSupportTeamEmail']
65 |
66 | if accountTagEnvironment != 'tst':
67 |
68 | # Payer account credentials
69 | payerAssumeRole = sts.assume_role(
70 | RoleArn="arn:aws:iam::" + accountCbId + ":role/tailor",
71 | RoleSessionName="talrEntsupportPayerAssumeRole"
72 | )
73 | payerCredentials = payerAssumeRole['Credentials']
74 | payer_aws_access_key_id = payerCredentials['AccessKeyId']
75 | payer_aws_secret_access_key = payerCredentials['SecretAccessKey']
76 | payer_aws_session_token = payerCredentials['SessionToken']
77 |
78 | # Linked account credentials
79 | paSupport = boto3.client(
80 | 'support',
81 | aws_access_key_id=payer_aws_access_key_id,
82 | aws_secret_access_key=payer_aws_secret_access_key,
83 | aws_session_token=payer_aws_session_token,
84 | )
85 |
86 | # Create case in Payer Account requested Enterprise Support on Linked Account
87 | createCase = paSupport.create_case(
88 | subject='Enable Enterprise Support',
89 | serviceCode='account-management',
90 | severityCode='normal',
91 | categoryCode='billing',
92 | communicationBody='Please enable Enterprise Support on Linked Account: ' + laAccountId + '.',
93 | ccEmailAddresses=[
94 | accountSupportTeamEmail,
95 | ],
96 | language='en',
97 | issueType='customer-service'
98 | )
99 | print(createCase)
100 |
101 | # Update task end status
102 | updateStatus = taskStatus.put_item(
103 | Item={
104 | "requestId": requestId,
105 | "eventTimestamp": str(time.time()),
106 | "period": "end",
107 | "taskName": "ENTSUPPORT",
108 | "function": "talr-entsupport",
109 | "message": incomingMessage
110 | }
111 | )
112 |
113 | else:
114 | print("No Enterprise Support enablement requested for", laAccountId)
115 |
116 | # Update task end status
117 | updateStatus = taskStatus.put_item(
118 | Item={
119 | "requestId": requestId,
120 | "eventTimestamp": str(time.time()),
121 | "period": "end",
122 | "taskName": "ENTSUPPORT",
123 | "function": "talr-entsupport",
124 | "message": incomingMessage
125 | }
126 | )
127 |
128 | return
129 |
--------------------------------------------------------------------------------
/sam/functions/talr-entsupport/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-entsupport/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-iam/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-iam/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-inquirer/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Key, Attr
8 | import os
9 | import sys
10 | import re
11 |
12 | # Path to modules needed to package local lambda function for upload
13 | currentdir = os.path.dirname(os.path.realpath(__file__))
14 | sys.path.append(os.path.join(currentdir, "./vendored"))
15 |
16 | # Modules downloaded into the vendored directory
17 | from netaddr import IPNetwork, IPAddress
18 |
19 | # Logging for Serverless
20 | log = logging.getLogger()
21 | log.setLevel(logging.DEBUG)
22 |
23 | # Initializing AWS services
24 | sns = boto3.client('sns')
25 | dynamodb = boto3.resource('dynamodb')
26 |
27 |
28 | def handler(event, context):
29 | log.debug("Received event {}".format(json.dumps(event)))
30 |
31 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
32 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
33 | accountIdFound = None
34 | ipAddressFound = None
35 | accountEmailAddressFound = None
36 |
37 | # Header validation
38 | try:
39 | print('header:accountCbAlias', re.match("^[a-z]{3,4}-[a-z]{3,5}$", event['params']['header']['accountCbAlias']))
40 |
41 | # Test if the accountCbAlias key exists
42 | getCbInfo = cbInfo.get_item(
43 | Key={
44 | 'accountCbAlias': event['params']['header']['accountCbAlias']
45 | }
46 | )
47 |
48 | # Test if the value of accountCbAlias is valid, it will be if cbInfo returns an entry.
49 | accountCbAlias = getCbInfo['Item']['accountCbAlias']
50 |
51 | except Exception as e:
52 | print(e)
53 | print("regex not matching any values passed in request")
54 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
55 |
56 | # accountId validation
57 | try:
58 | if event['context']['resource-path'] == '/accounts' and event['params']['querystring']['accountid']:
59 | if re.match("^[0-9]{12}$", event['params']['querystring']['accountid']) or \
60 | re.match("^[0-9]{4}-[0-9]{4}-[0-9]{4}$", event['params']['querystring']['accountid']):
61 |
62 | accountId = re.sub('-', '', event['params']['querystring']['accountid'])
63 | accountIdFound = True
64 | print('accoountIdFound', accountIdFound)
65 | else:
66 | accountIdFound = False
67 | print('accoountIdFound', accountIdFound)
68 | except KeyError as e:
69 | print(e)
70 | print("No accountId or bad accountId passed")
71 | accountIdFound = False
72 | print('accoountIdFound', accountIdFound)
73 |
74 | # email address validation
75 | try:
76 | if event['context']['resource-path'] == '/accounts' and event['params']['querystring']['emailaddress']:
77 | if re.match("^([a-zA-Z0-9_\-\.]+)@([a-zA-Z0-9_\-\.]+)\.([a-zA-Z]{2,5})$",
78 | event['params']['querystring']['emailaddress']):
79 |
80 | accountEmailAddress = event['params']['querystring']['emailaddress']
81 | accountEmailAddressFound = True
82 | print('accountEmailAddressFound', accountEmailAddressFound)
83 | else:
84 | accountEmailAddressFound = False
85 | print('accountEmailAddressFound', accountEmailAddressFound)
86 | except KeyError as e:
87 | print(e)
88 | print("No emailaddress or bad emailaddress passed")
89 | accountEmailAddressFound = False
90 | print('accountEmailAddressFound', accountEmailAddressFound)
91 |
92 | # ip address validation
93 | try:
94 | if event['context']['resource-path'] == '/accounts' and event['params']['querystring']['ipaddress']:
95 | if re.match("^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$",
96 | event['params']['querystring']['ipaddress']):
97 |
98 | ipAddress = event['params']['querystring']['ipaddress']
99 | ipAddressFound = True
100 | print('ipAddressFound', ipAddressFound)
101 | else:
102 | ipAddressFound = False
103 | print('ipAddressFound', ipAddressFound)
104 | except KeyError as e:
105 | print(e)
106 | print("No ipaddress or bad ipaddress passed")
107 | ipAddressFound = False
108 | print('ipAddressFound', ipAddressFound)
109 |
110 | # test whether no query parameters were passed
111 | if accountIdFound is False and accountEmailAddressFound is False and ipAddressFound is False:
112 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
113 | elif accountIdFound is True:
114 | getAccountInfo = accountInfo.query(
115 | IndexName='gsiAccountId',
116 | KeyConditionExpression=Key('accountId').eq(accountId)
117 | )
118 | if getAccountInfo['Count'] >= 1:
119 | return {'accountId': getAccountInfo['Items'][0]['accountId'],
120 | 'accountStatus': getAccountInfo['Items'][0]['accountStatus'],
121 | 'emailAddress': getAccountInfo['Items'][0]['accountEmailAddress'],
122 | 'regulated': getAccountInfo['Items'][0]['accountRegulated'],
123 | 'accountName': getAccountInfo['Items'][0]['accountTagLongProjectName'],
124 | 'costCenter': getAccountInfo['Items'][0]['accountTagCostCenter'],
125 | 'environment': getAccountInfo['Items'][0]['accountTagEnvironment'],
126 | 'department': getAccountInfo['Items'][0]['requestorDepartment'],
127 | 'requestorName': getAccountInfo['Items'][0]['requestorFullName'],
128 | 'technicalContactName': getAccountInfo['Items'][0]['accountTechnicalContactFullName']
129 | }
130 | elif getAccountInfo['Count'] == 0:
131 | raise Exception({"code": "4040", "message": "ERROR: Not found"})
132 | elif accountEmailAddressFound is True:
133 | try:
134 | getAccountInfo = accountInfo.get_item(
135 | Key={
136 | 'accountEmailAddress': accountEmailAddress
137 | }
138 | )
139 | return {'accountId': getAccountInfo['Item']['accountId'],
140 | 'accountStatus': getAccountInfo['Item']['accountStatus'],
141 | 'emailAddress': getAccountInfo['Item']['accountEmailAddress'],
142 | 'regulated': getAccountInfo['Item']['accountRegulated'],
143 | 'accountName': getAccountInfo['Item']['accountTagLongProjectName'],
144 | 'costCenter': getAccountInfo['Item']['accountTagCostCenter'],
145 | 'environment': getAccountInfo['Item']['accountTagEnvironment'],
146 | 'department': getAccountInfo['Item']['requestorDepartment'],
147 | 'requestorName': getAccountInfo['Item']['requestorFullName'],
148 | 'technicalContactName': getAccountInfo['Item']['accountTechnicalContactFullName']
149 | }
150 | except KeyError as e:
151 | print(e)
152 | print("No account found for given email address")
153 | raise Exception({"code": "4040", "message": "ERROR: Not found"})
154 | elif ipAddressFound is True:
155 |
156 | getAccountInfo = accountInfo.scan(
157 | ProjectionExpression='#accountVpcCidr,'
158 | 'accountId,accountEmailAddress,'
159 | 'accountRegulated,'
160 | 'accountStatus,'
161 | 'accountTagLongProjectName,'
162 | 'requestorFullName,'
163 | 'accountTechnicalContactFullName',
164 | FilterExpression='attribute_exists (#accountVpcCidr)',
165 | ExpressionAttributeNames={'#accountVpcCidr': 'accountVpcCidr'}
166 | )
167 |
168 | for i in getAccountInfo['Items']:
169 | if i['accountVpcCidr']['us-west-1']:
170 | if IPAddress(ipAddress) in IPNetwork(i['accountVpcCidr']['us-west-1']):
171 | return {'accountId': i['accountId'],
172 | 'accountStatus': i['accountStatus'],
173 | 'emailAddress': i['accountEmailAddress'],
174 | 'regulated': i['accountRegulated'],
175 | 'accountName': i['accountTagLongProjectName'],
176 | 'requestorName': i['requestorFullName'],
177 | 'technicalContactName': i['accountTechnicalContactFullName'],
178 | 'vpcCidr': i['accountVpcCidr']
179 | }
180 | else:
181 | pass
182 | elif i['accountVpcCidr']['us-west-2']:
183 | if IPAddress(ipAddress) in IPNetwork(i['accountVpcCidr']['us-west-2']):
184 | return {'accountId': i['accountId'],
185 | 'accountStatus': i['accountStatus'],
186 | 'emailAddress': i['accountEmailAddress'],
187 | 'regulated': i['accountRegulated'],
188 | 'accountName': i['accountTagLongProjectName'],
189 | 'requestorName': i['requestorFullName'],
190 | 'technicalContactName': i['accountTechnicalContactFullName'],
191 | 'vpcCidr': i['accountVpcCidr']
192 | }
193 | else:
194 | pass
195 | elif i['accountVpcCidr']['us-east-1']:
196 | if IPAddress(ipAddress) in IPNetwork(i['accountVpcCidr']['us-east-1']):
197 | return {'accountId': i['accountId'],
198 | 'accountStatus': i['accountStatus'],
199 | 'emailAddress': i['accountEmailAddress'],
200 | 'regulated': i['accountRegulated'],
201 | 'accountName': i['accountTagLongProjectName'],
202 | 'requestorName': i['requestorFullName'],
203 | 'technicalContactName': i['accountTechnicalContactFullName'],
204 | 'vpcCidr': i['accountVpcCidr']
205 | }
206 | else:
207 | pass
208 |
209 | if event['context']['resource-path'] == '/accounts/ids':
210 | getAccountInfo = accountInfo.scan(
211 | ProjectionExpression='accountId',
212 | FilterExpression=Attr('accountId').exists() & Attr('accountStatus').eq('ACTIVE')
213 | )
214 | accountIds = list()
215 | for i in getAccountInfo['Items']:
216 | accountIds.append(i['accountId'])
217 |
218 | return {'accountCbAlias': accountCbAlias,
219 | 'accountIds': accountIds,
220 | 'count': getAccountInfo['Count']}
221 |
--------------------------------------------------------------------------------
/sam/functions/talr-inquirer/requirements.txt:
--------------------------------------------------------------------------------
1 | netaddr
--------------------------------------------------------------------------------
/sam/functions/talr-lex-accountrequest/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-lex-accountrequest/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-nipap/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | import os
8 | import sys
9 | import time
10 |
11 | # Path to modules needed to package local lambda function for upload
12 | currentdir = os.path.dirname(os.path.realpath(__file__))
13 | sys.path.append(os.path.join(currentdir, "./vendored"))
14 |
15 | # Modules downloaded into the vendored directory
16 |
17 | # Logging for Serverless
18 | log = logging.getLogger()
19 | log.setLevel(logging.DEBUG)
20 |
21 | # Initializing AWS services
22 | dynamodb = boto3.resource('dynamodb')
23 | cloudformation = boto3.resource('cloudformation')
24 | s3 = boto3.client('s3')
25 |
26 |
27 | def handler(event, context):
28 | log.debug("Received event {}".format(json.dumps(event)))
29 |
30 | taskStatus = dynamodb.Table(os.environ['TAILOR_TABLENAME_TASKSTATUS'])
31 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
32 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
33 | nipapCfn = dynamodb.Table(os.environ['TAILOR_TABLENAME_NIPAPCFN'])
34 | nipapcfnResponseArn = os.environ['TAILOR_SNSARN_NIPAPCFN_RESPONSE']
35 | incomingMessage = json.loads(event['Records'][0]['Sns']['Message'])
36 | accountEmailAddress = incomingMessage['lambda']['accountEmailAddress']
37 |
38 | getAccountInfo = accountInfo.get_item(
39 | Key={
40 | 'accountEmailAddress': accountEmailAddress
41 | }
42 | )
43 | requestId = getAccountInfo['Item']['requestId']
44 | accountCbAlias = getAccountInfo['Item']['accountCbAlias']
45 | accountIamAlias = getAccountInfo['Item']['accountIamAlias']
46 |
47 | # Update task start status
48 | updateStatus = taskStatus.put_item(
49 | Item={
50 | "requestId": requestId,
51 | "eventTimestamp": str(time.time()),
52 | "period": "start",
53 | "taskName": "NIPAP_DAEMON",
54 | "function": "talr-nipap",
55 | "message": incomingMessage
56 | }
57 | )
58 |
59 | # Lookup cbInfo variables
60 | getCbInfo = cbInfo.get_item(
61 | Key={
62 | 'accountCbAlias': accountCbAlias
63 | }
64 | )
65 | accountTailorConfigBucket = getCbInfo['Item']['accountTailorConfigBucket']
66 |
67 | getNipapCfn = nipapCfn.get_item(
68 | Key={
69 | 'nipapAlias': accountCbAlias
70 | }
71 | )
72 |
73 | cfnDaemonTemplateObjectKey = getNipapCfn['Item']['cfnDaemonTemplateObjectKey']
74 | cfnAppName = getNipapCfn['Item']['cfnDaemonAppName']
75 | cfnEnvironmentName = getNipapCfn['Item']['cfnDaemonEnvironment']
76 | cfnApplicationSubnetAZ1 = getNipapCfn['Item']['cfnDaemonApplicationSubnetAZ1']
77 | cfnDaemonInstanceType = getNipapCfn['Item']['cfnDaemonInstanceType']
78 | cfnDaemonAmi = getNipapCfn['Item']['cfnDaemonAmi']
79 | cfnDaemonComponentsSecurityGroup = getNipapCfn['Item']['cfnDaemonComponentsSecurityGroup']
80 | cfnVpcId = getNipapCfn['Item']['cfnDaemonVpcId']
81 |
82 | # Download CFN template from S3 and pass contents to function to be used in Linked Account.
83 | getCfnTemplate = s3.get_object(
84 | Bucket=accountTailorConfigBucket,
85 | Key=cfnDaemonTemplateObjectKey
86 | )
87 | templateBody = getCfnTemplate['Body'].read()
88 |
89 | createDaemonInstance = cloudformation.create_stack(
90 | StackName='tailor-nipap-deamon-' + accountIamAlias + '-' + str(int(time.time())),
91 | TemplateBody=templateBody,
92 | Parameters=[
93 | {
94 | 'ParameterKey': 'TailorRequestId',
95 | 'ParameterValue': requestId
96 | },
97 | {
98 | 'ParameterKey': 'AppName',
99 | 'ParameterValue': cfnAppName
100 | },
101 | {
102 | 'ParameterKey': 'EnvironmentName',
103 | 'ParameterValue': cfnEnvironmentName
104 | },
105 | {
106 | 'ParameterKey': 'ApplicationSubnetAZ1',
107 | 'ParameterValue': cfnApplicationSubnetAZ1
108 | },
109 | {
110 | 'ParameterKey': 'TailorNipapDaemonInstanceType',
111 | 'ParameterValue': cfnDaemonInstanceType
112 | },
113 | {
114 | 'ParameterKey': 'TailorNipapDaemonAmi',
115 | 'ParameterValue': cfnDaemonAmi
116 | },
117 | {
118 | 'ParameterKey': 'TailorComponentsSecurityGroup',
119 | 'ParameterValue': cfnDaemonComponentsSecurityGroup
120 | },
121 | {
122 | 'ParameterKey': 'VPCID',
123 | 'ParameterValue': cfnVpcId
124 | }
125 | ],
126 | TimeoutInMinutes=15,
127 | NotificationARNs=[
128 | nipapcfnResponseArn,
129 | ],
130 | OnFailure='ROLLBACK'
131 | )
132 | print(createDaemonInstance)
133 |
134 | # Update task start status
135 | updateStatus = taskStatus.put_item(
136 | Item={
137 | "requestId": requestId,
138 | "eventTimestamp": str(time.time()),
139 | "period": "end",
140 | "taskName": "NIPAP_DAEMON",
141 | "function": "talr-nipap",
142 | "message": incomingMessage
143 | }
144 | )
145 |
146 | return
147 |
--------------------------------------------------------------------------------
/sam/functions/talr-nipap/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-nipap/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-notify/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Key
8 | import os
9 | import sys
10 | import time
11 |
12 | # Path to modules needed to package local lambda function for upload
13 | currentdir = os.path.dirname(os.path.realpath(__file__))
14 | sys.path.append(os.path.join(currentdir, "./vendored"))
15 |
16 | # Modules downloaded into the vendored directory
17 |
18 | # Logging for Serverless
19 | log = logging.getLogger()
20 | log.setLevel(logging.DEBUG)
21 |
22 | # Initializing AWS services
23 | dynamodb = boto3.resource('dynamodb')
24 | ses = boto3.client('ses')
25 |
26 |
27 | def handler(event, context):
28 | log.debug("Received event {}".format(json.dumps(event)))
29 |
30 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
31 | taskStatus = dynamodb.Table(os.environ['TAILOR_TABLENAME_TASKSTATUS'])
32 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
33 | incomingMessage = json.loads(event['Records'][0]['Sns']['Message'])
34 | requestId = incomingMessage['lambda']['requestId']
35 | requestorEmailAddress = incomingMessage['lambda']['requestorEmailAddress']
36 | emailContentText = incomingMessage['lambda']['emailContentText']
37 | emailContentHtml = incomingMessage['lambda']['emailContentHtml']
38 |
39 | # Update task status
40 | updateStatus = taskStatus.put_item(
41 | Item={
42 | "requestId": requestId,
43 | "eventTimestamp": str(time.time()),
44 | "period": "start",
45 | "taskName": "NOTIFY",
46 | "function": "talr-notify",
47 | "message": "-"
48 | }
49 | )
50 |
51 | # Look up the accountEmailAddress from the known requestId
52 | getAccountEmailAddress = accountInfo.query(
53 | IndexName='gsiRequestId',
54 | KeyConditionExpression=Key('requestId').eq(requestId)
55 | )
56 | accountEmailAddress = getAccountEmailAddress['Items'][0]['accountEmailAddress']
57 |
58 | # Look up account info
59 | getAccountInfo = accountInfo.get_item(
60 | Key={
61 | 'accountEmailAddress': accountEmailAddress
62 | }
63 | )
64 | requestorFullName = getAccountInfo['Item']['requestorFullName']
65 | accountTagLongProjectName = getAccountInfo['Item']['accountTagLongProjectName']
66 | accountCbAlias = getAccountInfo['Item']['accountCbAlias']
67 |
68 | getCbInfo = cbInfo.get_item(
69 | Key={
70 | 'accountCbAlias': accountCbAlias
71 | }
72 | )
73 | accountSupportTeamEmail = getCbInfo['Item']['accountSupportTeamEmail']
74 | accountNotificationsFromEmail = getCbInfo['Item']['accountNotificationsFromEmail']
75 |
76 | sendNotification = ses.send_email(
77 | Source=accountNotificationsFromEmail,
78 | Destination={
79 | 'ToAddresses': [
80 | requestorEmailAddress,
81 | ],
82 | 'BccAddresses': [
83 | accountSupportTeamEmail,
84 | ]
85 | },
86 | Message={
87 | 'Subject': {
88 | 'Data': 'AWS Account Request',
89 | 'Charset': 'UTF-8'
90 | },
91 | 'Body': {
92 | 'Text': {
93 | 'Data': emailContentText,
94 | 'Charset': 'UTF-8'
95 | },
96 | 'Html': {
97 | 'Data': emailContentHtml,
98 | 'Charset': 'UTF-8'
99 | }
100 | }
101 | },
102 | ReplyToAddresses=[
103 | accountSupportTeamEmail,
104 | ]
105 | )
106 |
107 | print(sendNotification)
108 |
109 | # Update task status
110 | updateStatus = taskStatus.put_item(
111 | Item={
112 | "requestId": requestId,
113 | "eventTimestamp": str(time.time()),
114 | "period": "end",
115 | "taskName": "NOTIFY",
116 | "function": "talr-notify",
117 | "message": "-"
118 | }
119 | )
120 |
--------------------------------------------------------------------------------
/sam/functions/talr-notify/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-notify/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-ops-ddb-backups/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import os
7 | import sys
8 | from time import gmtime, strftime
9 |
10 | # Path to modules needed to package local lambda function for upload
11 | currentdir = os.path.dirname(os.path.realpath(__file__))
12 | # sys.path.append(os.path.join(currentdir, "./vendored"))
13 | sys.path.insert(0, os.path.join(currentdir, "./vendored"))
14 |
15 | # Modules downloaded into the vendored directory
16 | import boto3
17 |
18 | # Logging for Serverless
19 | log = logging.getLogger()
20 | log.setLevel(logging.DEBUG)
21 |
22 | # Initializing AWS services
23 | dynamodb = boto3.resource('dynamodb')
24 | s3 = boto3.client('s3')
25 |
26 |
27 | def handler(event, context):
28 | log.debug("Received event {}".format(json.dumps(event)))
29 |
30 | opsTbl = dynamodb.Table(os.environ['TAILOR_TABLENAME_OPS'])
31 | newImage = event['Records'][0]['dynamodb']['NewImage']
32 | tableName = event['Records'][0]['eventSourceARN'].split(':')[5].split('/')[1]
33 | sequenceNumber = event['Records'][0]['dynamodb']['SequenceNumber']
34 | changeDate = event['Records'][0]['dynamodb']['ApproximateCreationDateTime']
35 |
36 | getOpsTbl = opsTbl.get_item(
37 | Key={
38 | 'layer': 'dynamodb'
39 | }
40 | )
41 | s3Bucket = getOpsTbl['Item']['backupBucket']
42 |
43 | putObject = s3.put_object(
44 | Body=str(event),
45 | Bucket=s3Bucket,
46 | Key=tableName + '/' + sequenceNumber,
47 | Tagging='ApproximateCreationDateTime=' + str(changeDate) +
48 | '&Month=' + str(strftime("%m", gmtime())) +
49 | '&Year=' + str(strftime("%Y", gmtime()))
50 | )
51 |
52 | """
53 | listChanges = s3.list_objects(
54 | Bucket=s3Bucket,
55 | Prefix=tableName,
56 | )
57 | """
58 |
59 | return
60 |
--------------------------------------------------------------------------------
/sam/functions/talr-ops-ddb-backups/requirements.txt:
--------------------------------------------------------------------------------
1 | botocore
2 | boto3
3 |
--------------------------------------------------------------------------------
/sam/functions/talr-ops-slack-notifications/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | import os
8 | import sys
9 | from base64 import b64decode
10 |
11 | # Path to modules needed to package local lambda function for upload
12 | currentdir = os.path.dirname(os.path.realpath(__file__))
13 | sys.path.append(os.path.join(currentdir, "./vendored"))
14 |
15 | # Modules downloaded into the vendored directory
16 | import requests
17 |
18 | # Logging for Serverless
19 | log = logging.getLogger()
20 | log.setLevel(logging.DEBUG)
21 |
22 | # Initializing AWS services
23 | dynamodb = boto3.resource('dynamodb')
24 | kms = boto3.client('kms')
25 |
26 |
27 | def handler(event, context):
28 | log.debug("Received event {}".format(json.dumps(event)))
29 |
30 | opsTbl = dynamodb.Table(os.environ['TAILOR_TABLENAME_OPS'])
31 | incomingMessage = json.loads(event['Records'][0]['Sns']['Message'])
32 |
33 | getOpsTbl = opsTbl.get_item(
34 | Key={
35 | 'layer': 'slack'
36 | }
37 | )
38 | slackChannelName = getOpsTbl['Item']['slackChannelName']
39 | slackWebhookEncrypted = getOpsTbl['Item']['slackWebhookEncrypted']
40 | slackHookUrl = "https://" + kms.decrypt(CiphertextBlob=b64decode(slackWebhookEncrypted))['Plaintext']
41 |
42 | try:
43 | if "Errors" in incomingMessage['Trigger']['MetricName'] \
44 | and "AWS/Lambda" in incomingMessage['Trigger']['Namespace']:
45 | newStateValue = incomingMessage['NewStateValue']
46 | reasonStateReason = incomingMessage['NewStateReason']
47 | functionName = incomingMessage['Trigger']['Dimensions'][0]['value']
48 | slackMessage = {
49 | 'channel': slackChannelName,
50 | 'username': "tailorbot",
51 | 'icon_emoji': ":robot_face:",
52 | "attachments": [
53 | {
54 | "color": "danger",
55 | "title": functionName,
56 | "text": "Has errors and is now in state %s: %s" % (newStateValue, reasonStateReason),
57 | "mrkdwn_in": ["text"]
58 | }
59 | ]
60 | }
61 |
62 | # Send notification
63 | slackWebhookResponse = requests.post(slackHookUrl, data=json.dumps(slackMessage))
64 | print(slackWebhookResponse)
65 | return
66 | except Exception as e:
67 | print(e)
68 | print("Input not a Lambda error metric")
69 |
70 | try:
71 | if "Duration" in incomingMessage['Trigger']['MetricName'] \
72 | and "AWS/Lambda" in incomingMessage['Trigger']['Namespace']:
73 | reasonStateReason = incomingMessage['NewStateReason']
74 | functionName = incomingMessage['Trigger']['Dimensions'][0]['value']
75 | slackMessage = {
76 | 'channel': slackChannelName,
77 | 'username': "tailorbot",
78 | 'icon_emoji': ":robot_face:",
79 | "attachments": [
80 | {
81 | "color": "warning",
82 | "title": functionName,
83 | "text": "Took longer than threshold: %s" % (reasonStateReason),
84 | "mrkdwn_in": ["text"]
85 | }
86 | ]
87 | }
88 |
89 | # Send notification
90 | slackWebhookResponse = requests.post(slackHookUrl, data=json.dumps(slackMessage))
91 | print(slackWebhookResponse)
92 | return
93 | except Exception as e:
94 | print(e)
95 | print("Input not a Lambda duration metric")
96 |
97 | try:
98 | if "ReadThrottleEvents" in incomingMessage['Trigger']['MetricName'] \
99 | and "AWS/DynamoDB" in incomingMessage['Trigger']['Namespace']:
100 | reasonStateReason = incomingMessage['NewStateReason']
101 | tableName = incomingMessage['Trigger']['Dimensions'][0]['value']
102 | slackMessage = {
103 | 'channel': slackChannelName,
104 | 'username': "tailorbot",
105 | 'icon_emoji': ":robot_face:",
106 | "attachments": [
107 | {
108 | "color": "danger",
109 | "title": tableName,
110 | "text": "Table %s is being throttled. Alert: %s" % (tableName, reasonStateReason),
111 | "mrkdwn_in": ["text"]
112 | }
113 | ]
114 | }
115 |
116 | # Send notification
117 | slackWebhookResponse = requests.post(slackHookUrl, data=json.dumps(slackMessage))
118 | print(slackWebhookResponse)
119 | return
120 | except Exception as e:
121 | print(e)
122 | print("Input not a DynamoDB ReadThrottleEvents metric")
123 |
124 | try:
125 | if "newAccount" in incomingMessage:
126 | requestorFullName = incomingMessage['newAccount']['requestorFullName']
127 | accountTagLongProjectName = incomingMessage['newAccount']['accountTagLongProjectName']
128 | accountId = incomingMessage['newAccount']['accountId']
129 | requestId = incomingMessage['newAccount']['requestId']
130 | accountEmailAddress = incomingMessage['newAccount']['accountEmailAddress']
131 | slackMessage = {
132 | 'channel': slackChannelName,
133 | 'username': "tailorbot",
134 | 'icon_emoji': ":robot_face:",
135 | "attachments": [
136 | {
137 | "color": "good",
138 | "title": "New Account",
139 | "text": "%s created account %s (%s) for %s via requestId %s" % (requestorFullName,
140 | accountId,
141 | accountEmailAddress,
142 | accountTagLongProjectName,
143 | requestId),
144 | "mrkdwn_in": ["text"]
145 | }
146 | ]
147 | }
148 |
149 | # Send notification
150 | slackWebhookResponse = requests.post(slackHookUrl, data=json.dumps(slackMessage))
151 | print(slackWebhookResponse)
152 | return
153 | except Exception as e:
154 | print(e)
155 | print("Input not a newAccount notification")
156 |
157 | try:
158 | slackMessage = {
159 | 'channel': slackChannelName,
160 | 'username': "tailorbot",
161 | 'icon_emoji': ":robot_face:",
162 | "attachments": [
163 | {
164 | "title": "Untrapped Error",
165 | "text": "Message: %s" % (incomingMessage),
166 | "mrkdwn_in": ["text"]
167 | }
168 | ]
169 | }
170 |
171 | # Send notification
172 | slackWebhookResponse = requests.post(slackHookUrl, data=json.dumps(slackMessage))
173 | print(slackWebhookResponse)
174 | return
175 | except Exception as e:
176 | print(e)
177 | print("Cannot recognize event input")
178 |
179 | return
180 |
--------------------------------------------------------------------------------
/sam/functions/talr-ops-slack-notifications/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 |
--------------------------------------------------------------------------------
/sam/functions/talr-poll-accountcompliance/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | import os
8 | import sys
9 | from base64 import b64decode
10 |
11 | # Path to modules needed to package local lambda function for upload
12 | currentdir = os.path.dirname(os.path.realpath(__file__))
13 | sys.path.append(os.path.join(currentdir, "./vendored"))
14 |
15 | # Modules downloaded into the vendored directory
16 | import aws-xray-sdk
17 | import requests
18 | from requests_aws4auth import AWS4Auth
19 |
20 | # Logging for Serverless
21 | log = logging.getLogger()
22 | log.setLevel(logging.DEBUG)
23 |
24 | # Initializing AWS services
25 | dynamodb = boto3.resource('dynamodb')
26 | sts = boto3.client('sts')
27 | kms = boto3.client('kms')
28 | awslambda = boto3.client('lambda')
29 |
30 |
31 | def handler(event, context):
32 | log.debug("Received event {}".format(json.dumps(event)))
33 |
34 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
35 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
36 | tailorApiDomain = os.environ['TAILOR_API_DOMAINNAME']
37 | stage = context.function_name.split('-')[-1]
38 |
39 | # Look up all CBs in talr-cbInfo and process based on each.
40 | scanCbInfo = cbInfo.scan(
41 | ProjectionExpression='accountCbAlias'
42 | )
43 |
44 | for i in scanCbInfo['Items']:
45 | tailorApiAccessKey, tailorApiSecretKey = getTailorCreds(cbInfo, i['accountCbAlias'])
46 | accountIds = getAccountIds(tailorApiAccessKey, tailorApiSecretKey, tailorApiDomain, i['accountCbAlias'])
47 |
48 | if event['api'] == 'cloudability':
49 | invokeCloudablity(accountIds, stage)
50 |
51 | return
52 |
53 | @xray_recorder.capture()
54 | def invokeCloudablity(account_ids, stage):
55 |
56 | for i in account_ids:
57 | print("Updating account", i)
58 |
59 | # Build Lambda invoke payload
60 | payload = {
61 | "body-json": {},
62 | "params": {
63 | "path": {
64 | "accountId": i
65 | },
66 | "querystring": {},
67 | "header": {}
68 | },
69 | "stage-variables": {
70 | "stage": stage
71 | },
72 | "context": {
73 | "resource-path": "/cloudability/{accountId}",
74 | }
75 | }
76 |
77 | # Call Lambda
78 | awslambda.invoke(
79 | FunctionName='talr-accountupdate-cloudability-' + stage,
80 | InvocationType='Event',
81 | Payload=json.dumps(payload),
82 | )
83 |
84 | return
85 |
86 | @xray_recorder.capture()
87 | def getAccountIds(access_key, secret_key, domain, cb_alias):
88 |
89 | boto3Session = boto3.Session()
90 | region = boto3Session.region_name
91 |
92 | tailorEndpoint = 'https://' + domain + '/accounts/ids'
93 | auth = AWS4Auth(access_key, secret_key, region, 'execute-api')
94 | headers = {
95 | 'host': domain,
96 | 'accountCbAlias': cb_alias
97 | }
98 | tailorResponse = requests.get(tailorEndpoint, auth=auth, headers=headers)
99 |
100 | return json.loads(tailorResponse.content)['accountIds']
101 |
102 | @xray_recorder.capture()
103 | def getTailorCreds(cb_object, cb_alias):
104 | getCbInfo = cb_object.get_item(
105 | Key={
106 | 'accountCbAlias': cb_alias
107 | }
108 | )
109 |
110 | tailorApiAccessKey = kms.decrypt(CiphertextBlob=b64decode(getCbInfo['Item']['tailorApiAccessKeyEncrypted']))['Plaintext']
111 | tailorApiSecretKey = kms.decrypt(CiphertextBlob=b64decode(getCbInfo['Item']['tailorApiSecretKeyEncrypted']))['Plaintext']
112 |
113 | return tailorApiAccessKey, tailorApiSecretKey
114 |
--------------------------------------------------------------------------------
/sam/functions/talr-poll-accountcompliance/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 | requests-aws4auth
3 | aws-xray-sdk
--------------------------------------------------------------------------------
/sam/functions/talr-poll-accountreconcile/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Key, Attr
8 | import os
9 | import sys
10 | import datetime
11 | import hashlib
12 | import hmac
13 |
14 | # Path to modules needed to package local lambda function for upload
15 | currentdir = os.path.dirname(os.path.realpath(__file__))
16 | sys.path.append(os.path.join(currentdir, "./vendored"))
17 |
18 | # Modules downloaded into the vendored directory
19 | import requests
20 |
21 | # Logging for Serverless
22 | log = logging.getLogger()
23 | log.setLevel(logging.DEBUG)
24 |
25 | # Initializing AWS services
26 | dynamodb = boto3.resource('dynamodb')
27 | sts = boto3.client('sts')
28 |
29 |
30 | def handler(event, context):
31 | log.debug("Received event {}".format(json.dumps(event)))
32 |
33 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
34 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
35 |
36 | try:
37 | if event['message'] == 'scaleup':
38 | updateTable = boto3.client('dynamodb').update_table(
39 | TableName=os.environ['TAILOR_TABLENAME_ACCOUNTINFO'],
40 | ProvisionedThroughput={
41 | 'WriteCapacityUnits': 10
42 | }
43 | )
44 | return updateTable
45 | except Exception as e:
46 | print("Failure in try/catch ", e)
47 |
48 | # Look up all CBs in talr-cbInfo and process based on each.
49 | scanCbInfo = cbInfo.scan(
50 | ProjectionExpression='accountCbAlias'
51 | )
52 |
53 | for i in scanCbInfo['Items']:
54 | getCbInfo = cbInfo.get_item(
55 | Key={
56 | 'accountCbAlias': i['accountCbAlias']
57 | }
58 | )
59 |
60 | accountCbId = getCbInfo['Item']['accountCbId']
61 |
62 | # Assuming Payer/CB role and extract credentials to be used by the Organizations calls
63 | payerAssumeRole = sts.assume_role(
64 | RoleArn="arn:aws:iam::" + accountCbId + ":role/tailor",
65 | RoleSessionName="talrClaPayerAssumeRole"
66 | )
67 | payerCredentials = payerAssumeRole['Credentials']
68 |
69 | organizations = boto3.client(
70 | 'organizations',
71 | region_name='us-east-1',
72 | aws_access_key_id=payerCredentials['AccessKeyId'],
73 | aws_secret_access_key=payerCredentials['SecretAccessKey'],
74 | aws_session_token=payerCredentials['SessionToken'],
75 | )
76 |
77 | count = 0
78 | accounts = list()
79 | nextToken = None
80 |
81 | # Call listAccounts API to get list of all accounts then store ouput in a list
82 | while True:
83 | if nextToken:
84 | listAccounts = organizations.list_accounts(
85 | NextToken=nextToken
86 | )
87 | else:
88 | listAccounts = organizations.list_accounts()
89 | count += 1
90 | for i in listAccounts['Accounts']:
91 | accounts.append(i)
92 |
93 | try:
94 | if listAccounts['NextToken']:
95 | nextToken = listAccounts['NextToken']
96 | except KeyError:
97 | break
98 |
99 | # For each account update it's status in talr-accountInfo. First, get all accounts from talr-accountInfo.
100 | getTailorAccounts = accountInfo.scan(
101 | ProjectionExpression='accountId, accountEmailAddress'
102 | )
103 |
104 | tailorAccountIds = list()
105 | for i in getTailorAccounts['Items']:
106 | # Try catch for cases where there's an email address with no accountId.
107 | # Should be rare but has been known to happen.
108 | try:
109 | tailorAccountIds.append(i['accountId'])
110 | except KeyError as e:
111 | print(e)
112 |
113 | tailorEmailAddresses = list()
114 | for i in getTailorAccounts['Items']:
115 | # Try catch for cases where there's an email address with no accountId.
116 | # Should be rare but has been known to happen.
117 | try:
118 | tailorEmailAddresses.append(i['accountEmailAddress'])
119 | except KeyError as e:
120 | print(e)
121 |
122 | # Loop through all the account received from Organizations
123 | for i in accounts:
124 | accountId = i['Id']
125 | emailAddress = i['Email']
126 | status = i['Status']
127 | # Loop through all the accounts from Organizations and compare with what's known to Tailor
128 | for ii in getTailorAccounts['Items']:
129 | # Try catch for cases where there's an email address with no accountId.
130 | # Should be rare but has been known to happen.
131 | try:
132 | if accountId == ii['accountId']:
133 | accountEmailAddress = ii['accountEmailAddress']
134 | if 'accountStatus' not in ii or ii['accountStatus'] != status:
135 | accountInfo.update_item(
136 | Key={
137 | 'accountEmailAddress': accountEmailAddress
138 | },
139 | UpdateExpression='SET #accountStatus = :val1',
140 | ExpressionAttributeNames={'#accountStatus': "accountStatus"},
141 | ExpressionAttributeValues={':val1': status}
142 | )
143 | break
144 | except KeyError as e:
145 | print(e)
146 | break
147 | # Loop for accounts in Organizations that are not in Tailor and add them.
148 | if accountId not in tailorAccountIds:
149 | accountInfo.put_item(
150 | Item={
151 | "accountEmailAddress": emailAddress,
152 | "accountId": accountId,
153 | "accountStatus": status,
154 | "addedViaAccountReconcile": True,
155 | "comment": "Account found in Organizations. Added by Tailor Account Reconcile function."
156 | }
157 | )
158 |
159 | # Loop for accounts in Tailor with no accountId
160 | if emailAddress not in tailorEmailAddresses:
161 | accountInfo.put_item(
162 | Item={
163 | "accountEmailAddress": emailAddress,
164 | "accountId": accountId,
165 | "accountStatus": status,
166 | "addedViaAccountReconcile": True,
167 | "comment": "Account found in Organizations. Added by Tailor Account Reconcile function."
168 | }
169 | )
170 | return
171 |
--------------------------------------------------------------------------------
/sam/functions/talr-poll-accountreconcile/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 |
--------------------------------------------------------------------------------
/sam/functions/talr-poll-cla/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 |
--------------------------------------------------------------------------------
/sam/functions/talr-poll-configcompliance/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | import os
8 | import sys
9 | import datetime
10 | import hashlib
11 | import hmac
12 | import base64
13 |
14 | # Path to modules needed to package local lambda function for upload
15 | currentdir = os.path.dirname(os.path.realpath(__file__))
16 | sys.path.append(os.path.join(currentdir, "./vendored"))
17 |
18 | # Modules downloaded into the vendored directory
19 | import requests
20 |
21 | # Logging for Serverless
22 | log = logging.getLogger()
23 | log.setLevel(logging.DEBUG)
24 |
25 | # Initializing AWS services
26 | dynamodb = boto3.resource('dynamodb')
27 | sts = boto3.client('sts')
28 |
29 |
30 | def handler(event, context):
31 | log.debug("Received event {}".format(json.dumps(event)))
32 |
33 | # AWS Version 4 signing
34 |
35 | # ************* REQUEST VALUES *************
36 | method = 'GET'
37 | service = 'execute-api'
38 | host = os.environ['TAILOR_API_DOMAINNAME']
39 |
40 | # Key derivation functions. See:
41 | # http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html#signature-v4-examples-python
42 | def sign(key, msg):
43 | return hmac.new(key, msg.encode('utf-8'), hashlib.sha256).digest()
44 |
45 | def getSignatureKey(key, dateStamp, regionName, serviceName):
46 | kDate = sign(('AWS4' + key).encode('utf-8'), dateStamp)
47 | kRegion = sign(kDate, regionName)
48 | kService = sign(kRegion, serviceName)
49 | kSigning = sign(kService, 'aws4_request')
50 | return kSigning
51 |
52 | # Read AWS access key from env. variables or configuration file. Best practice is NOT
53 | # to embed credentials in code.
54 | access_key = os.environ.get('AWS_ACCESS_KEY_ID')
55 | secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY')
56 | if access_key is None or secret_key is None:
57 | print
58 | 'No access key is available.'
59 | sys.exit()
60 |
61 | # Create a date for headers and the credential string
62 | t = datetime.datetime.utcnow()
63 | amzdate = t.strftime('%Y%m%dT%H%M%SZ')
64 | datestamp = t.strftime('%Y%m%d') # Date w/o time, used in credential scope
65 |
66 | # ************* TASK 1: CREATE A CANONICAL REQUEST *************
67 | # http://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
68 |
69 | # Step 1 is to define the verb (GET, POST, etc.)--already done.
70 |
71 | # Step 2: Create canonical URI--the part of the URI from domain to query
72 | # string (use '/' if no path)
73 | canonical_uri = '/'
74 |
75 | # Step 3: Create the canonical query string. In this example (a GET request),
76 | # request parameters are in the query string. Query string values must
77 | # be URL-encoded (space=%20). The parameters must be sorted by name.
78 | # For this example, the query string is pre-formatted in the request_parameters variable.
79 | canonical_querystring = request_parameters
80 |
81 | # Step 4: Create the canonical headers and signed headers. Header names
82 | # must be trimmed and lowercase, and sorted in code point order from
83 | # low to high. Note that there is a trailing \n.
84 | canonical_headers = 'host:' + host + '\n' + 'x-amz-date:' + amzdate + '\n'
85 |
86 | # Step 5: Create the list of signed headers. This lists the headers
87 | # in the canonical_headers list, delimited with ";" and in alpha order.
88 | # Note: The request can include any headers; canonical_headers and
89 | # signed_headers lists those that you want to be included in the
90 | # hash of the request. "Host" and "x-amz-date" are always required.
91 | signed_headers = 'host;x-amz-date'
92 |
93 | # Step 6: Create payload hash (hash of the request body content). For GET
94 | # requests, the payload is an empty string ("").
95 | payload_hash = hashlib.sha256('').hexdigest()
96 |
97 | # Step 7: Combine elements to create create canonical request
98 | canonical_request = method + '\n' + canonical_uri + '\n' + canonical_querystring + '\n' + canonical_headers + '\n' + signed_headers + '\n' + payload_hash
99 |
100 | # ************* TASK 2: CREATE THE STRING TO SIGN*************
101 | # Match the algorithm to the hashing algorithm you use, either SHA-1 or
102 | # SHA-256 (recommended)
103 | algorithm = 'AWS4-HMAC-SHA256'
104 | credential_scope = datestamp + '/' + region + '/' + service + '/' + 'aws4_request'
105 | string_to_sign = algorithm + '\n' + amzdate + '\n' + credential_scope + '\n' + hashlib.sha256(
106 | canonical_request).hexdigest()
107 |
108 | # ************* TASK 3: CALCULATE THE SIGNATURE *************
109 | # Create the signing key using the function defined above.
110 | signing_key = getSignatureKey(secret_key, datestamp, region, service)
111 |
112 | # Sign the string_to_sign using the signing_key
113 | signature = hmac.new(signing_key, (string_to_sign).encode('utf-8'), hashlib.sha256).hexdigest()
114 |
115 | # ************* TASK 4: ADD SIGNING INFORMATION TO THE REQUEST *************
116 | # The signing information can be either in a query string value or in
117 | # a header named Authorization. This code shows how to use a header.
118 | # Create authorization header and add to request headers
119 | authorization_header = algorithm + ' ' + 'Credential=' + access_key + '/' + credential_scope + ', ' + 'SignedHeaders=' + signed_headers + ', ' + 'Signature=' + signature
120 |
121 | # The request can include any headers, but MUST include "host", "x-amz-date",
122 | # and (for this scenario) "Authorization". "host" and "x-amz-date" must
123 | # be included in the canonical_headers and signed_headers, as noted
124 | # earlier. Order here is not significant.
125 | # Python note: The 'host' header is added automatically by the Python 'requests' library.
126 | headers = {'x-amz-date': amzdate, 'Authorization': authorization_header}
127 |
128 | # ************* SEND THE REQUEST *************
129 | request_url = endpoint + '?' + canonical_querystring
130 |
131 | print
132 | '\nBEGIN REQUEST++++++++++++++++++++++++++++++++++++'
133 | print
134 | 'Request URL = ' + request_url
135 | r = requests.get(request_url, headers=headers)
136 |
137 | print
138 | '\nRESPONSE++++++++++++++++++++++++++++++++++++'
139 | print
140 | 'Response code: %d\n' % r.status_code
141 | print
142 | r.text
143 |
--------------------------------------------------------------------------------
/sam/functions/talr-poll-configcompliance/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 |
--------------------------------------------------------------------------------
/sam/functions/talr-receptionist/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-receptionist/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-requeststatus/handler.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from __future__ import (absolute_import, division, print_function, unicode_literals)
3 |
4 | import json
5 | import logging
6 | import boto3
7 | from boto3.dynamodb.conditions import Key, Attr
8 | import os
9 | import sys
10 | import time
11 | import re
12 |
13 | # Path to modules needed to package local lambda function for upload
14 | currentdir = os.path.dirname(os.path.realpath(__file__))
15 | sys.path.append(os.path.join(currentdir, "./vendored"))
16 |
17 | # Modules downloaded into the vendored directory
18 |
19 | # Logging for Serverless
20 | log = logging.getLogger()
21 | log.setLevel(logging.DEBUG)
22 |
23 | # Initializing AWS services
24 | dynamodb = boto3.resource('dynamodb')
25 |
26 |
27 | def handler(event, context):
28 | log.debug("Received event {}".format(json.dumps(event)))
29 |
30 | taskStatus = dynamodb.Table(os.environ['TAILOR_TABLENAME_TASKSTATUS'])
31 | accountInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_ACCOUNTINFO'])
32 | cbInfo = dynamodb.Table(os.environ['TAILOR_TABLENAME_CBINFO'])
33 |
34 | try:
35 | print('path:requestId',
36 | re.match("^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$",
37 | event['params']['path']['requestId']))
38 | except Exception as e:
39 | print(e)
40 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
41 |
42 | if re.match("^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$",
43 | event['params']['path']['requestId']):
44 |
45 | requestId = event['params']['path']['requestId']
46 |
47 | # Lookup all task info
48 | getTaskStatus = taskStatus.query(
49 | KeyConditionExpression=Key('requestId').eq(requestId)
50 | )
51 |
52 | if getTaskStatus['Count'] == 0:
53 | raise Exception({"code": "4040", "message": "ERROR: Not found"})
54 |
55 | # Lookup email address for requestId
56 | getAccountEmailAddress = accountInfo.query(
57 | IndexName='gsiRequestId',
58 | KeyConditionExpression=Key('requestId').eq(requestId)
59 | )
60 | accountEmailAddress = getAccountEmailAddress['Items'][0]['accountEmailAddress']
61 |
62 | # Lookup accountInfo variables
63 | getAccountInfo = accountInfo.get_item(
64 | Key={
65 | 'accountEmailAddress': accountEmailAddress
66 | }
67 | )
68 |
69 | # Try to populate the accountId if it's present.
70 | # It would typically not be present if talr-poll-cla hasn't run yet.
71 | try:
72 | accountId = getAccountInfo['Item']['accountId']
73 | except KeyError as e:
74 | print("no accountId present")
75 | print(e)
76 | accountId = "Unknown"
77 |
78 | accountName = getAccountInfo['Item']['accountTagLongProjectName']
79 |
80 | taskAccountCreation = 'in_progress'
81 | taskAccountValidation = 'in_progress'
82 | taskAccountAdDl = 'in_progress'
83 | taskAccountAdSecGroups = 'in_progress'
84 | taskIam = 'in_progress'
85 | taskCloudtrail = 'in_progress'
86 | taskConfig = 'in_progress'
87 | taskCloudability = 'in_progress'
88 | taskEnterpriseSupport = 'in_progress'
89 | taskVpc = 'in_progress'
90 | taskVpcFlowLogs = 'in_progress'
91 | taskVpcDns = 'in_progress'
92 | taskDirectconnect = 'in_progress'
93 | taskNotify = 'in_progress'
94 |
95 | try:
96 | for i in getTaskStatus['Items']:
97 | if i['taskName'] == 'CLA_CREATION' and i['period'] == 'end':
98 | taskAccountCreation = 'complete'
99 | except KeyError as e:
100 | print('Account creation not yet complete')
101 | print(e)
102 |
103 | try:
104 | for i in getTaskStatus['Items']:
105 | if i['taskName'] == 'CLA_VALIDATION' and i['period'] == 'end':
106 | taskAccountValidation = 'complete'
107 | except KeyError as e:
108 | print('Account validation not yet complete')
109 | print(e)
110 |
111 | try:
112 | for i in getTaskStatus['Items']:
113 | if i['taskName'] == 'AD_DL' and i['period'] == 'end':
114 | taskAccountAdDl = 'complete'
115 | except KeyError as e:
116 | print('AD DL not yet complete')
117 | print(e)
118 |
119 | try:
120 | for i in getTaskStatus['Items']:
121 | if i['taskName'] == 'AD_SEC_GROUPS' and i['period'] == 'end':
122 | taskAccountAdSecGroups = 'complete'
123 | except KeyError as e:
124 | print('AD Sec Groups not yet complete')
125 | print(e)
126 |
127 | try:
128 | for i in getTaskStatus['Items']:
129 | if i['taskName'] == 'IAM' and i['period'] == 'end':
130 | taskIam = 'complete'
131 | except KeyError as e:
132 | print('IAM not yet complete')
133 | print(e)
134 |
135 | try:
136 | for i in getTaskStatus['Items']:
137 | if i['taskName'] == 'CLOUDTRAIL' and i['period'] == 'end':
138 | taskCloudtrail = 'complete'
139 | except KeyError as e:
140 | print('Cloudtrail not yet complete')
141 | print(e)
142 |
143 | try:
144 | for i in getTaskStatus['Items']:
145 | if i['taskName'] == 'CONFIG' and i['period'] == 'end':
146 | taskConfig = 'complete'
147 | except KeyError as e:
148 | print('Config not yet complete')
149 | print(e)
150 |
151 | try:
152 | for i in getTaskStatus['Items']:
153 | if i['taskName'] == 'ENTSUPPORT' and i['period'] == 'end':
154 | taskEnterpriseSupport = 'complete'
155 | except KeyError as e:
156 | print('Enterprise Support not yet complete')
157 | print(e)
158 |
159 | try:
160 | for i in getTaskStatus['Items']:
161 | if i['taskName'] == 'VPC' and i['period'] == 'end':
162 | taskVpc = 'complete'
163 | except KeyError as e:
164 | print('VPC not yet complete')
165 | print(e)
166 |
167 | try:
168 | for i in getTaskStatus['Items']:
169 | if i['taskName'] == 'VPCFLOWLOGS' and i['period'] == 'end':
170 | taskVpcFlowLogs = 'complete'
171 | except KeyError as e:
172 | print('VPC Flow Logs not yet complete')
173 | print(e)
174 |
175 | try:
176 | for i in getTaskStatus['Items']:
177 | if i['taskName'] == 'VPCDNS' and i['period'] == 'end':
178 | taskVpcDns = 'complete'
179 | except KeyError as e:
180 | print('VPC DNS not yet complete')
181 | print(e)
182 |
183 | try:
184 | for i in getTaskStatus['Items']:
185 | if i['taskName'] == 'CLOUDABILITY' and i['period'] == 'end':
186 | taskCloudability = 'complete'
187 | except KeyError as e:
188 | print('Cloudability not yet complete')
189 | print(e)
190 |
191 | try:
192 | for i in getTaskStatus['Items']:
193 | if i['taskName'] == 'DIRECTCONNECT' and i['period'] == 'end':
194 | taskDirectconnect = 'complete'
195 | except KeyError as e:
196 | print('Direct Connect not yet complete')
197 | print(e)
198 |
199 | try:
200 | for i in getTaskStatus['Items']:
201 | if i['taskName'] == 'NOTIFY' and i['period'] == 'end':
202 | taskNotify = 'complete'
203 | except KeyError as e:
204 | print('Notify not yet complete')
205 | print(e)
206 |
207 | # return output for new account creations
208 | for i in getTaskStatus['Items']:
209 | if "CLA_SUBMISSION" in i['taskName']:
210 | return {"status": taskNotify,
211 | "accountName": accountName,
212 | "accountId": accountId,
213 | "taskStatus": {
214 | "accountCreation": taskAccountCreation,
215 | "accountValidation": taskAccountValidation,
216 | "accountAdDl": taskAccountAdDl,
217 | "accountAdSecGroups": taskAccountAdSecGroups,
218 | "iam": taskIam,
219 | "cloudtrail": taskCloudtrail,
220 | "config": taskConfig,
221 | "cloudability": taskCloudability,
222 | "enterpriseSupport": taskEnterpriseSupport,
223 | "vpc": taskVpc,
224 | "vpcFlowLogs": taskVpcFlowLogs,
225 | "vpcDns": taskVpcDns,
226 | "directConnect": taskDirectconnect,
227 | "notify": taskNotify
228 | }
229 | }
230 |
231 | # return output for account updates
232 | for i in getTaskStatus['Items']:
233 | if getTaskStatus['Count'] <= 2 and i['taskName'] == 'VPCFLOWLOGS':
234 | return {"status": taskVpcFlowLogs,
235 | "accountName": accountName,
236 | "accountId": accountId,
237 | "taskStatus": {
238 | "vpcFlowLogs": taskVpcFlowLogs
239 | }
240 | }
241 | # return output for account updates
242 | for i in getTaskStatus['Items']:
243 | if getTaskStatus['Count'] <= 2 and i['taskName'] == 'VPCDNS':
244 | return {"status": taskVpcDns,
245 | "accountName": accountName,
246 | "accountId": accountId,
247 | "taskStatus": {
248 | "vpcDns": taskVpcDns
249 | }
250 | }
251 | # return output for account updates
252 | for i in getTaskStatus['Items']:
253 | if getTaskStatus['Count'] <= 2 and i['taskName'] == 'CLOUDABILITY':
254 | return {"status": taskCloudability,
255 | "accountName": accountName,
256 | "accountId": accountId,
257 | "taskStatus": {
258 | "cloudability": taskCloudability
259 | }
260 | }
261 | else:
262 | print("Bad requestId was provided")
263 | raise Exception({"code": "4000", "message": "ERROR: Bad request"})
264 |
--------------------------------------------------------------------------------
/sam/functions/talr-requeststatus/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-requeststatus/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-validator/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-validator/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-vpc/requirements.txt:
--------------------------------------------------------------------------------
1 | pynipap
2 | netaddr
3 |
--------------------------------------------------------------------------------
/sam/functions/talr-vpcdns/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alanwill/aws-tailor/439043e124b662f8657becb495ed5c344dcc0a08/sam/functions/talr-vpcdns/requirements.txt
--------------------------------------------------------------------------------
/sam/functions/talr-vpcflowlogs/requirements.txt:
--------------------------------------------------------------------------------
1 | retry
2 |
--------------------------------------------------------------------------------
/sam/functions/talr-vpciam/requirements.txt:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------