├── .gitignore ├── ACKNOWLEDGEMENTS ├── CONTRIBUTING.md ├── LICENSE ├── NOTICE ├── README.md ├── SkyWrapper.py ├── config.yml ├── exceptions ├── AthenaBadQueryException.py ├── CloudTrailBucketMissingLogsTableException.py ├── SingletonClassException.py └── ZeroBucketsFoundException.py ├── handlers ├── AthenaHandler.py ├── CloudTrailHandler.py ├── ConfigHandler.py ├── ExportStsHistoryHandler.py ├── PrivilegedRolesScanHandler.py ├── RolePermissionsHandler.py ├── StsHistoryHandler.py └── __init__.py ├── parsers ├── PoliciesPermissionsParser.py ├── UserIdentityParser.py └── __init__.py ├── requirements.txt ├── structures ├── AthenaTrailRow.py ├── StsToken.py ├── TrailBucket.py ├── UserIdentity.py └── __init__.py └── utilities ├── ArgParseUtilities.py ├── AwsAccountMetadataUtilities.py ├── Boto3Utilities.py ├── ExcelUtilities.py ├── FileUtilities.py ├── SkyWrapperConstants.py ├── StsTreeStructureUtilities.py └── __init__.py /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | *.py[cod] 3 | *$py.class 4 | .idea/ -------------------------------------------------------------------------------- /ACKNOWLEDGEMENTS: -------------------------------------------------------------------------------- 1 | SkyWrapper is using the following open source components: 2 | 3 | Boto 3 - The AWS SDK for Python (https://github.com/boto/boto3/): Apache License 2.0 4 | Copyright 2013-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. 5 | 6 | PyYAML - The next generation YAML parser and emitter for Python: 7 | Copyright (c) 2017-2019 Ingy döt Net 8 | Copyright (c) 2006-2016 Kirill Simonov 9 | 10 | Permission is hereby granted, free of charge, to any person obtaining a copy of 11 | this software and associated documentation files (the "Software"), to deal in 12 | the Software without restriction, including without limitation the rights to 13 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies 14 | of the Software, and to permit persons to whom the Software is furnished to do 15 | so, subject to the following conditions: 16 | 17 | The above copyright notice and this permission notice shall be included in all 18 | copies or substantial portions of the Software. 19 | 20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 21 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 22 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 23 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 24 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 25 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 26 | SOFTWARE. 27 | 28 | XlsxWriter - A Python module for creating Excel XLSX files: 29 | Copyright (c) 2013, John McNamara 30 | All rights reserved. 31 | 32 | Redistribution and use in source and binary forms, with or without 33 | modification, are permitted provided that the following conditions are met: 34 | 35 | 1. Redistributions of source code must retain the above copyright notice, this 36 | list of conditions and the following disclaimer. 37 | 2. Redistributions in binary form must reproduce the above copyright notice, 38 | this list of conditions and the following disclaimer in the documentation 39 | and/or other materials provided with the distribution. 40 | 41 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 42 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 43 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 44 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 45 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 46 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 47 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 48 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 49 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 50 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 51 | 52 | The views and conclusions contained in the software and documentation are those 53 | of the authors and should not be interpreted as representing official policies, 54 | either expressed or implied, of the FreeBSD Project. 55 | 56 | policyuniverse - Parse and Process AWS IAM Policies, Statements, ARNs, and wildcards: 57 | Apache License 58 | Version 2.0, January 2004 59 | http://www.apache.org/licenses/ 60 | 61 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 62 | 63 | 1. Definitions. 64 | 65 | "License" shall mean the terms and conditions for use, reproduction, 66 | and distribution as defined by Sections 1 through 9 of this document. 67 | 68 | "Licensor" shall mean the copyright owner or entity authorized by 69 | the copyright owner that is granting the License. 70 | 71 | "Legal Entity" shall mean the union of the acting entity and all 72 | other entities that control, are controlled by, or are under common 73 | control with that entity. For the purposes of this definition, 74 | "control" means (i) the power, direct or indirect, to cause the 75 | direction or management of such entity, whether by contract or 76 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 77 | outstanding shares, or (iii) beneficial ownership of such entity. 78 | 79 | "You" (or "Your") shall mean an individual or Legal Entity 80 | exercising permissions granted by this License. 81 | 82 | "Source" form shall mean the preferred form for making modifications, 83 | including but not limited to software source code, documentation 84 | source, and configuration files. 85 | 86 | "Object" form shall mean any form resulting from mechanical 87 | transformation or translation of a Source form, including but 88 | not limited to compiled object code, generated documentation, 89 | and conversions to other media types. 90 | 91 | "Work" shall mean the work of authorship, whether in Source or 92 | Object form, made available under the License, as indicated by a 93 | copyright notice that is included in or attached to the work 94 | (an example is provided in the Appendix below). 95 | 96 | "Derivative Works" shall mean any work, whether in Source or Object 97 | form, that is based on (or derived from) the Work and for which the 98 | editorial revisions, annotations, elaborations, or other modifications 99 | represent, as a whole, an original work of authorship. For the purposes 100 | of this License, Derivative Works shall not include works that remain 101 | separable from, or merely link (or bind by name) to the interfaces of, 102 | the Work and Derivative Works thereof. 103 | 104 | "Contribution" shall mean any work of authorship, including 105 | the original version of the Work and any modifications or additions 106 | to that Work or Derivative Works thereof, that is intentionally 107 | submitted to Licensor for inclusion in the Work by the copyright owner 108 | or by an individual or Legal Entity authorized to submit on behalf of 109 | the copyright owner. For the purposes of this definition, "submitted" 110 | means any form of electronic, verbal, or written communication sent 111 | to the Licensor or its representatives, including but not limited to 112 | communication on electronic mailing lists, source code control systems, 113 | and issue tracking systems that are managed by, or on behalf of, the 114 | Licensor for the purpose of discussing and improving the Work, but 115 | excluding communication that is conspicuously marked or otherwise 116 | designated in writing by the copyright owner as "Not a Contribution." 117 | 118 | "Contributor" shall mean Licensor and any individual or Legal Entity 119 | on behalf of whom a Contribution has been received by Licensor and 120 | subsequently incorporated within the Work. 121 | 122 | 2. Grant of Copyright License. Subject to the terms and conditions of 123 | this License, each Contributor hereby grants to You a perpetual, 124 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 125 | copyright license to reproduce, prepare Derivative Works of, 126 | publicly display, publicly perform, sublicense, and distribute the 127 | Work and such Derivative Works in Source or Object form. 128 | 129 | 3. Grant of Patent License. Subject to the terms and conditions of 130 | this License, each Contributor hereby grants to You a perpetual, 131 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 132 | (except as stated in this section) patent license to make, have made, 133 | use, offer to sell, sell, import, and otherwise transfer the Work, 134 | where such license applies only to those patent claims licensable 135 | by such Contributor that are necessarily infringed by their 136 | Contribution(s) alone or by combination of their Contribution(s) 137 | with the Work to which such Contribution(s) was submitted. If You 138 | institute patent litigation against any entity (including a 139 | cross-claim or counterclaim in a lawsuit) alleging that the Work 140 | or a Contribution incorporated within the Work constitutes direct 141 | or contributory patent infringement, then any patent licenses 142 | granted to You under this License for that Work shall terminate 143 | as of the date such litigation is filed. 144 | 145 | 4. Redistribution. You may reproduce and distribute copies of the 146 | Work or Derivative Works thereof in any medium, with or without 147 | modifications, and in Source or Object form, provided that You 148 | meet the following conditions: 149 | 150 | (a) You must give any other recipients of the Work or 151 | Derivative Works a copy of this License; and 152 | 153 | (b) You must cause any modified files to carry prominent notices 154 | stating that You changed the files; and 155 | 156 | (c) You must retain, in the Source form of any Derivative Works 157 | that You distribute, all copyright, patent, trademark, and 158 | attribution notices from the Source form of the Work, 159 | excluding those notices that do not pertain to any part of 160 | the Derivative Works; and 161 | 162 | (d) If the Work includes a "NOTICE" text file as part of its 163 | distribution, then any Derivative Works that You distribute must 164 | include a readable copy of the attribution notices contained 165 | within such NOTICE file, excluding those notices that do not 166 | pertain to any part of the Derivative Works, in at least one 167 | of the following places: within a NOTICE text file distributed 168 | as part of the Derivative Works; within the Source form or 169 | documentation, if provided along with the Derivative Works; or, 170 | within a display generated by the Derivative Works, if and 171 | wherever such third-party notices normally appear. The contents 172 | of the NOTICE file are for informational purposes only and 173 | do not modify the License. You may add Your own attribution 174 | notices within Derivative Works that You distribute, alongside 175 | or as an addendum to the NOTICE text from the Work, provided 176 | that such additional attribution notices cannot be construed 177 | as modifying the License. 178 | 179 | You may add Your own copyright statement to Your modifications and 180 | may provide additional or different license terms and conditions 181 | for use, reproduction, or distribution of Your modifications, or 182 | for any such Derivative Works as a whole, provided Your use, 183 | reproduction, and distribution of the Work otherwise complies with 184 | the conditions stated in this License. 185 | 186 | 5. Submission of Contributions. Unless You explicitly state otherwise, 187 | any Contribution intentionally submitted for inclusion in the Work 188 | by You to the Licensor shall be under the terms and conditions of 189 | this License, without any additional terms or conditions. 190 | Notwithstanding the above, nothing herein shall supersede or modify 191 | the terms of any separate license agreement you may have executed 192 | with Licensor regarding such Contributions. 193 | 194 | 6. Trademarks. This License does not grant permission to use the trade 195 | names, trademarks, service marks, or product names of the Licensor, 196 | except as required for reasonable and customary use in describing the 197 | origin of the Work and reproducing the content of the NOTICE file. 198 | 199 | 7. Disclaimer of Warranty. Unless required by applicable law or 200 | agreed to in writing, Licensor provides the Work (and each 201 | Contributor provides its Contributions) on an "AS IS" BASIS, 202 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 203 | implied, including, without limitation, any warranties or conditions 204 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 205 | PARTICULAR PURPOSE. You are solely responsible for determining the 206 | appropriateness of using or redistributing the Work and assume any 207 | risks associated with Your exercise of permissions under this License. 208 | 209 | 8. Limitation of Liability. In no event and under no legal theory, 210 | whether in tort (including negligence), contract, or otherwise, 211 | unless required by applicable law (such as deliberate and grossly 212 | negligent acts) or agreed to in writing, shall any Contributor be 213 | liable to You for damages, including any direct, indirect, special, 214 | incidental, or consequential damages of any character arising as a 215 | result of this License or out of the use or inability to use the 216 | Work (including but not limited to damages for loss of goodwill, 217 | work stoppage, computer failure or malfunction, or any and all 218 | other commercial damages or losses), even if such Contributor 219 | has been advised of the possibility of such damages. 220 | 221 | 9. Accepting Warranty or Additional Liability. While redistributing 222 | the Work or Derivative Works thereof, You may choose to offer, 223 | and charge a fee for, acceptance of support, warranty, indemnity, 224 | or other liability obligations and/or rights consistent with this 225 | License. However, in accepting such obligations, You may act only 226 | on Your own behalf and on Your sole responsibility, not on behalf 227 | of any other Contributor, and only if You agree to indemnify, 228 | defend, and hold each Contributor harmless for any liability 229 | incurred by, or claims asserted against, such Contributor by reason 230 | of your accepting any such warranty or additional liability. 231 | 232 | END OF TERMS AND CONDITIONS 233 | 234 | APPENDIX: How to apply the Apache License to your work. 235 | 236 | To apply the Apache License to your work, attach the following 237 | boilerplate notice, with the fields enclosed by brackets "{}" 238 | replaced with your own identifying information. (Don't include 239 | the brackets!) The text should be enclosed in the appropriate 240 | comment syntax for the file format. We also recommend that a 241 | file or class name and description of purpose be included on the 242 | same "printed page" as the copyright notice for easier 243 | identification within third-party archives. 244 | 245 | Copyright {yyyy} {name of copyright owner} 246 | 247 | Licensed under the Apache License, Version 2.0 (the "License"); 248 | you may not use this file except in compliance with the License. 249 | You may obtain a copy of the License at 250 | 251 | http://www.apache.org/licenses/LICENSE-2.0 252 | 253 | Unless required by applicable law or agreed to in writing, software 254 | distributed under the License is distributed on an "AS IS" BASIS, 255 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 256 | See the License for the specific language governing permissions and 257 | limitations under the License. -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | We are not accepting contributions at this time -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 CyberArk 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | SkyWrapper 2 | 3 | © 2019 CyberArk Software Ltd. All rights reserved 4 | 5 | License - CyberArk is licensed under the following Internal Use License: 6 | 7 | Internal Use License 8 | 9 | 1. This license grants you (whether you are a person or a corporation) a non-exclusive and non-transferable right to 10 | 11 | use the software which is licensed under this license (the “Software”) for Internal Use only. 12 | 13 | “Internal Use” is defined as use of the Software (whether in its binary form or its source code form) in any manner 14 | 15 | (including modifying it, enhancing it, upgrading or improving it) provided that it is only being used – if you are an 16 | 17 | individual - for your own personal use, and if you are a corporation, then for your organization’s own internal use. 18 | 19 | 20 | 21 | For clarity, Internal Use forbids “Commercial Use”, which means sale, lease, license, distribution, commercialization 22 | 23 | (including by way of SaaS), or any other form of making the Software (whether in a stand-alone form or 24 | 25 | incorporated/combined/bundled etc. with other software/hardware) available to third parties, except if it is done for the 26 | 27 | sole purpose of “Contribution” under the terms of section 2 below. If you wish to make Commercial Use of the Software, 28 | 29 | please contact CyberArk. 30 | 31 | 32 | 33 | 2. Contribution – We welcome and encourage any feedback, as well as contribution and share of modifications, 34 | 35 | enhancements, upgrades, improvements, and bug-fixes of the Software. By submitting/sharing a feedback or contribution (for 36 | 37 | convenience – both are referred as contribution), you hereby assign to CyberArk, on behalf of yourself and your 38 | 39 | organization, all right, title and interest in any copyrights and other proprietary rights you and your organization have 40 | 41 | in or to the contribution, and you waive any such and other rights under the laws of any applicable jurisdiction, 42 | 43 | including without limitation any moral rights, database rights, rights of attribution, authorship, integrity, paternity, 44 | 45 | reversion, etc., that may affect CyberArk's exclusive rights of ownership, transfer, enforcement, or exploitation of your 46 | 47 | contribution, including the right to license it under any terms it deems fit). Without derogating from CyberArk’s 48 | 49 | aforesaid rights and title in your contributions, your contribution will be licensed to third parties under this Internal 50 | 51 | Use License, and any contributions thereto made by such third parties, will also be licensed by them under this Internal 52 | 53 | Use License, and so forth. 54 | 55 | 56 | 57 | 3. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, 58 | 59 | INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTYIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND 60 | 61 | NONINFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 62 | 63 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 64 | 65 | OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 66 | 67 | WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF OR IN 68 | 69 | CONNECTION WITH THE USE OR DEALING OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![SkyWrapper](https://raw.githubusercontent.com/omer-ts/Images/master/skywrapper.png) 2 | 3 | ![GitHub release](https://img.shields.io/badge/version-1.0-blue) 4 | [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://choosealicense.com/licenses/mit/) 5 | ## Overview 6 | 7 | SkyWrapper is an open-source project which analyzes behaviors of temporary tokens created in a given AWS account. 8 | The tool is aiming to find suspicious creation forms and uses of temporary tokens to detect malicious activity in the account. 9 | The tool analyzes the AWS account, and creating an excel sheet includes all the currently living temporary tokens. 10 | A summary of the finding printed to the screen after each run. 11 | 12 | SkyWrapper DEMO: 13 | 14 | ![SkyWrapper](https://raw.githubusercontent.com/omer-ts/Images/master/skywrapper_demo.gif) 15 | 16 | --- 17 | 18 | ## Usage 19 | 20 | 1. Fill the required data in the **config** file 21 | 2. Make sure your users have the satisfied permissions for running the script (You can check this in the IAM at the summary page of the user) 22 | 3. Run the python script 23 | ```bash 24 | python SkyWrapper.py 25 | ``` 26 | 27 | ## Permissions 28 | 29 | For running this script, you will need at least the following permissions policy: 30 | ``` 31 | { 32 | "Version": "2012-10-17", 33 | "Statement": [ 34 | { 35 | "Sid": "S3TrailBucketPermissions", 36 | "Effect": "Allow", 37 | "Action": [ 38 | "s3:GetObject", 39 | "s3:ListBucketMultipartUploads", 40 | "s3:ListBucket", 41 | "s3:GetBucketLocation", 42 | "s3:ListMultipartUploadParts" 43 | ], 44 | "Resource": [ 45 | "arn:aws:s3:::{cloudtrail_bucket_name}/*", 46 | "arn:aws:s3:::{cloudtrail_bucket_name} 47 | ] 48 | }, 49 | { 50 | "Sid": "IAMReadPermissions", 51 | "Effect": "Allow", 52 | "Action": [ 53 | "iam:ListAttachedRolePolicies", 54 | "iam:ListRolePolicies", 55 | "iam:GetRolePolicy", 56 | "iam:GetPolicyVersion", 57 | "iam:GetPolicy", 58 | "iam:ListRoles" 59 | ], 60 | "Resource": [ 61 | "arn:aws:iam::*:policy/*", 62 | "arn:aws:iam::*:role/*" 63 | ] 64 | }, 65 | { 66 | "Sid": "GLUEReadWritePermissions", 67 | "Effect": "Allow", 68 | "Action": [ 69 | "glue:CreateTable", 70 | "glue:CreateDatabase", 71 | "glue:GetTable", 72 | "glue:GetDatabase" 73 | ], 74 | "Resource": "*" 75 | }, 76 | { 77 | "Sid": "CLOUDTRAILReadPermissions", 78 | "Effect": "Allow", 79 | "Action": [ 80 | "cloudtrail:DescribeTrails" 81 | ], 82 | "Resource": "*" 83 | }, 84 | { 85 | "Sid": "ATHENAReadPermissions", 86 | "Effect": "Allow", 87 | "Action": [ 88 | "athena:GetQueryResults", 89 | "athena:StartQueryExecution", 90 | "athena:GetQueryExecution" 91 | ], 92 | "Resource": "arn:aws:athena:*:*:workgroup/*" 93 | }, 94 | { 95 | "Sid": "S3AthenaResultsBucketPermissions", 96 | "Effect": "Allow", 97 | "Action": [ 98 | "s3:PutObject", 99 | "s3:GetObject", 100 | "s3:ListBucketMultipartUploads", 101 | "s3:CreateBucket", 102 | "s3:ListBucket", 103 | "s3:GetBucketLocation", 104 | "s3:ListMultipartUploadParts" 105 | ], 106 | "Resource": "arn:aws:s3:::aws-athena-query-results-*" 107 | } 108 | ] 109 | } 110 | ``` 111 | **Make sure you change the "{trail_bucket}" with your trail's bucket name!** 112 | 113 | In case you have more than one trail, which you want to use the script also on them, you have to add them as well to the policy permissions resource section. 114 | 115 | ## Configuration 116 | 117 | **"config.yaml"** is the configuration file. 118 | In most cases, you can leave the configuration as is. In case you need to change it, the configuration file is documented. 119 | 120 | ```yaml 121 | athena: # Athena configuration 122 | database_name: default # The name of the database Athena uses for querying the trail bucket. 123 | table_name: cloudtrail_logs_{table_name} # The table name of the trail bucket name 124 | output_location: s3://aws-athena-query-results-{account_id}-{region}/ # The default output location bucket for the query results 125 | output: 126 | excel_output_file: run_results_{trail}_{account_id}-{date}.xlsx # Excel results file 127 | summary_output_file: run_summary_{trail}_{account_id}-{date}.txt # Summary text results file 128 | verify_https: True # Enable/ Disable verification of SSL certificates for HTTP requests 129 | account: 130 | account_id: 0 # The account id - Keep it as 0 in case you don't know it 131 | aws_access_key_id: # If you keep it empty, the script will look after the default AWS credentials stored in ~/.aws/credentials 132 | aws_secret_access_key: # If you keep it empty, the script will look after the default AWS credentials stored in ~/.aws/credentials 133 | aws_session_token: # If you keep it empty, the script will look after the default AWS credentials stored in ~/.aws/credentials 134 | ``` 135 | 136 | --- 137 | 138 | ## References: 139 | 140 | For more comments, suggestions, or questions, you can contact Omer Tsarfati ([@OmerTsarfati](https://twitter.com/OmerTsarfati)) and CyberArk Labs. 141 | You can find more projects developed by us in https://github.com/cyberark/. 142 | -------------------------------------------------------------------------------- /SkyWrapper.py: -------------------------------------------------------------------------------- 1 | from handlers.StsHistoryHandler import StsHistoryHandler, ROOT_AKIA_TOKENS_USED_FOR_REFRESH_STS 2 | from handlers.ExportStsHistoryHandler import ExportStsHistoryHandler 3 | from utilities.AwsAccountMetadataUtilities import get_account_id 4 | from utilities.StsTreeStructureUtilities import print_root_access_key_sts_tree 5 | from handlers.ConfigHandler import ConfigHandler 6 | from handlers.CloudTrailHandler import CloudTrailHandler 7 | from exceptions.ZeroBucketsFoundException import ZeroBucketsFoundException 8 | from utilities.ArgParseUtilities import str2bool 9 | from utilities.FileUtilities import get_project_root 10 | import traceback 11 | import argparse 12 | import logging 13 | import os 14 | import time 15 | from utilities.SkyWrapperConstants import SKYWRAPPER_INTRO 16 | 17 | def logger_setup(): 18 | config = ConfigHandler.get_instance().get_config() 19 | logging.root.setLevel(logging.INFO) 20 | formatter = logging.basicConfig(format='%(message)s', datefmt='%Y-%m-%d %H:%M:%S') 21 | fh = logging.FileHandler( 22 | os.path.join(get_project_root(), "run_log_account_{0}-{1}.log".format(config["account"]["account_id"], config["run_timestamp"]))) 23 | logging.root.addHandler(fh) 24 | for handler in logging.root.handlers: 25 | handler.setFormatter(formatter) 26 | 27 | 28 | def get_user_cloudtrail_bucket(logger): 29 | cloudtrail_handler = CloudTrailHandler() 30 | account_cloudtrails = cloudtrail_handler.get_account_cloud_trails() 31 | 32 | if len(account_cloudtrails) == 0: 33 | raise ZeroBucketsFoundException("No cloudtrail buckets found!\nFor runing this script you must to have at least one bucket trail") 34 | logger.info("The CloudTrail's trails in your account:") 35 | for index, cloudtrail in enumerate(account_cloudtrails): 36 | logger.info("{index}. Trail name: {cloudtail_name} Trail's S3 Bucket name: {s3_bucket_name}".format(index=index+1, cloudtail_name=cloudtrail.trail_name, s3_bucket_name=cloudtrail.s3_bucket_name)) 37 | 38 | user_cloudtrail_bucket_choice = None 39 | while user_cloudtrail_bucket_choice is None: 40 | try: 41 | user_index_input = int(input("Enter the bucket number for the script to run on: ")) - 1 42 | if user_index_input > len(account_cloudtrails) - 1 or user_index_input < 0: 43 | # Invalid input - raising ValueError exception 44 | raise ValueError() 45 | else: 46 | user_cloudtrail_bucket_choice = account_cloudtrails[user_index_input] 47 | except ValueError: 48 | logger.warning("Invalid bucket number") 49 | 50 | logger.info("[+] Validating CloudTrail table for the chosen trail") 51 | cloudtrail_handler.is_cloudtrail_logs_table_exists(user_cloudtrail_bucket_choice) 52 | 53 | return user_cloudtrail_bucket_choice 54 | 55 | 56 | def main(): 57 | try: 58 | parser = argparse.ArgumentParser() 59 | parser.add_argument("--export-results", "-er", type=str2bool, default=True, 60 | required=False) 61 | parser.add_argument("--print-sts-refresh-tree", required=False, type=str2bool, 62 | default=False) 63 | options = parser.parse_args() 64 | 65 | # Settings configuration 66 | config = ConfigHandler.get_instance().get_config() 67 | run_timestamp = int(time.time()) 68 | account_id = get_account_id() 69 | config["account"]["account_id"] = account_id 70 | config["run_timestamp"] = run_timestamp 71 | logger_setup() 72 | logger = logging.getLogger("Main") 73 | logger.info(SKYWRAPPER_INTRO) 74 | # Get the user's CloudTrail Bucket 75 | user_cloudtrail_bucket = get_user_cloudtrail_bucket(logger) 76 | 77 | # Continue the settings configuration 78 | config["athena"]["table_name"] = config["athena"]["table_name"].format(table_name=user_cloudtrail_bucket.get_converted_s3_bucket_name_to_table_name()) 79 | config["athena"]["output_location"] = config["athena"]["output_location"].format(account_id=account_id, region=user_cloudtrail_bucket.home_region) 80 | logger.info("[+] Getting the temporary tokens history") 81 | 82 | # Main program logic 83 | sts_history = StsHistoryHandler(user_cloudtrail_bucket) 84 | 85 | # Export results options 86 | if options.print_sts_refresh_tree: 87 | print_root_access_key_sts_tree(sts_history.root_tokens[ROOT_AKIA_TOKENS_USED_FOR_REFRESH_STS]) 88 | 89 | if options.export_results: 90 | export_handler = ExportStsHistoryHandler(sts_history) 91 | export_handler.export_results() 92 | 93 | except Exception as e: 94 | logger = logging.getLogger("Main") 95 | logger.warning("SkyWrapper failed to run - Exception was raised") 96 | logger.warning("Exception details: {0}".format(e.args[0])) 97 | if "Unable to verify/create output bucket" in e.args[0]: 98 | logger.warning("Couldn't access the trail bucket. It might be insufficient permissions issue.") 99 | logger.warning(traceback.format_exc()) 100 | 101 | 102 | if __name__ == "__main__": 103 | main() 104 | -------------------------------------------------------------------------------- /config.yml: -------------------------------------------------------------------------------- 1 | athena: # Athena configuration 2 | database_name: default # The name of the database Athena uses for querying the trail bucket. 3 | table_name: cloudtrail_logs_{table_name} # The table name of the trail bucket name 4 | output_location: s3://aws-athena-query-results-{account_id}-{region}/ # The default output location bucket for the query results 5 | output: 6 | excel_output_file: run_results_{trail}_{account_id}-{date}.xlsx # Excel results file 7 | summary_output_file: run_summary_{trail}_{account_id}-{date}.txt # Summary text results file 8 | verify_https: True # Enable/ Disable verification of SSL certificates for HTTP requests 9 | account: 10 | account_id: 0 # The account id - Keep it as 0 in case you don't know it 11 | aws_access_key_id: # If you keep it empty, the script will look after the default AWS credentials stored in ~/.aws/credentials 12 | aws_secret_access_key: # If you keep it empty, the script will look after the default AWS credentials stored in ~/.aws/credentials 13 | aws_session_token: # If you keep it empty, the script will look after the default AWS credentials stored in ~/.aws/credentials -------------------------------------------------------------------------------- /exceptions/AthenaBadQueryException.py: -------------------------------------------------------------------------------- 1 | class AthenaBadQueryException(Exception): 2 | pass -------------------------------------------------------------------------------- /exceptions/CloudTrailBucketMissingLogsTableException.py: -------------------------------------------------------------------------------- 1 | class CloudTrailBucketMissingLogsTableException(Exception): 2 | pass -------------------------------------------------------------------------------- /exceptions/SingletonClassException.py: -------------------------------------------------------------------------------- 1 | class SingletonClassException(Exception): 2 | pass -------------------------------------------------------------------------------- /exceptions/ZeroBucketsFoundException.py: -------------------------------------------------------------------------------- 1 | class ZeroBucketsFoundException(Exception): 2 | pass -------------------------------------------------------------------------------- /handlers/AthenaHandler.py: -------------------------------------------------------------------------------- 1 | from utilities.Boto3Utilities import client_session_creator 2 | import time 3 | from structures.AthenaTrailRow import AthenaTrailRow 4 | from exceptions.AthenaBadQueryException import AthenaBadQueryException 5 | import logging 6 | from handlers.ConfigHandler import ConfigHandler 7 | 8 | # Query Result Fields 9 | DATA_VALUE_TYPE = "VarCharValue" 10 | 11 | class AthenaHandler(object): 12 | def __init__(self, athena_bucket_region): 13 | self.__config = ConfigHandler.get_instance().get_config() 14 | self.athena_client = client_session_creator('athena', region_name=athena_bucket_region) 15 | self.__logger = logging.getLogger(__name__) 16 | 17 | 18 | def run_query(self, query_string, database_name, output_location): 19 | # Code inspired by https://gist.github.com/schledererj/b2e2a800998d61af2bbdd1cd50e08b76 20 | if database_name is not None: 21 | query_id = self.athena_client.start_query_execution( 22 | QueryString=query_string, 23 | QueryExecutionContext={'Database': database_name}, 24 | ResultConfiguration={'OutputLocation': output_location} 25 | )['QueryExecutionId'] 26 | else: 27 | query_id = self.athena_client.start_query_execution( 28 | QueryString=query_string, 29 | ResultConfiguration={'OutputLocation': output_location} 30 | )['QueryExecutionId'] 31 | 32 | self.__logger.debug("Running the following SQL query: {0}".format(query_string)) 33 | self.__logger.info("Athena is running a query, it might take a while") 34 | 35 | query_status = None 36 | while query_status == 'QUEUED' or query_status == 'RUNNING' or query_status is None: 37 | query_status_data = self.athena_client.get_query_execution(QueryExecutionId=query_id) 38 | query_status = query_status_data['QueryExecution']['Status']['State'] 39 | if query_status == 'FAILED' or query_status == 'CANCELLED': 40 | raise AthenaBadQueryException( 41 | 'Athena query with the string "{}" failed or was cancelled.\nReason: {}'.format(query_string, 42 | query_status_data[ 43 | 'QueryExecution'][ 44 | 'Status'][ 45 | 'StateChangeReason'])) 46 | # In order to prevent spamming the athena's servers, 47 | # we create a time gap between each status request 48 | time.sleep(2) 49 | 50 | return query_id 51 | 52 | def fetchall_athena(self, query_string, database_name, output_location): 53 | # Code inspired by https://gist.github.com/schledererj/b2e2a800998d61af2bbdd1cd50e08b76 54 | query_id = self.run_query(query_string, database_name, output_location) 55 | self.__logger.info("Fetching the query result") 56 | 57 | results_paginator = self.athena_client.get_paginator('get_query_results') 58 | results_iter = results_paginator.paginate( 59 | QueryExecutionId=query_id, 60 | PaginationConfig={'PageSize': 1000} 61 | 62 | ) 63 | results = [] 64 | data_list = [] 65 | for results_page in results_iter: 66 | for row in results_page['ResultSet']['Rows']: 67 | data_list.append(row['Data']) 68 | object_fields_descriptor = data_list[0] 69 | for datum in data_list[1:]: 70 | row = {} 71 | for column_id, column_data in enumerate(datum): 72 | column_name = object_fields_descriptor[column_id][DATA_VALUE_TYPE] 73 | if DATA_VALUE_TYPE in column_data: 74 | row[column_name] = column_data[DATA_VALUE_TYPE] 75 | else: 76 | row[column_name] = None 77 | results.append(AthenaTrailRow(row)) 78 | 79 | self.__logger.info("Fetched {0} rows".format(len(results))) 80 | return results -------------------------------------------------------------------------------- /handlers/CloudTrailHandler.py: -------------------------------------------------------------------------------- 1 | from utilities.Boto3Utilities import client_session_creator 2 | from structures.TrailBucket import TrailBucket 3 | from handlers.AthenaHandler import AthenaHandler 4 | from handlers.ConfigHandler import ConfigHandler 5 | from exceptions.AthenaBadQueryException import AthenaBadQueryException 6 | from exceptions.CloudTrailBucketMissingLogsTableException import CloudTrailBucketMissingLogsTableException 7 | import logging 8 | 9 | class CloudTrailHandler(object): 10 | def __init__(self): 11 | self.__logger = logging.getLogger(__name__) 12 | self.__config = ConfigHandler.get_instance().get_config() 13 | self.trails = None 14 | self.__raw_trails_list = [] 15 | 16 | def get_account_cloud_trails(self): 17 | if self.trails is None: 18 | self.trails = [] 19 | cloudtrail_client = client_session_creator('cloudtrail') 20 | trails_dict = cloudtrail_client.describe_trails() 21 | trail_list = trails_dict["trailList"] 22 | self.__raw_trails_list = trail_list 23 | self.parse_cloudtrail_list(trail_list) 24 | return self.trails 25 | 26 | def parse_cloudtrail_list(self, trail_list): 27 | for trail in trail_list: 28 | self.trails.append(TrailBucket( 29 | trail["Name"], 30 | trail["IncludeGlobalServiceEvents"], 31 | trail["IsOrganizationTrail"], 32 | trail["TrailARN"], 33 | trail["LogFileValidationEnabled"], 34 | trail["IsMultiRegionTrail"], 35 | trail["HasCustomEventSelectors"], 36 | trail["S3BucketName"], 37 | trail["HomeRegion"] 38 | )) 39 | 40 | def handle_creation_cloudtrail_logs_table(self, trail_object): 41 | self.__logger.warning( 42 | "There is no existing logs table for the trail {trail_name}.".format(trail_name=trail_object.trail_name)) 43 | create_logs_table = None 44 | while create_logs_table is None: 45 | user_answer = input("Would you like to create one? (Y=yes / N=No)").lower() 46 | if user_answer == "y": 47 | create_logs_table = True 48 | elif user_answer == "n": 49 | create_logs_table = False 50 | else: 51 | self.__logger.warning("Incorrect input!") 52 | if not create_logs_table: 53 | raise CloudTrailBucketMissingLogsTableException() 54 | else: 55 | self.create_cloudtrail_logs_table(trail_object) 56 | 57 | def is_cloudtrail_logs_table_exists(self, trail_object): 58 | table_name = self.__config["athena"]["table_name"].format(table_name=trail_object.get_converted_s3_bucket_name_to_table_name()) 59 | output_location = self.__config["athena"]["output_location"].format(account_id=self.__config["account"]["account_id"], region=trail_object.home_region) 60 | check_cloudtrail_existing_query = "select * from \"{table_name}\" limit 1".format(table_name=table_name) 61 | athena_handler = AthenaHandler(trail_object.home_region) 62 | try: 63 | athena_handler.fetchall_athena(check_cloudtrail_existing_query, 64 | self.__config["athena"]["database_name"], 65 | output_location 66 | ) 67 | except AthenaBadQueryException as e: 68 | if " does not exist" in e.args[0]: 69 | self.handle_creation_cloudtrail_logs_table(trail_object) 70 | 71 | def create_cloudtrail_logs_table(self, trail_object): 72 | account_id = self.__config["account"]["account_id"] 73 | creating_default_database = "CREATE DATABASE IF NOT EXISTS default;" 74 | cloudtrails_logs_create_table_query = """ 75 | CREATE EXTERNAL TABLE IF NOT EXISTS cloudtrail_logs_{cloudtrail_table_name} ( 76 | eventVersion STRING, 77 | userIdentity STRUCT< 78 | type: STRING, 79 | principalId: STRING, 80 | arn: STRING, 81 | accountId: STRING, 82 | invokedBy: STRING, 83 | accessKeyId: STRING, 84 | userName: STRING, 85 | sessionContext: STRUCT< 86 | attributes: STRUCT< 87 | mfaAuthenticated: STRING, 88 | creationDate: STRING>, 89 | sessionIssuer: STRUCT< 90 | type: STRING, 91 | principalId: STRING, 92 | arn: STRING, 93 | accountId: STRING, 94 | userName: STRING>>>, 95 | eventTime STRING, 96 | eventSource STRING, 97 | eventName STRING, 98 | awsRegion STRING, 99 | sourceIpAddress STRING, 100 | userAgent STRING, 101 | errorCode STRING, 102 | errorMessage STRING, 103 | requestParameters STRING, 104 | responseElements STRING, 105 | additionalEventData STRING, 106 | requestId STRING, 107 | eventId STRING, 108 | resources ARRAY>, 112 | eventType STRING, 113 | apiVersion STRING, 114 | readOnly STRING, 115 | recipientAccountId STRING, 116 | serviceEventDetails STRING, 117 | sharedEventID STRING, 118 | vpcEndpointId STRING 119 | ) 120 | COMMENT 'CloudTrail table for {cloudtrail_bucket_name} bucket' 121 | ROW FORMAT SERDE 'com.amazon.emr.hive.serde.CloudTrailSerde' 122 | STORED AS INPUTFORMAT 'com.amazon.emr.cloudtrail.CloudTrailInputFormat' 123 | OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' 124 | LOCATION 's3://{cloudtrail_bucket_name}/AWSLogs/{account_id}/CloudTrail/' 125 | TBLPROPERTIES ('classification'='cloudtrail'); 126 | """.format(account_id=account_id, cloudtrail_bucket_name= trail_object.s3_bucket_name, cloudtrail_table_name=trail_object.get_converted_s3_bucket_name_to_table_name()) 127 | 128 | athena_handler = AthenaHandler(trail_object.home_region) 129 | output_location = self.__config["athena"]["output_location"].format(account_id=self.__config["account"]["account_id"], region=trail_object.home_region) 130 | self.__logger.info("Creating logs tables to the selected trail") 131 | athena_handler.run_query(creating_default_database, 132 | None, 133 | output_location 134 | ) 135 | athena_handler.run_query(cloudtrails_logs_create_table_query, 136 | self.__config["athena"]["database_name"], 137 | output_location 138 | ) 139 | -------------------------------------------------------------------------------- /handlers/ConfigHandler.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | from exceptions.SingletonClassException import SingletonClassException 3 | from utilities.FileUtilities import get_project_root 4 | import os 5 | 6 | class ConfigHandler(object): 7 | __instance = None 8 | 9 | @staticmethod 10 | def get_instance(): 11 | """ Static access method. """ 12 | if ConfigHandler.__instance == None: 13 | ConfigHandler() 14 | return ConfigHandler.__instance 15 | 16 | def __init__(self): 17 | """ Virtually private constructor. """ 18 | if ConfigHandler.__instance != None: 19 | raise SingletonClassException("This class is a singleton!") 20 | else: 21 | self.config = self.__load_config_file() 22 | ConfigHandler.__instance = self 23 | 24 | def __load_config_file(self): 25 | config_path = os.path.join(get_project_root(), "config.yml") 26 | with open(config_path, 'r') as yaml_config: 27 | config = yaml.load(yaml_config, Loader=yaml.FullLoader) 28 | return config 29 | 30 | def get_config(self): 31 | return self.config 32 | 33 | def save_config(self): 34 | config_path = os.path.join(get_project_root(), "config.yml") 35 | with open(config_path, 'w') as yaml_config: 36 | yaml.dump(self.config, yaml_config, default_flow_style=False) -------------------------------------------------------------------------------- /handlers/ExportStsHistoryHandler.py: -------------------------------------------------------------------------------- 1 | import xlsxwriter 2 | from handlers.ConfigHandler import ConfigHandler 3 | from utilities.FileUtilities import get_project_root 4 | from utilities.StsTreeStructureUtilities import count_node_children_and_live_nodes 5 | import os 6 | from datetime import datetime 7 | from structures.StsToken import MANUAL_TOKEN_SOURCE, EC2_TOKEN_SOURCE, LAMBDA_TOKEN_SOURCE, OTHER_TOKEN_SOURCE 8 | from utilities.ExcelUtilities import * 9 | from operator import methodcaller, attrgetter 10 | from utilities.SkyWrapperConstants import SKYWRAPPER_INTRO 11 | from handlers.PrivilegedRolesScanHandler import PrivilegedRolesScanHandler 12 | 13 | TOKENS_SHEET_COLUMNS = [ 14 | "Token", 15 | "Suspected Key", 16 | "Suspicion Description", 17 | "Token Age", 18 | "Token Root Source", 19 | "Role Name", 20 | "Role Session Name", 21 | "Permissions Summary", 22 | "Token TTL", 23 | "Event Time", 24 | "Expiration Time", 25 | "Event Name", 26 | "Aws Region", 27 | "Source Ip Address", 28 | "User Agent", 29 | "Event Type", 30 | "Event Source", 31 | "Request Id", 32 | "Event Id", 33 | "Role Arn", 34 | "Detailed Role Permissions" 35 | ] 36 | 37 | AKIA_SHEET_COLUMNS = [ 38 | "User", 39 | "Akia Token", 40 | "Suspicion Description" 41 | ] 42 | 43 | class ExportStsHistoryHandler(object): 44 | def __init__(self, sts_history_object): 45 | self.__sts_history_object = sts_history_object 46 | self.worksheets_columns_max_size = {} 47 | self.__inserted_index = 1 48 | 49 | def write_row(self, worksheet, row_index, col_index, data): 50 | if worksheet.name not in self.worksheets_columns_max_size.keys(): 51 | self.worksheets_columns_max_size[worksheet.name] = {} 52 | if col_index not in self.worksheets_columns_max_size[worksheet.name].keys() or\ 53 | self.worksheets_columns_max_size[worksheet.name][col_index] < len(str(data)): 54 | self.worksheets_columns_max_size[worksheet.name][col_index] = len(str(data)) 55 | worksheet.write(row_index, col_index, data) 56 | 57 | def set_columns_headers(self, worksheet, columns): 58 | for column_index, column_name in enumerate(columns, start=0): 59 | self.write_row(worksheet, 0, column_index, column_name) 60 | 61 | def __add_token_to_tokens_sheet(self, row_index, token, tokens_sheet): 62 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Token"), token.token) 63 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Suspected Key"), token.is_suspicious_token()) 64 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Suspicion Description"), token.suspicious_reason) 65 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Token Age"), token.get_living_days()) 66 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Token Root Source"), token.get_token_source_string()) 67 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Role Name"), token.role_name) 68 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Role Session Name"), token.role_session_name) 69 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Permissions Summary"), token.get_token_privileged_information()) 70 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Token TTL"), "{0} minutes".format((token.expiration_time - datetime.utcnow()).seconds // 60)) 71 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Event Time"), token.event_time.strftime("%Y-%m-%dT%H:%M:%SZ")) 72 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Expiration Time"), token.expiration_time.strftime("%Y-%m-%dT%H:%M:%SZ")) 73 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Event Name"), token.event_name) 74 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Aws Region"), token.aws_region) 75 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Source Ip Address"), token.source_ip_address) 76 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("User Agent"), token.user_agent) 77 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Event Type"), token.event_type) 78 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Event Source"), token.event_source) 79 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Request Id"), token.request_id) 80 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Event Id"), token.event_id) 81 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Role Arn"), token.role_arn) 82 | self.write_row(tokens_sheet, row_index, TOKENS_SHEET_COLUMNS.index("Detailed Role Permissions"), token.get_token_privileged_information(detailed=True)) 83 | 84 | def export_results(self): 85 | config_handler = ConfigHandler.get_instance() 86 | config = config_handler.config 87 | project_path = get_project_root() 88 | excel_output_file_location = os.path.join(project_path, config["output"]["excel_output_file"].format(trail=self.__sts_history_object.cloudwatch_trail_object.trail_name, account_id=config["account"]["account_id"], date=config["run_timestamp"])) 89 | summary_file_location = os.path.join(project_path, config["output"]["summary_output_file"].format( 90 | trail=self.__sts_history_object.cloudwatch_trail_object.trail_name, 91 | account_id=config["account"]["account_id"], date=config["run_timestamp"])) 92 | self.export_data_to_excel(excel_output_file_location) 93 | self.create_summary_file(summary_file_location) 94 | 95 | def create_summary_file(self, summary_file_location): 96 | summary_data_file = SKYWRAPPER_INTRO 97 | summary_data_file += "\nSkyWrapper run summary:\n" 98 | data_template = "\t{data}\n" 99 | 100 | privileged_tokens = [] 101 | live_ec2_tokens = [] 102 | live_lambda_tokens = [] 103 | live_manual_tokens = [] 104 | live_other_tokens = [] 105 | oldest_token = None 106 | 107 | for live_token in self.__sts_history_object.live_temporary_tokens: 108 | if oldest_token is None or oldest_token.living_days < live_token.living_days: 109 | oldest_token = live_token 110 | token_privileges = live_token.get_token_privileged_information() 111 | 112 | if token_privileges != "": 113 | privileged_tokens.append(live_token) 114 | if EC2_TOKEN_SOURCE is live_token.token_source: 115 | live_ec2_tokens.append(live_token) 116 | elif LAMBDA_TOKEN_SOURCE is live_token.token_source: 117 | live_lambda_tokens.append(live_token) 118 | elif MANUAL_TOKEN_SOURCE is live_token.token_source: 119 | live_manual_tokens.append(live_token) 120 | else: 121 | live_other_tokens.append(live_token) 122 | 123 | if oldest_token is None: 124 | oldest_token_living_days = 0 125 | else: 126 | oldest_token_living_days = oldest_token.living_days 127 | 128 | privileged_roles_handler = PrivilegedRolesScanHandler() 129 | privileged_roles_handler.scan_for_privileged_roles() 130 | 131 | with open(summary_file_location, "w") as output_file: 132 | summary_data_file += data_template.format(data="Live temporary tokens found: " + str(len(self.__sts_history_object.live_temporary_tokens))) 133 | summary_data_file += data_template.format( 134 | data="The number of privileged tokens: " + str(len(privileged_tokens))) 135 | summary_data_file += data_template.format(data="The oldest token is live for {days_number} days".format(days_number=oldest_token_living_days)) 136 | summary_data_file += data_template.format(data="The number of suspicious live temporary tokens discovered: " + str(len(self.__sts_history_object.suspicious_tokens))) 137 | summary_data_file += data_template.format( 138 | data="The number of live ec2 tokens: " + str(len(live_ec2_tokens))) 139 | summary_data_file += data_template.format( 140 | data="The number of live lambda tokens: " + str(len(live_lambda_tokens))) 141 | summary_data_file += data_template.format( 142 | data="The number of live manual tokens: " + str(len(live_manual_tokens))) 143 | summary_data_file += data_template.format( 144 | data="The number of other live tokens: " + str(len(live_other_tokens))) 145 | if len(privileged_tokens) > 0: 146 | summary_data_file += data_template.format( 147 | data="") 148 | summary_data_file += data_template.format( 149 | data="List of the found privileges tokens:") 150 | for privilege_token in privileged_tokens: 151 | summary_data_file += data_template.format( 152 | data="Privileged token: {token_key_id} | Privileges: {privileges} | Token source: {token_source}".format(token_key_id=privilege_token.token, privileges=privilege_token.get_token_privileged_information(), token_source=privilege_token.token_source)) 153 | if len(privileged_roles_handler.privileged_roles) > 0: 154 | summary_data_file += data_template.format( 155 | data="") 156 | summary_data_file += data_template.format( 157 | data="List of the privilege roles:") 158 | for role in privileged_roles_handler.privileged_roles: 159 | summary_data_file += data_template.format( 160 | data="Role name: {role_name} | Role ARN: {role_arn}".format( 161 | role_name=role["RoleName"], 162 | role_arn=role["Arn"])) 163 | if len(privileged_roles_handler.refresh_privileged_roles) > 0: 164 | summary_data_file += data_template.format( 165 | data="") 166 | summary_data_file += data_template.format( 167 | data="List of the roles can be use for refreshing tokens:") 168 | for role in privileged_roles_handler.refresh_privileged_roles: 169 | summary_data_file += data_template.format( 170 | data="Role name: {role_name} | Role ARN: {role_arn}".format( 171 | role_name=role["RoleName"], 172 | role_arn=role["Arn"])) 173 | 174 | output_file.write(summary_data_file) 175 | 176 | def export_data_to_excel(self, excel_output_file_location): 177 | wb = xlsxwriter.Workbook(excel_output_file_location) 178 | live_temporary_tokens_sheet = wb.add_worksheet('Live temporary tokens') 179 | flagged_temporary_tokens_sheet = wb.add_worksheet('Suspected live temporary tokens') 180 | refresh_tokens_akia_sheet = wb.add_worksheet('Suspected permanent tokens') 181 | # Live temporary Tokens Sheet Columns 182 | self.set_columns_headers(live_temporary_tokens_sheet, TOKENS_SHEET_COLUMNS) 183 | 184 | # Flagged temporary Tokens Sheet Columns 185 | self.set_columns_headers(flagged_temporary_tokens_sheet, TOKENS_SHEET_COLUMNS) 186 | 187 | # User and Akia Tokens Sheet Columns q 188 | self.set_columns_headers(refresh_tokens_akia_sheet, AKIA_SHEET_COLUMNS) 189 | 190 | flagged_tokens_counter = 0 191 | 192 | live_temporary_tokens = sorted(self.__sts_history_object.live_temporary_tokens, key=methodcaller('number_of_suspicious_reasons'), reverse=True) 193 | live_temporary_tokens = sorted(live_temporary_tokens,key=attrgetter('living_days'), reverse=True) 194 | live_temporary_tokens = sorted(live_temporary_tokens, 195 | key=methodcaller('rate_of_privilege_token'), reverse=True) 196 | live_temporary_tokens = sorted(live_temporary_tokens, key=methodcaller('is_suspicious_token'), reverse=True) 197 | 198 | for row_index, live_token in enumerate(live_temporary_tokens, start=1): 199 | if live_token.is_suspicious_token(): 200 | self.__add_token_to_tokens_sheet(flagged_tokens_counter + 1, live_token, flagged_temporary_tokens_sheet) 201 | flagged_tokens_counter += 1 202 | self.__add_token_to_tokens_sheet(row_index, live_token, live_temporary_tokens_sheet) 203 | 204 | flagged_temporary_tokens_sheet.autofilter( 205 | "A1:W{amount_of_tokens}".format(amount_of_tokens=len(live_temporary_tokens))) 206 | live_temporary_tokens_sheet.autofilter( 207 | "A1:W{amount_of_tokens}".format(amount_of_tokens=len(live_temporary_tokens))) 208 | 209 | akia_tokens = {} 210 | for persistence_token in self.__sts_history_object.root_temporary_tokens: 211 | akia_owner_user = self.__sts_history_object.root_temporary_tokens[persistence_token].user 212 | akia_key = self.__sts_history_object.root_temporary_tokens[persistence_token].parent_access_key_id 213 | count_results = count_node_children_and_live_nodes( 214 | self.__sts_history_object.root_temporary_tokens[persistence_token]) 215 | 216 | if akia_key in akia_tokens: 217 | akia_tokens[akia_key]["number_of_child_tokens"] += count_results[0] 218 | akia_tokens[akia_key]["live_created_tokens"] += count_results[1] 219 | if self.__sts_history_object.root_temporary_tokens[persistence_token].role_arn is not None: 220 | akia_tokens[akia_key]["role_arn_token"].add( 221 | self.__sts_history_object.root_temporary_tokens[persistence_token].role_arn) 222 | else: 223 | roles_arn_set = set() 224 | if self.__sts_history_object.root_temporary_tokens[persistence_token].role_arn is not None: 225 | roles_arn_set.add(self.__sts_history_object.root_temporary_tokens[persistence_token].role_arn) 226 | akia_tokens[akia_key] = { 227 | "akia_owner_user": akia_owner_user, 228 | "number_of_child_tokens": count_results[0], 229 | "live_created_tokens": count_results[1], 230 | "role_arn_token": roles_arn_set 231 | } 232 | 233 | for row_index, akia_key in enumerate(akia_tokens, start=1): 234 | self.write_row(refresh_tokens_akia_sheet, row_index, 0, akia_tokens[akia_key]["akia_owner_user"]) 235 | self.write_row(refresh_tokens_akia_sheet, row_index, 1, akia_key) 236 | suspicion_reason = """This token created {number_of_child_tokens} tokens. \r\n{live_created_tokens} are live tokens. \r\nThe roles arn of the created tokens: \n{roles_arn} 237 | """.format(number_of_child_tokens=akia_tokens[akia_key]["number_of_child_tokens"], 238 | live_created_tokens=akia_tokens[akia_key]["live_created_tokens"], 239 | roles_arn="\n".join(akia_tokens[akia_key]["role_arn_token"])) 240 | self.write_row(refresh_tokens_akia_sheet, row_index, 2, suspicion_reason) 241 | 242 | set_sheet_columns_sizes(live_temporary_tokens_sheet, self.worksheets_columns_max_size) 243 | set_sheet_columns_sizes(flagged_temporary_tokens_sheet, self.worksheets_columns_max_size) 244 | set_sheet_columns_sizes(refresh_tokens_akia_sheet, self.worksheets_columns_max_size) 245 | 246 | wb.close() 247 | 248 | -------------------------------------------------------------------------------- /handlers/PrivilegedRolesScanHandler.py: -------------------------------------------------------------------------------- 1 | from utilities.Boto3Utilities import client_session_creator 2 | from handlers.RolePermissionsHandler import RolePermissionsHandler 3 | from parsers.PoliciesPermissionsParser import PoliciesPermissionsParser 4 | from handlers.RolePermissionsHandler import ATTACHED_POLICIES, INLINE_POLICIES 5 | 6 | 7 | class PrivilegedRolesScanHandler(object): 8 | def __init__(self): 9 | self._role_permission_handler = RolePermissionsHandler.get_instance() 10 | self.privileged_roles = [] 11 | self.refresh_privileged_roles = [] 12 | 13 | def scan_for_privileged_roles(self): 14 | """ 15 | Scan for privileged roles in the account 16 | :return: 17 | """ 18 | iam_client = client_session_creator('iam') 19 | role_list_response = None 20 | marker = None 21 | while (role_list_response is None or role_list_response['IsTruncated'] is True): 22 | if marker is None: 23 | role_list_response = iam_client.list_roles() 24 | else: 25 | role_list_response = iam_client.list_roles(Marker=marker) 26 | if "Roles" in role_list_response: 27 | roles_list = role_list_response 28 | for role in roles_list["Roles"]: 29 | if self.__is_a_privileged_role(role): 30 | self.privileged_roles.append(role) 31 | if self.__is_role_can_be_use_for_persistence(role): 32 | self.refresh_privileged_roles.append(role) 33 | 34 | 35 | if role_list_response['IsTruncated']: 36 | marker = role_list_response['Marker'] 37 | 38 | def get_privileges_roles(self): 39 | return self.privileged_roles 40 | 41 | def get_refresh_privileges_roles(self): 42 | return self.refresh_privileged_roles 43 | 44 | def __is_a_privileged_role(self, role): 45 | role_permissions = self._role_permission_handler.get_role_policy_permissions(role["RoleName"]) 46 | role_policies_permissions = role_permissions[INLINE_POLICIES].copy() 47 | role_policies_permissions.update(role_permissions[ATTACHED_POLICIES]) 48 | policies_permission_parser = PoliciesPermissionsParser(role_policies_permissions) 49 | policies_permission_parser.parse() 50 | if len(policies_permission_parser.get_permissions_status()) > 0: 51 | return True 52 | return False 53 | 54 | def __is_role_can_be_use_for_persistence(self, role): 55 | role_permissions = self._role_permission_handler.get_role_policy_permissions(role["RoleName"]) 56 | role_policies_permissions = role_permissions[INLINE_POLICIES].copy() 57 | role_policies_permissions.update(role_permissions[ATTACHED_POLICIES]) 58 | policies_permission_parser = PoliciesPermissionsParser(role_policies_permissions) 59 | policies_permission_parser.parse() 60 | if policies_permission_parser.is_permission_allowed(["sts:AssumeRole", "sts:*"]): 61 | return True 62 | return False 63 | -------------------------------------------------------------------------------- /handlers/RolePermissionsHandler.py: -------------------------------------------------------------------------------- 1 | from utilities.Boto3Utilities import client_session_creator 2 | import logging 3 | from handlers.ConfigHandler import ConfigHandler 4 | from exceptions.SingletonClassException import SingletonClassException 5 | from policyuniverse.policy import Policy 6 | 7 | ATTACHED_POLICIES = "AttachedPolicies" 8 | INLINE_POLICIES = "InlinePolicies" 9 | 10 | class RolePermissionsHandler(object): 11 | __instance = None 12 | @staticmethod 13 | def get_instance(): 14 | if RolePermissionsHandler.__instance is None: 15 | RolePermissionsHandler() 16 | return RolePermissionsHandler.__instance 17 | 18 | def __init__(self): 19 | if RolePermissionsHandler.__instance is not None: 20 | raise SingletonClassException("This class is a singleton!") 21 | else: 22 | self.__config = ConfigHandler.get_instance().get_config() 23 | self.__logger = logging.getLogger(__name__) 24 | self.__roles = {} 25 | self.__attached_policies = {} 26 | RolePermissionsHandler.__instance = self 27 | 28 | def get_role_policy_permissions(self, role): 29 | if role not in self.__roles: 30 | try: 31 | iam_client = client_session_creator('iam') 32 | 33 | self.__logger.debug("Getting the permissions attached to the role: {0}".format(role)) 34 | 35 | attached_role_policies = iam_client.list_attached_role_policies(RoleName=role) 36 | role_policies = iam_client.list_role_policies(RoleName=role) 37 | 38 | 39 | attached_role_policies_list = attached_role_policies['AttachedPolicies'] 40 | policy_permissions = { 41 | INLINE_POLICIES:{}, 42 | ATTACHED_POLICIES:{} 43 | } 44 | for attached_policy in attached_role_policies_list: 45 | attached_role_arn = attached_policy["PolicyArn"] 46 | if attached_role_arn not in self.__attached_policies: 47 | current_policy_version = iam_client.get_policy(PolicyArn=attached_role_arn)['Policy']['DefaultVersionId'] 48 | policy_permissions_statement_list = iam_client.get_policy_version(PolicyArn=attached_role_arn, VersionId=current_policy_version)['PolicyVersion']['Document'] 49 | policy_object = Policy(policy_permissions_statement_list) 50 | policy_permissions_statement_list = policy_object.statements 51 | self.__attached_policies[attached_role_arn] = policy_permissions_statement_list 52 | policy_permissions[ATTACHED_POLICIES][attached_role_arn] = self.__attached_policies[attached_role_arn] 53 | 54 | role_policies_list = role_policies["PolicyNames"] 55 | for policy in role_policies_list: 56 | policy_data = iam_client.get_role_policy(RoleName=role, PolicyName=policy) 57 | policy_object = Policy(policy_data["PolicyDocument"]) 58 | policy_statement_list = policy_object.statements 59 | policy_permissions[INLINE_POLICIES][policy] = policy_statement_list 60 | except Exception as e: 61 | policy_permissions = { 62 | INLINE_POLICIES: {}, 63 | ATTACHED_POLICIES: {} 64 | } 65 | self.__roles[role] = policy_permissions 66 | return self.__roles[role] 67 | 68 | 69 | 70 | 71 | -------------------------------------------------------------------------------- /handlers/StsHistoryHandler.py: -------------------------------------------------------------------------------- 1 | from handlers.AthenaHandler import AthenaHandler 2 | from handlers.ConfigHandler import ConfigHandler 3 | from structures.StsToken import StsToken, EC2_ASIA_REFRESHED_MANUAL_FLAG, LIVE_REFRESHED_TOKEN_FLAG 4 | import logging 5 | from utilities.Boto3Utilities import instance_id_validator 6 | 7 | # Query Constants 8 | GET_ACCESS_TOKENS_FROM_STS_QUERY = """SELECT * FROM \"{0}\" WHERE useridentity.accesskeyid LIKE '%ASIA%' and requestparameters LIKE '%"roleArn"%' and responseelements LIKE '%"accessKeyId":"ASIA%' order by eventtime desc;""" 9 | GET_ORIGIN_ACCESS_TOKENS_FROM_STS_QUERY = """SELECT * FROM \"{0}\" WHERE useridentity.accesskeyid LIKE '%AKIA%' and requestparameters LIKE '%"roleArn"%' And responseelements LIKE '%"accessKeyId":"ASIA%' order by eventtime desc;""" 10 | GET_LIVE_TEMPORARY_TOKENS_QUERY= """with temporary_tokens as (SELECT *, REPLACE(json_extract_scalar(responseelements, '$.credentials.expiration'), ',', '') AS ext FROM \"{0}\" WHERE responseelements LIKE '%"accessKeyId":"ASIA%' and requestparameters LIKE '%"roleArn"%' and eventTime > to_iso8601(current_timestamp - interval '36' hour) order by eventtime desc) 11 | select * from temporary_tokens where date_parse(ext, '%b %e %Y %l:%i:%s %p')> date(current_timestamp)""" 12 | 13 | # constants for extraction of keys 14 | USER_IDENTITY_FILED = "useridentity" 15 | ACCESS_KEY_ID_FILED = "accessKeyId" 16 | # Constants for suspicious tokens 17 | EC2_ASIA_REFRESHED_MANUAL = "Token generated for EC2 machine refreshed manually" 18 | LIVE_REFRESHED_TOKEN = "This token is a refreshed token" 19 | # ROOTS KEYS CONSTANTS 20 | ROOT_AKIA_TOKENS_USED_FOR_REFRESH_STS = 0 21 | REGULAR_AKIA_TOKENS = 1 22 | ROOT_STS_TOKENS_USED_TO_REFRESH_STS = 2 23 | 24 | class StsHistoryHandler(object): 25 | def __init__(self, cloudwatch_trail_object): 26 | config_handler = ConfigHandler.get_instance() 27 | config = config_handler.config 28 | self.__logger = logging.getLogger(__name__) 29 | self.cloudwatch_trail_object = cloudwatch_trail_object 30 | self.athena_handler = AthenaHandler(cloudwatch_trail_object.home_region) 31 | self.suspicious_tokens = [] 32 | 33 | # Gets all the temporary (Role Access tokens) used to create other Access tokens 34 | self.__logger.info("[+] Searching for refreshed temporary tokens") 35 | self.tokens_created_by_temporary_token_athena_rows = self.athena_handler.fetchall_athena( 36 | GET_ACCESS_TOKENS_FROM_STS_QUERY.format(config["athena"]["table_name"]), 37 | config["athena"]["database_name"], 38 | config["athena"]["output_location"] 39 | ) 40 | self.access_keys_to_check = self.access_keys_ids_dict_generator(self.tokens_created_by_temporary_token_athena_rows) 41 | 42 | # Pair nodes to their parents 43 | self.__match_parent_node_to_child(self.access_keys_to_check) 44 | 45 | # Gets all the AKIA (User Access tokens) used to create other Access tokens 46 | self.__logger.info( 47 | "[+] Searching after users that their keys used for creating temporary tokens") 48 | self.created_sts_tokens_from_main_access_keys_athena_rows = self.athena_handler.fetchall_athena( 49 | GET_ORIGIN_ACCESS_TOKENS_FROM_STS_QUERY.format(config["athena"]["table_name"]), 50 | config["athena"]["database_name"], 51 | config["athena"]["output_location"] 52 | ) 53 | self.sts_persistence_root_temporary_keys_id_set = self.parse_athena_rows_sts_persistence_root_temporary_keys(self.access_keys_to_check) 54 | self.root_tokens = self.parse_athena_rows_akia_access_key_id_for_root_sts(self.created_sts_tokens_from_main_access_keys_athena_rows, 55 | self.sts_persistence_root_temporary_keys_id_set) 56 | self.root_temporary_tokens = self.root_tokens[ROOT_STS_TOKENS_USED_TO_REFRESH_STS] 57 | 58 | # Get all the live temporary tokens in the account 59 | self.__logger.info( 60 | "[+] Searching after live temporary tokens under the AWS account") 61 | self.live_temporary_tokens_athena_rows = self.athena_handler.fetchall_athena( 62 | GET_LIVE_TEMPORARY_TOKENS_QUERY.format(config["athena"]["table_name"]), 63 | config["athena"]["database_name"], 64 | config["athena"]["output_location"] 65 | ) 66 | self.live_temporary_tokens = self.parse_athena_rows_live_temporary_tokens(self.live_temporary_tokens_athena_rows) 67 | self.get_info_for_live_temporary_tokens() 68 | self.flag_suspicious_tokens() 69 | 70 | def flag_suspicious_tokens(self): 71 | self.__logger.info("[+] Examining the scraped tokens") 72 | self.__logger.info("Has there been a token refresh process in the account according to the trail bucket? - {status}". 73 | format(status=(len(self.root_temporary_tokens) > 0))) 74 | # EC2 STS tokens which used for persistent 75 | ec2_refreshed_keys_counter = 0 76 | 77 | for root_token_key_id in self.sts_persistence_root_temporary_keys_id_set: 78 | for child_token in self.sts_persistence_root_temporary_keys_id_set[root_token_key_id]: 79 | principal = child_token.athena_row.data["useridentity"].object["principalid"].split(":") 80 | issuer_arn = child_token.athena_row.data["useridentity"].arn.split("/") 81 | if len(principal) == 2 and principal[1] == issuer_arn[-1] and child_token.source_ip_address != "ec2.amazonaws.com" and instance_id_validator(issuer_arn[-1]): 82 | child_token.suspicious_token[EC2_ASIA_REFRESHED_MANUAL_FLAG] = True 83 | child_token.set_suspicious_reason(EC2_ASIA_REFRESHED_MANUAL) 84 | ec2_refreshed_keys_counter += 1 85 | self.suspicious_tokens.append(child_token) 86 | ec2_refreshed_keys_counter += self.flag_token_children(child_token, EC2_ASIA_REFRESHED_MANUAL) 87 | 88 | live_ec2_refreshed_keys_counter = 0 89 | for suspected_key in self.suspicious_tokens: 90 | if suspected_key.is_expired() is False and suspected_key.suspicious_token[EC2_ASIA_REFRESHED_MANUAL_FLAG] is True: 91 | live_ec2_refreshed_keys_counter += 1 92 | 93 | self.__logger.info("The number of refreshed tokens created from stolen EC2 access keys: {0}, while {1} out of them are live tokens".format(ec2_refreshed_keys_counter, live_ec2_refreshed_keys_counter)) 94 | 95 | # STS tokens which used for persistent that origin from AKIA 96 | sts_refreshed_keys_counter = 0 97 | for live_temporary_token in self.live_temporary_tokens: 98 | if live_temporary_token.parent_node is not None: 99 | live_temporary_token.suspicious_token[LIVE_REFRESHED_TOKEN_FLAG] = True 100 | live_temporary_token.set_suspicious_reason(LIVE_REFRESHED_TOKEN) 101 | sts_refreshed_keys_counter += 1 102 | self.suspicious_tokens.append(live_temporary_token) 103 | self.__logger.info("The number of live refreshed tokens: {0}".format( 104 | sts_refreshed_keys_counter + live_ec2_refreshed_keys_counter)) 105 | 106 | def flag_token_children(self, node, flag_reason): 107 | """ 108 | :param node: token node 109 | :param flag_reason: string 110 | :return: the number of flagged tokens 111 | """ 112 | counter = 0 113 | for child in node.children: 114 | child.suspicious_token[EC2_ASIA_REFRESHED_MANUAL_FLAG] = True 115 | child.set_suspicious_reason(flag_reason) 116 | counter += 1 117 | self.suspicious_tokens.append(child) 118 | counter += self.flag_token_children(child, flag_reason) 119 | return counter 120 | 121 | def parse_athena_rows_live_temporary_tokens(self, data): 122 | live_temporary_tokens = [] 123 | for row in data: 124 | sts_row_token = StsToken(row) 125 | if sts_row_token.token in self.access_keys_to_check: 126 | sts_row_token = self.access_keys_to_check[sts_row_token.token] 127 | elif sts_row_token.parent_access_key_id is not None and sts_row_token.parent_access_key_id in self.access_keys_to_check: 128 | sts_row_token.parent_node = self.access_keys_to_check[sts_row_token.parent_access_key_id] 129 | if not sts_row_token.is_expired(): 130 | live_temporary_tokens.append(sts_row_token) 131 | return live_temporary_tokens 132 | 133 | def parse_athena_rows_sts_persistence_root_temporary_keys(self, root_temporary_keys): 134 | sts_persistence_root_keys_ids = {} 135 | for key in root_temporary_keys: 136 | if root_temporary_keys[key].parent_node is None: 137 | root_key = root_temporary_keys[key].parent_access_key_id 138 | if root_key in sts_persistence_root_keys_ids: 139 | sts_persistence_root_keys_ids[root_key].append(root_temporary_keys[key]) 140 | else: 141 | sts_persistence_root_keys_ids[root_key] = [root_temporary_keys[key]] 142 | return sts_persistence_root_keys_ids 143 | 144 | def __set_root_token_to_node_children(self, node, root_key=None): 145 | if root_key is None: 146 | root_key = node 147 | for child_node in node.children: 148 | child_node.root_parent_node = root_key 149 | self.__set_root_token_to_node_children(child_node, root_key) 150 | 151 | def parse_athena_rows_akia_access_key_id_for_root_sts(self, data, root_persistence_temporary_keys): 152 | akia_tokens = {} 153 | root_temporary_tokens = {} 154 | regular_sts_token_created_by_akia = {} 155 | temp_root_persistence_temporary_keys = root_persistence_temporary_keys.copy() 156 | for row in data: 157 | response_elements = row.data["responseelements"] 158 | credentials_object = response_elements["credentials"] 159 | sts_row_token = StsToken(row) 160 | if credentials_object["accessKeyId"] in temp_root_persistence_temporary_keys: 161 | # Set to all of the token child the root temporary token 162 | # Saving every root temporary tokens to a key value format 163 | root_temporary_tokens[credentials_object["accessKeyId"]] = sts_row_token 164 | temp_root_persistence_temporary_keys.pop(credentials_object["accessKeyId"]) 165 | for key in self.access_keys_to_check: 166 | if self.access_keys_to_check[key].parent_access_key_id == sts_row_token.token: 167 | sts_row_token.children.append(self.access_keys_to_check[key]) 168 | self.access_keys_to_check[key].parent_node = sts_row_token 169 | self.__set_root_token_to_node_children(sts_row_token) 170 | if sts_row_token.parent_access_key_id in akia_tokens: 171 | akia_tokens[sts_row_token.parent_access_key_id].append(sts_row_token) 172 | else: 173 | akia_tokens[sts_row_token.parent_access_key_id] = [sts_row_token] 174 | else: 175 | if sts_row_token.parent_access_key_id in regular_sts_token_created_by_akia: 176 | regular_sts_token_created_by_akia[sts_row_token.parent_access_key_id].append(sts_row_token) 177 | else: 178 | regular_sts_token_created_by_akia[sts_row_token.parent_access_key_id] = [sts_row_token] 179 | 180 | if len(temp_root_persistence_temporary_keys) > 0: 181 | self.__logger.warning( 182 | "Couldn't find the Akia token used to generate the following temporary tokens: {0}".format(", ".join(temp_root_persistence_temporary_keys.keys()))) 183 | 184 | return akia_tokens, regular_sts_token_created_by_akia, root_temporary_tokens 185 | 186 | def is_temporary_token_used_for_persistence(self, athena_row): 187 | if "IAMUser" == athena_row.data[USER_IDENTITY_FILED].type: 188 | return False 189 | if "ASIA" not in athena_row.data[USER_IDENTITY_FILED].access_key_id: 190 | return False 191 | return True 192 | 193 | def __match_parent_node_to_child(self, dict_of_nodes): 194 | iterate_token_nodes = dict_of_nodes 195 | for sts_token_access_key_id in iterate_token_nodes: 196 | parent_access_key_id = iterate_token_nodes[sts_token_access_key_id].parent_access_key_id 197 | for sts_token_to_check in iterate_token_nodes: 198 | suspected_parent = iterate_token_nodes[sts_token_to_check] 199 | if parent_access_key_id == suspected_parent.token: 200 | suspected_parent.children.append(iterate_token_nodes[sts_token_access_key_id]) 201 | iterate_token_nodes[sts_token_access_key_id].parent_node = suspected_parent 202 | iterate_token_nodes[sts_token_access_key_id].parent_access_key_id = suspected_parent.token 203 | break 204 | 205 | def access_keys_ids_dict_generator(self, data): 206 | access_key_dict = {} 207 | for row in data: 208 | if self.is_temporary_token_used_for_persistence(row): 209 | sts_token = StsToken(row) 210 | if sts_token.token not in access_key_dict.keys(): 211 | access_key_dict[sts_token.token] = sts_token 212 | 213 | return access_key_dict 214 | 215 | def get_info_for_live_temporary_tokens(self): 216 | if len(self.live_temporary_tokens) > 0: 217 | self.__logger.info("[+] Getting the permissions for the live tokens") 218 | for live_temporary_token in self.live_temporary_tokens: 219 | live_temporary_token.fetch_token_permissions() 220 | else: 221 | self.__logger.info("No live temporary token has found") 222 | -------------------------------------------------------------------------------- /handlers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyberark/SkyWrapper/26d4d74c9aa389b4a9d6681949bd48770f745953/handlers/__init__.py -------------------------------------------------------------------------------- /parsers/PoliciesPermissionsParser.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | PRIVILEGED_STATEMENT_RULES = { 4 | "\*:\*": "Full AWS Account Admin", 5 | "^[A-Za-z0-9]+:\*$": "Full {service} Admin" 6 | } 7 | 8 | FIND_SERVICE_REGEX = "^([A-Za-z0-9]+)(?=:)" 9 | 10 | RESOURCE_ARN_WITH_SERVICE_REGEX = "^arn:aws(-cn|):{service}:.+" 11 | 12 | class PoliciesPermissionsParser(object): 13 | def __init__(self, policies): 14 | self.__policies = policies 15 | self.__permissions = {} 16 | self.__disallowed_permissions = {} 17 | self.__allowed_permissions = {} 18 | 19 | 20 | @staticmethod 21 | def __push_resource_permission(permissions_dict, statement): 22 | resources = statement.resources 23 | for action in statement.actions: 24 | action_service = re.findall(FIND_SERVICE_REGEX, action) 25 | if len(action_service) > 0: 26 | for resource in resources: 27 | is_arn_service_resource = re.match(RESOURCE_ARN_WITH_SERVICE_REGEX.format(service=action_service[0]), resource) 28 | if is_arn_service_resource or resource == "*": 29 | if resource not in permissions_dict: 30 | permissions_dict[resource] = [] 31 | permissions_dict[resource].append(action) 32 | 33 | def is_action_disallowed(self, deny_statement_rules, action_permission_rule): 34 | action_service_matches = re.findall(FIND_SERVICE_REGEX, action_permission_rule) 35 | if len(action_service_matches) > 0: 36 | action_service = action_service_matches[0] 37 | # Not matching permissions like ec2:list* - It doesn't support 38 | if action_service in deny_statement_rules and (action_permission_rule in deny_statement_rules[action_service] 39 | or action_service+":*" in deny_statement_rules[action_service]): 40 | return True 41 | return False 42 | 43 | def is_permission_allowed(self, permissions_name, permission_resource=None): 44 | if permissions_name is str: 45 | permissions_name = [permissions_name] 46 | if permission_resource is None: 47 | for permission_resource in self.__permissions.keys(): 48 | for permission_name in permissions_name: 49 | if permission_name in self.__permissions[permission_resource]: 50 | return True 51 | else: 52 | if permission_resource in self.__permissions: 53 | for permission_name in permissions_name: 54 | if permission_name in self.__permissions[permission_resource]: 55 | return True 56 | return False 57 | 58 | def parse(self): 59 | for policy_arn, attached_policy_statement in self.__policies.items(): 60 | for statement in attached_policy_statement: 61 | if statement.effect == "Deny": 62 | self.__push_resource_permission(self.__disallowed_permissions, statement) 63 | elif statement.effect == "Allow": 64 | # Goes to function which parse the permissions and the resources (Get a statement) 65 | self.__push_resource_permission(self.__allowed_permissions, statement) 66 | for resource, actions in self.__allowed_permissions.items(): 67 | for action in actions: 68 | if not self.is_action_disallowed(self.__disallowed_permissions, action): 69 | if resource in self.__permissions: 70 | self.__permissions[resource].add(action) 71 | else: 72 | self.__permissions[resource] = set() 73 | self.__permissions[resource].add(action) 74 | 75 | def __statement_policy_privilege_parser(self, action_permission): 76 | """ 77 | The function takes a action permission as an input and returns in a string any high privileged permissions it has 78 | """ 79 | action_permission_overview = "" 80 | for rule in PRIVILEGED_STATEMENT_RULES: 81 | if re.search(rule, action_permission): 82 | service = re.findall(FIND_SERVICE_REGEX, action_permission) 83 | if len(service) > 0: 84 | action_permission_overview = PRIVILEGED_STATEMENT_RULES[rule].format(service=service[0]) 85 | else: 86 | action_permission_overview = PRIVILEGED_STATEMENT_RULES[rule].format(service=action_permission) 87 | break 88 | 89 | return action_permission_overview 90 | 91 | def get_detailed_permissions_status(self): 92 | permissions_status = "" 93 | if 0 < len(self.__permissions.keys()): 94 | permissions_status += "Allowed permissions\r\n" 95 | for resource in self.__permissions.keys(): 96 | permissions_status += " {resource}:\r\n".format(resource=resource) 97 | for action_permission in self.__permissions[resource]: 98 | permissions_status += " {action_permission}\n".format(action_permission=action_permission) 99 | if 0 < len(self.__disallowed_permissions): 100 | permissions_status += "Disallowed permissions:\r\n" 101 | for resource in self.__disallowed_permissions: 102 | permissions_status += " {resource}:\r\n".format(resource=resource) 103 | for permission in self.__disallowed_permissions[resource]: 104 | permissions_status += " {permission}\n".format(permission=permission) 105 | return permissions_status 106 | 107 | def get_permissions_status(self): 108 | permissions_status = set() 109 | for resource in self.__permissions.keys(): 110 | for action_permission in self.__permissions[resource]: 111 | policy_privilege_parser = self.__statement_policy_privilege_parser(action_permission) 112 | if policy_privilege_parser != "": 113 | permissions_status.add(policy_privilege_parser) 114 | return ", ".join(permissions_status) -------------------------------------------------------------------------------- /parsers/UserIdentityParser.py: -------------------------------------------------------------------------------- 1 | # Constants for user identity filed parsing 2 | OBJECT_START = '{' 3 | OBJECT_END = '}' 4 | FILED_DELIMITER = ',' 5 | FILED_VALUE_NAME = ',' 6 | 7 | def parse_user_identity_filed(user_identity): 8 | return __parse_user_identity_filed(user_identity, 0, {})[0] 9 | 10 | def __parse_user_identity_filed(user_identity, index, result_object): 11 | filed_name = "" 12 | value_filed = "" 13 | if user_identity[index] == OBJECT_START: 14 | index += 1 15 | while user_identity[index] != OBJECT_END: 16 | filed_name = "" 17 | value_filed = "" 18 | while user_identity[index] != "=": 19 | filed_name += user_identity[index] 20 | index += 1 21 | # Skip the "=" sign to get to the value 22 | index += 1 23 | while user_identity[index] != "," and user_identity[index] != OBJECT_END: 24 | if user_identity[index] == OBJECT_START: 25 | value_filed, index = __parse_user_identity_filed(user_identity, index, {}) 26 | break 27 | value_filed += user_identity[index] 28 | if user_identity[index] != OBJECT_END: 29 | index += 1 30 | result_object[filed_name] = value_filed 31 | if user_identity[index] == ",": 32 | # Skip to the next key-value element 33 | index += 2 34 | return result_object, index -------------------------------------------------------------------------------- /parsers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyberark/SkyWrapper/26d4d74c9aa389b4a9d6681949bd48770f745953/parsers/__init__.py -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | PyYAML == 5.1.2 2 | XlsxWriter == 1.1.8 3 | boto3 == 1.9.201 4 | policyuniverse == 1.3.2.0 5 | -------------------------------------------------------------------------------- /structures/AthenaTrailRow.py: -------------------------------------------------------------------------------- 1 | from structures.UserIdentity import UserIdentity 2 | import json 3 | 4 | COLUMN_PARSERS = {"useridentity": UserIdentity} 5 | 6 | class AthenaTrailRow(object): 7 | def __init__(self, raw_row): 8 | self.raw_row = raw_row 9 | self.data = {} 10 | self.__parse_raw_row() 11 | 12 | def __parse_raw_row(self): 13 | for column_name in self.raw_row: 14 | column_data = self.raw_row[column_name] 15 | if column_name in COLUMN_PARSERS: 16 | column_data = COLUMN_PARSERS[column_name](column_data) 17 | else: 18 | try: 19 | column_data = json.loads(column_data) 20 | except ValueError: 21 | pass 22 | except TypeError: 23 | pass 24 | self.data[column_name] = column_data 25 | 26 | def __repr__(self): 27 | return str(self.data) 28 | 29 | 30 | -------------------------------------------------------------------------------- /structures/StsToken.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from datetime import date 3 | from handlers.RolePermissionsHandler import RolePermissionsHandler 4 | from handlers.RolePermissionsHandler import ATTACHED_POLICIES, INLINE_POLICIES 5 | from parsers.PoliciesPermissionsParser import PoliciesPermissionsParser 6 | import re 7 | 8 | USER_IDENTITY_FILED = "useridentity" 9 | # Suspicion Flags 10 | EC2_ASIA_REFRESHED_MANUAL_FLAG = "EC2_ASIA_REFRESHED_MANUAL" 11 | LIVE_REFRESHED_TOKEN_FLAG = "EC2_ASIA_REFRESHED_MANUAL" 12 | # Token Source Constants 13 | EC2_TOKEN_SOURCE = "EC2" 14 | LAMBDA_TOKEN_SOURCE = "Lambda" 15 | MANUAL_TOKEN_SOURCE = "Manual" 16 | OTHER_TOKEN_SOURCE = "Other" 17 | 18 | 19 | class StsToken(object): 20 | def __init__(self, athena_row=None): 21 | self.token = "" 22 | self.children = [] 23 | self.athena_row = athena_row 24 | self.parent_access_key_id = None 25 | self.parent_node = None 26 | self.root_parent_node = None 27 | self.expiration_time = None 28 | self.expired = False 29 | self.event_name = None 30 | self.event_time = None 31 | self.user = None 32 | self.source_ip_address = None 33 | self.user_agent = None 34 | self.permissions = None 35 | self.aws_region = None 36 | self.request_id = None 37 | self.event_id = None 38 | self.event_type = None 39 | self.event_source = None 40 | self.role_permissions = None 41 | self.role_arn = None 42 | self.role_name = None 43 | self.role_session_name = None 44 | self.parent_token = None 45 | self.living_days = 0 46 | self.token_source = None 47 | self.suspicious_token = { 48 | "EC2_ASIA_REFRESHED_MANUAL": False, 49 | "LIVE_REFRESHED_TOKEN": False, 50 | } 51 | self.__suspicious_reason = [] 52 | 53 | if athena_row is not None: 54 | self.__parse() 55 | 56 | def get_suspicious_reason(self): 57 | return ", ".join(self.__suspicious_reason) 58 | 59 | def set_suspicious_reason(self, val): 60 | if val not in self.__suspicious_reason: 61 | self.__suspicious_reason.append(val) 62 | 63 | def get_root_parent_node(self, return_also_assumed=True): 64 | """ 65 | Returns the parent node and handles cases which the parent node not assain to the local variable 66 | In case the token created after our root token scan the STSToken instance won't have token root key reference. 67 | Because of that we need to re assign the root token key to the current STSToken instance 68 | """ 69 | if self.root_parent_node is None and self.parent_node is not None: 70 | root_token_found = False 71 | parent_token_instance = self.parent_node 72 | root_token_instance = None 73 | while not root_token_found: 74 | if parent_token_instance.root_parent_node is not None: 75 | root_token_instance = parent_token_instance.root_parent_node 76 | root_token_found = True 77 | break 78 | if parent_token_instance.parent_access_key_id is None or parent_token_instance.parent_node is None: 79 | # Finish going up in the reference child-parent tree 80 | # The root parent not found and We can't assume that the top node is for sure the Root token 81 | # which created the current token 82 | if return_also_assumed: 83 | root_token_instance = parent_token_instance 84 | break 85 | parent_token_instance = parent_token_instance.parent_node 86 | self.root_parent_node = root_token_instance 87 | return self.root_parent_node 88 | 89 | suspicious_reason = property(get_suspicious_reason) 90 | 91 | def __parse(self): 92 | response_elements = self.athena_row.data["responseelements"] 93 | request_parameters = self.athena_row.data["requestparameters"] 94 | credentials_object = response_elements["credentials"] 95 | self.event_name = self.athena_row.data["eventname"] 96 | self.event_time = datetime.strptime(self.athena_row.data["eventtime"], "%Y-%m-%dT%H:%M:%SZ") 97 | if self.event_name == "AssumeRole": 98 | self.role_arn = request_parameters["roleArn"] 99 | self.role_name = self.role_arn.split("/")[-1] 100 | self.role_session_name = self.athena_row.data["requestparameters"]["roleSessionName"] 101 | if self.athena_row.data["sourceipaddress"] == "ec2.amazonaws.com": 102 | self.user = self.role_session_name 103 | if self.athena_row.data["useridentity"].type == "IAMUser" or self.athena_row.data["useridentity"].type == "Root": 104 | self.user = self.athena_row.data["useridentity"].username 105 | self.token = credentials_object["accessKeyId"] 106 | self.parent_access_key_id = self.athena_row.data[USER_IDENTITY_FILED].access_key_id 107 | self.expiration_time = datetime.strptime(credentials_object["expiration"].replace(",", ""), '%b %d %Y %I:%M:%S %p') 108 | self.expired = self.expiration_time < datetime.utcnow() 109 | self.source_ip_address = self.athena_row.data["sourceipaddress"] 110 | self.user_agent = self.athena_row.data["useragent"] 111 | if "ec2.amazonaws.com" == self.source_ip_address: 112 | self.token_source = EC2_TOKEN_SOURCE 113 | elif "lambda.amazonaws.com" == self.source_ip_address: 114 | self.token_source = LAMBDA_TOKEN_SOURCE 115 | elif re.match(".+\.amazonaws\.com$", self.source_ip_address): 116 | aws_service = re.findall("^(.*?)\.amazonaws\.com$", self.source_ip_address) 117 | self.token_source = aws_service[0] if len(aws_service) > 0 else OTHER_TOKEN_SOURCE 118 | elif "amazonaws.com" not in self.source_ip_address: 119 | self.token_source = MANUAL_TOKEN_SOURCE 120 | else: 121 | self.token_source = OTHER_TOKEN_SOURCE 122 | self.aws_region = self.athena_row.data["awsregion"] 123 | self.request_id = self.athena_row.data["requestid"] 124 | self.event_id = self.athena_row.data["eventid"] 125 | self.event_type = self.athena_row.data["eventtype"] 126 | self.event_source = self.athena_row.data["eventsource"] 127 | 128 | def get_token_source_string(self): 129 | token_source_string = "" 130 | if self.token_source == EC2_TOKEN_SOURCE: 131 | token_source_string = "EC2: {ec2_machine_id}".format(ec2_machine_id=self.role_session_name) 132 | elif self.token_source == LAMBDA_TOKEN_SOURCE: 133 | token_source_string = "Lambda: {lambda_name}".format(lambda_name=self.role_session_name) 134 | elif self.suspicious_token[EC2_ASIA_REFRESHED_MANUAL_FLAG] is True: 135 | root_parent_node = self.get_root_parent_node() 136 | if root_parent_node is None: 137 | # In case the current token is the root token, then the root_parent_node points to None. 138 | # Therefore, we set the root_parent_token to point on self. 139 | root_parent_node = self 140 | principal = root_parent_node.athena_row.data["useridentity"].object["principalid"].split(":") 141 | ec2_machine_id = "N/A" if len(principal) != 2 else principal[1] 142 | token_source_string = "EC2: {ec2_machine_id}".format(ec2_machine_id=ec2_machine_id) 143 | elif self.token_source == MANUAL_TOKEN_SOURCE: 144 | root_parent_node = self.get_root_parent_node() 145 | if root_parent_node is None and self.user is not None and self.user != "N/A" and "AKIA" in self.parent_access_key_id: 146 | user_access_key_id = self.parent_access_key_id 147 | elif root_parent_node is not None and root_parent_node.parent_access_key_id is not None and "AKIA" in root_parent_node.parent_access_key_id : 148 | user_access_key_id = root_parent_node.parent_access_key_id 149 | else: 150 | user_access_key_id = "N/A" 151 | token_source_string = "User: {user_name} User's Akia: {user_access_key_id}".format( 152 | user_name=self.get_user_or_ec2_name(), user_access_key_id=user_access_key_id) 153 | elif self.token_source is OTHER_TOKEN_SOURCE: 154 | token_source_string = "Other/Unknown" 155 | elif self.token_source is not None: 156 | token_source_string = self.token_source 157 | return token_source_string 158 | 159 | def get_user_or_ec2_name(self): 160 | 161 | if self.user is None: 162 | parent_node = self.get_root_parent_node() 163 | if parent_node is not None and parent_node.user is not None: 164 | self.user = parent_node.user 165 | else: 166 | # Couldn't find the user which created the token or the token didn't created by a user (most likely by some service) 167 | self.user = "N/A" 168 | return self.user 169 | 170 | def get_living_days(self): 171 | root_parent_node = self.get_root_parent_node() 172 | if root_parent_node is not None: 173 | return (date.today() - self.get_root_parent_node().event_time.date()).days 174 | else: 175 | return 0 176 | 177 | def fetch_token_permissions(self): 178 | if self.role_name is not None: 179 | role_permissions_handler = RolePermissionsHandler.get_instance() 180 | self.role_permissions = role_permissions_handler.get_role_policy_permissions(self.role_name) 181 | 182 | 183 | def get_token_privileged_information(self, detailed=False): 184 | """ 185 | The function parses the permissions that token has, 186 | then it returns if the token has "Admin" access to any of aws services and which service. 187 | The function doesn't check the condition part in the statement.. 188 | :return: 189 | """ 190 | # For now we only supporting AssumeRole event for permissions parsing 191 | if self.event_name != "AssumeRole": 192 | return "" 193 | token_permissions_policies = self.get_token_permissions() 194 | token_policies_permissions = token_permissions_policies[INLINE_POLICIES].copy() 195 | token_policies_permissions.update(token_permissions_policies[ATTACHED_POLICIES]) 196 | policies_permission_parser = PoliciesPermissionsParser(token_policies_permissions) 197 | policies_permission_parser.parse() 198 | if not detailed: 199 | return policies_permission_parser.get_permissions_status() 200 | else: 201 | return policies_permission_parser.get_detailed_permissions_status() 202 | 203 | def get_token_permissions(self): 204 | if self.role_permissions is None: 205 | self.fetch_token_permissions() 206 | return self.role_permissions 207 | 208 | def is_suspicious_token(self): 209 | for suspicion_reason in self.suspicious_token: 210 | if self.suspicious_token[suspicion_reason]: 211 | return True 212 | return False 213 | 214 | def number_of_suspicious_reasons(self): 215 | return len(self.__suspicious_reason) 216 | 217 | def rate_of_privilege_token(self): 218 | rate = 0 219 | privilege_token_string = self.get_token_privileged_information() 220 | if "AWS Account" in privilege_token_string: 221 | # If the token has full admin access set to high number for setting this token high in the sort process 222 | rate = 10000 223 | privileges = privilege_token_string.split(",") 224 | # It won't add point to non privileged tokens 225 | if privileges != [""]: 226 | rate += len(privileges) 227 | return rate 228 | 229 | def is_expired(self): 230 | return self.expiration_time < datetime.utcnow() 231 | 232 | def __repr__(self): 233 | return str(self.__dict__) 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | -------------------------------------------------------------------------------- /structures/TrailBucket.py: -------------------------------------------------------------------------------- 1 | class TrailBucket(object): 2 | def __init__(self, trail_name, 3 | include_global_service_events, 4 | is_organization_trail, 5 | trail_arn, 6 | log_file_validation_enabled, 7 | is_multi_region_trail, 8 | has_custom_events_selectors, 9 | s3_bucket_name, 10 | home_region 11 | ): 12 | self.trail_name = trail_name 13 | self.include_global_service_events = include_global_service_events 14 | self.is_organization_trail = is_organization_trail 15 | self.trail_arn = trail_arn 16 | self.log_file_validation_enabled = log_file_validation_enabled 17 | self.is_multi_region_trail = is_multi_region_trail 18 | self.has_custom_events_selectors = has_custom_events_selectors 19 | self.s3_bucket_name = s3_bucket_name 20 | self.home_region = home_region 21 | 22 | def get_converted_s3_bucket_name_to_table_name(self): 23 | return self.s3_bucket_name.replace("-", "_").replace(".", "") 24 | 25 | def get_athena_result_bucket(self): 26 | account_id = self.__config["account_id"] 27 | return "s3://aws-athena-query-results-{account_id}-{region}/".format(account_id=account_id, region=self.home_region) 28 | 29 | def get_bucket_url(self): 30 | return "s3://{s3_bucket_name}/".format(s3_bucket_name=self.s3_bucket_name) 31 | 32 | 33 | -------------------------------------------------------------------------------- /structures/UserIdentity.py: -------------------------------------------------------------------------------- 1 | from parsers import UserIdentityParser 2 | 3 | # User identity properties 4 | USER_IDENTITY_TYPE = "type" 5 | USER_IDENTITY_PRINCIPAL_ID = "principalid" 6 | USER_IDENTITY_ARN = "arn" 7 | USER_IDENTITY_ACCOUNT_ID = "accountid" 8 | USER_IDENTITY_INVOKED_BY = "invokedby" 9 | USER_IDENTITY_ACCESS_KEY_ID = "accesskeyid" 10 | USER_IDENTITY_USERNAME = "username" 11 | USER_IDENTITY_SESSION_CONTEXT = "sessioncontext" 12 | 13 | class UserIdentity(object): 14 | def __init__(self, user_identity_data): 15 | self.user_identity_data = user_identity_data 16 | self.type = None 17 | self.principal_id = None 18 | self.arn = None 19 | self.account_id = None 20 | self.invoked_by = None 21 | self.access_key_id = None 22 | self.username = None 23 | self.session_context = None 24 | self.object = None 25 | self.__parse() 26 | 27 | def __parse(self): 28 | self.object = UserIdentityParser.parse_user_identity_filed(self.user_identity_data) 29 | self.type = self.object[USER_IDENTITY_TYPE] 30 | self.principal_id = self.object[USER_IDENTITY_PRINCIPAL_ID] 31 | self.arn = self.object[USER_IDENTITY_ARN] 32 | self.account_id = self.object[USER_IDENTITY_ACCOUNT_ID] 33 | self.invoked_by = self.object[USER_IDENTITY_INVOKED_BY] 34 | self.access_key_id = self.object[USER_IDENTITY_ACCESS_KEY_ID] 35 | self.username = self.object[USER_IDENTITY_USERNAME] 36 | self.session_context = self.object[USER_IDENTITY_SESSION_CONTEXT] 37 | 38 | def __repr__(self): 39 | return str(self.__dict__) -------------------------------------------------------------------------------- /structures/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyberark/SkyWrapper/26d4d74c9aa389b4a9d6681949bd48770f745953/structures/__init__.py -------------------------------------------------------------------------------- /utilities/ArgParseUtilities.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | 4 | def str2bool(v): 5 | if isinstance(v, bool): 6 | return v 7 | if v.lower() in ('yes', 'true', 't', 'y', '1'): 8 | return True 9 | elif v.lower() in ('no', 'false', 'f', 'n', '0'): 10 | return False 11 | else: 12 | raise argparse.ArgumentTypeError('Boolean value expected.') -------------------------------------------------------------------------------- /utilities/AwsAccountMetadataUtilities.py: -------------------------------------------------------------------------------- 1 | from utilities.Boto3Utilities import client_session_creator 2 | from handlers.ConfigHandler import ConfigHandler 3 | 4 | def get_account_id(): 5 | config = ConfigHandler.get_instance().get_config() 6 | sts_client = client_session_creator('sts') 7 | aws_account_id = sts_client.get_caller_identity().get('Account') 8 | return aws_account_id -------------------------------------------------------------------------------- /utilities/Boto3Utilities.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | from handlers.ConfigHandler import ConfigHandler 3 | import re 4 | 5 | 6 | def client_session_creator(client_name, **kwargs): 7 | """ 8 | Creates client session based on the credentials the users entered in the config file 9 | or it goes to the default AWS credentials 10 | """ 11 | config = ConfigHandler.get_instance().get_config() 12 | if "verify" in kwargs: 13 | overridden_verify_value = kwargs.pop("verify") 14 | if config["account"]["aws_access_key_id"] is not None and config["account"]["aws_access_key_id"] != "" \ 15 | and config["account"]["aws_secret_access_key"] is not None and config["account"]["aws_secret_access_key"] != "": 16 | kwargs["aws_access_key_id"] = config["account"]["aws_access_key_id"] 17 | kwargs["aws_secret_access_key"] = config["account"]["aws_secret_access_key"] 18 | if config["account"]["aws_session_token"] is not None and config["account"]["aws_session_token"] != "": 19 | kwargs["aws_session_token"] = config["account"]["aws_session_token"] 20 | 21 | return boto3.client(client_name, verify=config["verify_https"], **kwargs) 22 | 23 | 24 | def instance_id_validator(instance_string): 25 | if re.match("(^i-(\w{8}|\w{17})$)|(^mi-\w{17}$)]", instance_string): 26 | return True 27 | else: 28 | return False -------------------------------------------------------------------------------- /utilities/ExcelUtilities.py: -------------------------------------------------------------------------------- 1 | def get_col_widths(dataframe): 2 | # Taken from https://stackoverflow.com/questions/29463274/simulate-autofit-column-in-xslxwriter 3 | # First we find the maximum length of the index column 4 | idx_max = max([len(str(s)) for s in dataframe.index.values] + [len(str(dataframe.index.name))]) 5 | # Then, we concatenate this to the max of the lengths of column name and its values for each column, left to right 6 | return [idx_max] + [max([len(str(s)) for s in dataframe[col].values] + [len(col)]) for col in dataframe.columns] 7 | 8 | def set_sheet_columns_sizes(worksheet, worksheets_columns_max_size): 9 | for i, width in worksheets_columns_max_size[worksheet.name].items(): 10 | worksheet.set_column(i, i, width + 2) 11 | 12 | def set_sheet_columns_headers(sheet, columns): 13 | for column_index, column_name in enumerate(columns, start=0): 14 | sheet.write(0, column_index, column_name) -------------------------------------------------------------------------------- /utilities/FileUtilities.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | 4 | def get_project_root() -> Path: 5 | """Returns project root folder.""" 6 | return Path(__file__).parent.parent -------------------------------------------------------------------------------- /utilities/SkyWrapperConstants.py: -------------------------------------------------------------------------------- 1 | SKYWRAPPER_INTRO = """ 2 | _____ _ _ _ 3 | / ___| | | | | | 4 | \ `--.| | ___ _| | | |_ __ __ _ _ __ _ __ ___ _ __ 5 | `--. \ |/ / | | | |/\| | '__/ _` | '_ \| '_ \ / _ \ '__| 6 | /\__/ / <| |_| \ /\ / | | (_| | |_) | |_) | __/ | 7 | \____/|_|\_\\__, |\/ \/|_| \__,_| .__/| .__/ \___|_| 8 | __/ | | | | | 9 | |___/ |_| |_| 10 | 11 | """ -------------------------------------------------------------------------------- /utilities/StsTreeStructureUtilities.py: -------------------------------------------------------------------------------- 1 | from structures.StsToken import StsToken 2 | 3 | 4 | def count_node_children_and_live_nodes(node): 5 | nodes_count = 1 6 | live_nodes_count = 0 7 | if not node.expired: 8 | live_nodes_count += 1 9 | for child in node.children: 10 | node_result = count_node_children_and_live_nodes(child) 11 | nodes_count += node_result[0] 12 | live_nodes_count += node_result[1] 13 | return nodes_count, live_nodes_count 14 | 15 | 16 | def print_tree(current_node, indent="", last='updown'): 17 | # This function taken from https://stackoverflow.com/users/4768304/clemtoy 18 | nb_children = lambda node: sum(nb_children(child) for child in node.children) + 1 19 | size_branch = {child: nb_children(child) for child in current_node.children} 20 | 21 | """ Creation of balanced lists for "up" branch and "down" branch. """ 22 | up = sorted(current_node.children, key=lambda node: nb_children(node)) 23 | down = [] 24 | while up and sum(size_branch[node] for node in down) < sum(size_branch[node] for node in up): 25 | down.append(up.pop()) 26 | 27 | """ Printing of "up" branch. """ 28 | for child in up: 29 | next_last = 'up' if up.index(child) is 0 else '' 30 | next_indent = '{0}{1}{2}'.format(indent, ' ' if 'up' in last else '│', " " * len(current_node.token)) 31 | print_tree(child, indent=next_indent, last=next_last) 32 | 33 | """ Printing of current node. """ 34 | if last == 'up': start_shape = '┌' 35 | elif last == 'down': start_shape = '└' 36 | elif last == 'updown': start_shape = ' ' 37 | else: start_shape = '├' 38 | 39 | if up: end_shape = '┤' 40 | elif down: end_shape = '┐' 41 | else: end_shape = '' 42 | 43 | print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.token, end_shape)) 44 | 45 | """ Printing of "down" branch. """ 46 | for child in down: 47 | next_last = 'down' if down.index(child) is len(down) - 1 else '' 48 | next_indent = '{0}{1}{2}'.format(indent, ' ' if 'down' in last else '│', " " * len(current_node.token)) 49 | print_tree(child, indent=next_indent, last=next_last) 50 | 51 | def print_root_access_key_sts_tree(root_tokens): 52 | for root_token in root_tokens: 53 | print("Print the tokens generated by the root token {}:".format(root_token)) 54 | temp_root_sts_token = StsToken() 55 | temp_root_sts_token.token = root_token 56 | for child_token in root_tokens[root_token]: 57 | temp_root_sts_token.children.append(child_token) 58 | print_tree(temp_root_sts_token) -------------------------------------------------------------------------------- /utilities/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyberark/SkyWrapper/26d4d74c9aa389b4a9d6681949bd48770f745953/utilities/__init__.py --------------------------------------------------------------------------------