├── .idea ├── .gitignore ├── aws-integrations-cloudops.iml ├── inspectionProfiles │ └── profiles_settings.xml ├── misc.xml ├── modules.xml └── vcs.xml ├── LICENSE ├── README.md ├── aws-cloudformation-for-storage-integration ├── README.md └── images │ ├── CFT.png │ ├── descInt.png │ └── stage.png ├── aws-controltower ├── LICENSE ├── README.md ├── cft │ ├── aws-snowflake-controltower.yaml │ ├── aws-snowflake-ssm.yml │ └── s3bucketpolicy.json ├── images │ └── snowflake-controltower-arch-diagram.PNG ├── lambda │ ├── SnowflakeIntegration_Lambda_SSM.py │ └── SnowflakeIntegration_Lambda_SSM.zip └── layer │ └── snowflakelayer.zip ├── aws-servicecatalog └── apigw-sample │ ├── APIGW_README.md │ ├── create-resources-1.0.zip │ ├── snowflake-connector-python-1.0.zip │ └── template │ ├── aws-snowflake-apigw-integrationobject.yml │ └── aws-snowflake-apigw-servicecatalog.yml ├── aws-systemsmanager ├── LICENSE ├── README.md ├── cft │ ├── aws-snowflake-ssm.yml │ └── s3bucketpolicy.json ├── images │ ├── snowflake-controltower-arch-diagram.PNG │ └── snowflake-systemsmanager-arch-diagram.PNG ├── lambda │ ├── SnowflakeIntegration_Lambda_SSM.py │ └── SnowflakeIntegration_Lambda_SSM.zip └── layer │ └── snowflakelayer.zip ├── cft ├── aws-snowflake-integrationobject.yml └── aws-snowflakeintobj-servicecatalog.yml ├── images └── snowflake-arch.png ├── lambda ├── SnowflakeIntegration_Lambda.py └── SnowflakeIntegration_Lambda.zip └── layer └── snowflakelayer.zip /.idea/.gitignore: -------------------------------------------------------------------------------- 1 | # Default ignored files 2 | /shelf/ 3 | /workspace.xml 4 | -------------------------------------------------------------------------------- /.idea/aws-integrations-cloudops.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 12 | -------------------------------------------------------------------------------- /.idea/inspectionProfiles/profiles_settings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 |

3 | 4 | # Use AWS Service Catalog to automate Snowflake storage integration to access Amazon S3 5 | 6 | 1. Snowflake storage integrations are Snowflake objects that allow Snowflake to read and write data to Amazon S3. Snowflake storage integrations leverage AWS IAM to access S3. The S3 bucket is referenced by the Snowflake integration from an external (i.e. S3) Snowflake stage object 7 | 2. This solution provides full automation for Snowflake to access S3 using AWS Service Catalog. The solution implements an AWS Service Catalog product that automates Snowflake access to S3. 8 | 1. The Service Catalog product provisions a Snowflake integration object, attaches an IAM role to it and creates a Snowflake stage for it that references S3. 9 | 10 | 11 | ## How it Works 12 | 13 | 1. Provisions a Service Catalog Portfolio with a Service Catalog Product 14 | 2. The Snowflake Service Catalog Product takes a) Snowflake Connection information and b) S3 bucketname and prefix as input parameters and uses the *aws-snowflake-integrationobject.yml* CloudFormation template to create a Snowflake external stage object that enables access to S3. 15 | 1. The Snowflake Service Catalog Product can be invoked as many times as needed. Each time it creates a Snowflake external stage object to access an S3 object/prefix based on the 2 input parameters (a and b) supplied above. 16 | 3. The template from 2: 17 | 1. Provisions AWS Secrets Manager to store and retrieve Snowflake connection information 18 | 2. Provisions a Lambda function that uses the Snowflake python connector: 19 | 1. Creates a Snowflake integration object and obtains the Snowflake generated *AWS_IAM_USER_ARN* and *AWS_EXTERNAL_ID* from the Snowflake integration 20 | 2. Provisions an AWS IAM role that uses the Snowflake generated IAM Principal and External ID from 1 above 21 | 3. Creates a Snowflake stage object that leverages the snowflake integration 22 | 23 | 24 | ## Solution Design 25 | 26 | ![](images/snowflake-arch.png) 27 | 28 | 29 | ## Prerequisites 30 | 31 | 1. Create an S3 bucket: ***s3-snowflakeintegration-accountId-region***. Replace accountId and region with the AWS Account ID and region of your AWS account. 32 | 1. Upload the [snowflakelayer.zip](https://github.com/Snowflake-Labs/aws-integrations-cloudops/blob/master/layer/snowflakelayer.zip) in the root folder of this S3 object. This zip file packages the Snowflake connector as an AWS Lambda layer 33 | 2. Create a folder called *template* and upload the [aws-snowflake-integrationobject.yml](https://github.com/Snowflake-Labs/aws-integrations-cloudops/blob/master/cft/aws-snowflake-integrationobject.yml) CloudFormation template. This template is provisioned when the Service Catalog Product is launched and it automates this integration for Snowflake to access S3 34 | 2. Create a Snowflake user and role with the ability to create Integraitons in your Snowflake account. Below are sample SQL Commands that can be used. 35 | ```use role accountadmin; 36 | create or replace role store_rl; 37 | grant role store_rl to role sysadmin; 38 | grant create integration on account to role store_rl; 39 | 40 | CREATE OR REPLACE USER store_admin PASSWORD = '' 41 | LOGIN_NAME = 'store_admin' 42 | DISPLAY_NAME = 'store_admin' 43 | DEFAULT_ROLE = "store_rl" 44 | MUST_CHANGE_PASSWORD = FALSE; 45 | GRANT ROLE store_rl TO USER store_admin; 46 | ``` 47 | 3. Option - Have an AWS User Group with privliges to create an IAM Role, Create and access AWS Secrets, Create Lambda Functions/Layer, Relevant S3 bucket access and KMS Key creation 48 | 49 | ## How to Install 50 | 51 | **1-step install** 52 | 1. Launch the [aws-snowflakeintobj-servicecatalog](https://github.com/Snowflake-Labs/aws-integrations-cloudops/blob/master/cft/aws-snowflakeintobj-servicecatalog.yml) template. The template takes the S3 prerequisites bucket as a single parameter. 53 | 54 | ## Test and Run 55 | 56 | 1. The Snowflake solution creates a Snowflake Service Catalog Portfolio, a ‘SnowflakeEnduserGroup’ AWS IAM group and provides this IAM group with access to the Portfolio. In order to launch the Snowflake Service Catalog Product, you have 2 options – 57 | 1. Option 1 - Grant your current logged in AWS IAM user/role permissions to access the Snowflake Service Catalog Portfolio by following steps [here](https://docs.aws.amazon.com/servicecatalog/latest/adminguide/getstarted-deploy.html) and launch the Snowflake Service Catalog product using your current logged in IAM user/role. 58 | 2. Option 2 – Add an IAM user to the ‘SnowflakeEnduserGroup’ IAM group. Log in as this IAM user to launch the Snowflake Service Catalog Product 59 | 2. Make sure the user that accesses Service Catalog also has access to the User Group or Privilges outlined in the Prerequisites Step 3 60 | 3. Navigate to the Service Catalog Console and launch the Snowflake Service Catalog Product. 61 | 1. Provide Snowflake connection details (note that the Snowflake Account ID is the [Account Identifier](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html)), the name of the Storage Integraiton in Snowflake, the S3 bucket created in the prerequisites with the code and the S3 bucket name for the data bucket 62 | 5. From your Snowflake account (snowsql or console)- 63 | 1. Validate that a new Snowflake integration object has been created - the name of the integration object will be the input paramater in the step above and *_STORAGE_INTEGRATION* apended to it (DESC INTEGRATION *'integrationobjectname'*) 64 | 2. Obtain the *AWS_IAM_USER_ARN* and *AWS_EXTERNAL_ID* parameters from above and check that the AWS IAM role uses those as the trust relationship and external id parameters 65 | 3. Validate that a new storage object has been created in Snowflake that references the S3 bucket 66 | 4. You can now create [Snowflake Stages](https://docs.snowflake.com/en/sql-reference/sql/create-stage.html) to various folders in the bucket and assign priviliges to other roles in Snowflake to read & write data to S3 67 | ## Cleanup 68 | 69 | To clean up your account after deploying the solution perform the following steps: 70 | 71 | 1. Terminate the Snowflake Service Catalog Provisioned Product. Follow steps [here](https://docs.aws.amazon.com/servicecatalog/latest/userguide/enduser-delete.html) to terminate Service Catalog provisioned products 72 | 2. If you followed Step 1a (Option 1) in the Test and Run section then remove the access of your logged in AWS user from the Snowflake Service Catalog Portfolio. If you followed Step 1b (Option 2) in the Test and Run section, then remove the IAM user from the ‘SnowflakeEnduserGroup’ IAM group 73 | 3. Delete the CloudFormation stack for the [aws-snowflakeintobj-servicecatalog](https://github.com/aws-samples/aws-datadog-controltower/blob/main/snowflake/cft/aws-snowflakeintobj-servicecatalog.yml) template 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /aws-cloudformation-for-storage-integration/README.md: -------------------------------------------------------------------------------- 1 |

2 |

3 | 4 | # Use an AWS Cloudformation template to automate Snowflake storage integration to access Amazon S3 5 | 6 | 1. Snowflake storage integrations are Snowflake objects that allow Snowflake to read and write data to Amazon S3. Snowflake storage integrations leverage AWS IAM to access S3. The S3 bucket is referenced by the Snowflake integration from an external (i.e. S3) Snowflake stage object 7 | 2. This solution provides automation for Snowflake to access a S3 bucket by deploying a Cloudformation template. The template takes outputs from the 'desc integration' command from Snowflake and creates 8 | an IAM role that can be assumed by Snowflake and has the permissions to access the S3 bucket. 9 | 3. Note this approach is for a single storage integration only, it is different from the [Service Catalog](https://github.com/sfc-gh-jsun/aws-integrations-cloudops) that should be used for multiple storage integrations with the Snowflake and AWS Account. 10 | 11 | 12 | ## How to use the template 13 | 14 | 1. Create a S3 bucket that you want to integrate with Snowflake if it doesn't exist, then copy your datasets to the bucket. 15 | 2. In your Snowflake UI, open a worksheet and run the following command to create a storage integration 16 | ```commandline 17 | CREATE or REPLACE STORAGE INTEGRATION 18 | TYPE = EXTERNAL_STAGE 19 | STORAGE_PROVIDER = 'S3' 20 | STORAGE_AWS_ROLE_ARN = 'arn:aws:iam:::role/' 21 | ENABLED = TRUE 22 | STORAGE_ALLOWED_LOCATIONS = ('s3:///'); 23 | 24 | DESC INTEGRATION ; 25 | ``` 26 | For example: 27 | ```commandline 28 | CREATE or REPLACE STORAGE INTEGRATION myS3Integration 29 | TYPE = EXTERNAL_STAGE 30 | STORAGE_PROVIDER = 'S3' 31 | STORAGE_AWS_ROLE_ARN = 'arn:aws:iam::111222333444:role/myIntRole' 32 | ENABLED = TRUE 33 | STORAGE_ALLOWED_LOCATIONS = ('s3://myawesomesnowflakebucket/'); 34 | 35 | DESC INTEGRATION myS3Integration; 36 | ``` 37 | You should see an output similar to the screen capture below: 38 | 39 | ![desc integration](images/descInt.png) 40 | 41 | You will need the information in the red boxes as the inputs for the Cloudformation template. 42 | 43 | 3. Now click [here](https://console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/new?stackName=Snowflake-storage-integration&templateURL=https://snowflake-corp-se-workshop.s3.us-west-1.amazonaws.com/CFTs/storageInt.json) to create the Cloudformation stack. 44 | Click `Next`, and you will see the page where you need to type in the values from the step above. 45 | 46 | ![cloudformation stack](images/CFT.png) 47 | 48 | Continue clicking through a couple of pages and 49 | leave everything as default, then submit the stack. In a few minutes, the stack is deployed. 50 | 51 | 4. Go back to the Snowflake UI and issue the follow command to create a stage: 52 | 53 | For example 54 | ```commandline 55 | CREATE or REPLACE STAGE my_stg STORAGE_INTEGRATION = myS3Integration 56 | URL = 's3://myawesomesnowflakebucket/' 57 | FILE_FORMAT = (TYPE = 'parquet'); 58 | ``` 59 | 60 | Now list the external stage: 61 | ``` 62 | LIST @my_stg; 63 | ``` 64 | 65 | You should be able to see the content of the stage. For example: 66 | 67 | ![stage](images/stage.png) 68 | 69 | 70 | 71 | -------------------------------------------------------------------------------- /aws-cloudformation-for-storage-integration/images/CFT.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-cloudformation-for-storage-integration/images/CFT.png -------------------------------------------------------------------------------- /aws-cloudformation-for-storage-integration/images/descInt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-cloudformation-for-storage-integration/images/descInt.png -------------------------------------------------------------------------------- /aws-cloudformation-for-storage-integration/images/stage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-cloudformation-for-storage-integration/images/stage.png -------------------------------------------------------------------------------- /aws-controltower/LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /aws-controltower/README.md: -------------------------------------------------------------------------------- 1 |

2 |

3 | 4 | # Use AWS Control Tower to automate multi account Snowflake storage integrations in AWS 5 | 6 | 7 | ## Overview 8 | 9 | 1. Snowflake storage integrations are Snowflake objects that allow Snowflake to read and write data to Amazon S3. This Control Tower integration with Snowflake solution enables Snowflake storage integrations with Amazon S3 to be automatically available for all newly added AWS accounts in an AWS Control Tower environment. 10 | 11 | 3. Each time AWS Control Tower provisions a new account in AWS, the account is automatically setup with an AWS Systems Manager automation runbook for creating Snowflake storage integrations with S3 in that account. The administrator for the new AWS account launches the runbook to create Snowflake integrations with S3 buckets in that account. 12 | 13 | 14 | 15 | ## How it Works 16 | 17 | 1. The solution is deployed using AWS CloudFormation templates and integrates with AWS Control Tower lifecycle events. When a new account is created or an existing one is enrolled using the AWS Control Tower Account Factory, the lifecycle event triggers a Lambda function. The Lambda function creates new CloudFormation stack instances in the newly added Control Tower managed account. 18 | 2. The stack instance in the newly added Control Tower managed account provisions an AWS Systems Manager Automation runbook in the managed account. The runbook is then launched by account administrators of this Control Tower managed account to create Snowflake integrations with S3 buckets in that account. 19 | 3. The AWS Systems Manager Automation runbook in the managed account automates all the steps required by Snowflake to create a storage integration with S3 in that account - it provisions a Snowflake integration object, attaches an IAM role to it, and creates a Snowflake stage object for it that references S3. 20 | 1. The runbook uses AWS Secrets Manager to store and retrieve Snowflake connection information. 21 | 22 | ## Solution Design 23 | 24 | ![](images/snowflake-controltower-arch-diagram.PNG) 25 | 26 | 27 | ## Setup 28 | 29 | **Shared services account:** 30 | 31 | 1. Create an S3 bucket: *s3-snowflakeintegration-accountId-region*. Replace accountId and region with the AWS Account ID and region of your shared services AWS account. 32 | 2. Create a folder called *SnowflakeIntegration_Lambda_SSM* and upload the [SnowflakeIntegration_Lambda_SSM.zip](https://github.com/Snowflake-Labs/aws-integrations-cloudops/blob/master/aws-controltower/lambda/SnowflakeIntegration_Lambda_SSM.zip) file. This lambda uses the Snowflake Python Connector to query and update Snowflake 33 | 3. Upload the [snowflakelayer.zip](https://github.com/aws-samples/aws-datadog-controltower/blob/main/snowflake/layer/snowflakelayer.zip) in the root folder of this S3 object. This zip file packages the Snowflake connector as an AWS Lambda layer 34 | 4. Provide organization level read access to this S3 bucket: 35 | 1. Download the [s3bucketpolicy.json](https://github.com/Snowflake-Labs/aws-integrations-cloudops/blob/master/aws-controltower/cft/s3bucketpolicy.json) permissions policy file. Replace accountid and region with the AWS Account ID and region of your shared services AWS account. Sign in to your Control Tower organization's management account. Navigate to the AWS Organizations console and choose Settings. Note down the organization ID of your Control Tower organization and replace organizationid in the s3bucketpolicy.json file with this identifier. 36 | 2. Add the s3bucketpolicy.json as a bucket policy to your s3-snowflakeintegration-accountId-region S3 bucket from the AWS console. 37 | 38 | 39 | **Control Tower Management account:** 40 | 41 | 1. Launch the [aws-snowflake-controltower.yaml](https://github.com/Snowflake-Labs/aws-integrations-cloudops/blob/master/aws-controltower/cft/aws-snowflake-controltower.yaml) template. The template takes connection information for your Snowflake account as parameters. 42 | 43 | ## Test and Validate 44 | 45 | **Control Tower Management account:** 46 | 47 | 1. Use Control Tower Account Factory to create a new managed account in your AWS Organizations. This can take up to 30 mins for the account to be successfully created and the AWS Control Tower Lifecycle Event to trigger the account creation event. 48 | 49 | **Control Tower Managed account:** 50 | 51 | 1. Navigate to the AWS Systems Manager console in your AWS account. Select Documents from the left panel. Select Owned by me on the right panel and search for the ‘Custom-Snowflakestorageintegration’ document. 52 | 1. Launch your Systems Manager document from the console. Provide Snowflake connection details and an S3 bucket name as parameters 53 | 2. Navigate to the AWS IAM console and check that a new IAM role has been provisioned that ends with *S3INTxxxxx* suffix. This suffix will also be the name of your new Snowflake integration object 54 | 3. From your Snowflake account (snowsql or console)- 55 | 1. Validate that a new Snowflake integration object has been created (DESC INTEGRATION *'integrationobjectname'*) 56 | 2. Obtain the *AWS_IAM_USER_ARN* and *AWS_EXTERNAL_ID* parameters from above and check that the AWS IAM role uses those as the trust relationship and external id parameters 57 | 3. Validate that a new storage object has been created in Snowflake that references the S3 bucket and uses the integration object (SHOW STAGES IN ACCOUNT) 58 | 59 | 60 | 61 | -------------------------------------------------------------------------------- /aws-controltower/cft/aws-snowflake-controltower.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: AWS Control Tower Lifecycle Events for Snowflake(MPCT-jqowxsqq) 3 | 4 | # ---------------------------------------------------------------------------------------------------------- 5 | # 6 | # Enables Snowflake storage integrations to be automatically extended to newly added Control Tower accounts 7 | # 8 | # During an account lifecycle creation event - 9 | # - An SSM Automation that creates Snowflake Storage Integrations is provisioned in the newly addded account. 10 | # 11 | ## 12 | ## @kmmahaj 13 | ## 14 | # 15 | # ------------------------------------------------------------............................................... 16 | 17 | Parameters: 18 | 19 | SnowflakeSSMAutomationURL: 20 | Description: Base URL for Snowflake CloudFormation template for managed accounts 21 | Type: String 22 | Default: 'https://snowflake-controltower-managedaccount.s3.amazonaws.com/aws-snowflake-ssm.yml' 23 | SourceBucket: 24 | Description: REQUIRED. S3 Bucket that contains the Snowflake integration Lambda 25 | Type: String 26 | Default: 's3-snowflakeintegration-SharedServicesAccountId-Region' 27 | MinLength: '1' 28 | MaxLength: '255' 29 | snowaccount: 30 | Description: REQUIRED. Snowflake Account Identifier 31 | Type: String 32 | AllowedPattern: .+ 33 | ConstraintDescription: snowaccount is required 34 | snowuser: 35 | Description: REQUIRED. Snowflake account user 36 | Type: String 37 | NoEcho: true 38 | AllowedPattern: .+ 39 | ConstraintDescription: snowuser is required 40 | snowpass: 41 | Description: REQUIRED. Snowflake password for the Snowflake account user 42 | Type: String 43 | NoEcho: true 44 | AllowedPattern: .+ 45 | ConstraintDescription: snowpass is required 46 | snowdb: 47 | Description: REQUIRED. Snowflake Database 48 | Type: String 49 | Default: 'SF_TUTS' 50 | AllowedPattern: .+ 51 | ConstraintDescription: snowdb is required 52 | snowschema: 53 | Description: REQUIRED. Snowflake Schema 54 | Type: String 55 | Default: 'PUBLIC' 56 | AllowedPattern: .+ 57 | ConstraintDescription: snowschema is required 58 | 59 | 60 | Resources: 61 | 62 | # --------------------------------------------------------------------------------------------------- 63 | # Create a Snowflake StackSet in the Control Tower Management Account 64 | # - The Snowflake StackSet is the basis for the template to be provisioned in the managed accounts 65 | # -------------------------------------------------------------------------------------------------- 66 | 67 | SnowflakeStackSet: 68 | Type: AWS::CloudFormation::StackSet 69 | Properties: 70 | Description: StackSet for creating SSM Automation Execution Role to run Snowflake Storage integration 71 | StackSetName: 'SnowflakeSSMAutomationStackset' 72 | Parameters: 73 | - ParameterKey: SourceBucket 74 | ParameterValue: !Ref SourceBucket 75 | - ParameterKey: snowaccount 76 | ParameterValue: !Ref snowaccount 77 | - ParameterKey: snowuser 78 | ParameterValue: !Ref snowuser 79 | - ParameterKey: snowpass 80 | ParameterValue: !Ref snowpass 81 | - ParameterKey: snowdb 82 | ParameterValue: !Ref snowdb 83 | - ParameterKey: snowschema 84 | ParameterValue: !Ref snowschema 85 | PermissionModel: SELF_MANAGED 86 | AdministrationRoleARN: !Join [':', ['arn:aws:iam:', !Ref 'AWS::AccountId', 'role/service-role/AWSControlTowerStackSetRole']] 87 | ExecutionRoleName: "AWSControlTowerExecution" 88 | Capabilities: 89 | - CAPABILITY_NAMED_IAM 90 | - CAPABILITY_IAM 91 | - CAPABILITY_AUTO_EXPAND 92 | TemplateURL: !Ref SnowflakeSSMAutomationURL 93 | 94 | # -------------------------------------------------------------------------------------------------- 95 | # 96 | # 1- Provisions a CloudWatchEvents Rule that is triggered based on a Control Tower Lifecycle Event 97 | # 2- Provisions a Lifecyle Lambda as a target for the CloudWatch Events Rule. 98 | # -------------------------------------------------------------------------------------------------- 99 | 100 | SnowflakeCaptureControlTowerLifeCycleEvents: 101 | Type: AWS::Events::Rule 102 | Properties: 103 | Description: Capture Control Tower LifeCycle Events for Snowflake and Trigger an Action 104 | EventPattern: 105 | detail: 106 | eventName: 107 | - CreateManagedAccount 108 | - UpdateManagedAccount 109 | eventSource: 110 | - controltower.amazonaws.com 111 | detail-type: 112 | - AWS Service Event via CloudTrail 113 | source: 114 | - aws.controltower 115 | Name: SnowflakeCaptureControlTowerLifeCycleEvents 116 | State: ENABLED 117 | Targets: 118 | - Arn: !GetAtt "SnowflakeTriggerCustomizationsOnLifeCycleEvent.Arn" 119 | Id: IDCaptureControlTowerLifeCycleEvents 120 | 121 | 122 | #Snowflake TriggerLifecyleEvent Lambda 123 | SnowflakeTriggerCustomizationsOnLifeCycleEvent: 124 | Type: AWS::Lambda::Function 125 | Properties: 126 | Code: 127 | ZipFile: | 128 | import json 129 | import os 130 | import boto3 131 | import logging 132 | 133 | logger = logging.getLogger() 134 | logger.setLevel(logging.INFO) 135 | stackset_list = ['SnowflakeSSMAutomationStackset'] 136 | result = {"ResponseMetadata":{"HTTPStatusCode":"400"}} 137 | 138 | def lambda_handler(event, context): 139 | 140 | masterAcct = event['account'] 141 | eventDetails = event['detail'] 142 | regionName = eventDetails['awsRegion'] 143 | eventName = eventDetails['eventName'] 144 | srvEventDetails = eventDetails['serviceEventDetails'] 145 | if eventName == 'CreateManagedAccount' or eventName == 'UpdateManagedAccount': 146 | newAccInfo = {} 147 | logger.info('Event Processed Sucessfully') 148 | if eventName == 'CreateManagedAccount': 149 | newAccInfo = srvEventDetails['createManagedAccountStatus'] 150 | if eventName == 'UpdateManagedAccount': 151 | newAccInfo = srvEventDetails['updateManagedAccountStatus'] 152 | cmdStatus = newAccInfo['state'] 153 | if cmdStatus == 'SUCCEEDED': 154 | '''Sucessful event recieved''' 155 | accId = newAccInfo['account']['accountId'] 156 | cloudformation = boto3.client('cloudformation') 157 | for item in stackset_list: 158 | try: 159 | result = cloudformation.create_stack_instances(StackSetName=item, Accounts=[accId], Regions=[regionName]) 160 | logger.info('Processed {} Sucessfully'.format(item)) 161 | except Exception as e: 162 | logger.error('Unable to launch in:{}, REASON: {}'.format(item, e)) 163 | else: 164 | '''Unsucessful event recieved''' 165 | logger.info('Unsucessful Event Recieved. SKIPPING :{}'.format(event)) 166 | return(False) 167 | else: 168 | logger.info('Control Tower Event Captured :{}'.format(event)) 169 | Handler: index.lambda_handler 170 | MemorySize: 256 171 | Role: !GetAtt "SnowflakeTriggerLifecycleEventLambdaRole.Arn" 172 | Runtime: python3.7 173 | Timeout: 60 174 | 175 | 176 | #Snowflake Trigger LifecyleEvent Lambda Role 177 | SnowflakeTriggerLifecycleEventLambdaRole: 178 | Type: 'AWS::IAM::Role' 179 | Properties: 180 | AssumeRolePolicyDocument: 181 | Version: 2012-10-17 182 | Statement: 183 | - Sid: AllowLambdaAssumeRole 184 | Effect: Allow 185 | Principal: 186 | Service: lambda.amazonaws.com 187 | Action: 'sts:AssumeRole' 188 | Policies: 189 | - PolicyName: !Sub snowflakelifecyclepolicy-${AWS::Region} 190 | PolicyDocument: 191 | Version: 2012-10-17 192 | Statement: 193 | - Sid: '1' 194 | Effect: Allow 195 | Action: 196 | - 'cloudformation:CreateStackInstances' 197 | Resource: !Join [':',['arn:aws:cloudformation', !Ref 'AWS::Region', !Ref 'AWS::AccountId', 'stackset/SnowflakeSSMAutomationStackset:*']] 198 | - Sid: '2' 199 | Action: 200 | - 'logs:CreateLogGroup' 201 | - 'logs:CreateLogStream' 202 | - 'logs:PutLogEvents' 203 | - 'logs:DescribeLogStreams' 204 | Effect: Allow 205 | Resource: !Join [':',['arn:aws:logs', !Ref 'AWS::Region', !Ref 'AWS::AccountId', 'log-group', '/aws/lambda/SnowflakeTriggerCustomizationsOnLifeCycleEvent:*']] 206 | ManagedPolicyArns: 207 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/ReadOnlyAccess' 208 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole' 209 | 210 | 211 | PermissionForEventsToInvokeLambdachk: 212 | Type: AWS::Lambda::Permission 213 | Properties: 214 | Action: lambda:InvokeFunction 215 | FunctionName: !GetAtt "SnowflakeTriggerCustomizationsOnLifeCycleEvent.Arn" 216 | Principal: events.amazonaws.com 217 | SourceArn: !GetAtt "SnowflakeCaptureControlTowerLifeCycleEvents.Arn" 218 | -------------------------------------------------------------------------------- /aws-controltower/cft/aws-snowflake-ssm.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: MIT-0 3 | # 4 | # - Provisions an SSM Automation Document to create a Snowflake Storage Integration 5 | # 6 | # 7 | # kmmahaj 8 | 9 | AWSTemplateFormatVersion: 2010-09-09 10 | Description: >- 11 | AWS CloudFormation template to create Snowflake integration object for S3 access. 12 | Parameters: 13 | SourceBucket: 14 | Description: REQUIRED. S3 Bucket that contains the Snowflake integration Lambda 15 | Type: String 16 | Default: 's3-snowflakeintegration-AccountId-Region' 17 | MinLength: '1' 18 | MaxLength: '255' 19 | snowaccount: 20 | Description: REQUIRED. Snowflake Account Identifier 21 | Type: String 22 | AllowedPattern: .+ 23 | ConstraintDescription: snowaccount is required 24 | snowuser: 25 | Description: REQUIRED. Snowflake account user 26 | Type: String 27 | NoEcho: true 28 | AllowedPattern: .+ 29 | ConstraintDescription: snowuser is required 30 | snowpass: 31 | Description: REQUIRED. Snowflake password for the Snowflake account user 32 | Type: String 33 | NoEcho: true 34 | AllowedPattern: .+ 35 | ConstraintDescription: snowpass is required 36 | snowdb: 37 | Description: REQUIRED. Snowflake Database 38 | Type: String 39 | Default: 'SF_TUTS' 40 | AllowedPattern: .+ 41 | ConstraintDescription: snowdb is required 42 | snowschema: 43 | Description: REQUIRED. Snowflake Schema 44 | Type: String 45 | Default: 'PUBLIC' 46 | AllowedPattern: .+ 47 | ConstraintDescription: snowschema is required 48 | 49 | Resources: 50 | 51 | #--------------------------------------------------------------------------------------------------- 52 | # 1- Secrets Manager to store Snowflake credentials 53 | # -------------------------------------------------------------------------------------------------- 54 | 55 | # Secrets Management - Snowflake Credentials 56 | SnowflakeSecretString: 57 | Type: AWS::SecretsManager::Secret 58 | Properties: 59 | Description: Credentials required for Snowflake 60 | Name: !Sub snowflakesecret-${AWS::Region} 61 | SecretString: 62 | Fn::Join: 63 | - '' 64 | - - '{"snowaccount":"' 65 | - Ref: snowaccount 66 | - '","snowuser": "' 67 | - Ref: snowuser 68 | - '","snowpass": "' 69 | - Ref: snowpass 70 | - '","snowdb": "' 71 | - Ref: snowdb 72 | - '","snowschema": "' 73 | - Ref: snowschema 74 | - '"}' 75 | 76 | #--------------------------------------------------------------------------------------------------- 77 | # 2- Lambda Function that creates the Snowflake integration with S3 78 | # -------------------------------------------------------------------------------------------------- 79 | 80 | #Lambda Function that creates the Snowflake integration with S3 81 | SnowflakeIntegrationLambda: 82 | Type: 'AWS::Lambda::Function' 83 | Properties: 84 | FunctionName: !Join 85 | - '' 86 | - - SnowflakeIntegration_ 87 | - Lambda_ 88 | - SSM 89 | Role: !GetAtt SnowflakeIntegrationLambdaRole.Arn 90 | Code: 91 | S3Bucket: !Ref SourceBucket 92 | S3Key: !Join 93 | - '' 94 | - - SnowflakeIntegration_Lambda_SSM 95 | - / 96 | - SnowflakeIntegration_Lambda_SSM 97 | - .zip 98 | Description: SnowflakeIntegrationLambdaSSM 99 | Handler: SnowflakeIntegration_Lambda_SSM.lambda_handler 100 | MemorySize: '256' 101 | Runtime: python3.7 102 | Layers: 103 | - !Ref SnowflakeLayer 104 | Environment: 105 | Variables: 106 | SNOW_SECRET: !Ref SnowflakeSecretString 107 | AWSACCOUNT: !Ref 'AWS::AccountId' 108 | Timeout: 500 109 | 110 | #Lambda Layer for Snowflake Python Connector 111 | SnowflakeLayer: 112 | Type: AWS::Lambda::LayerVersion 113 | Properties: 114 | CompatibleRuntimes: 115 | - python3.6 116 | - python3.7 117 | - python3.8 118 | Content: 119 | S3Bucket: !Ref SourceBucket 120 | S3Key: snowflakelayer.zip 121 | Description: Lambda layer for Snowflake Python Connector 122 | LayerName: snowflakelayer 123 | LicenseInfo: MIT 124 | 125 | #IAM Role for the SnowflakeIntegration Lambda 126 | SnowflakeIntegrationLambdaRole: 127 | Type: 'AWS::IAM::Role' 128 | Properties: 129 | RoleName: !Sub snowflakeintegrationlamdarole-${AWS::Region} 130 | AssumeRolePolicyDocument: 131 | Version: 2012-10-17 132 | Statement: 133 | - Sid: AllowLambdaAssumeRole 134 | Effect: Allow 135 | Principal: 136 | Service: lambda.amazonaws.com 137 | Action: 'sts:AssumeRole' 138 | Policies: 139 | - PolicyName: SnowflakeIntegrationLambdaPolicy 140 | PolicyDocument: 141 | Version: 2012-10-17 142 | Statement: 143 | - Sid: '1' 144 | Action: 145 | - s3:GetObjectVersion 146 | - s3:PutObject 147 | - s3:GetObject 148 | - s3:GetObjectVersionTagging 149 | - s3:GetObjectVersionAcl 150 | Effect: Allow 151 | Resource: 152 | - !Sub arn:${AWS::Partition}:s3:::${SourceBucket} 153 | - !Sub arn:${AWS::Partition}:s3:::${SourceBucket}/* 154 | - Sid: '2' 155 | Action: 156 | - 'logs:CreateLogGroup' 157 | - 'logs:CreateLogStream' 158 | - 'logs:PutLogEvents' 159 | - 'logs:DescribeLogStreams' 160 | Effect: Allow 161 | Resource: '*' 162 | - Sid: '3' 163 | Action: 164 | - 'secretsmanager:GetSecretValue' 165 | - 'secretsmanager:ListSecrets' 166 | Effect: Allow 167 | Resource: !Join [':',['arn:aws:secretsmanager', !Ref 'AWS::Region', !Ref 'AWS::AccountId','secret','snowflakesecret-*']] 168 | ManagedPolicyArns: 169 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/IAMFullAccess' 170 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess' 171 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole' 172 | 173 | #--------------------------------------------------------------------------------------------------- 174 | # 3- SSM Automation that invokes Lambda 175 | # -------------------------------------------------------------------------------------------------- 176 | 177 | # SSM Automation Role 178 | SnowflakeSSMAutomationAssumeRole: 179 | Type: 'AWS::IAM::Role' 180 | Properties: 181 | RoleName: !Sub snowflake-automationassumerole-${AWS::Region} 182 | AssumeRolePolicyDocument: 183 | Version: 2012-10-17 184 | Statement: 185 | - Effect: Allow 186 | Principal: 187 | Service: 188 | - ssm.amazonaws.com 189 | - events.amazonaws.com 190 | - ec2.amazonaws.com 191 | Action: 192 | - 'sts:AssumeRole' 193 | Path: / 194 | ManagedPolicyArns: 195 | - !Sub "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" 196 | 197 | 198 | SSMPermissionToCallSnowflakeLambda: 199 | Type: 'AWS::Lambda::Permission' 200 | Properties: 201 | FunctionName: !GetAtt 202 | - SnowflakeIntegrationLambda 203 | - Arn 204 | Action: 'lambda:InvokeFunction' 205 | Principal: !GetAtt SnowflakeSSMAutomationAssumeRole.Arn 206 | 207 | 208 | # [Snowflake storage integration SSM] 209 | SnowflakeStorageIntegrationSSM: 210 | Type: AWS::SSM::Document 211 | DependsOn: SSMPermissionToCallSnowflakeLambda 212 | Properties: 213 | DocumentType: Automation 214 | Name: Custom-Snowflakestorageintegration 215 | Content: 216 | schemaVersion: '0.3' 217 | assumeRole: !GetAtt SnowflakeSSMAutomationAssumeRole.Arn 218 | parameters: 219 | S3BUCKET: 220 | type: String 221 | default: 'mybucket1' 222 | AutomationAssumeRole: 223 | type: String 224 | default: !GetAtt SnowflakeSSMAutomationAssumeRole.Arn 225 | mainSteps: 226 | - name: createstorageintegration 227 | action: 'aws:invokeLambdaFunction' 228 | maxAttempts: 3 229 | timeoutSeconds: 180 230 | inputs: 231 | FunctionName: !GetAtt SnowflakeIntegrationLambda.Arn 232 | InvocationType: RequestResponse 233 | Payload: '{"parameterName":"S3BUCKET", "parameterValue":"{{S3BUCKET}}"}' 234 | 235 | -------------------------------------------------------------------------------- /aws-controltower/cft/s3bucketpolicy.json: -------------------------------------------------------------------------------- 1 | { 2 | "Version": "2012-10-17", 3 | "Statement": [ 4 | { 5 | "Effect": "Allow", 6 | "Principal": "*", 7 | "Action": "s3:GetObject", 8 | "Resource": "arn:aws:s3:::s3-snowflakeintegration--/*", 9 | "Condition": { 10 | "StringEquals": { 11 | "aws:PrincipalOrgID": "" 12 | } 13 | } 14 | } 15 | ] 16 | } -------------------------------------------------------------------------------- /aws-controltower/images/snowflake-controltower-arch-diagram.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-controltower/images/snowflake-controltower-arch-diagram.PNG -------------------------------------------------------------------------------- /aws-controltower/lambda/SnowflakeIntegration_Lambda_SSM.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # Lambda that creates Snowflake integration object in Snowflake and the corresponding IAM role in AWS 5 | # - Uses Snowflake Python Connector 6 | # 7 | # @kmmahaj 8 | 9 | import json 10 | import urllib 11 | import boto3 12 | import os 13 | import string 14 | import random 15 | import snowflake.connector 16 | import logging 17 | import urllib3 18 | from snowflake.connector import DictCursor 19 | 20 | AWS_EXTERNAL_ID = "" 21 | AWS_IAM_USER_ARN = "" 22 | 23 | logger = logging.getLogger(__name__) 24 | logging.getLogger().setLevel(logging.INFO) 25 | http = urllib3.PoolManager() 26 | 27 | session = boto3.session.Session() 28 | 29 | sf_config_name = '' 30 | allowed_sf_config = ('snowaccount', 'snowuser', 'snowpass', 'snowdb', 'snowschema') 31 | 32 | def get_secret_value(secret_name): 33 | """ 34 | get secret value from AWS Secrets Manager 35 | :param secret_name: name of the secret passed 36 | :return secret_value: value of the secret passed 37 | """ 38 | client = session.client(service_name='secretsmanager') 39 | secret_value = '' 40 | try: 41 | get_secret_value_response = client.get_secret_value(SecretId=secret_name) 42 | except ClientError as e: 43 | logger.error(f"error while executing get_secret_value, {e}") 44 | raise Exception() 45 | else: 46 | secret_value = get_secret_value_response['SecretString'] if 'SecretString' in get_secret_value_response else '' 47 | finally: 48 | return secret_value 49 | 50 | 51 | def get_snowflake_config(sf_config_name): 52 | """ 53 | get snowflake config, throws exception if invalid 54 | :return config: snowflake config dict 55 | """ 56 | config = json.loads(get_secret_value(sf_config_name)) 57 | for key in config: 58 | if not config.get(key, ''): 59 | logger.error(f"either key {key} do not exist, or non empty value found") 60 | raise Exception() 61 | return config 62 | 63 | 64 | def create_iam_policy(externalid, iamrolearn,SNOW_S3_BUCKET,SNOW_INT,SNOW_ROLE): 65 | iam = boto3.client('iam') 66 | s3fullresourcearn = "arn:aws:s3:::" + SNOW_S3_BUCKET + '/*' 67 | s3bucketresourcearn = "arn:aws:s3:::" + SNOW_S3_BUCKET 68 | s3prefix = SNOW_S3_BUCKET + '/*' 69 | s3_access_policy = { 70 | "Version": "2012-10-17", 71 | "Statement": [ 72 | { 73 | "Effect": "Allow", 74 | "Action": [ 75 | "s3:PutObject", 76 | "s3:GetObject", 77 | "s3:GetObjectVersion", 78 | "s3:DeleteObject", 79 | "s3:DeleteObjectVersion" 80 | ], 81 | "Resource": s3fullresourcearn 82 | }, 83 | { 84 | "Effect": "Allow", 85 | "Action": "s3:ListBucket", 86 | "Resource": s3bucketresourcearn 87 | } 88 | ] 89 | } 90 | snowpolicy = "SnowflakeS3AccessPolicy-" + SNOW_S3_BUCKET + SNOW_INT 91 | response_policy = iam.create_policy( 92 | PolicyName=snowpolicy, 93 | PolicyDocument=json.dumps(s3_access_policy) 94 | ) 95 | 96 | policyArn = response_policy['Policy']['Arn'] 97 | 98 | trust_relationship_policy = { 99 | "Version": "2012-10-17", 100 | "Statement": [ 101 | { 102 | "Effect": "Allow", 103 | "Principal": { 104 | "AWS": iamrolearn 105 | }, 106 | "Action": "sts:AssumeRole", 107 | "Condition": { 108 | "StringEquals": { 109 | "sts:ExternalId": externalid 110 | } 111 | } 112 | } 113 | ] 114 | } 115 | 116 | AssumeRolePolicyDocument = json.dumps(trust_relationship_policy) 117 | print(AssumeRolePolicyDocument) 118 | 119 | snowrole = SNOW_ROLE 120 | response_role = iam.create_role( 121 | RoleName=snowrole, 122 | AssumeRolePolicyDocument=AssumeRolePolicyDocument 123 | ) 124 | print(response_role) 125 | 126 | response = iam.attach_role_policy( 127 | RoleName=snowrole, 128 | PolicyArn=policyArn 129 | ) 130 | 131 | print(response) 132 | 133 | 134 | def lambda_handler(event, context): 135 | 136 | logger.info('EVENT Received: {}'.format(event)) 137 | 138 | CURRENT_AWS_ACCOUNT = os.environ['AWSACCOUNT'] 139 | sf_config_name = os.environ['SNOW_SECRET'] 140 | sf_config = get_snowflake_config(sf_config_name) 141 | logger.info(f'snowflake config successfully retrieved from secrets') 142 | 143 | assert isinstance(sf_config, dict), 'sf_config config must be of type dict' 144 | 145 | ctx = snowflake.connector.connect( 146 | user=sf_config['snowuser'], 147 | password=sf_config['snowpass'], 148 | role='ACCOUNTADMIN', 149 | account=sf_config['snowaccount'], 150 | database=sf_config['snowdb'], 151 | schema=sf_config['snowschema'], 152 | ocsp_response_cache_filename="/tmp/ocsp_response_cache" 153 | ) 154 | cs = ctx.cursor() 155 | 156 | SNOW_S3_BUCKET = event['parameterValue'] 157 | 158 | letters = string.ascii_lowercase 159 | randomstr = ''.join(random.choice(letters) for i in range(3)) 160 | randomnum = str(random.randrange(2,100)) 161 | SNOW_INT = "S3INT" + randomstr + randomnum 162 | SNOW_ROLE = "SFAccessRole-" + SNOW_INT 163 | 164 | 165 | SNOW_S3_LOCATION = 's3://' + SNOW_S3_BUCKET +'/' 166 | try: 167 | 168 | sql_1 = 'create storage integration ' + SNOW_INT + ' type = external_stage storage_provider = s3 enabled = true' \ 169 | + ' storage_aws_role_arn = ' + "'" + "arn:aws:iam::" + CURRENT_AWS_ACCOUNT + ":role/" + SNOW_ROLE + "'" + ' storage_allowed_locations = (' + "'" + SNOW_S3_LOCATION + "'" +')' 170 | print(sql_1) 171 | cs.execute(sql_1) 172 | 173 | sql_2 = 'desc integration ' + SNOW_INT 174 | print(sql_2) 175 | cs.execute(sql_2) 176 | 177 | query_id_desc = cs.sfqid 178 | 179 | sql_3 = 'select "property", "property_value" from table(result_scan(' + "'" + query_id_desc + "'" + '))' + ' where "property" = ' + "'" + "STORAGE_AWS_EXTERNAL_ID" + "'" 180 | print(sql_3) 181 | cs.execute(sql_3) 182 | for (property, property_value) in cs: 183 | AWS_EXTERNAL_ID = property_value 184 | print('{0}, {1}'.format(property, AWS_EXTERNAL_ID)) 185 | 186 | 187 | sql_4 = 'select "property", "property_value" from table(result_scan(' + "'" + query_id_desc + "'" + '))' + ' where "property" = ' + "'" + "STORAGE_AWS_IAM_USER_ARN" + "'" 188 | print(sql_4) 189 | cs.execute(sql_4) 190 | for (property, property_value) in cs: 191 | AWS_IAM_USER_ARN = property_value 192 | print('{0}, {1}'.format(property, AWS_IAM_USER_ARN)) 193 | 194 | create_iam_policy(AWS_EXTERNAL_ID,AWS_IAM_USER_ARN,SNOW_S3_BUCKET,SNOW_INT,SNOW_ROLE) 195 | 196 | sql_5 = 'create stage ' + "S3STAGE" + SNOW_INT + ' storage_integration = ' + SNOW_INT + ' url = (' + "'" + SNOW_S3_LOCATION + "'" +')' 197 | print(sql_5) 198 | cs.execute(sql_5) 199 | 200 | finally: 201 | cs.close() 202 | ctx.close() 203 | 204 | return 'SUCCESS' 205 | 206 | -------------------------------------------------------------------------------- /aws-controltower/lambda/SnowflakeIntegration_Lambda_SSM.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-controltower/lambda/SnowflakeIntegration_Lambda_SSM.zip -------------------------------------------------------------------------------- /aws-controltower/layer/snowflakelayer.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-controltower/layer/snowflakelayer.zip -------------------------------------------------------------------------------- /aws-servicecatalog/apigw-sample/APIGW_README.md: -------------------------------------------------------------------------------- 1 |

2 |

3 | 4 | # Use AWS Service Catalog to automate Snowflake integration with Amazon API GW and AWS Lambda to access Amazon service APIs 5 | 6 | 1. Snowflake API integrations are Snowflake objects that allow Snowflake to read and write data to Amazon S3. Snowflake storage integrations leverage AWS IAM to access S3. The S3 bucket is referenced by the Snowflake integration from an external (i.e. S3) Snowflake stage object 7 | An API integration object stores information about an HTTPS proxy service, including information about the Cloud platform provider e.g. AWS, AWS role ARN (Amazon Resource Name). 8 | 9 | 2. This solution provides an integration design pattern and the building blocks for automating Snowflake access to Aamazon service APIs using AWS Service Catalog. The solution implements an AWS Service Catalog product that automates Snowflake access to Amazon service API using an API GW and Lambda. 10 | 1. The Service Catalog product provisions a Snowflake API integration object, attaches an IAM role to it and creates a Lambda function that calls the required AWS service. 11 | 12 | ## How it Works 13 | 14 | 1. Provision a Service Catalog Portfolio with a Service Catalog Product 15 | 2. The Snowflake Service Catalog Product takes a) Snowflake Connection information and b) S3 bucketname and prefix as input parameters and uses the *aws-snowflake-apigw-integrationobject.yml* CloudFormation template to create a Snowflake API integration object. 16 | 1. The Snowflake Service Catalog Product can be invoked as many times as needed. Each time it creates a Snowflake API integration object to access an API defined in the create-resources-1.0.zip using input parameters supplied above. 17 | 3. The template from 2: 18 | 1. Provisions AWS Secrets Manager to store and retrieve Snowflake connection information 19 | 2. Provisions a Lambda function that uses the Snowflake python connector: 20 | 1. Creates a Snowflake API integration object and obtains the Snowflake generated *AWS_IAM_USER_ARN* and *AWS_EXTERNAL_ID* from the Snowflake integration 21 | 2. Provisions an AWS IAM role that uses the Snowflake generated IAM Principal and External ID from 1 above 22 | 23 | 24 | 25 | ## Solution Design 26 | 27 | ![](images/snowflake-arch.png) 28 | 29 | 30 | ## Prerequisites 31 | 32 | 1. Create an S3 bucket for the source files: ***s3-snowflakeintegration-accountId-region***. Replace accountId and region with the AWS Account ID and region of your AWS account. These source files will be copied to a destination S3 bucket specified as one of the parameters of the 2nd CloudFormation template 33 | 1. Edit the CloudFormation template in github folder aws-servicecatalog/apigw-sample/template/aws-snowflake-apigw-integrationobject.yml and in the mapping section update the codebucket to the S3 bucket created above. 34 | 2. Upload the contents of the github folder aws-servicecatalog/apigw-sample to the above S3 bucket. This zip file packages the Snowflake connector as an AWS Lambda layer 35 | 3. After completing the upload the above S3 bucket for source files should contain a create-resources-1.0.zip file which contains sample Lambda code. You should also see the Snowflake Python connector zip file and a template folder containing the two CloudFormation templates. 36 | 2. Create a Snowflake user and role with the ability to create Integrations in your Snowflake account. Below are sample SQL Commands that can be used. 37 | ```use role accountadmin; 38 | create or replace role apigw_role; 39 | grant role apigw_role to role sysadmin; 40 | grant create integration on account to role apigw_role; 41 | 42 | CREATE OR REPLACE USER apigw_admin PASSWORD = '' 43 | LOGIN_NAME = 'apigw_admin' 44 | DISPLAY_NAME = 'apigw_admin' 45 | DEFAULT_ROLE = "apigw_admin" 46 | MUST_CHANGE_PASSWORD = FALSE; 47 | GRANT ROLE apigw_role TO USER apigw_admin; 48 | ``` 49 | 3. Optional - Have an AWS User Group with privliges to create an IAM Role, Create and access AWS Secrets, Create Lambda Functions/Layer, Relevant S3 bucket access and KMS Key creation 50 | 51 | ## How to Install 52 | 53 | **1-step install** 54 | 1. Launch the [aws-snowflake-apigw-servicecatalog](https://github.com/Snowflake-Labs/aws-integrations-cloudops/aws-servicecatalog/apigw-sample/template/aws-snowflake-apigw-servicecatalog.yml) template. The template takes the S3 prerequisites bucket (with source files) as a single parameter. 55 | 56 | ## Test and Run 57 | 58 | 1. The Snowflake solution creates a Snowflake Service Catalog Portfolio, a ‘SnowflakeEnduserGroup’ AWS IAM group and provides this IAM group with access to the Portfolio. In order to launch the Snowflake Service Catalog Product, you have 2 options – 59 | 1. Option 1 - Grant your current logged in AWS IAM user/role permissions to access the Snowflake Service Catalog Portfolio by following steps [here](https://docs.aws.amazon.com/servicecatalog/latest/adminguide/getstarted-deploy.html) and launch the Snowflake Service Catalog product using your current logged in IAM user/role. 60 | 2. Option 2 – Add an IAM user to the ‘SnowflakeEnduserGroup’ IAM group. Log in as this IAM user to launch the Snowflake Service Catalog Product 61 | 2. Make sure the user that accesses Service Catalog also has access to the User Group or Privilges outlined in the Prerequisites 62 | 3. Navigate to the Service Catalog Console and launch the Snowflake Service Catalog Product. 63 | 1. Provide Snowflake connection details (note that the Snowflake Account ID is the [Account Identifier](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html)), the name of the Storage Integraiton in Snowflake, the S3 bucket created in the prerequisites with the code and the S3 bucket name for the data bucket 64 | 5. From your Snowflake account (snowsql or console)- 65 | 1. Validate that a new Snowflake API integration object has been created - the name of the integration object will be the input paramater in the step above and *_* appended to it (DESC INTEGRATION *'integrationobjectname'*). You can sort the output by date to identify the latest object created. 66 | 2. Obtain the *AWS_IAM_USER_ARN* and *AWS_EXTERNAL_ID* parameters from above and check that the AWS IAM role uses those as the trust relationship and external id parameters 67 | 3. Validate that a new external function has been created in Snowflake with this command by providing values for Snowflake database, schema, suffix used: DESCRIBE FUNCTION ..AWS_AUTOPILOT_CREATE_MODEL_(VARCHAR, VARCHAR, VARCHAR); 68 | 4. The sample create-resources-1.0.zip contains the code for the 'create_model' API call to Amazon SageMaker AutoPilot. You should be able to run the following SQL to invoke this API call : 69 | SELECT ..AWS_AUTOPILOT_CREATE_MODEL_('model-name','ABALONE', 'RINGS'); where ABALONE is the table name and RINGS is the column to be predicted.Please see: https://archive.ics.uci.edu/ml/datasets/abalone 70 | 71 | ## References 72 | You can find the full implementation of the Amazon SageMaker integration with Snowflake here: https://github.com/aws-samples/amazon-sagemaker-integration-with-snowflake/blob/main/snowflake-integration-overview.md 73 | 74 | 75 | ## Cleanup 76 | 77 | To clean up your account after deploying the solution perform the following steps: 78 | 79 | 1. Terminate the Snowflake Service Catalog Provisioned Product. Follow steps [here](https://docs.aws.amazon.com/servicecatalog/latest/userguide/enduser-delete.html) to terminate Service Catalog provisioned products 80 | 2. If you followed Step 1a (Option 1) in the Test and Run section then remove the access of your logged in AWS user from the Snowflake Service Catalog Portfolio. If you followed Step 1b (Option 2) in the Test and Run section, then remove the IAM user from the ‘SnowflakeEnduserGroup’ IAM group 81 | 3. Delete the CloudFormation stack for the aws-snowflake-apigw-servicecatalog template 82 | 83 | 84 | 85 | -------------------------------------------------------------------------------- /aws-servicecatalog/apigw-sample/create-resources-1.0.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-servicecatalog/apigw-sample/create-resources-1.0.zip -------------------------------------------------------------------------------- /aws-servicecatalog/apigw-sample/snowflake-connector-python-1.0.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-servicecatalog/apigw-sample/snowflake-connector-python-1.0.zip -------------------------------------------------------------------------------- /aws-servicecatalog/apigw-sample/template/aws-snowflake-apigw-integrationobject.yml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: "2010-09-09" 2 | Parameters: 3 | s3BucketName: 4 | Type: String 5 | Description: "Name of the S3 bucket to be created" 6 | MinLength: 1 7 | snowflakeSecretArn: 8 | Type: String 9 | Description: "ARN of the AWS Secret containing the Snowflake login information" 10 | MinLength: 1 11 | kmsKeyArn: 12 | Type: String 13 | AllowedPattern: "^(arn:aws[a-z-]*:kms:[a-z0-9-]*:[0-9]{12}:key\\/.+)?$" 14 | Default: "" 15 | Description: "(Optional) ARN of the AWS Key Management Service key that Amazon SageMaker uses to encrypt job outputs. The KmsKeyId is applied to all outputs." 16 | snowflakeRole: 17 | Type: String 18 | Description: "Snowflake Role with permissions to create Storage Integrations, API Integrations and Functions" 19 | Default: "ACCOUNTADMIN" 20 | MinLength: 1 21 | snowflakeDatabaseName: 22 | Type: String 23 | Description: "Snowflake Database in which external functions will be created" 24 | MinLength: 1 25 | snowflakeSchemaName: 26 | Type: String 27 | Description: "Snowflake Database Schema in which external functions will be created" 28 | MinLength: 1 29 | apiGatewayName: 30 | Type: "String" 31 | AllowedPattern: "^[a-zA-Z0-9]+[-a-zA-Z0-9-]+[-a-zA-Z0-9]+$" 32 | Default: "snowflake-autopilot-api" 33 | Description: "API Gateway name" 34 | apiGatewayStageName: 35 | Type: "String" 36 | AllowedPattern: "^[-a-zA-Z0-9]+$" 37 | Default: "main" 38 | Description: "API deployment stage" 39 | MinLength: 1 40 | snowflakeResourceSuffix: 41 | Type: String 42 | Description: "(Optional) Suffix for resources created in Snowflake. This suffix will be added to all function names created in the database schema." 43 | Default: "" 44 | Mappings: 45 | Package: 46 | Attributes: 47 | Identifier: "'SagemakerProxy/1.0'" 48 | Locations: 49 | CodeBucket: "apigw--" 50 | PathToLayerCode: "snowflake-connector-python-1.0.zip" 51 | PathToLambdaCode: "create-resources-1.0.zip" 52 | Conditions: 53 | KMSKeyArnProvided: !Not 54 | - !Equals 55 | - !Ref kmsKeyArn 56 | - "" 57 | Metadata: 58 | AWS::CloudFormation::Interface: 59 | ParameterGroups: 60 | - 61 | Label: "" 62 | Parameters: 63 | - apiGatewayName 64 | - apiGatewayStageName 65 | - s3BucketName 66 | - kmsKeyArn 67 | - snowflakeDatabaseName 68 | - snowflakeSchemaName 69 | - snowflakeResourceSuffix 70 | - snowflakeRole 71 | - snowflakeSecretArn 72 | Resources: 73 | S3Bucket: 74 | Type: 'AWS::S3::Bucket' 75 | DeletionPolicy: Delete 76 | Properties: 77 | BucketName: !Ref s3BucketName 78 | SnowflakeAutoMLExecutionRole: 79 | Type: 'AWS::IAM::Role' 80 | Properties: 81 | Description: IAM Role used to execute the AutoML jobs from Snowflake 82 | AssumeRolePolicyDocument: 83 | Version: '2012-10-17' 84 | Statement: 85 | - Effect: Allow 86 | Principal: 87 | Service: 88 | - sagemaker.amazonaws.com 89 | Action: 90 | - 'sts:AssumeRole' 91 | Path: / 92 | ManagedPolicyArns: 93 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/AmazonSageMakerFullAccess' 94 | Policies: 95 | - PolicyName: s3-permissions 96 | PolicyDocument: 97 | Version: 2012-10-17 98 | Statement: 99 | - Effect: Allow 100 | Action: 101 | - 's3:GetObject' 102 | - 's3:PutObject' 103 | - 's3:DeleteObject' 104 | - 's3:ListBucket' 105 | Resource: 106 | - !Join 107 | - '' 108 | - - !GetAtt S3Bucket.Arn 109 | - '/*' 110 | - PolicyName: kms-permissions 111 | PolicyDocument: 112 | Version: 2012-10-17 113 | Statement: 114 | - Effect: Allow 115 | Action: 116 | - 'kms:CreateGrant' 117 | - "kms:Decrypt" 118 | - "kms:DescribeKey" 119 | - "kms:Encrypt" 120 | - "kms:GenerateDataKey*" 121 | - "kms:ReEncrypt*" 122 | Resource: 123 | - !Join 124 | - ":" 125 | - - "arn" 126 | - !Ref AWS::Partition 127 | - "kms" 128 | - !Ref AWS::Region 129 | - !Ref AWS::AccountId 130 | - "alias/aws/secretsmanager" 131 | - !If 132 | - KMSKeyArnProvided 133 | - !Ref kmsKeyArn 134 | - !Ref AWS::NoValue 135 | - PolicyName: secrets-permissions 136 | PolicyDocument: 137 | Version: 2012-10-17 138 | Statement: 139 | - Effect: Allow 140 | Action: 141 | - 'secretsmanager:GetSecretValue' 142 | Resource: !Ref snowflakeSecretArn 143 | SnowflakeAPIGatewayExecutionRole: 144 | Type: 'AWS::IAM::Role' 145 | Properties: 146 | Description: IAM Role used to call SageMaker from API Gateway for SnowFlake 147 | AssumeRolePolicyDocument: 148 | Version: '2012-10-17' 149 | Statement: 150 | - Effect: Allow 151 | Principal: 152 | Service: 153 | - apigateway.amazonaws.com 154 | Action: 155 | - 'sts:AssumeRole' 156 | Path: / 157 | Policies: 158 | - PolicyName: root 159 | PolicyDocument: 160 | Version: 2012-10-17 161 | Statement: 162 | - Effect: Allow 163 | Action: 164 | - 'sagemaker:CreateAutoMLJob' 165 | Resource: '*' 166 | - PolicyName: passRoleToExecute 167 | PolicyDocument: 168 | Version: 2012-10-17 169 | Statement: 170 | - Effect: Allow 171 | Action: 172 | - 'iam:PassRole' 173 | Resource: !GetAtt "SnowflakeAutoMLExecutionRole.Arn" 174 | - PolicyName: kms-permissions 175 | PolicyDocument: 176 | Version: 2012-10-17 177 | Statement: 178 | - Effect: Allow 179 | Action: 180 | - 'kms:CreateGrant' 181 | - "kms:Decrypt" 182 | - "kms:DescribeKey" 183 | - "kms:Encrypt" 184 | - "kms:GenerateDataKey*" 185 | - "kms:ReEncrypt*" 186 | Resource: 187 | - !Join 188 | - ":" 189 | - - "arn" 190 | - !Ref AWS::Partition 191 | - "kms" 192 | - !Ref AWS::Region 193 | - !Ref AWS::AccountId 194 | - "alias/aws/secretsmanager" 195 | - !If 196 | - KMSKeyArnProvided 197 | - !Ref kmsKeyArn 198 | - !Ref AWS::NoValue 199 | CopyZipsRole: 200 | Type: AWS::IAM::Role 201 | Properties: 202 | Description: IAM Role used to copy Snowflake libraries form the shared repository 203 | AssumeRolePolicyDocument: 204 | Version: '2012-10-17' 205 | Statement: 206 | - Effect: Allow 207 | Principal: 208 | Service: 209 | - lambda.amazonaws.com 210 | Action: 211 | - sts:AssumeRole 212 | Path: '/' 213 | ManagedPolicyArns: 214 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/CloudWatchLogsFullAccess' 215 | Policies: 216 | - PolicyName: s3-dest-permissions 217 | PolicyDocument: 218 | Version: 2012-10-17 219 | Statement: 220 | - Effect: Allow 221 | Action: 222 | - 's3:PutObject' 223 | - 's3:DeleteObject' 224 | Resource: 225 | - !Join 226 | - '' 227 | - - !GetAtt S3Bucket.Arn 228 | - '/*' 229 | - PolicyName: s3-src-permissions 230 | PolicyDocument: 231 | Version: 2012-10-17 232 | Statement: 233 | - Effect: Allow 234 | Action: 235 | - 's3:GetObject' 236 | - 's3:ListBucket' 237 | Resource: '*' 238 | CreateSnowflakeResourcesExecutionRole: 239 | Type: AWS::IAM::Role 240 | Properties: 241 | Description: IAM Role used to create Snowflake resources from the CloudFormation template 242 | AssumeRolePolicyDocument: 243 | Version: '2012-10-17' 244 | Statement: 245 | - Effect: Allow 246 | Principal: 247 | Service: 248 | - lambda.amazonaws.com 249 | Action: 250 | - sts:AssumeRole 251 | Path: '/' 252 | ManagedPolicyArns: 253 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/CloudWatchLogsFullAccess' 254 | Policies: 255 | - PolicyName: secrets-permissions 256 | PolicyDocument: 257 | Version: 2012-10-17 258 | Statement: 259 | - Effect: Allow 260 | Action: 261 | - 'secretsmanager:GetSecretValue' 262 | Resource: !Ref snowflakeSecretArn 263 | - PolicyName: update-iam-role 264 | PolicyDocument: 265 | Version: 2012-10-17 266 | Statement: 267 | - Effect: Allow 268 | Action: 269 | - 'iam:UpdateAssumeRolePolicy' 270 | Resource: 271 | - !GetAtt SnowflakeAPIGatewayExecutionRole.Arn 272 | - !GetAtt SnowflakeAutoMLExecutionRole.Arn 273 | SnowflakeApiGateway: 274 | Type: "AWS::ApiGateway::RestApi" 275 | DependsOn: SnowflakeAPIGatewayExecutionRole 276 | Properties: 277 | Name: !Ref apiGatewayName 278 | Description: "Snowflake external functions Gateway" 279 | Policy: !Sub 280 | - '{ "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Principal": { "AWS": "arn:${AWS::Partition}:sts::${AWS::AccountId}:assumed-role/${SnowflakeAPIGatewayExecutionRole}/snowflake" }, "Action": "execute-api:Invoke", "Resource": "${resourceArn}" } ] }' 281 | - resourceArn: !Join [ "", [ "execute-api:/", "*" ] ] 282 | EndpointConfiguration: 283 | Types: 284 | - REGIONAL 285 | SnowflakeApiGatewayDeployment: 286 | Type: "AWS::ApiGateway::Deployment" 287 | DependsOn: 288 | - "CreateModelPostMethod" 289 | Properties: 290 | RestApiId: !Ref "SnowflakeApiGateway" 291 | StageName: !Ref apiGatewayStageName 292 | RootApiResource: 293 | Type: 'AWS::ApiGateway::Resource' 294 | Properties: 295 | RestApiId: !Ref SnowflakeApiGateway 296 | ParentId: !GetAtt 297 | - SnowflakeApiGateway 298 | - RootResourceId 299 | PathPart: sagemaker 300 | CreateModelApiResource: 301 | Type: 'AWS::ApiGateway::Resource' 302 | Properties: 303 | RestApiId: !Ref SnowflakeApiGateway 304 | ParentId: !Ref RootApiResource 305 | PathPart: createmodel 306 | CreateModelPostMethod: 307 | Type: "AWS::ApiGateway::Method" 308 | Properties: 309 | AuthorizationType: "AWS_IAM" 310 | HttpMethod: "POST" 311 | Integration: 312 | IntegrationHttpMethod: "POST" 313 | Type: "AWS" 314 | Credentials: !GetAtt SnowflakeAPIGatewayExecutionRole.Arn 315 | Uri: 316 | Fn::Join: 317 | - ":" 318 | - - "arn" 319 | - Ref: AWS::Partition 320 | - "apigateway" 321 | - Ref: AWS::Region 322 | - "sagemaker:action/CreateAutoMLJob" 323 | RequestParameters: 324 | integration.request.header.X-Amz-Target: "'SageMaker.CreateAutoMLJob'" 325 | integration.request.header.Content-Type: "'application/x-amz-json-1.1'" 326 | integration.request.header.X-Proxy-Agent: !FindInMap [Package, Attributes, Identifier] 327 | PassthroughBehavior: WHEN_NO_MATCH 328 | IntegrationResponses: 329 | - StatusCode: 200 330 | SelectionPattern: '2..' 331 | - StatusCode: 400 332 | SelectionPattern: '4..' 333 | - StatusCode: 500 334 | SelectionPattern: '5..' 335 | MethodResponses: 336 | - StatusCode: 200 337 | - StatusCode: 400 338 | - StatusCode: 500 339 | ResourceId: !Ref "CreateModelApiResource" 340 | RestApiId: !Ref "SnowflakeApiGateway" 341 | CopyZipsLambda: 342 | Type: AWS::Lambda::Function 343 | Properties: 344 | Code: 345 | ZipFile: | 346 | # Inspired by https://aws.amazon.com/blogs/infrastructure-and-automation/deploying-aws-lambda-functions-using-aws-cloudformation-the-portable-way/ 347 | import boto3 348 | import json 349 | import logging 350 | import os 351 | import requests 352 | import time 353 | 354 | EMPTY_RESPONSE_DATA = {} 355 | FAILED = 'FAILED' 356 | SUCCESS = 'SUCCESS' 357 | 358 | logger = logging.getLogger(__name__) 359 | logger.setLevel(logging.INFO) 360 | 361 | def lambda_handler(event, context): 362 | logger.info('Starting CopyZipsLambda') 363 | 364 | try: 365 | s3_destination_bucket_name = event['ResourceProperties']['DestBucket'] 366 | s3_source_bucket_name = event['ResourceProperties']['SourceBucket'] 367 | object_keys = event['ResourceProperties']['ObjectKeys'] 368 | 369 | if event['RequestType'] != 'Delete': 370 | copy_objects(s3_source_bucket_name, s3_destination_bucket_name, object_keys) 371 | logger.info("Files copied successfully") 372 | else: 373 | delete_objects(s3_destination_bucket_name, object_keys) 374 | logger.info("Files deleted successfully") 375 | 376 | sendResponse(event, context, SUCCESS, EMPTY_RESPONSE_DATA) 377 | logger.info('CopyZipsLambda finished') 378 | except: 379 | logger.exception("There was a problem running CopyZipsLambda") 380 | sendResponse(event, context, FAILED, EMPTY_RESPONSE_DATA) 381 | return 382 | 383 | def copy_objects(s3_source_bucket_name, s3_destination_bucket_name, object_keys): 384 | s3 = boto3.resource('s3') 385 | destination_bucket = s3.Bucket(s3_destination_bucket_name) 386 | 387 | for object_key in object_keys: 388 | copy_object(s3_source_bucket_name, destination_bucket, object_key) 389 | 390 | def copy_object(s3_source_bucket_name, destination_bucket, object_key): 391 | logger.info('Copying object key: ' + object_key) 392 | copy_source = { 393 | 'Bucket': s3_source_bucket_name, 394 | 'Key': object_key 395 | } 396 | destination_bucket.copy(copy_source, object_key) 397 | 398 | def delete_objects(s3_destination_bucket_name, object_keys): 399 | s3 = boto3.client('s3') 400 | 401 | for object_key in object_keys: 402 | delete_object(s3, s3_destination_bucket_name, object_key) 403 | 404 | def delete_object(s3, s3_destination_bucket_name, object_key): 405 | logger.info('Deleting object key: ' + object_key) 406 | s3.delete_object(Bucket=s3_destination_bucket_name, Key=object_key) 407 | 408 | def sendResponse(event, context, responseStatus, responseData): 409 | responseBody = {'Status': responseStatus, 410 | 'Reason': 'See the details in CloudWatch Log Stream: ' + context.log_stream_name, 411 | 'PhysicalResourceId': context.log_stream_name, 412 | 'StackId': event['StackId'], 413 | 'RequestId': event['RequestId'], 414 | 'LogicalResourceId': event['LogicalResourceId'], 415 | 'Data': responseData} 416 | req = requests.put(event['ResponseURL'], data=json.dumps(responseBody)) 417 | if req.status_code != 200: 418 | raise Exception('Received a non-200 HTTP response while sending response to CloudFormation.') 419 | return 420 | Handler: index.lambda_handler 421 | Role: !GetAtt CopyZipsRole.Arn 422 | Runtime: python3.7 423 | Timeout: 600 424 | CopyZips: 425 | Type: Custom::CopyZips 426 | DependsOn: 427 | - S3Bucket 428 | - CopyZipsRole 429 | Properties: 430 | ServiceToken: !GetAtt CopyZipsLambda.Arn 431 | DestBucket: !Ref s3BucketName 432 | SourceBucket: !FindInMap [Package, Locations, CodeBucket] 433 | ObjectKeys: 434 | - !FindInMap [Package, Locations, PathToLayerCode] 435 | - !FindInMap [Package, Locations, PathToLambdaCode] 436 | CreateSnowflakeResourcesLambdaLayer: 437 | Type: AWS::Lambda::LayerVersion 438 | DependsOn: 439 | - CopyZips 440 | Properties: 441 | CompatibleRuntimes: 442 | - python3.7 443 | Content: 444 | S3Bucket: !Ref s3BucketName 445 | S3Key: !FindInMap [Package, Locations, PathToLayerCode] 446 | Description: 'Layer to download Snowflake driver' 447 | CreateSnowflakeResourcesLambda: 448 | Type: AWS::Lambda::Function 449 | Properties: 450 | Code: 451 | S3Bucket: !Ref s3BucketName 452 | S3Key: !FindInMap [Package, Locations, PathToLambdaCode] 453 | Layers: 454 | - Ref: CreateSnowflakeResourcesLambdaLayer 455 | Handler: create-resources.lambda_handler 456 | Role: !GetAtt CreateSnowflakeResourcesExecutionRole.Arn 457 | Runtime: python3.7 458 | Timeout: 600 459 | Environment: 460 | Variables: 461 | ApiGatewayURL: !Sub "https://${SnowflakeApiGateway}.execute-api.${AWS::Region}.amazonaws.com/${apiGatewayStageName}/sagemaker" 462 | ApiGatewayRoleARN: !GetAtt SnowflakeAPIGatewayExecutionRole.Arn 463 | ApiGatewayRoleName: !Ref SnowflakeAPIGatewayExecutionRole 464 | AutoMLRoleARN: !GetAtt SnowflakeAutoMLExecutionRole.Arn 465 | AutoMLRoleName: !Ref SnowflakeAutoMLExecutionRole 466 | Region: !Sub "${AWS::Region}" 467 | S3BucketName: !Ref s3BucketName 468 | SecretArn: !Ref snowflakeSecretArn 469 | KmsKeyArn: !Ref kmsKeyArn 470 | # Future consideration 471 | VpcSecurityGroupIds: "" # !Ref vpcSecurityGroupIds 472 | VpcSubnetIds: "" # !Ref vpcSubnetIds 473 | SnowflakeRole: !Ref snowflakeRole 474 | StackName: !Sub "${AWS::StackName}" 475 | DatabaseName: !Ref snowflakeDatabaseName 476 | SchemaName: !Ref snowflakeSchemaName 477 | SnowflakeResourceSuffix: !Ref snowflakeResourceSuffix 478 | SnowflakeResources: 479 | Type: Custom::SnowflakeResources 480 | DependsOn: 481 | - SnowflakeAPIGatewayExecutionRole 482 | - SnowflakeAutoMLExecutionRole 483 | Properties: 484 | ServiceToken: !Sub 485 | - "${lambdaArn}" 486 | - lambdaArn: !GetAtt CreateSnowflakeResourcesLambda.Arn 487 | PackageIdentifier: !FindInMap [Package, Attributes, Identifier] 488 | -------------------------------------------------------------------------------- /aws-servicecatalog/apigw-sample/template/aws-snowflake-apigw-servicecatalog.yml: -------------------------------------------------------------------------------- 1 | # ------------------------------------------------------------------------------------------------- 2 | # 3 | # Service Catalog Product that launches Snowflake API-GW Integration Objects 4 | # 5 | # Updated: @boscoaws 6 | # --------------------------------------------------------------------------------------------------- 7 | 8 | 9 | Parameters: 10 | S3StagingBucketURL: 11 | Type: String 12 | Description: S3 Staging Bucket Prefix that contains the Snowflake API-GW Integration template 13 | Default: 'https://apigw-accountid-region.s3.amazonaws.com/' 14 | 15 | Resources: 16 | 17 | #-------------------------------------------------------------------------------------------- 18 | # Service Catalog Portfolio that contains the Snowflake API-GW Integration Product 19 | #-------------------------------------------------------------------------------------------- 20 | 21 | SnowflakeAPIGWIntegrationPortfolio: 22 | Type: 'AWS::ServiceCatalog::Portfolio' 23 | Properties: 24 | AcceptLanguage: en 25 | Description: Snowflake API-GW Integration Portfolio 26 | DisplayName: Snowflake API-GW Integration Portfolio 27 | ProviderName: AWS 28 | 29 | SnowflakeAPIGWIntegrationProduct: 30 | Type: 'AWS::ServiceCatalog::CloudFormationProduct' 31 | Properties: 32 | AcceptLanguage: en 33 | Description: This Service Catalog product deploys the Snowflake API-GW Integration Product 34 | Distributor: AWS 35 | Name: SnowflakeAPIGWIntegrationProduct 36 | Owner: AWS 37 | SupportEmail: email@mycompany.com 38 | SupportUrl: 'https://www.mycompany.com' 39 | SupportDescription: >- 40 | SnowflakeAPIGWIntegrationProduct 41 | ProvisioningArtifactParameters: 42 | - Description: This is version 1.0 of the Snowflake API-GW Integration Product 43 | Name: Version - 1.0 44 | Info: 45 | LoadTemplateFromURL: !Sub "${S3StagingBucketURL}template/aws-snowflake-apigw-integrationobject.yml" 46 | 47 | SnowflakeAPIGWIntegrationPortfolioAssociation: 48 | Type: 'AWS::ServiceCatalog::PortfolioProductAssociation' 49 | Properties: 50 | PortfolioId: !Ref SnowflakeAPIGWIntegrationPortfolio 51 | ProductId: !Ref SnowflakeAPIGWIntegrationProduct 52 | 53 | SnowflakeEnduserGroup: 54 | Type: AWS::IAM::Group 55 | Properties: 56 | GroupName: SnowflakeEnduserGroup 57 | ManagedPolicyArns: 58 | - arn:aws:iam::aws:policy/AWSServiceCatalogEndUserFullAccess 59 | 60 | 61 | SnowflakeEnduserGroupPortfolioAssociation: 62 | Type: 'AWS::ServiceCatalog::PortfolioPrincipalAssociation' 63 | Properties: 64 | PrincipalARN: !Sub 'arn:aws:iam::${AWS::AccountId}:group/SnowflakeEnduserGroup' 65 | PortfolioId: !Ref SnowflakeAPIGWIntegrationPortfolio 66 | PrincipalType: IAM 67 | 68 | 69 | 70 | -------------------------------------------------------------------------------- /aws-systemsmanager/LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /aws-systemsmanager/README.md: -------------------------------------------------------------------------------- 1 |

2 |

3 | 4 | # Automate Snowflake storage integrations in AWS using an AWS Systems Manager Automation runbook 5 | 6 | 7 | ## Overview 8 | 9 | 1. Snowflake storage integrations are Snowflake objects that allow Snowflake to read and write data to Amazon S3. This Control Tower integration with Snowflake solution enables Snowflake storage integrations with Amazon S3 to be automatically available for all newly added AWS accounts in an AWS Control Tower environment. 10 | 2. This solution provisions an AWS Systems Manager Automation runbook that automates all the steps required by Snowflake to create a storage integration with S3 in an AWS account. 11 | 12 | 13 | ## How it Works 14 | 15 | 1. Each time you launch the AWS Systems Manager Automation runbook in your account, it provisions a Snowflake storage integration object, attaches an IAM role to it and creates an external Snowflake stage object for Amazon S3 by leveraging the integration object and your supplied S3 bucket as parameters. The runbook uses AWS Secrets Manager to store and retrieve Snowflake connection information. You can launch the runbook as many times as needed to create new integrations between Snowflake and additional S3 buckets in your account. 16 | 2. The AWS Identity and Access Management (IAM) role that is created by the runbook provides trusted access to Snowflake to reference the S3 bucket in your account. The Principal element and external ID in the role's trust policy are extracted by the runbook from the Snowflake integration object. 17 | 3. The runbook deployment itself is fully automated using 1-click automation via AWS CloudFormation. The CloudFormation template first takes your Snowflake connection information and stores it in AWS Secrets Manager. It then provisions an AWS Lambda Layer that wraps the Snowflake connector for Python, provisions an AWS Lambda function that that uses the connector to create the Snowflake integration and finally provisions the Systems Manager runbook in your account that leverages this Lambda. 18 | 19 | 20 | ## Solution Design 21 | 22 | ![](images/snowflake-systemsmanager-arch-diagram.PNG) 23 | 24 | 25 | ## Setup 26 | 27 | **Prerequisites:** 28 | 29 | 1. Create an S3 bucket: *s3-snowflakeintegration-accountId-region*. Replace accountId and region with the AWS Account ID and region of your shared services AWS account. 30 | 2. Create a folder called *SnowflakeIntegration_Lambda_SSM* and upload the [SnowflakeIntegration_Lambda_SSM.zip](https://github.com/Snowflake-Labs/aws-integrations-cloudops/blob/master/aws-systemsmanager/lambda/SnowflakeIntegration_Lambda_SSM.zip) file. This lambda uses the Snowflake Python Connector to query and update Snowflake 31 | 3. Upload the [snowflakelayer.zip](https://github.com/Snowflake-Labs/aws-integrations-cloudops/blob/master/aws-systemsmanager/layer/snowflakelayer.zip) in the root folder of this S3 bucket. This zip file packages the Snowflake connector as an AWS Lambda layer 32 | 33 | **Install** 34 | 35 | 1. 1 step install. Launch the [aws-snowflake-ssm.yml](https://github.com/Snowflake-Labs/aws-integrations-cloudops/blob/master/aws-systemsmanager/cft/aws-snowflake-ssm.yml) template. The template takes connection information for your Snowflake account as parameters. 36 | 37 | ## Test and Validate 38 | 39 | 1. Navigate to the AWS Systems Manager console in your AWS account. Select Documents from the left panel and then select the Owned by me tab on the console. Search for the ‘Custom-Snowflakestorageintegration’ document in the search filter. Click on this document and then select Execute automation from the right corner of your console. On the Execute automation document screen, select Simple execution, provide the S3 bucket name in the Input parameters section and click on Execute 40 | 2. Navigate back to the AWS Systems Manager console, select Automation from the left panel from where you can track the execution of your automation runbook on the Automation executions screen to ensure that the status column displays Success. 41 | 3. Navigate to the AWS IAM console and check that a new IAM role has been provisioned that ends with *S3INTxxxxx* suffix. This suffix will also be the name of your new Snowflake integration object 42 | 4. From your Snowflake account (snowsql or console)- 43 | 1. Validate that a new Snowflake integration object has been created (DESC INTEGRATION *'integrationobjectname'*) 44 | 2. Obtain the *AWS_IAM_USER_ARN* and *AWS_EXTERNAL_ID* parameters from above and check that the AWS IAM role uses those as the trust relationship and external id parameters 45 | 3. Validate that a new storage object has been created in Snowflake that references the S3 bucket and uses the integration object (SHOW STAGES IN ACCOUNT) -------------------------------------------------------------------------------- /aws-systemsmanager/cft/aws-snowflake-ssm.yml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: MIT-0 3 | # 4 | # - Provisions an SSM Automation Document to create a Snowflake Storage Integration 5 | # 6 | # 7 | # kmmahaj 8 | 9 | AWSTemplateFormatVersion: 2010-09-09 10 | Description: >- 11 | AWS CloudFormation template to create Snowflake integration object for S3 access. 12 | Parameters: 13 | SourceBucket: 14 | Description: REQUIRED. S3 Bucket that contains the Snowflake integration Lambda 15 | Type: String 16 | Default: 's3-snowflakeintegration-AccountId-Region' 17 | MinLength: '1' 18 | MaxLength: '255' 19 | snowaccount: 20 | Description: REQUIRED. Snowflake Account Identifier 21 | Type: String 22 | AllowedPattern: .+ 23 | ConstraintDescription: snowaccount is required 24 | snowuser: 25 | Description: REQUIRED. Snowflake account user 26 | Type: String 27 | NoEcho: true 28 | AllowedPattern: .+ 29 | ConstraintDescription: snowuser is required 30 | snowpass: 31 | Description: REQUIRED. Snowflake password for the Snowflake account user 32 | Type: String 33 | NoEcho: true 34 | AllowedPattern: .+ 35 | ConstraintDescription: snowpass is required 36 | snowdb: 37 | Description: REQUIRED. Snowflake Database 38 | Type: String 39 | Default: 'SF_TUTS' 40 | AllowedPattern: .+ 41 | ConstraintDescription: snowdb is required 42 | snowschema: 43 | Description: REQUIRED. Snowflake Schema 44 | Type: String 45 | Default: 'PUBLIC' 46 | AllowedPattern: .+ 47 | ConstraintDescription: snowschema is required 48 | 49 | Resources: 50 | 51 | #--------------------------------------------------------------------------------------------------- 52 | # 1- Secrets Manager to store Snowflake credentials 53 | # -------------------------------------------------------------------------------------------------- 54 | 55 | # Secrets Management - Snowflake Credentials 56 | SnowflakeSecretString: 57 | Type: AWS::SecretsManager::Secret 58 | Properties: 59 | Description: Credentials required for Snowflake 60 | Name: !Sub snowflakesecret-${AWS::Region} 61 | SecretString: 62 | Fn::Join: 63 | - '' 64 | - - '{"snowaccount":"' 65 | - Ref: snowaccount 66 | - '","snowuser": "' 67 | - Ref: snowuser 68 | - '","snowpass": "' 69 | - Ref: snowpass 70 | - '","snowdb": "' 71 | - Ref: snowdb 72 | - '","snowschema": "' 73 | - Ref: snowschema 74 | - '"}' 75 | 76 | #--------------------------------------------------------------------------------------------------- 77 | # 2- Lambda Function that creates the Snowflake integration with S3 78 | # -------------------------------------------------------------------------------------------------- 79 | 80 | #Lambda Function that creates the Snowflake integration with S3 81 | SnowflakeIntegrationLambda: 82 | Type: 'AWS::Lambda::Function' 83 | Properties: 84 | FunctionName: !Join 85 | - '' 86 | - - SnowflakeIntegration_ 87 | - Lambda_ 88 | - SSM 89 | Role: !GetAtt SnowflakeIntegrationLambdaRole.Arn 90 | Code: 91 | S3Bucket: !Ref SourceBucket 92 | S3Key: !Join 93 | - '' 94 | - - SnowflakeIntegration_Lambda_SSM 95 | - / 96 | - SnowflakeIntegration_Lambda_SSM 97 | - .zip 98 | Description: SnowflakeIntegrationLambdaSSM 99 | Handler: SnowflakeIntegration_Lambda_SSM.lambda_handler 100 | MemorySize: '256' 101 | Runtime: python3.7 102 | Layers: 103 | - !Ref SnowflakeLayer 104 | Environment: 105 | Variables: 106 | SNOW_SECRET: !Ref SnowflakeSecretString 107 | AWSACCOUNT: !Ref 'AWS::AccountId' 108 | Timeout: 500 109 | 110 | #Lambda Layer for Snowflake Python Connector 111 | SnowflakeLayer: 112 | Type: AWS::Lambda::LayerVersion 113 | Properties: 114 | CompatibleRuntimes: 115 | - python3.6 116 | - python3.7 117 | - python3.8 118 | Content: 119 | S3Bucket: !Ref SourceBucket 120 | S3Key: snowflakelayer.zip 121 | Description: Lambda layer for Snowflake Python Connector 122 | LayerName: snowflakelayer 123 | LicenseInfo: MIT 124 | 125 | #IAM Role for the SnowflakeIntegration Lambda 126 | SnowflakeIntegrationLambdaRole: 127 | Type: 'AWS::IAM::Role' 128 | Properties: 129 | RoleName: !Sub snowflakeintegrationlamdarole-${AWS::Region} 130 | AssumeRolePolicyDocument: 131 | Version: 2012-10-17 132 | Statement: 133 | - Sid: AllowLambdaAssumeRole 134 | Effect: Allow 135 | Principal: 136 | Service: lambda.amazonaws.com 137 | Action: 'sts:AssumeRole' 138 | Policies: 139 | - PolicyName: SnowflakeIntegrationLambdaPolicy 140 | PolicyDocument: 141 | Version: 2012-10-17 142 | Statement: 143 | - Sid: '1' 144 | Action: 145 | - s3:GetObjectVersion 146 | - s3:PutObject 147 | - s3:GetObject 148 | - s3:GetObjectVersionTagging 149 | - s3:GetObjectVersionAcl 150 | Effect: Allow 151 | Resource: 152 | - !Sub arn:${AWS::Partition}:s3:::${SourceBucket} 153 | - !Sub arn:${AWS::Partition}:s3:::${SourceBucket}/* 154 | - Sid: '2' 155 | Action: 156 | - 'logs:CreateLogGroup' 157 | - 'logs:CreateLogStream' 158 | - 'logs:PutLogEvents' 159 | - 'logs:DescribeLogStreams' 160 | Effect: Allow 161 | Resource: '*' 162 | - Sid: '3' 163 | Action: 164 | - 'secretsmanager:GetSecretValue' 165 | - 'secretsmanager:ListSecrets' 166 | Effect: Allow 167 | Resource: !Join [':',['arn:aws:secretsmanager', !Ref 'AWS::Region', !Ref 'AWS::AccountId','secret','snowflakesecret-*']] 168 | ManagedPolicyArns: 169 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/IAMFullAccess' 170 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess' 171 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole' 172 | 173 | #--------------------------------------------------------------------------------------------------- 174 | # 3- SSM Automation that invokes Lambda 175 | # -------------------------------------------------------------------------------------------------- 176 | 177 | # SSM Automation Role 178 | SnowflakeSSMAutomationAssumeRole: 179 | Type: 'AWS::IAM::Role' 180 | Properties: 181 | RoleName: !Sub snowflake-automationassumerole-${AWS::Region} 182 | AssumeRolePolicyDocument: 183 | Version: 2012-10-17 184 | Statement: 185 | - Effect: Allow 186 | Principal: 187 | Service: 188 | - ssm.amazonaws.com 189 | - events.amazonaws.com 190 | - ec2.amazonaws.com 191 | Action: 192 | - 'sts:AssumeRole' 193 | Path: / 194 | ManagedPolicyArns: 195 | - !Sub "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" 196 | 197 | 198 | SSMPermissionToCallSnowflakeLambda: 199 | Type: 'AWS::Lambda::Permission' 200 | Properties: 201 | FunctionName: !GetAtt 202 | - SnowflakeIntegrationLambda 203 | - Arn 204 | Action: 'lambda:InvokeFunction' 205 | Principal: !GetAtt SnowflakeSSMAutomationAssumeRole.Arn 206 | 207 | 208 | # [Snowflake storage integration SSM] 209 | SnowflakeStorageIntegrationSSM: 210 | Type: AWS::SSM::Document 211 | DependsOn: SSMPermissionToCallSnowflakeLambda 212 | Properties: 213 | DocumentType: Automation 214 | Name: Custom-Snowflakestorageintegration 215 | Content: 216 | schemaVersion: '0.3' 217 | assumeRole: !GetAtt SnowflakeSSMAutomationAssumeRole.Arn 218 | parameters: 219 | S3BUCKET: 220 | type: String 221 | default: 'mybucket1' 222 | AutomationAssumeRole: 223 | type: String 224 | default: !GetAtt SnowflakeSSMAutomationAssumeRole.Arn 225 | mainSteps: 226 | - name: createstorageintegration 227 | action: 'aws:invokeLambdaFunction' 228 | maxAttempts: 3 229 | timeoutSeconds: 180 230 | inputs: 231 | FunctionName: !GetAtt SnowflakeIntegrationLambda.Arn 232 | InvocationType: RequestResponse 233 | Payload: '{"parameterName":"S3BUCKET", "parameterValue":"{{S3BUCKET}}"}' 234 | 235 | -------------------------------------------------------------------------------- /aws-systemsmanager/cft/s3bucketpolicy.json: -------------------------------------------------------------------------------- 1 | { 2 | "Version": "2012-10-17", 3 | "Statement": [ 4 | { 5 | "Effect": "Allow", 6 | "Principal": "*", 7 | "Action": "s3:GetObject", 8 | "Resource": "arn:aws:s3:::s3-snowflakeintegration--/*", 9 | "Condition": { 10 | "StringEquals": { 11 | "aws:PrincipalOrgID": "" 12 | } 13 | } 14 | } 15 | ] 16 | } -------------------------------------------------------------------------------- /aws-systemsmanager/images/snowflake-controltower-arch-diagram.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-systemsmanager/images/snowflake-controltower-arch-diagram.PNG -------------------------------------------------------------------------------- /aws-systemsmanager/images/snowflake-systemsmanager-arch-diagram.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-systemsmanager/images/snowflake-systemsmanager-arch-diagram.PNG -------------------------------------------------------------------------------- /aws-systemsmanager/lambda/SnowflakeIntegration_Lambda_SSM.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # Lambda that creates Snowflake integration object in Snowflake and the corresponding IAM role in AWS 5 | # - Uses Snowflake Python Connector 6 | # 7 | # @kmmahaj 8 | 9 | import json 10 | import urllib 11 | import boto3 12 | import os 13 | import string 14 | import random 15 | import snowflake.connector 16 | import logging 17 | import urllib3 18 | from snowflake.connector import DictCursor 19 | 20 | AWS_EXTERNAL_ID = "" 21 | AWS_IAM_USER_ARN = "" 22 | 23 | logger = logging.getLogger(__name__) 24 | logging.getLogger().setLevel(logging.INFO) 25 | http = urllib3.PoolManager() 26 | 27 | session = boto3.session.Session() 28 | 29 | sf_config_name = '' 30 | allowed_sf_config = ('snowaccount', 'snowuser', 'snowpass', 'snowdb', 'snowschema') 31 | 32 | def get_secret_value(secret_name): 33 | """ 34 | get secret value from AWS Secrets Manager 35 | :param secret_name: name of the secret passed 36 | :return secret_value: value of the secret passed 37 | """ 38 | client = session.client(service_name='secretsmanager') 39 | secret_value = '' 40 | try: 41 | get_secret_value_response = client.get_secret_value(SecretId=secret_name) 42 | except ClientError as e: 43 | logger.error(f"error while executing get_secret_value, {e}") 44 | raise Exception() 45 | else: 46 | secret_value = get_secret_value_response['SecretString'] if 'SecretString' in get_secret_value_response else '' 47 | finally: 48 | return secret_value 49 | 50 | 51 | def get_snowflake_config(sf_config_name): 52 | """ 53 | get snowflake config, throws exception if invalid 54 | :return config: snowflake config dict 55 | """ 56 | config = json.loads(get_secret_value(sf_config_name)) 57 | for key in config: 58 | if not config.get(key, ''): 59 | logger.error(f"either key {key} do not exist, or non empty value found") 60 | raise Exception() 61 | return config 62 | 63 | 64 | def create_iam_policy(externalid, iamrolearn,SNOW_S3_BUCKET,SNOW_INT,SNOW_ROLE): 65 | iam = boto3.client('iam') 66 | s3fullresourcearn = "arn:aws:s3:::" + SNOW_S3_BUCKET + '/*' 67 | s3bucketresourcearn = "arn:aws:s3:::" + SNOW_S3_BUCKET 68 | s3prefix = SNOW_S3_BUCKET + '/*' 69 | s3_access_policy = { 70 | "Version": "2012-10-17", 71 | "Statement": [ 72 | { 73 | "Effect": "Allow", 74 | "Action": [ 75 | "s3:PutObject", 76 | "s3:GetObject", 77 | "s3:GetObjectVersion", 78 | "s3:DeleteObject", 79 | "s3:DeleteObjectVersion" 80 | ], 81 | "Resource": s3fullresourcearn 82 | }, 83 | { 84 | "Effect": "Allow", 85 | "Action": "s3:ListBucket", 86 | "Resource": s3bucketresourcearn 87 | } 88 | ] 89 | } 90 | snowpolicy = "SnowflakeS3AccessPolicy-" + SNOW_S3_BUCKET + SNOW_INT 91 | response_policy = iam.create_policy( 92 | PolicyName=snowpolicy, 93 | PolicyDocument=json.dumps(s3_access_policy) 94 | ) 95 | 96 | policyArn = response_policy['Policy']['Arn'] 97 | 98 | trust_relationship_policy = { 99 | "Version": "2012-10-17", 100 | "Statement": [ 101 | { 102 | "Effect": "Allow", 103 | "Principal": { 104 | "AWS": iamrolearn 105 | }, 106 | "Action": "sts:AssumeRole", 107 | "Condition": { 108 | "StringEquals": { 109 | "sts:ExternalId": externalid 110 | } 111 | } 112 | } 113 | ] 114 | } 115 | 116 | AssumeRolePolicyDocument = json.dumps(trust_relationship_policy) 117 | print(AssumeRolePolicyDocument) 118 | 119 | snowrole = SNOW_ROLE 120 | response_role = iam.create_role( 121 | RoleName=snowrole, 122 | AssumeRolePolicyDocument=AssumeRolePolicyDocument 123 | ) 124 | print(response_role) 125 | 126 | response = iam.attach_role_policy( 127 | RoleName=snowrole, 128 | PolicyArn=policyArn 129 | ) 130 | 131 | print(response) 132 | 133 | 134 | def lambda_handler(event, context): 135 | 136 | logger.info('EVENT Received: {}'.format(event)) 137 | 138 | CURRENT_AWS_ACCOUNT = os.environ['AWSACCOUNT'] 139 | sf_config_name = os.environ['SNOW_SECRET'] 140 | sf_config = get_snowflake_config(sf_config_name) 141 | logger.info(f'snowflake config successfully retrieved from secrets') 142 | 143 | assert isinstance(sf_config, dict), 'sf_config config must be of type dict' 144 | 145 | ctx = snowflake.connector.connect( 146 | user=sf_config['snowuser'], 147 | password=sf_config['snowpass'], 148 | role='ACCOUNTADMIN', 149 | account=sf_config['snowaccount'], 150 | database=sf_config['snowdb'], 151 | schema=sf_config['snowschema'], 152 | ocsp_response_cache_filename="/tmp/ocsp_response_cache" 153 | ) 154 | cs = ctx.cursor() 155 | 156 | SNOW_S3_BUCKET = event['parameterValue'] 157 | 158 | letters = string.ascii_lowercase 159 | randomstr = ''.join(random.choice(letters) for i in range(3)) 160 | randomnum = str(random.randrange(2,100)) 161 | SNOW_INT = "S3INT" + randomstr + randomnum 162 | SNOW_ROLE = "SFAccessRole-" + SNOW_INT 163 | 164 | 165 | SNOW_S3_LOCATION = 's3://' + SNOW_S3_BUCKET +'/' 166 | try: 167 | 168 | sql_1 = 'create storage integration ' + SNOW_INT + ' type = external_stage storage_provider = s3 enabled = true' \ 169 | + ' storage_aws_role_arn = ' + "'" + "arn:aws:iam::" + CURRENT_AWS_ACCOUNT + ":role/" + SNOW_ROLE + "'" + ' storage_allowed_locations = (' + "'" + SNOW_S3_LOCATION + "'" +')' 170 | print(sql_1) 171 | cs.execute(sql_1) 172 | 173 | sql_2 = 'desc integration ' + SNOW_INT 174 | print(sql_2) 175 | cs.execute(sql_2) 176 | 177 | query_id_desc = cs.sfqid 178 | 179 | sql_3 = 'select "property", "property_value" from table(result_scan(' + "'" + query_id_desc + "'" + '))' + ' where "property" = ' + "'" + "STORAGE_AWS_EXTERNAL_ID" + "'" 180 | print(sql_3) 181 | cs.execute(sql_3) 182 | for (property, property_value) in cs: 183 | AWS_EXTERNAL_ID = property_value 184 | print('{0}, {1}'.format(property, AWS_EXTERNAL_ID)) 185 | 186 | 187 | sql_4 = 'select "property", "property_value" from table(result_scan(' + "'" + query_id_desc + "'" + '))' + ' where "property" = ' + "'" + "STORAGE_AWS_IAM_USER_ARN" + "'" 188 | print(sql_4) 189 | cs.execute(sql_4) 190 | for (property, property_value) in cs: 191 | AWS_IAM_USER_ARN = property_value 192 | print('{0}, {1}'.format(property, AWS_IAM_USER_ARN)) 193 | 194 | create_iam_policy(AWS_EXTERNAL_ID,AWS_IAM_USER_ARN,SNOW_S3_BUCKET,SNOW_INT,SNOW_ROLE) 195 | 196 | sql_5 = 'create stage ' + "S3STAGE" + SNOW_INT + ' storage_integration = ' + SNOW_INT + ' url = (' + "'" + SNOW_S3_LOCATION + "'" +')' 197 | print(sql_5) 198 | cs.execute(sql_5) 199 | 200 | finally: 201 | cs.close() 202 | ctx.close() 203 | 204 | return 'SUCCESS' 205 | 206 | -------------------------------------------------------------------------------- /aws-systemsmanager/lambda/SnowflakeIntegration_Lambda_SSM.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-systemsmanager/lambda/SnowflakeIntegration_Lambda_SSM.zip -------------------------------------------------------------------------------- /aws-systemsmanager/layer/snowflakelayer.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/aws-systemsmanager/layer/snowflakelayer.zip -------------------------------------------------------------------------------- /cft/aws-snowflake-integrationobject.yml: -------------------------------------------------------------------------------- 1 | ## TODO: 2 | # Validate path for S3 data location 3 | # Check that Snowflake account exists and it can connect with provided credentials 4 | 5 | AWSTemplateFormatVersion: "2010-09-09" 6 | Parameters: 7 | dataS3BucketName: 8 | Type: String 9 | Description: "Name of the S3 bucket in which data is stored" 10 | AllowedPattern: ".+" 11 | SnowflakeAssetsBucket: 12 | Type: String 13 | Description: "S3 bucket that contains Snowflake driver and assets" 14 | AllowedPattern: ".*" 15 | Default: "s3-snowflakeintegration-acountId-region" 16 | snowflakeAccount: 17 | Type: String 18 | Description: "Your Snowflake Account ID" 19 | snowflakeUsername: 20 | Type: String 21 | Description: "Snowflake Username" 22 | snowflakePassword: 23 | NoEcho: true 24 | Type: String 25 | Description: "Snowflake Password" 26 | snowflakeRole: 27 | Type: String 28 | Description: "Snowflake Role that will be used to create Integrations" 29 | AllowedPattern: ".+" 30 | Default: "STORE_RL" 31 | snowflakeStorageIntegrationName: 32 | Type: String 33 | Description: "Name of the Storage Integration to be created" 34 | AllowedPattern: ".+" 35 | Default: "SINT" 36 | SnowflakeSecretArn: 37 | Description: >- 38 | Enter ARN of the Secrets Manager Secret that contains Snowflake connection information 39 | Type: String 40 | Default: '' 41 | 42 | Conditions: 43 | CreateNewSnowflakeSecret: !Equals 44 | - !Ref SnowflakeSecretArn 45 | - '' 46 | 47 | Resources: 48 | # Secrets Manager Secret for Snowflake credentials 49 | SnowflakeSecret: 50 | Type: AWS::SecretsManager::Secret 51 | Condition: CreateNewSnowflakeSecret 52 | Properties: 53 | Description: "Secret to store Snowflake credential" 54 | SecretString: 55 | Fn::Join: 56 | - '' 57 | - - '{"username": "' 58 | - Ref: snowflakeUsername 59 | - '","password": "' 60 | - Ref: snowflakePassword 61 | - '","accountid": "' 62 | - Ref: snowflakeAccount 63 | - '","snowflake_role": "' 64 | - Ref: snowflakeRole 65 | - '"}' 66 | # KMS Key 67 | KMSKey: 68 | Type: AWS::KMS::Key 69 | Properties: 70 | KeyPolicy: 71 | Version: '2012-10-17' 72 | Id: !Join 73 | - "-" 74 | - - "snowflake-store-key" 75 | - !Select 76 | - 0 77 | - !Split 78 | - "-" 79 | - !Select 80 | - 2 81 | - !Split 82 | - "/" 83 | - !Ref "AWS::StackId" 84 | Statement: 85 | - Sid: Enable IAM User Permissions 86 | Effect: Allow 87 | Principal: 88 | AWS: 89 | Fn::Join: 90 | - '' 91 | - - 'arn:aws:iam::' 92 | - Ref: AWS::AccountId 93 | - :root 94 | Action: kms:* 95 | Resource: '*' 96 | # ExternalStage role 97 | SnowflakeExternalStageRole: 98 | Type: AWS::IAM::Role 99 | Properties: 100 | RoleName: !Join 101 | - "-" 102 | - - "Snowflake-Store-ExternalStage-Role" 103 | - !Select 104 | - 0 105 | - !Split 106 | - "-" 107 | - !Select 108 | - 2 109 | - !Split 110 | - "/" 111 | - !Ref "AWS::StackId" 112 | Description: IAM Role used for Snowflake external stage 113 | Path: / 114 | AssumeRolePolicyDocument: 115 | Version: "2012-10-17" 116 | Statement: 117 | - Effect: Allow 118 | Principal: 119 | AWS: 120 | - !Ref AWS::AccountId 121 | Action: 122 | - 'sts:AssumeRole' 123 | Policies: 124 | - PolicyName: !Join 125 | - "-" 126 | - - "Snowflake-Store-ExternalStage-S3-Policy" 127 | - !Select 128 | - 0 129 | - !Split 130 | - "-" 131 | - !Select 132 | - 2 133 | - !Split 134 | - "/" 135 | - !Ref "AWS::StackId" 136 | PolicyDocument: 137 | Version: 2012-10-17 138 | Statement: 139 | - Effect: Allow 140 | Action: 141 | - "s3:GetObject" 142 | - "s3:GetObjectVersion" 143 | - "s3:PutObject" 144 | - "s3:DeleteObject" 145 | - "s3:DeleteObjectVersion" 146 | Resource: !Sub 147 | - 'arn:aws:s3:::${dataS3BucketName}/*' 148 | - { dataS3BucketName: !Ref dataS3BucketName } 149 | - Effect: Allow 150 | Action: 151 | - "s3:ListBucket" 152 | - "s3:GetBucketLocation" 153 | Resource: !Sub 154 | - 'arn:aws:s3:::${dataS3BucketName}' 155 | - { dataS3BucketName: !Ref dataS3BucketName } 156 | - Effect: Allow 157 | Action: 158 | - "s3:GetObject" 159 | - "s3:GetObjectVersion" 160 | Resource: !Sub 161 | - 'arn:aws:s3:::${SnowflakeAssetsBucket}/*' 162 | - { SnowflakeAssetsBucket: !Ref SnowflakeAssetsBucket } 163 | 164 | # Lambda Layer 165 | CreateSnowflakeResourcesLambdaLayer: 166 | Type: AWS::Lambda::LayerVersion 167 | Properties: 168 | CompatibleRuntimes: 169 | - python3.7 170 | - python3.8 171 | Content: 172 | S3Bucket: !Ref SnowflakeAssetsBucket 173 | S3Key: snowflakelayer.zip 174 | Description: 'Lambda layer for Snowflake Python Connector' 175 | LayerName: snowflakelayer 176 | # IAM Role and Policy for SF Resource Deployment Lambda 177 | CreateSnowflakeResourcesExecutionRole: 178 | Type: AWS::IAM::Role 179 | Properties: 180 | Description: IAM Role used to create Snowflake resources from the CloudFormation template 181 | AssumeRolePolicyDocument: 182 | Version: '2012-10-17' 183 | Statement: 184 | - Effect: Allow 185 | Principal: 186 | Service: 187 | - lambda.amazonaws.com 188 | Action: 189 | - sts:AssumeRole 190 | Path: '/' 191 | ManagedPolicyArns: 192 | - !Sub 'arn:${AWS::Partition}:iam::aws:policy/CloudWatchLogsFullAccess' 193 | Policies: 194 | - PolicyName: kms-permissions 195 | PolicyDocument: 196 | Version: 2012-10-17 197 | Statement: 198 | - Effect: Allow 199 | Action: 200 | - 'kms:CreateGrant' 201 | - "kms:Decrypt" 202 | - "kms:DescribeKey" 203 | - "kms:Encrypt" 204 | - "kms:GenerateDataKey*" 205 | - "kms:ReEncrypt*" 206 | Resource: 207 | - !Join 208 | - ":" 209 | - - "arn" 210 | - !Ref AWS::Partition 211 | - "kms" 212 | - !Ref AWS::Region 213 | - !Ref AWS::AccountId 214 | - "alias/aws/secretsmanager" 215 | - !GetAtt KMSKey.Arn 216 | - PolicyName: secrets-permissions 217 | PolicyDocument: 218 | Version: 2012-10-17 219 | Statement: 220 | - Effect: Allow 221 | Action: 222 | - 'secretsmanager:GetSecretValue' 223 | Resource: !If [CreateNewSnowflakeSecret, !Ref SnowflakeSecret, !Ref SnowflakeSecretArn] 224 | - PolicyName: update-iam-role 225 | PolicyDocument: 226 | Version: 2012-10-17 227 | Statement: 228 | - Effect: Allow 229 | Action: 230 | - 'iam:UpdateAssumeRolePolicy' 231 | Resource: !GetAtt SnowflakeExternalStageRole.Arn 232 | # Lambda def for creating the external stage 233 | CreateSnowflakeResourcesLambda: 234 | Type: AWS::Lambda::Function 235 | Properties: 236 | Code: 237 | ZipFile: | 238 | import json 239 | import boto3 240 | import os 241 | import logging 242 | from botocore.exceptions import ClientError 243 | import requests 244 | 245 | import snowflake.connector 246 | 247 | SUCCESS = "SUCCESS" 248 | FAILED = "FAILED" 249 | EMPTY_RESPONSE_DATA = {} 250 | 251 | logger = logging.getLogger(__name__) 252 | logger.setLevel(logging.INFO) 253 | 254 | 255 | def get_secret_information(region_name, secret_name): 256 | logger.info("Getting secret information") 257 | try: 258 | secretsmanager = boto3.client("secretsmanager") 259 | 260 | return secretsmanager.get_secret_value(SecretId=secret_name) 261 | except ClientError as e: 262 | if e.response["Error"]["Code"] == "ResourceNotFoundException": 263 | logger.exception( 264 | "The requested secret " + secret_name + " was not found" 265 | ) 266 | else: 267 | logger.exception(e) 268 | raise e 269 | 270 | 271 | def connect_to_snowflake(get_secret_value_response): 272 | """Creates an instance of a Snowflake connection.""" 273 | secret_string = get_secret_value_response["SecretString"] 274 | 275 | secret = json.loads(secret_string) 276 | snowflake_account = secret["accountid"] 277 | snowflake_password = secret["password"] 278 | snowflake_userName = secret["username"] 279 | snowflake_role_name = secret["snowflake_role"] 280 | 281 | # Connect to Snowflake 282 | logger.info("Connecting to Snowflake") 283 | snowflake_connection = snowflake.connector.connect( 284 | user=snowflake_userName, 285 | password=snowflake_password, 286 | account=snowflake_account, 287 | role=snowflake_role_name, 288 | ) 289 | 290 | return snowflake_connection 291 | 292 | 293 | def create_storage_integration( 294 | snowflake_cursor, storage_integration_name, role_arn, s3_bucket_name 295 | ): 296 | """Creates a Snowflake storage integration.""" 297 | logger.info("Creating Storage Integration") 298 | 299 | storage_integration_str = ( 300 | ( 301 | "create or replace storage integration \"%s\" \ 302 | type = external_stage \ 303 | storage_provider = s3 \ 304 | enabled = true \ 305 | storage_aws_role_arn = '%s' \ 306 | storage_allowed_locations = ('s3://%s')" 307 | ) 308 | % (storage_integration_name, role_arn, s3_bucket_name) 309 | ) 310 | 311 | snowflake_cursor.execute(storage_integration_str) 312 | 313 | 314 | def get_storage_integration_info_for_policy( 315 | snowflake_cursor, storage_integration_name 316 | ): 317 | """Retrieves the IAM role ARN and External ID for the Snowflake role that will assume the IAM role created by Cloudformation.""" 318 | logger.info("Describing Storage Integration") 319 | storage_user_arn = "" 320 | storage_external_id = "" 321 | 322 | snowflake_cursor.execute( 323 | ('describe integration "%s"') % (storage_integration_name) 324 | ) 325 | rows = snowflake_cursor.fetchall() 326 | for row in rows: 327 | value = list(row) 328 | if value[0] == "STORAGE_AWS_IAM_USER_ARN": 329 | storage_user_arn = value[2] 330 | if value[0] == "STORAGE_AWS_EXTERNAL_ID": 331 | storage_external_id = value[2] 332 | return { 333 | "storage_user_arn": storage_user_arn, 334 | "storage_external_id": storage_external_id, 335 | } 336 | 337 | 338 | def create_policy_string(storage_integration_info): 339 | """Generate the Trust Policy to allow a Snowflake owned role to assume the IAM role.""" 340 | policy_json = { 341 | "Version": "2012-10-17", 342 | "Statement": [ 343 | { 344 | "Effect": "Allow", 345 | "Principal": { 346 | "AWS": [storage_integration_info["storage_user_arn"]] 347 | }, 348 | "Action": "sts:AssumeRole", 349 | "Condition": { 350 | "StringEquals": { 351 | "sts:ExternalId": storage_integration_info[ 352 | "storage_external_id" 353 | ] 354 | } 355 | }, 356 | }, 357 | ], 358 | } 359 | return json.dumps(policy_json) 360 | 361 | 362 | def sendResponse(event, context, responseStatus, responseData): 363 | responseBody = { 364 | "Status": responseStatus, 365 | "Reason": "See the details in CloudWatch Log Stream: " 366 | + context.log_stream_name, 367 | "PhysicalResourceId": context.log_stream_name, 368 | "StackId": event["StackId"], 369 | "RequestId": event["RequestId"], 370 | "LogicalResourceId": event["LogicalResourceId"], 371 | "Data": responseData, 372 | } 373 | req = requests.put(event["ResponseURL"], data=json.dumps(responseBody)) 374 | if req.status_code != 200: 375 | raise Exception( 376 | "Received a non-200 HTTP response while sending response to CloudFormation." 377 | ) 378 | return 379 | 380 | 381 | def lambda_handler(event, context): 382 | """Entrypoint for the lambda.""" 383 | # Get variables from os 384 | region_name = os.environ["Region"] 385 | role_arn = os.environ["RoleARN"] 386 | role_name = os.environ["RoleName"] 387 | s3_bucket_name = os.environ["S3BucketName"] 388 | storage_integration = os.environ["StorageIntegrationName"] 389 | secret_name = os.environ["SecretArn"] 390 | 391 | logger.info("role_arn: " + role_arn) 392 | logger.info("role_name: " + role_name) 393 | logger.info("s3_bucket_name: " + s3_bucket_name) 394 | logger.info("region_name: " + region_name) 395 | logger.info("secret_name: " + secret_name) 396 | 397 | # Initialize integration related variables 398 | storage_integration_info = {} 399 | 400 | # Delete 401 | if event["RequestType"] == "Delete": 402 | sendResponse(event, context, SUCCESS, EMPTY_RESPONSE_DATA) 403 | return 404 | 405 | # Get the information connection from Secrets Manager 406 | try: 407 | get_secret_value_response = get_secret_information( 408 | region_name, secret_name 409 | ) 410 | except: 411 | sendResponse(event, context, FAILED, EMPTY_RESPONSE_DATA) 412 | return 413 | 414 | # Decrypted secret using the associated KMS CMK 415 | # Ensure the Secret is in String mode 416 | if "SecretString" not in get_secret_value_response: 417 | logger.error("The Secret is not in String mode") 418 | sendResponse(event, context, FAILED, EMPTY_RESPONSE_DATA) 419 | return 420 | 421 | # Create Snowflake resource 422 | try: 423 | snowflake_connection = connect_to_snowflake( 424 | get_secret_value_response 425 | ) 426 | snowflake_cursor = snowflake_connection.cursor() 427 | 428 | storage_integration_name = storage_integration + "_STORAGE_INTEGRATION" 429 | 430 | # Create Snowflake Integrations 431 | create_storage_integration( 432 | snowflake_cursor, 433 | storage_integration_name, 434 | role_arn, 435 | s3_bucket_name, 436 | ) 437 | 438 | # Describe Snowflake integrations 439 | storage_integration_info = get_storage_integration_info_for_policy( 440 | snowflake_cursor, storage_integration_name 441 | ) 442 | except Exception as e: 443 | logger.exception("Problem running SQL statements: " + str(e)) 444 | responseData = { 445 | "Failed": "Unable to execute SQL statements in Snowflake" 446 | } 447 | sendResponse(event, context, FAILED, responseData) 448 | return 449 | finally: 450 | if "snowflake_cursor" in vars(): 451 | snowflake_cursor.close() 452 | if "snowflake_connection" in vars(): 453 | snowflake_connection.close() 454 | 455 | # Update IAM role to add Snowflake information 456 | logger.info("Updating IAM Role") 457 | policy_str = create_policy_string(storage_integration_info) 458 | 459 | try: 460 | iam = boto3.client("iam") 461 | iam.update_assume_role_policy( 462 | PolicyDocument=policy_str, RoleName=role_name 463 | ) 464 | except Exception as e: 465 | logger.exception("Problem updating assume role policy: " + str(e)) 466 | logger.exception("Policy used: " + policy_str) 467 | responseData = { 468 | "Failed": "The assume role policy could not be updated" 469 | } 470 | sendResponse(event, context, FAILED, responseData) 471 | return 472 | 473 | responseData = {"Success": "Snowflake resources created."} 474 | sendResponse(event, context, SUCCESS, responseData) 475 | logger.info("Success") 476 | Layers: 477 | - Ref: CreateSnowflakeResourcesLambdaLayer 478 | Handler: index.lambda_handler 479 | Role: !GetAtt CreateSnowflakeResourcesExecutionRole.Arn 480 | Runtime: python3.7 481 | Timeout: 600 482 | Environment: 483 | Variables: 484 | Region: !Sub "${AWS::Region}" 485 | RoleARN: !GetAtt SnowflakeExternalStageRole.Arn 486 | RoleName: !Ref SnowflakeExternalStageRole 487 | S3BucketName: !Ref dataS3BucketName 488 | StorageIntegrationName: !Ref snowflakeStorageIntegrationName 489 | SecretArn: !If [CreateNewSnowflakeSecret, !Ref SnowflakeSecret, !Ref SnowflakeSecretArn] 490 | StackName: !Sub "${AWS::StackName}" 491 | # Type: Custom - execute create storage integration update ExternalStage TrustPolicy 492 | SnowflakeResources: 493 | Type: Custom::SnowflakeResources 494 | Properties: 495 | ServiceToken: !Sub 496 | - "${lambdaArn}" 497 | - lambdaArn: !GetAtt CreateSnowflakeResourcesLambda.Arn 498 | 499 | Outputs: 500 | DataS3BucketName: 501 | Value: !Ref dataS3BucketName 502 | SnowflakeStorageIntegrationName: 503 | Value: !Sub 504 | - "${snowflakeStorageIntegrationName}_STORAGE_INGRATION" 505 | - snowflakeStorageIntegrationName: !Ref snowflakeStorageIntegrationName 506 | SnowflakeAccountId: 507 | Value: !Ref snowflakeAccount 508 | SnowflakeStorageIntegrationRoleArn: 509 | Value: !GetAtt SnowflakeExternalStageRole.Arn 510 | SnowflakeSecret: 511 | Value: !If [CreateNewSnowflakeSecret, !Ref SnowflakeSecret, !Ref SnowflakeSecretArn] 512 | KMSKey: 513 | Value: !Ref KMSKey 514 | -------------------------------------------------------------------------------- /cft/aws-snowflakeintobj-servicecatalog.yml: -------------------------------------------------------------------------------- 1 | # ------------------------------------------------------------------------------------------------- 2 | # 3 | # Service Catalog Product that launches Snowflake Storage Integration Objects to access Amazon S3 4 | # 5 | # @kmmahaj 6 | # --------------------------------------------------------------------------------------------------- 7 | 8 | 9 | Parameters: 10 | S3StagingBucketURL: 11 | Type: String 12 | Description: S3 Staging Bucket Prefix that contains the Snowflake Storage Integration for S3 template 13 | Default: 'https://s3-snowflakeintegration-accountid-region.s3.amazonaws.com/' 14 | 15 | Resources: 16 | 17 | #-------------------------------------------------------------------------------------------- 18 | # Service Catalog Portfolio that contains the Snowflake Storage Integration for Amazon S3 Product 19 | #-------------------------------------------------------------------------------------------- 20 | 21 | SnowflakeStorageIntegrationPortfolio: 22 | Type: 'AWS::ServiceCatalog::Portfolio' 23 | Properties: 24 | AcceptLanguage: en 25 | Description: Snowflake Storage Integration Portfolio 26 | DisplayName: Snowflake Storage Integration Portfolio 27 | ProviderName: AWS 28 | 29 | SnowflakeStorageIntegrationProduct: 30 | Type: 'AWS::ServiceCatalog::CloudFormationProduct' 31 | Properties: 32 | AcceptLanguage: en 33 | Description: This Service Catalog product deploys the Snowflake Storage Integration Product 34 | Distributor: AWS 35 | Name: SnowflakeStorageIntegrationProduct 36 | Owner: AWS 37 | SupportEmail: email@mycompany.com 38 | SupportUrl: 'https://www.mycompany.com' 39 | SupportDescription: >- 40 | SnowflakeStorageIntegrationProduct 41 | ProvisioningArtifactParameters: 42 | - Description: This is version 1.0 of the Snowflake Storage Integration Product 43 | Name: Version - 1.0 44 | Info: 45 | LoadTemplateFromURL: !Sub "${S3StagingBucketURL}template/aws-snowflake-integrationobject.yml" 46 | 47 | SnowflakeStorageIntegrationPortfolioAssociation: 48 | Type: 'AWS::ServiceCatalog::PortfolioProductAssociation' 49 | Properties: 50 | PortfolioId: !Ref SnowflakeStorageIntegrationPortfolio 51 | ProductId: !Ref SnowflakeStorageIntegrationProduct 52 | 53 | SnowflakeEnduserGroup: 54 | Type: AWS::IAM::Group 55 | Properties: 56 | GroupName: SnowflakeEnduserGroup 57 | ManagedPolicyArns: 58 | - arn:aws:iam::aws:policy/AWSServiceCatalogEndUserFullAccess 59 | 60 | 61 | SnowflakeEnduserGroupPortfolioAssociation: 62 | Type: 'AWS::ServiceCatalog::PortfolioPrincipalAssociation' 63 | Properties: 64 | PrincipalARN: !Sub 'arn:aws:iam::${AWS::AccountId}:group/SnowflakeEnduserGroup' 65 | PortfolioId: !Ref SnowflakeStorageIntegrationPortfolio 66 | PrincipalType: IAM 67 | 68 | 69 | 70 | -------------------------------------------------------------------------------- /images/snowflake-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/images/snowflake-arch.png -------------------------------------------------------------------------------- /lambda/SnowflakeIntegration_Lambda.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | # Lambda that creates Snowflake integration object in Snowflake and the corresponding IAM role in AWS 5 | # - Uses Snowflake Python Connector 6 | # 7 | # @kmmahaj 8 | 9 | import json 10 | import urllib 11 | import boto3 12 | import os 13 | import string 14 | import random 15 | import snowflake.connector 16 | import logging 17 | import urllib3 18 | from snowflake.connector import DictCursor 19 | 20 | AWS_EXTERNAL_ID = "" 21 | AWS_IAM_USER_ARN = "" 22 | 23 | logger = logging.getLogger(__name__) 24 | logging.getLogger().setLevel(logging.INFO) 25 | http = urllib3.PoolManager() 26 | 27 | session = boto3.session.Session() 28 | 29 | sf_config_name = '' 30 | allowed_sf_config = ('snowaccount', 'snowuser', 'snowpass', 'snowdb', 'snowschema') 31 | 32 | def get_secret_value(secret_name): 33 | """ 34 | get secret value from AWS Secrets Manager 35 | :param secret_name: name of the secret passed 36 | :return secret_value: value of the secret passed 37 | """ 38 | client = session.client(service_name='secretsmanager') 39 | secret_value = '' 40 | try: 41 | get_secret_value_response = client.get_secret_value(SecretId=secret_name) 42 | except ClientError as e: 43 | logger.error(f"error while executing get_secret_value, {e}") 44 | raise Exception() 45 | else: 46 | secret_value = get_secret_value_response['SecretString'] if 'SecretString' in get_secret_value_response else '' 47 | finally: 48 | return secret_value 49 | 50 | 51 | def get_snowflake_config(sf_config_name): 52 | """ 53 | get snowflake config, throws exception if invalid 54 | :return config: snowflake config dict 55 | """ 56 | config = json.loads(get_secret_value(sf_config_name)) 57 | for key in config: 58 | if not config.get(key, ''): 59 | logger.error(f"either key {key} do not exist, or non empty value found") 60 | raise Exception() 61 | return config 62 | 63 | 64 | def create_iam_policy(externalid, iamrolearn,SNOW_S3_BUCKETNAME,SNOW_S3_BUCKETPREFIX,SNOW_INT): 65 | iam = boto3.client('iam') 66 | s3fullresourcearn = "arn:aws:s3:::" + SNOW_S3_BUCKETNAME +"/"+ SNOW_S3_BUCKETPREFIX + '/*' 67 | s3bucketresourcearn = "arn:aws:s3:::" + SNOW_S3_BUCKETNAME 68 | s3prefix = SNOW_S3_BUCKETPREFIX + '/*' 69 | s3_access_policy = { 70 | "Version": "2012-10-17", 71 | "Statement": [ 72 | { 73 | "Effect": "Allow", 74 | "Action": [ 75 | "s3:PutObject", 76 | "s3:GetObject", 77 | "s3:GetObjectVersion", 78 | "s3:DeleteObject", 79 | "s3:DeleteObjectVersion" 80 | ], 81 | "Resource": s3fullresourcearn 82 | }, 83 | { 84 | "Effect": "Allow", 85 | "Action": "s3:ListBucket", 86 | "Resource": s3bucketresourcearn, 87 | "Condition": { 88 | "StringLike": { 89 | "s3:prefix": [ 90 | s3prefix 91 | ] 92 | } 93 | } 94 | } 95 | ] 96 | } 97 | snowpolicy = "SnowflakeS3AccessPolicy-" + SNOW_S3_BUCKETNAME + SNOW_INT 98 | response_policy = iam.create_policy( 99 | PolicyName=snowpolicy, 100 | PolicyDocument=json.dumps(s3_access_policy) 101 | ) 102 | 103 | policyArn = response_policy['Policy']['Arn'] 104 | 105 | trust_relationship_policy = { 106 | "Version": "2012-10-17", 107 | "Statement": [ 108 | { 109 | "Effect": "Allow", 110 | "Principal": { 111 | "AWS": iamrolearn 112 | }, 113 | "Action": "sts:AssumeRole", 114 | "Condition": { 115 | "StringEquals": { 116 | "sts:ExternalId": externalid 117 | } 118 | } 119 | } 120 | ] 121 | } 122 | 123 | AssumeRolePolicyDocument = json.dumps(trust_relationship_policy) 124 | print(AssumeRolePolicyDocument) 125 | 126 | snowrole = "SnowflakeS3AccessRole-" + SNOW_S3_BUCKETNAME + SNOW_INT 127 | response_role = iam.create_role( 128 | RoleName=snowrole, 129 | AssumeRolePolicyDocument=AssumeRolePolicyDocument 130 | ) 131 | print(response_role) 132 | 133 | response = iam.attach_role_policy( 134 | RoleName=snowrole, 135 | PolicyArn=policyArn 136 | ) 137 | 138 | print(response) 139 | 140 | def cfnsend(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False, reason=None): 141 | 142 | responseUrl = '' 143 | StackId ='' 144 | RequestId ='' 145 | LogicalResourceId ='' 146 | 147 | if 'ResponseURL' in event: 148 | responseUrl = event['ResponseURL'] 149 | 150 | if 'StackId' in event: 151 | StackId = event['StackId'] 152 | 153 | if 'RequestId' in event: 154 | RequestId = event['RequestId'] 155 | 156 | if 'LogicalResourceId' in event: 157 | LogicalResourceId = event['LogicalResourceId'] 158 | 159 | responseBody = { 160 | 'Status' : responseStatus, 161 | 'Reason' : reason or "See the details in CloudWatch Log Stream: {}".format(context.log_stream_name), 162 | 'PhysicalResourceId' : physicalResourceId or context.log_stream_name, 163 | 'StackId' : StackId, 164 | 'RequestId' : RequestId, 165 | 'LogicalResourceId' : LogicalResourceId, 166 | 'NoEcho' : noEcho, 167 | 'Data' : responseData 168 | } 169 | 170 | json_responseBody = json.dumps(responseBody) 171 | 172 | print("Response body:") 173 | print(json_responseBody) 174 | 175 | headers = { 176 | 'content-type' : '', 177 | 'content-length' : str(len(json_responseBody)) 178 | } 179 | 180 | try: 181 | response = http.request('PUT', responseUrl, headers=headers, body=json_responseBody) 182 | print("Status code:", response.status) 183 | 184 | 185 | except Exception as e: 186 | 187 | print("send(..) failed executing http.request(..):", e) 188 | 189 | def lambda_handler(event, context): 190 | 191 | logger.info('EVENT Received: {}'.format(event)) 192 | responseData = {} 193 | 194 | #Handle cfnsend delete event 195 | eventType = event['RequestType'] 196 | if eventType == 'Delete': 197 | logger.info(f'Request Type is Delete; unsupported') 198 | cfnsend(event, context, 'SUCCESS', responseData) 199 | return 'SUCCESS' 200 | 201 | #sf_config_name = os.environ['SNOW_SECRET'] 202 | sf_config_name = event['ResourceProperties']['SNOW_SECRET'] 203 | sf_config = get_snowflake_config(sf_config_name) 204 | logger.info(f'snowflake config successfully retrieved from secrets') 205 | 206 | assert isinstance(sf_config, dict), 'sf_config config must be of type dict' 207 | 208 | ctx = snowflake.connector.connect( 209 | user=sf_config['snowuser'], 210 | password=sf_config['snowpass'], 211 | role='ACCOUNTADMIN', 212 | account=sf_config['snowaccount'], 213 | database=sf_config['snowdb'], 214 | schema=sf_config['snowschema'], 215 | ocsp_response_cache_filename="/tmp/ocsp_response_cache" 216 | ) 217 | cs = ctx.cursor() 218 | #SNOW_S3_BUCKETNAME = os.environ['SNOW_S3_BUCKETNAME'] 219 | SNOW_S3_BUCKETNAME = event['ResourceProperties']['SNOW_S3_BUCKETNAME'] 220 | #SNOW_S3_BUCKETPREFIX = os.environ['SNOW_S3_BUCKETPREFIX'] 221 | SNOW_S3_BUCKETPREFIX = event['ResourceProperties']['SNOW_S3_BUCKETPREFIX'] 222 | 223 | letters = string.ascii_lowercase 224 | randomstr = ''.join(random.choice(letters) for i in range(3)) 225 | randomnum = str(random.randrange(2,100)) 226 | SNOW_INT = "S3INT" + randomstr + randomnum 227 | 228 | SNOW_TABLE = os.environ['SNOW_TABLE'] 229 | CURRENT_AWS_ACCOUNT = os.environ['CURRENT_AWS_ACCOUNT'] 230 | 231 | SNOW_S3_LOCATION = 's3://' + SNOW_S3_BUCKETNAME +'/' + SNOW_S3_BUCKETPREFIX +'/' 232 | try: 233 | 234 | sql_1 = 'create storage integration ' + SNOW_INT + ' type = external_stage storage_provider = s3 enabled = true' \ 235 | + ' storage_aws_role_arn = ' + "'" + "arn:aws:iam::" + CURRENT_AWS_ACCOUNT + ":role/myrole" + "'" + ' storage_allowed_locations = (' + "'" + SNOW_S3_LOCATION + "'" +')' 236 | print(sql_1) 237 | cs.execute(sql_1) 238 | 239 | sql_2 = 'desc integration ' + SNOW_INT 240 | print(sql_2) 241 | cs.execute(sql_2) 242 | 243 | query_id_desc = cs.sfqid 244 | 245 | sql_3 = 'select "property", "property_value" from table(result_scan(' + "'" + query_id_desc + "'" + '))' + ' where "property" = ' + "'" + "STORAGE_AWS_EXTERNAL_ID" + "'" 246 | print(sql_3) 247 | cs.execute(sql_3) 248 | for (property, property_value) in cs: 249 | AWS_EXTERNAL_ID = property_value 250 | print('{0}, {1}'.format(property, AWS_EXTERNAL_ID)) 251 | 252 | 253 | sql_4 = 'select "property", "property_value" from table(result_scan(' + "'" + query_id_desc + "'" + '))' + ' where "property" = ' + "'" + "STORAGE_AWS_IAM_USER_ARN" + "'" 254 | print(sql_4) 255 | cs.execute(sql_4) 256 | for (property, property_value) in cs: 257 | AWS_IAM_USER_ARN = property_value 258 | print('{0}, {1}'.format(property, AWS_IAM_USER_ARN)) 259 | 260 | create_iam_policy(AWS_EXTERNAL_ID,AWS_IAM_USER_ARN,SNOW_S3_BUCKETNAME,SNOW_S3_BUCKETPREFIX,SNOW_INT) 261 | 262 | sql_5 = 'create stage ' + "S3STAGE" + SNOW_INT + ' storage_integration = ' + SNOW_INT + ' url = (' + "'" + SNOW_S3_LOCATION + "'" +')' 263 | print(sql_5) 264 | cs.execute(sql_5) 265 | 266 | finally: 267 | cs.close() 268 | ctx.close() 269 | 270 | cfnsend(event, context, 'SUCCESS', responseData) 271 | return 'SUCCESS' 272 | 273 | -------------------------------------------------------------------------------- /lambda/SnowflakeIntegration_Lambda.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/lambda/SnowflakeIntegration_Lambda.zip -------------------------------------------------------------------------------- /layer/snowflakelayer.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/aws-integrations-cloudops/4577e34d3496763465665711ae661fd7a83455c4/layer/snowflakelayer.zip --------------------------------------------------------------------------------