├── LICENSE ├── README.md ├── __init__.py ├── hooks ├── __init__.py └── salesforce_hook.py └── operators ├── __init__.py ├── salesforce_schema_to_redshift_operator.py └── salesforce_to_s3_operator.py /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Airflow Plugin - Salesforce 2 | This plugin moves data from the Salesforce API to S3 based on the specified object 3 | 4 | ## Hooks 5 | ### Salesforce Hook 6 | This hook handles the authentication and request to Salesforce. This extends the HttpHook and allows you to create a new connection to Salesforce, pull out data, and save it to a file. 7 | 8 | ### S3Hook 9 | Core Airflow S3Hook with the standard boto dependency. 10 | 11 | ## Operators 12 | ### SalesforceToS3Operator 13 | This operator composes the logic for this plugin. It queries the Salesforce Bulk API using a SOQL string and then drops results in an s3 bucket. 14 | It accepts the following parameters: 15 | 16 | :param sf_conn_id: Salesforce Connection Id 17 | :param soql: Salesforce SOQL Query String used to query Bulk API 18 | :param: object_type: Salesforce Object Type (lead, contact, etc) 19 | :param s3_conn_id: S3 Connection Id 20 | :param s3_bucket: S3 Bucket where query results will be put 21 | :param s3_key: S3 Key that will be assigned to uploaded Salesforce 22 | query results 23 | ### SalesforceSchematoRedshiftOperator 24 | 25 | Reconciles schema between salesforce API objects and Redshift while leveraging the Salesforce API function to describe source objects and push the object attributes and datatypes to Redshift tables. Ignores Compound Fields as they are already broken out into their components elsewhere. 26 | It accepts the following parameters: 27 | ``` 28 | :param sf_conn_id: The conn_id for your Salesforce Instance 29 | :param s3_conn_id: The conn_id for your S3 30 | :param rs_conn_id: The conn_id for your Redshift Instance 31 | :param sf_object: The Salesforce object you wish you reconcile 32 | the schema for. 33 | Examples includes Lead, Contacts etc. 34 | :param rs_schema: The schema where you want to put the renconciled 35 | schema 36 | :param rs_table: The table inside of the schema where you want to 37 | put the renconciled schema 38 | :param s3_bucket: The s3 bucket name that will be used to store the 39 | JSONPath file to map source schema to redshift columns 40 | :param s3_key: The s3 key that will be given to the JSONPath file 41 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | from airflow.plugins_manager import AirflowPlugin 2 | from salesforce_plugin.hooks.salesforce_hook import SalesforceHook 3 | from salesforce_plugin.operators.salesforce_schema_to_redshift_operator import SalesforceSchemaToRedshiftOperator 4 | from salesforce_plugin.operators.salesforce_to_s3_operator import SalesforceBulkQueryToS3Operator 5 | from salesforce_plugin.operators.salesforce_to_s3_operator import SalesforceToS3Operator 6 | 7 | 8 | class SalesforceToRedshiftPlugin(AirflowPlugin): 9 | name = "salesforce_to_redshift_plugin" 10 | hooks = [SalesforceHook] 11 | operators = [SalesforceToS3Operator, 12 | SalesforceSchemaToRedshiftOperator, 13 | SalesforceBulkQueryToS3Operator] 14 | executors = [] 15 | macros = [] 16 | admin_views = [] 17 | flask_blueprints = [] 18 | menu_links = [] 19 | -------------------------------------------------------------------------------- /hooks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airflow-plugins/salesforce_plugin/7c21346dd0d49f541ca419d766bf41ac4a20e6cd/hooks/__init__.py -------------------------------------------------------------------------------- /hooks/salesforce_hook.py: -------------------------------------------------------------------------------- 1 | from airflow.hooks.base_hook import BaseHook 2 | from simple_salesforce import Salesforce 3 | 4 | class SalesforceHook(BaseHook): 5 | def __init__( 6 | self, 7 | conn_id, 8 | *args, 9 | **kwargs 10 | ): 11 | """ 12 | Borrowed from airflow.contrib 13 | 14 | Create new connection to Salesforce 15 | and allows you to pull data out of SFDC and save it to a file. 16 | You can then use that file with other 17 | Airflow operators to move the data into another data source 18 | :param conn_id: the name of the connection that has the parameters 19 | we need to connect to Salesforce. 20 | The conenction shoud be type `http` and include a 21 | user's security token in the `Extras` field. 22 | 23 | .. note:: 24 | For the HTTP connection type, you can include a 25 | JSON structure in the `Extras` field. 26 | We need a user's security token to connect to Salesforce. 27 | So we define it in the `Extras` field as: 28 | `{"security_token":"YOUR_SECRUITY_TOKEN"}` 29 | """ 30 | 31 | self.sf = None 32 | self.conn_id = conn_id 33 | self._args = args 34 | self._kwargs = kwargs 35 | 36 | # get the connection parameters 37 | self.connection = self.get_connection(conn_id) 38 | self.extras = self.connection.extra_dejson 39 | 40 | def get_conn(self): 41 | """ 42 | Sign into Salesforce. 43 | If we have already signed it, this will just return the original object 44 | """ 45 | if self.sf: 46 | return self.sf 47 | 48 | auth_type = self.extras.get('auth_type', 'password') 49 | 50 | if auth_type == 'direct': 51 | auth_kwargs = { 52 | 'instance_url': self.connection.host, 53 | 'session_id': self.connection.password 54 | } 55 | 56 | else: 57 | auth_kwargs = { 58 | 'username': self.connection.login, 59 | 'password': self.connection.password, 60 | 'security_token': self.extras.get('security_token'), 61 | 'instance_url': self.connection.host 62 | } 63 | # connect to Salesforce 64 | self.sf = Salesforce(**auth_kwargs) 65 | 66 | return self.sf -------------------------------------------------------------------------------- /operators/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airflow-plugins/salesforce_plugin/7c21346dd0d49f541ca419d766bf41ac4a20e6cd/operators/__init__.py -------------------------------------------------------------------------------- /operators/salesforce_schema_to_redshift_operator.py: -------------------------------------------------------------------------------- 1 | from airflow.models import BaseOperator 2 | 3 | class SalesforceSchemaToRedshiftOperator(BaseOperator): 4 | """ 5 | Reconcile Schema between salesforce API objects and Redshift 6 | 7 | Leverages the Salesforce API function to describe source objects 8 | and push the object attributes and datatypes to Redshift tables 9 | 1 table per object 10 | 11 | Ignores Compound Fields as they are already broken out into their 12 | components else where in the 13 | :param sf_conn_id: The conn_id for your Salesforce Instance 14 | 15 | :param s3_conn_id: The conn_id for your S3 16 | 17 | :param rs_conn_id: The conn_id for your Redshift Instance 18 | 19 | :param sf_object: The Salesforce object you wish you reconcile 20 | the schema for. 21 | Examples includes Lead, Contacts etc. 22 | 23 | :param rs_schema: The schema where you want to put the renconciled 24 | schema 25 | 26 | :param rs_table: The table inside of the schema where you want to 27 | put the renconciled schema 28 | 29 | :param s3_bucket: The s3 bucket name that will be used to store the 30 | JSONPath file to map source schema to redshift columns 31 | 32 | :param s3_key: The s3 key that will be given to the JSONPath file 33 | 34 | .. note:: 35 | Be aware that JSONPath files are used for the column mapping of source 36 | objects to destination tables 37 | 38 | Datatype conversiona happen via the dt_conv dictionary 39 | """ 40 | 41 | dt_conv = { 42 | 'boolean': lambda x: 'boolean', 43 | 'date': lambda x: 'date', 44 | 'dateTime': lambda x: 'TIMESTAMP', 45 | 'double': lambda x: 'float8', 46 | 'email': lambda x: 'varchar(80)', 47 | 'id': lambda x: 'varchar(100)', 48 | 'ID': lambda x: 'varchar(100)', 49 | 'int': lambda x: 'int', 50 | 'picklist': lambda x: 'varchar({})'.format(x if x <= 65535 else 'MAX'), 51 | 'phone': lambda x: 'varchar(40)', 52 | 'string': lambda x: 'varchar({})'.format(x if x <= 65535 else 'MAX'), 53 | 'textarea': lambda x: 'varchar({})'.format(x if x <= 65535 else 'MAX'), 54 | 'url': lambda x: 'varchar(256)' 55 | } 56 | 57 | template_fields = ('s3_key',) 58 | 59 | def __init__(self, 60 | sf_conn_id, s3_conn_id, rs_conn_id, # Connection Ids 61 | sf_object, # SF Configs 62 | rs_schema, rs_table, # RS Configs 63 | s3_bucket, s3_key, #S3 Configs 64 | *args, **kwargs): 65 | 66 | super().__init__(*args, **kwargs) 67 | 68 | self.sf_conn_id = sf_conn_id 69 | self.s3_conn_id = s3_conn_id 70 | self.rs_conn_id = rs_conn_id 71 | self.sf_object = sf_object 72 | self.rs_schema = rs_schema 73 | self.rs_table = rs_table 74 | self.s3_bucket = s3_bucket 75 | self.s3_key = s3_key 76 | 77 | super().__init__(*args, **kwargs) 78 | 79 | def fetch_sf_columns(self, sf_conn_id, sf_object): 80 | """ 81 | Uses Salesforce describe() method to fetch columns from 82 | Salesforce instance. Compound columns are filtered out. 83 | """ 84 | sf_conn = SalesforceHook(sf_conn_id).get_conn() 85 | 86 | # Dynamically Fetch the simple_salesforce query method 87 | # ie. sf_conn.Lead.describe() | sf_conn.Contact.describe() 88 | sf_fields = sf_conn.__getattr__(sf_object).describe()['fields'] 89 | 90 | # Get compound fields 91 | k1 = 'compoundFieldName' 92 | compound_fields = [f[k1] for f in sf_fields] # Get all compound fields across all fields 93 | compound_fields = set(compound_fields) 94 | compound_fields.remove(None) 95 | 96 | def build_dict(x): return { 97 | 'rs_name': x['name'].lower(), 98 | 'sf_name': x['name'], 99 | 'path': [x['name']], 100 | 'type': x['soapType'].split(':')[-1], 101 | 'length': x['length'], 102 | 'precision': x['precision'] 103 | } 104 | 105 | # Loop through fields and grab columns we want 106 | return [build_dict(field) for field in sf_fields if field['name'] not in compound_fields] 107 | 108 | def create_tbl_ddl(self, rs_table, rs_schema, sf_cols): 109 | """ 110 | Creates the Create Table DDL to be executed on the Redshift 111 | instance. Only run if table does not exist at time of first run. 112 | """ 113 | ddl = """ 114 | CREATE TABLE 115 | IF NOT EXISTS {table_schema}.{table_name} 116 | ({cols}); 117 | """ 118 | def make_col_ddl(sf_col): 119 | sf_type = sf_col['type'] 120 | type_transform = self.dt_conv[sf_type] # Grab lambda type converter 121 | rs_type = type_transform(sf_col['length']) # Execute type converter 122 | 123 | return "\t{} {}".format(sf_col['rs_name'], rs_type) 124 | 125 | cols_ddl = [make_col_ddl(col) for col in sf_cols] 126 | cols_ddl = ', \n'.join(cols_ddl) 127 | 128 | return [ddl.format(table_name=rs_table, table_schema=rs_schema, cols=cols_ddl)] 129 | 130 | def alter_tbl_ddl(self, rs_schema, rs_table, missing_sf_cols): 131 | """ 132 | Creates the Alter Table DDL that will be executed on the Redshift 133 | instance. Only run if there is an addtional column to be added. 134 | """ 135 | alter_ddls = [] 136 | for col in missing_sf_cols: 137 | sf_type = col['type'] 138 | rs_type = self.dt_conv[sf_type](col['length']) 139 | 140 | alter_ddl = """ALTER TABLE {}.{} ADD COLUMN {} {}""" 141 | 142 | alter_ddls.append( 143 | alter_ddl.format( 144 | rs_schema, 145 | rs_table, 146 | col['rs_name'].lower(), 147 | rs_type 148 | ) 149 | ) 150 | 151 | return alter_ddls 152 | 153 | def fetch_rs_ddl(self, rs_conn_id, rs_table, rs_schema, sf_cols): 154 | """ 155 | Used to decide whether we need to run an ALTER or CREATE 156 | table command. Leverages alter_tbl_ddl() and create_tbl_ddl() 157 | to create the DDL that will be run. 158 | """ 159 | rs_info = PostgresHook().get_connection(rs_conn_id) 160 | rs_conn = PostgresHook(rs_conn_id).get_cursor() 161 | 162 | q = """ 163 | SELECT column_name 164 | FROM information_schema.columns c 165 | WHERE table_name = '{rs_table}' 166 | and table_schema = '{rs_schema}' 167 | and table_catalog = '{rs_database}' 168 | ORDER BY ordinal_position ASC 169 | """ 170 | 171 | rs_conn.execute(q.format( 172 | rs_table=rs_table, 173 | rs_schema=rs_schema, 174 | rs_database=rs_info.schema 175 | )) 176 | 177 | rs_cols = rs_conn.fetchall() 178 | rs_cols = [col[0] for col in rs_cols] 179 | tbl_missing = False if rs_cols else True 180 | 181 | if tbl_missing: 182 | ddl = self.create_tbl_ddl(rs_table, rs_schema, sf_cols) 183 | 184 | else: 185 | # All columns that exist in sf_cols but not in rs_cols 186 | # missing_cols = {x['name'] for x in sf_cols} - {x[0] for x in rs_cols} 187 | missing_cols = [x for x in sf_cols if x['rs_name'] not in rs_cols] 188 | ddl = self.alter_tbl_ddl(rs_schema, rs_table, missing_cols) if missing_cols else None 189 | 190 | rs_conn.close() 191 | 192 | return ddl 193 | 194 | def fetch_rs_columns(self, rs_conn_id, rs_table, rs_schema): 195 | """ 196 | Fetches the current Redshift columns while maintaining order 197 | of columns returned. This allows the JSONPath column mapping 198 | to maintain the same order. 199 | """ 200 | rs_info = PostgresHook().get_connection(rs_conn_id) 201 | rs_conn = PostgresHook(rs_conn_id).get_cursor() 202 | 203 | q = """ 204 | SELECT column_name, ordinal_position 205 | FROM information_schema.columns c 206 | WHERE table_name = '{rs_table}' 207 | and table_schema = '{rs_schema}' 208 | and table_catalog = '{rs_database}' 209 | ORDER BY ordinal_position ASC 210 | """.format(rs_table=rs_table, rs_schema=rs_schema, rs_database=rs_info.schema) 211 | 212 | rs_conn.execute(q) 213 | recs = rs_conn.fetchall() 214 | rs_conn.close() 215 | return [rec[0] for rec in recs] 216 | 217 | def create_paths(self, paths): 218 | """ 219 | Creates JSONPath column mapping string. 220 | 221 | :param paths: Each element of list represents a specific path to a dictionary attribute 222 | ex. [ 223 | ['first_lvl_attr'], 224 | ['first_lvl_attr', 'second_lvl_attr'] 225 | ] 226 | :type paths: list of lists 227 | """ 228 | def create_path(field_path): 229 | """ 230 | """ 231 | base = "\"${}\"" 232 | # Create a template for each value in array 233 | # reasonable assume iether str or int will be passed 234 | tmplts = ["['{}']" if isinstance(path, str) else "[{}]" for path in field_path] 235 | paths = [tmplts[i].format(field_path[i]) for i in range(len(field_path))] 236 | full_path = ''.join(paths) 237 | return base.format(full_path) 238 | 239 | base = """"jsonpaths": [ \n\t{}\t]""" 240 | l = len(paths) 241 | # List of Template Strings Ready to be Formmatted 242 | tmplts = ["\t{},\n" for i in range(l)] 243 | # JSON Paths that will be passted into template strings 244 | paths = [tmplts[i].format(create_path(paths[i])) for i in range(l)] 245 | paths[-1] = paths[-1].replace(',', '') # remove first , in path 246 | paths_str = ''.join(paths) # finally merge into single str 247 | 248 | return "{\n" + base.format(paths_str) + "\n}" 249 | 250 | def generate_path_file(self, rs_cols, sf_cols): 251 | """ 252 | Takes a list of __ordered__ redshift columns that exist in the dst 253 | and a list of salesforce columns that exist in the source. Using the 254 | ordered redshift columns it creates a list of paths to the salesforce 255 | attributes matching the order of the redshift columns. Returns a 256 | JSONPath column mapping string matching the order of the 257 | salesforce columns. 258 | 259 | :param rs_cols: List of tuples, because that is how postgres Hook 260 | returns it's results. 261 | :type rs_cols: list of tuples representing Redshift cols 262 | :param sf_cols: list of dictionaries representing Salesforce cols 263 | 'path' and 'rs_name' are required attrs of dict 264 | :type sf_cols: list of dictionaries 265 | """ 266 | sf_cols = {sf_col['rs_name']: sf_col['path'] for sf_col in sf_cols} 267 | ordered_paths = [sf_cols.get(col_key, None) for col_key in rs_cols if sf_cols.get(col_key) is not None] 268 | 269 | return self.create_paths(ordered_paths) 270 | 271 | def build_copy_cmd_template(self, schema, tbl, columns, path_key, path_bucket): 272 | """ 273 | Builds a partial copy command that can be further templated to avoid storing credentials in xcom 274 | Notice bucket, key, creds being double wrapped. 275 | """ 276 | base_copy = """ 277 | COPY {schema}.{tbl} ({columns}) 278 | FROM 's3://{{bucket}}/{{key}}' 279 | CREDENTIALS '{{creds}}' 280 | JSON 's3://{path_bucket}/{path}' 281 | REGION as 'us-east-1' 282 | TIMEFORMAT 'epochmillisecs' 283 | TRUNCATECOLUMNS 284 | COMPUPDATE OFF 285 | STATUPDATE OFF; 286 | """ 287 | 288 | return base_copy.format( 289 | schema=schema, 290 | tbl=tbl, 291 | columns=columns, 292 | path=path_key, 293 | path_bucket=path_bucket 294 | ) 295 | 296 | def execute(self, context): 297 | """ 298 | See class definition. 299 | """ 300 | # Get Columns From Salesforce 301 | sf_cols = self.fetch_sf_columns(self.sf_conn_id, self.sf_object) 302 | # Check if we need DDL changes 303 | rs_ddl = self.fetch_rs_ddl(self.rs_conn_id, self.rs_table, self.rs_schema, sf_cols) 304 | 305 | # Run RS DDL Changes 306 | if rs_ddl is not None: 307 | # return rs_ddl 308 | rs = PostgresHook(self.rs_conn_id) 309 | for ddl in rs_ddl: 310 | rs.run(ddl) 311 | 312 | # Get Columns From Redshift 313 | rs_cols = self.fetch_rs_columns(self.rs_conn_id, self.rs_table, self.rs_schema) 314 | 315 | # Generate JSONPath String w/ same ordering as RS Table Columns 316 | jsonPath = self.generate_path_file(rs_cols, sf_cols) 317 | 318 | # Push JSONPath to S3 319 | s3 = S3Hook(self.s3_conn_id) 320 | s3.load_string(jsonPath, self.s3_key, bucket_name=self.s3_bucket, replace=True) 321 | 322 | # Limit Columns in Copy CMD to only Columns in SF 323 | filter_list = [col['rs_name'] for col in sf_cols] 324 | filtered_cols = [col for col in rs_cols if col in filter_list] 325 | 326 | rs_col_str = ', '.join(filtered_cols) 327 | 328 | copy_cmd = self.build_copy_cmd_template( 329 | schema=self.rs_schema, 330 | tbl=self.rs_table, 331 | columns=rs_col_str, 332 | path_key=self.s3_key, 333 | path_bucket=self.s3_bucket 334 | ) 335 | 336 | self.xcom_push(context, key='copy_cmd', value=copy_cmd) 337 | 338 | # Push SF Columns to Xcom or S3 339 | self.xcom_push(context, key='sf_cols', value=[col['sf_name'] for col in sf_cols]) -------------------------------------------------------------------------------- /operators/salesforce_to_s3_operator.py: -------------------------------------------------------------------------------- 1 | from tempfile import NamedTemporaryFile 2 | import logging 3 | import json 4 | 5 | from airflow.utils.decorators import apply_defaults 6 | from airflow.models import BaseOperator 7 | from airflow.hooks.S3_hook import S3Hook 8 | 9 | from airflow.contrib.hooks.salesforce_hook import SalesforceHook 10 | 11 | 12 | class SalesforceBulkQueryToS3Operator(BaseOperator): 13 | """ 14 | Queries the Salesforce Bulk API using a SOQL stirng. Results are then 15 | put into an S3 Bucket. 16 | 17 | :param sf_conn_id: Salesforce Connection Id 18 | :param soql: Salesforce SOQL Query String used to query Bulk API 19 | :param: object_type: Salesforce Object Type (lead, contact, etc) 20 | :param s3_conn_id: S3 Connection Id 21 | :param s3_bucket: S3 Bucket where query results will be put 22 | :param s3_key: S3 Key that will be assigned to uploaded Salesforce 23 | query results 24 | """ 25 | template_fields = ('soql', 's3_key') 26 | 27 | def __init__(self, 28 | sf_conn_id, 29 | soql, 30 | object_type, 31 | s3_conn_id, 32 | s3_bucket, 33 | s3_key, 34 | *args, 35 | **kwargs): 36 | 37 | super().__init__(*args, **kwargs) 38 | 39 | self.sf_conn_id = sf_conn_id 40 | self.soql = soql 41 | self.s3_conn_id = s3_conn_id 42 | self.s3_bucket = s3_bucket 43 | self.s3_key = s3_key 44 | self.object = object_type[0].upper() + object_type[1:].lower() 45 | 46 | def execute(self, context): 47 | sf_conn = SalesforceHook(self.sf_conn_id).get_conn() 48 | 49 | logging.info(self.soql) 50 | query_results = sf_conn.bulk.__getattr__(self.object).query(self.soql) 51 | 52 | s3 = S3Hook(self.s3_conn_id) 53 | # One JSON Object Per Line 54 | query_results = [json.dumps(result, ensure_ascii=False) for result in query_results] 55 | query_results = '\n'.join(query_results) 56 | 57 | s3.load_string(query_results, self.s3_key, bucket_name=self.s3_bucket, replace=True) 58 | 59 | 60 | class SalesforceToS3Operator(BaseOperator): 61 | """ 62 | Salesforce to S3 Operator 63 | 64 | Makes a query against Salesforce and write the resulting data to a file. 65 | 66 | :param sf_conn_id: Name of the Airflow connection that has 67 | the following information: 68 | - username 69 | - password 70 | - security_token 71 | :type sf_conn_id: string 72 | :param sf_obj: Name of the relevant Salesforce object 73 | :param s3_conn_id: The destination s3 connection id. 74 | :type s3_conn_id: string 75 | :param s3_bucket: The destination s3 bucket. 76 | :type s3_bucket: string 77 | :param s3_key: The destination s3 key. 78 | :type s3_key: string 79 | :param sf_fields: *(optional)* list of fields that you want 80 | to get from the object. 81 | If *None*, then this will get all fields 82 | for the object 83 | :type sf_fields: list 84 | :param fmt: *(optional)* format that the s3_key of the 85 | data should be in. Possible values include: 86 | - csv 87 | - json 88 | - ndjson 89 | *Default: csv* 90 | :type fmt: list 91 | :param query: *(optional)* A specific query to run for 92 | the given object. This will override 93 | default query creation. 94 | *Default: None* 95 | :type query: string 96 | :param relationship_object: *(optional)* Some queries require 97 | relationship objects to work, and 98 | these are not the same names as 99 | the SF object. Specify that 100 | relationship object here. 101 | *Default: None* 102 | :type relationship_object: string 103 | :param record_time_added: *(optional)* True if you want to add a 104 | Unix timestamp field to the resulting data 105 | that marks when the data was 106 | fetched from Salesforce. 107 | *Default: False*. 108 | :type record_time_added: string 109 | :param coerce_to_timestamp: *(optional)* True if you want to convert 110 | all fields with dates and datetimes 111 | into Unix timestamp (UTC). 112 | *Default: False*. 113 | :type coerce_to_timestamp: string 114 | """ 115 | template_fields = ("s3_key", 116 | "query") 117 | 118 | @apply_defaults 119 | def __init__(self, 120 | sf_conn_id, 121 | sf_obj, 122 | s3_conn_id, 123 | s3_bucket, 124 | s3_key, 125 | sf_fields=None, 126 | fmt="csv", 127 | query=None, 128 | relationship_object=None, 129 | record_time_added=False, 130 | coerce_to_timestamp=False, 131 | *args, 132 | **kwargs): 133 | 134 | super(SalesforceToS3Operator, self).__init__(*args, **kwargs) 135 | 136 | self.sf_conn_id = sf_conn_id 137 | self.object = sf_obj 138 | self.fields = sf_fields 139 | self.s3_conn_id = s3_conn_id 140 | self.s3_bucket = s3_bucket 141 | self.s3_key = s3_key 142 | self.fmt = fmt.lower() 143 | self.query = query 144 | self.relationship_object = relationship_object 145 | self.record_time_added = record_time_added 146 | self.coerce_to_timestamp = coerce_to_timestamp 147 | 148 | def special_query(self, query, sf_hook, relationship_object=None): 149 | if not query: 150 | raise ValueError("Query is None. Cannot query nothing") 151 | 152 | sf_hook.sign_in() 153 | 154 | results = sf_hook.make_query(query) 155 | if relationship_object: 156 | records = [] 157 | for r in results['records']: 158 | if r.get(relationship_object, None): 159 | records.extend(r[relationship_object]['records']) 160 | results['records'] = records 161 | 162 | return results 163 | 164 | def execute(self, context): 165 | """ 166 | Execute the operator. 167 | This will get all the data for a particular Salesforce model 168 | and write it to a file. 169 | """ 170 | logging.info("Prepping to gather data from Salesforce") 171 | 172 | # Open a name temporary file to store output file until S3 upload 173 | with NamedTemporaryFile("w") as tmp: 174 | 175 | # Load the SalesforceHook 176 | hook = SalesforceHook(conn_id=self.sf_conn_id, output=tmp.name) 177 | 178 | # Attempt to login to Salesforce 179 | # If this process fails, it will raise an error and die. 180 | try: 181 | hook.sign_in() 182 | except: 183 | logging.debug('Unable to login.') 184 | 185 | # Get object from Salesforce 186 | # If fields were not defined, all fields are pulled. 187 | if not self.fields: 188 | self.fields = hook.get_available_fields(self.object) 189 | 190 | logging.info( 191 | "Making request for " 192 | "{0} fields from {1}".format(len(self.fields), self.object) 193 | ) 194 | 195 | if self.query: 196 | query = self.special_query(self.query, 197 | hook, 198 | relationship_object=self.relationship_object 199 | ) 200 | else: 201 | query = hook.get_object_from_salesforce(self.object, 202 | self.fields) 203 | 204 | # output the records from the query to a file 205 | # the list of records is stored under the "records" key 206 | logging.info("Writing query results to: {0}".format(tmp.name)) 207 | 208 | hook.write_object_to_file(query['records'], 209 | filename=tmp.name, 210 | fmt=self.fmt, 211 | coerce_to_timestamp=self.coerce_to_timestamp, 212 | record_time_added=self.record_time_added) 213 | 214 | # Flush the temp file and upload temp file to S3 215 | tmp.flush() 216 | 217 | dest_s3 = S3Hook(self.s3_conn_id) 218 | 219 | dest_s3.load_file( 220 | filename=tmp.name, 221 | key=self.s3_key, 222 | bucket_name=self.s3_bucket, 223 | replace=True 224 | ) 225 | 226 | dest_s3.connection.close() 227 | 228 | tmp.close() 229 | 230 | logging.info("Query finished!") 231 | --------------------------------------------------------------------------------