├── .gitignore ├── LICENSE ├── README.md ├── airflow-setup ├── airflow-scheduler.service ├── airflow-scheduler.sh ├── airflow-server.sh ├── airflow-webserver.service ├── env-setup.sh ├── server-setup.sh └── start-airflow-venv.sh ├── images ├── airflow-dag-process.png ├── airflow-dag-trigger-ui.png ├── airflow-fivetran-dbt-arch.png ├── dbt-lineage-graph.png ├── gcp-associate-db-instance-with-network.png ├── gcp-create-firewall-rules.png ├── gcp-create-http-rule-example.png ├── gcp-create-project.png ├── gcp-enable-compute-engine-api.png ├── gcp-place-instance-in-network.png ├── gcp-ssh-to-vm.png └── git-repo-ssh-keys.png └── src ├── dbt_cloud.py ├── example_fivetran_dbt.py └── fivetran.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # airflow-fivetran-dbt 2 | The purpose of this github repo is to provide an example of what an orchestration pipeline for Fivetran + dbt managed by Airflow would look like. If you have any questions about this, feel free to ask in our [dbt Slack community](https://community.getdbt.com/). 3 | 4 | # Introduction 5 | This is one way to orchetstrate dbt in coordination with other tools, such as Fivetran for data loading. In this example, our focus is on coordinating a Fivetran sync for loading data to a warehouse, and then triggering a dbt run in an event-driven pipeline. We use the Fivetran and dbt Cloud APIs to accomplish this, with Airflow managing the scheduling / orchestration of the job flow. The final step extracts the `manifest.json` from the dbt run results to capture relevant metadata for downstream logging, alerting and analysis. The code provided in this repository are intended as a demonstration to build upon and should not be utilized as a production-ready solution. 6 | 7 | # Table of Contents 8 | 1. [Highlights](#Highlights) 9 | 2. [Solution Architecture](#Solution-Architecture) 10 | 3. [Airflow DAG](#Airflow-DAG) 11 | 4. [dbt Job DAG](#dbt-Job-DAG) 12 | 5. [How to Guide](#How-to-Guide) 13 | * [Systems](#Systems) 14 | * [User permissions](#User-permissions) 15 | * [Airflow server configuration](#Airflow-server-configuration) 16 | * [Aiflow environment setup](#Aiflow-environment-setup) 17 | * [SSH key configuration in Github](#SSH-key-configuration-in-Github) 18 | * [Git repository configuration](#Git-repository-configuration) 19 | * [Environment variables](#Environment-variables) 20 | * [Running the code](#Running-the-code) 21 | 22 | 23 | ## Highlights 24 | - logical isolation of data load (Fivetran), data transform (dbt) and orchestration (Airflow) functions 25 | - Airflow code can be run from a managed service like [Astronomer](https://www.astronomer.io/) 26 | - avoids complexity of re-creating dbt DAG in Airflow, which we've seen implemented at a few clients 27 | - demonstrates orchestrating Fivetran and dbt in an event-driven pipeline 28 | - configurable approach which can be extended to handle additional Fivetran connectors and dbt job definitions 29 | - captures relevant data from a job run which could be shipped to downstream logging & analytics services. It would also be feasible to log interim job status data using this setup, though we did not build it into the current python codebase 30 | 31 | ## Solution Architecture 32 | Below is a system diagram with a brief description of each step in the process 33 | 34 | ![alt text](https://github.com/fishtown-analytics/airflow-fivetran-dbt/blob/main/images/airflow-fivetran-dbt-arch.png "Solution Architecture Diagram") 35 | 36 | ## Aiflow DAG 37 | If you are already using Airflow, you may want to skip the implementation guide below and focus on the key parts of the python code which enable this workflow. 38 | 39 | ![alt text](https://github.com/fishtown-analytics/airflow-fivetran-dbt/blob/main/images/airflow-dag-process.png "Airflow DAG") 40 | 41 | This is a simplified workflow meant to illustrate the coordination role Airflow can play between a data loading system like Fivetran and dbt. Airflow [XComs](https://airflow.apache.org/docs/apache-airflow/stable/concepts.html?highlight=xcom#concepts-xcom) are used to share state among the tasks defined in the job. An example Xcom reference in the code is 42 | 43 | Add XCom value in `dbt_job` task 44 | ```python 45 | run_id = trigger_resp['id'] 46 | kwargs['ti'].xcom_push(key='dbt_run_id', value=str(run_id)) 47 | ``` 48 | 49 | Retrieve XCom value associated with `dbt_job` task in downstream `get_dbt_job_status` task 50 | ```python 51 | ti = kwargs['ti'] 52 | run_id = ti.xcom_pull(key='dbt_run_id', task_ids='dbt_job') 53 | ``` 54 | 55 | 56 | Additionally, the DAG takes a mapping for runtime input: 57 | ``` 58 | { 59 | "connector_id": "warn_enormously", 60 | "dbt_job_name": "pokemon_aggregation_job" 61 | } 62 | ``` 63 | 64 | ## dbt Job DAG 65 | The dbt job run against this data is defined in [this repository](https://github.com/fishtown-analytics/airflow-fivetran-dbt--dbt-jobs). It runs a simple aggregation of the input source data to summarize the average HP per pokemon catch_number. It looks like this: 66 | 67 | ![alt text](https://github.com/fishtown-analytics/airflow-fivetran-dbt/blob/main/images/dbt-lineage-graph.png "dbt Lineage Graph") 68 | 69 | # How to Guide 70 | 71 | ### What you'll need 72 | 1) Snowflake account with database, warehouse etc. configured 73 | 2) Fivetran account with permission to upload data to Snowflake 74 | 3) Source data configured in Fivetran - this guide uses Google Sheets as the source 75 | 4) Google Cloud Platform account. Google offers $300 in credits for new accounts, which should be more than enough to get this up and running. 76 | 5) dbt Cloud account 77 | 6) Git repository for dbt code. Here is a [link to ours](https://github.com/fishtown-analytics/airflow-fivetran-dbt--dbt-jobs) 78 | 79 | 80 | ### User permissions 81 | 1) User with access to run database operations in Snowflake. dbt operates under a user account alias 82 | 2) User account in Fivetran with permissions to create new connectors. In this example, we use Google Sheets as the connector source data. You will also need sufficient permissions (or a friend who has them :) ) to obtain an API token and secret from the Fivetran Admin console as described [here](https://fivetran.com/docs/rest-api/getting-started) 83 | 3) User account in dbt Cloud with sufficient permissions to create database connections, repositories, and API keys. 84 | 4) User account in Github/Gitlab/Bitbucket etc with permissions to create repositories and associate ssh deploy keys with them. You can read more about this setup [here](https://docs.github.com/en/github/authenticating-to-github/connecting-to-github-with-ssh) 85 | 86 | ### Airflow Installation 87 | 88 | Key Notes not mentioned in Jostein Leira's Post: 89 | - Make sure to create the instance in the desired project (whether an existing one or a new one) 90 | - You will need to enable the Compute Engine API 91 | - When you create the subnet, make sure to select a region that makes sense to your infastructure. 92 | - For your VM machine type, use E2 series. 93 | - You do not need to setup a load balancer for this flow. 94 | - When you go to setup your Postgres database, do not click on Storage. The interface has updated and you should see 95 | `SQL` in the GCP console. 96 | - Whitelist only the [Google IP Ranges](https://support.google.com/a/answer/60764?hl=en) and any developer IP addresses. You will be asked this when you setup the VPC. 97 | - Install apache-airflow v2.0.0 instead of v1.10.10. Note that airflow command syntax changed slightly across major versions. The Airflow v2.0.0 CLI command syntax is documented [here](https://airflow.apache.org/docs/apache-airflow/stable/cli-and-env-variables-ref.html) 98 | 99 | We configured a VM in Google Cloud Platform to host the Airflow server. There are many options for hosting, including managed services like [Astronomer](https://www.astronomer.io/). Your preferred installation method will likely depend on your company's security posture and your desire to customize the implementation. 100 | 101 | We began by following the process described in Jostein Leira's [Medium Post](https://medium.com/grensesnittet/airflow-on-gcp-may-2020-cdcdfe594019) 1. During the installation, we implemented several changes and additions to the architecture, described below. 102 | 103 | The elemnts included in the final GCP setup include: 104 | 105 | - GCP Project 106 | - Networking - VPC 107 | - Networking - Firewall rules 108 | - The VM running the Airflow application (we used Ubuntu v...) 109 | - Instance group 110 | - Static IP Address 111 | - Cloud SQL database 112 | - Load Balancer 113 | 114 | The specific steps to set up and test each of these components are described below. 115 | 116 | ### Create the GCP Project 117 | 118 | The project will contain all the resources you create in the following steps. Click the project dropdown at the top left of the console and create a new project. We named ours `airflow-server`. 119 | 120 | ![alt text](https://github.com/fishtown-analytics/airflow-fivetran-dbt/blob/main/images/gcp-create-project.png "Create new GCP project") 121 | 122 | ### Create the VPC Network 123 | 124 | First we'll set up the VPC in which the Airflow VM will run. When first accessing the VPC Network pane within GCP, you may need to enable the Compute Engine API. 125 | 126 | 1) Navigate to the hamburger menu at the top left of the GCP console, then click "VPC Networks" 127 | 2) Click "Create VPC Network" 128 | 3) Give the network a name (e.g. `fishtown-airflow-network`) 129 | 4) Create a subnet for the network (e.g. `fishtown-airflow-network-subnet1`) 130 | 5) Choose a region 131 | 6) Choose a CIDR range for the subnet (10.0.0.0/8 works) 132 | -- Leave the default settings on the rest of the page 133 | 134 | ### Set up the Firewall Rules 135 | 136 | 1) From the VPC Networks page, click the name of the network you created in the previous step. 137 | 2) Click Firewall rules > Add firewall rule 138 | 3) We need to allow http (temporarily), https, and ssh access to the network from an external IP. Google "what is my ip address" and use the returned value in your firewall settings, plus "/32". For example, if your IP is 11.222.33.444, you would add `11.222.33.444/32` 139 | 4) Set Targets to "All instances in the network" 140 | 5) Set your IP in the list of allows IP addresses 141 | 6) Open port 80 and 8080 142 | 7) Click "Create" 143 | 8) Add additional rules for HTTPS (port 443), SSH (port 22), and load balancer (IP ranges 130.211.0.0/32 and 35.191.0.0/16) traffic. The load balancer traffic IPs are internal to Google. 144 | 145 | When you're done, the firewall rules should look as shown in the screenshot below. 146 | 147 | ![alt text](https://github.com/fishtown-analytics/airflow-fivetran-dbt/blob/main/images/gcp-create-firewall-rules.png "VPC firewall rules") 148 | 149 | ### Create the Virtual Machine 150 | 151 | 1) Click the hamburger icon at the top left of the console > Compute Engine > Virtual Machines > VM Instances > Create 152 | 2) Give the instance a name, such as fishtown-airflow-vm1 153 | 3) Be sure to place the VM in the same region you selected for your subnet in the VPC configuration step 154 | 4) Select the instance type. We used an `e2-standard-2` instance for this 155 | 5) Change the operating system to Ubuntu version 20.04 LTS Minimal 156 | 6) Check the box to allow HTTP traffic to the instance 157 | 7) Place the instance in the VPC Network you created in the VPC setup step 158 | -- Leave the rest of the defaults when adding the instance to your VPC 159 | -- Optional -- you can use the `env-setup.sh` script in the `airflow-setup` folder of this repository to bootstrap the instance when it starts. 160 | 161 | ![alt text](https://github.com/fishtown-analytics/airflow-fivetran-dbt/blob/main/images/gcp-place-instance-in-network.png "Add the instance to your network") 162 | 163 | ### Create the Instance Group 164 | 165 | 1) Click the hamburger icon at the top left of the console > Compute Engine > Instance Groups > Click Create Instance Group > New Unmanaged Instance Group 166 | 2) Choose the same region you've been working from \ 167 | 3) Select the name of the network you created in the VPC configuration step 168 | 4) Add the VM instance you created in the Create Virtual Machine Step 169 | 5) Click Create 170 | 171 | ### Test SSH access to the VM 172 | 173 | GCP offers multiple options for ssh access to instances, including via the browser. There is some extra configuration necessary for browser-based connections. You can also ssh to your machine from a local terminal. 174 | 175 | #### SSH from a local terminal 176 | 177 | 1) Open a terminal on your machine 178 | 2) Run `gcloud auth login` 179 | 3) This will open a browser window. Use it to sign in to your account. 180 | 4) Ensure you have an ssh key set up and added to your project. Follow the instructions [here](https://cloud.google.com/compute/docs/instances/adding-removing-ssh-keys) for this 181 | 5) To ssh to your machine, run the folloing in your terminal: `gcloud beta compute ssh --zone "" "" --project ""` 182 | 6) If you configured a password on your ssh key, enter it when prompted 183 | -- Upon successful login, your terminal should look similar to the image below 184 | 185 | #### SSH from the browser 186 | 187 | 1) Ensure that you've set up [OS Login](https://cloud.google.com/compute/docs/instances/managing-instance-access#enable_oslogin). You can set this on the project level or instance level. Here is an example of setting this at the instance level: 188 | 189 | ![alt text](https://github.com/fishtown-analytics/airflow-fivetran-dbt/blob/main/images/gcp-ssh-to-vm.png "SSH to the VM") 190 | 191 | 2) Whitelist the list of Google IPs [listed here](https://www.gstatic.com/ipranges/goog.json) for port 22 in your custom network. Connections from the browser are initiated from a Google IP, not your network's IP. 192 | 3) Navigate back to the VM instance screen 193 | 4) Click the "SSH" buttom to the right of your VM's listing 194 | 195 | ### Cloning this Git Repository to the VM 196 | 197 | #### SSH key configuration in Github 198 | We use ssh keys to manage both this git repository with the Airflow code and the one containing dbt code. You will need access to manage ssh keys for your repository (in Settings > Deploy Keys > Add Key). Below is an example of creating an ssh key and granting access in Github: 199 | 200 | 1) Generate ssh key: `$ ssh-keygen -t ed25519 -C "your_email@example.com"` 201 | 2) Choose where to save the key, e.g. $HOME/.ssh/ 202 | 3) Start the ssh agent in the background: `eval "$(ssh-agent -s)"` 203 | 4) If the configuration file doesn't exist, create it: `vim ~/.ssh/config` 204 | 5) Open the config file and replace the key Id as necessary: 205 | 206 | ``` 207 | Host github.com-airflow-fivetran-dbt 208 | AddKeysToAgent yes 209 | IdentityFile ~/.ssh/ 210 | ``` 211 | 212 | 6) Add the ssh key to the agent: `ssh-add ~/.ssh/` 213 | Note: It's useful to add a line to your `.bashrc` or `.zshrc` file to automatically start the agent and add your ssh keys each time you open a terminal. 214 | 7) run `cd ~/.ssh/` 215 | 8) run `cat .pub` 216 | 9) Copy the output on the screen 217 | 10) In github, add the public key to the repository. This is in Settings > Deploy Keys > Add New Key. The screenshot below shows what this looks like 218 | 219 | ![alt text](https://github.com/fishtown-analytics/airflow-fivetran-dbt/blob/main/images/git-repo-ssh-keys.png "Adding Deploy Keys to a Repository") 220 | 221 | 11) Run `cd /srv/airflow` 222 | 12) Run `git clone git@github.com:fishtown-analytics/airflow-fivetran-dbt.git` to clone the repository 223 | 224 | ### Cloning the dbt Code repository in dbt Cloud 225 | 226 | Note, this repository is related to [another](https://github.com/fishtown-analytics/airflow-fivetran-dbt--dbt-jobs) which contains the dbt code run in the job triggered from Airflow. You'll need to set a similar repository to run the dbt jobs for your setup. Instructions for cloning git repositories in dbt Cloud can be found [here](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url) 227 | 228 | Once you've set ssh keys for both the airflow and dbt code repositories, clone the respective codebases on the airflow server and in dbt Cloud. Instructions for configuring Github repositories in dbt Cloud are [here](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-installing-the-github-application) 229 | 230 | ### Aiflow environment setup 231 | Make sure you have the Fivetran API Key, dbt Cloud API Key, and dbt Cloud Account ID handy before going further. These are set into environment variables for Airflow. 232 | 233 | 1) Run `source airflow-fivetran-dbt/airflow-setup/start-airflow-venv.sh` 234 | 2) Set the environment variables for the Fivetran API Key, dbt Cloud API Key and dbt Cloud Account ID 235 | 3) Feel free to manage the virtual environment and environment variables as suits you 236 | 237 | Below is a description of each environment variable set by the script. 238 | 239 | * `FIVETRAN_API_KEY`: This is a base64 encoded value of your account's `:`. [This link from Fivetran](https://fivetran.com/docs/rest-api/getting-started) documents how to generate this value. For example, an API key of `d9c4511349dd4b86` and API secret of `1f6f2d161365888a1943160ccdb8d968` encode to `ZDljNDUxMTM0OWRkNGI4NjoxZjZmMmQxNjEzNjU4ODhhMTk0MzE2MGNjZGI4ZDk2OA==`. The specific values will be different on your system. 240 | * `FIVETRAN_DATETIME_FORMAT` set to `%Y-%m-%dT%H:%M:%S.%fZ` for a datetime like `2018-12-01T15:43:29.013729Z` 241 | * `AIRFLOW_DATETIME_FORMAT` set to `%Y-%m-%dT%H:%M:%S.%fZ` for a datetime like `2018-12-01T15:43:29.013729Z` 242 | * `DBT_ACCOUNT_ID` which can be obtained from the URLs when logged in to dbt Cloud. For example in the URL cloud.getdbt.com/#/accounts/****/projects//dashboard/ 243 | * `DBT_API_KEY` which can be obtained by navigating to Profile > API Access in dbt Cloud. 244 | * `DBT_DATETIME_FORMAT` set to `%Y-%m-%dT%H:%M:%S.%fZ` for a datetime like `2018-12-01T15:43:29.013729Z` 245 | 246 | ### Setting up the Postgres Database 247 | 248 | Airflow persits artifacts into a database, often Postgresql. 249 | 250 | 1) Navigate to the hamburger icon at the top left > Databases > SQL 251 | 2) Click "Create Instance" 252 | 3) Give the instance a name and default user password 253 | 4) Click "Connectivity" 254 | 5) Check the option for "Private IP", associate the instance with your vpc network, and uncheck the "Public IP" option at the bottom of the Connectivity tab 255 | 6) Click "Allocate and Connect" 256 | 257 | ![alt text](https://github.com/fishtown-analytics/airflow-fivetran-dbt/blob/main/images/gcp-associate-db-instance-with-network.png "Add db instance to your network") 258 | 259 | 7) Under the SQL menu at the top left, click "Databases" 260 | 8) Click "Create database" and give your database a name 261 | 9) Under the SQL menu at the top left, click "Users" and add a new user 262 | 10) Be sure to add a user to the instance and not a Cloud IAM user 263 | 264 | ### Test database connectivity 265 | 266 | Note: make sure that the psql client is installed on your instance. This aspect is skipped in the guide linked from Medium above. If missing, you can install the client by running the following: 267 | 268 | ``` 269 | sudo apt-get -y install postgresql-client- 270 | ``` 271 | 272 | From the Airflow VM, test connectivity to the db instance 273 | 274 | ``` 275 | psql -h -U airflow-user -d airflow 276 | ``` 277 | 278 | Then enter the password you set when configuring the database 279 | 280 | 281 | ### Start the Airflow server 282 | 283 | 1) Run the following sequence of commands: 284 | * `sudo su airflow` 285 | * `cd /srv/airflow` 286 | * `source bin/activate` 287 | * `export AIRFLOW_HOME=/srv/airflow` 288 | * `airflow db init` 289 | 290 | 2) Now you will update the `airflow.cfg` file to point airflow towards your sql database server instead of the default sqlite database. Update the following configurations in the file: 291 | 292 | `sql_alchemy_conn = postgresql+psycopg2://airflow-user:@/` 293 | `default_impersonation = airflow` 294 | `enable_proxy_fix = True` 295 | 296 | 3) Run `airflow db init` again 297 | 4) Run 298 | ``` 299 | airflow users create \ 300 | --username \ 301 | --password 302 | --firstname \ 303 | --lastname \ 304 | --role Admin \ 305 | --email @example.org 306 | ``` 307 | 5) Run `airflow webserver -p 8080` 308 | 6) Run `airflow scheduler` 309 | 310 | You now have a functioning system to which you can upload the [airflow code provided here](https://github.com/fishtown-analytics/airflow-fivetran-dbt). Add the load balancer configuration per instructions in the linked Medium post. Additionally, we provide service configuration files in this repository as well to run your airflow webserver automatically upon starting up the VM. 311 | 312 | ### Running the code 313 | 314 | -- From the Airflow UI -- 315 | 316 | 1) From the DAGs list, click on the run button for the `example_fivetran_dbt_operator` DAG 317 | 318 | 2) Add the optional configuration JSON to the DAG. These inputs are accessed in the `dag_run` configuration variables within the python code, as follows: 319 | 320 | ```python 321 | connector_id = kwargs['dag_run'].conf['connector_id'] 322 | ``` 323 | 324 | ![alt text](https://github.com/fishtown-analytics/airflow-fivetran-dbt/blob/main/images/airflow-dag-trigger-ui.png "Adding configurations for a Airflow DAG run") 325 | 326 | -- From the command line -- 327 | With your virtual environment activated, run: 328 | ```shell 329 | airflow dags trigger --conf '{"connector_id": "warn_enormously", "dbt_job_name": "pokemon_aggregation_job"}' example_fivetran_dbt_operator 330 | ``` 331 | 332 | Sources 333 | ====== 334 | 1 GCP Setup Guide created by Jostein Leira: https://medium.com/grensesnittet/airflow-on-gcp-may-2020-cdcdfe594019 335 | 336 | -------------------------------------------------------------------------------- /airflow-setup/airflow-scheduler.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Airflow scheduler daemon 3 | After=network.target postgresql.service mysql.service 4 | Wants=postgresql.service mysql.service 5 | 6 | [Service] 7 | User=airflow 8 | Group=airflow 9 | Type=simple 10 | ExecStart=/srv/airflow-2.0/airflow-scheduler.sh 11 | Restart=on-failure 12 | RestartSec=5s 13 | PrivateTmp=true 14 | 15 | [Install] 16 | WantedBy=multi-user.target -------------------------------------------------------------------------------- /airflow-setup/airflow-scheduler.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # To enable this script to activate the virtualenv and set AIRFLOW_HOME, it must be run from MyRepo i.e. your repository 3 | cd /srv/airflow-2.0 4 | 5 | source bin/activate 6 | 7 | export AIRFLOW_HOME=$(pwd) 8 | export PYTHONPATH=$(pwd) 9 | 10 | airflow scheduler -------------------------------------------------------------------------------- /airflow-setup/airflow-server.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # To enable this script to activate the virtualenv and set AIRFLOW_HOME, it must be run from MyRepo i.e. your repository 4 | cd $(dirname "$0")/../.. 5 | 6 | source venv/bin/activate 7 | export AIRFLOW_HOME=$(pwd) 8 | export PYTHONPATH=$(dirname pwd) 9 | 10 | airflow webserver -p 8080 -------------------------------------------------------------------------------- /airflow-setup/airflow-webserver.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Airflow webserver daemon 3 | After=network.target postgresql.service mysql.service 4 | Wants=postgresql.service mysql.service 5 | 6 | [Service] 7 | User=airflow 8 | Group=airflow 9 | Type=simple 10 | ExecStart=/srv/airflow-2.0/airflow-webserver.sh 11 | Restart=on-failure 12 | RestartSec=5s 13 | PrivateTmp=true 14 | 15 | [Install] 16 | WantedBy=multi-user.target -------------------------------------------------------------------------------- /airflow-setup/env-setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo -n "Enter dbt API Key:"; 4 | read dbt_api_key 5 | echo -n "Enter Fivetran API Key:"; 6 | read -s fivetran_api_key 7 | echo -n "Enter dbt Account Id:"; 8 | read -s dbt_account_id 9 | 10 | sudo su 11 | apt update 12 | apt upgrade 13 | apt install software-properties-common 14 | add-apt-repository ppa:deadsnakes/ppa 15 | apt install python3.7 python3.7-venv python3.7-dev 16 | 17 | adduser airflow --disabled-login --disabled-password --gecos "Airflow system user" 18 | 19 | cd /srv 20 | 21 | # creates a virtual environment called "airflow" 22 | python3.7 -m venv airflow 23 | cd airflow 24 | source bin/activate 25 | 26 | # With an activated virtual environment 27 | pip install --upgrade pip 28 | pip install wheel 29 | pip install apache-airflow[postgres,crypto]==2.0.0 30 | chown airflow.airflow . -R 31 | chmod g+rwx . -R 32 | 33 | export AIRFLOW_HOME=/srv/airflow 34 | export FIVETRAN_API_KEY=$fivetran_api_key 35 | export DBT_API_KEY=$dbt_api_key 36 | export DBT_ACCOUNT_ID=$dbt_account_id 37 | export FIVETRAN_DATETIME_FORMAT=%Y-%m-%dT%H:%M:%S.%fZ 38 | export DBT_DATETIME_FORMAT=%Y-%m-%dT%H:%M:%S.%fZ 39 | export AIRFLOW_DATETIME_FORMAT=%Y-%m-%dT%H:%M:%S.%fZ -------------------------------------------------------------------------------- /airflow-setup/server-setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ## sourced from https://medium.com/grensesnittet/airflow-on-gcp-may-2020-cdcdfe594019 4 | 5 | echo -n "Enter dbt API Key:"; 6 | read dbt_api_key 7 | echo -n "Enter Fivetran API Key:"; 8 | read -s fivetran_api_key 9 | echo -n "Enter dbt Account Id:"; 10 | read -s dbt_account_id 11 | 12 | sudo su 13 | apt update -y 14 | apt upgrade -y 15 | apt install software-properties-common -y 16 | add-apt-repository ppa:deadsnakes/ppa -y 17 | apt install python3.7 python3.7-venv python3.7-dev -y 18 | apt install vim -y 19 | apt install postgresql-client-12 -y 20 | apt-get install git -y 21 | 22 | adduser airflow --disabled-login --disabled-password --gecos "Airflow system user" 23 | 24 | cd /srv 25 | 26 | # creates a virtual environment called "airflow" 27 | python3.7 -m venv airflow 28 | 29 | cd airflow 30 | source bin/activate 31 | 32 | # With an activated virtual environment 33 | pip install --upgrade pip 34 | pip install wheel 35 | pip install apache-airflow[postgres,crypto]==2.0.0 36 | chown airflow.airflow . -R 37 | chmod g+rwx . -R -------------------------------------------------------------------------------- /airflow-setup/start-airflow-venv.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ## sourced from https://medium.com/grensesnittet/airflow-on-gcp-may-2020-cdcdfe594019 4 | 5 | echo -n "Enter dbt API Key:"; 6 | read dbt_api_key 7 | echo -n "Enter Fivetran API Key:"; 8 | read -s fivetran_api_key 9 | echo -n "Enter dbt Account Id:"; 10 | read -s dbt_account_id 11 | 12 | cd /srv/airflow 13 | source bin/activate 14 | 15 | export AIRFLOW_HOME=/srv/airflow 16 | export FIVETRAN_API_KEY=$fivetran_api_key 17 | export DBT_API_KEY=$dbt_api_key 18 | export DBT_ACCOUNT_ID=$dbt_account_id 19 | export FIVETRAN_DATETIME_FORMAT=%Y-%m-%dT%H:%M:%S.%fZ 20 | export DBT_DATETIME_FORMAT=%Y-%m-%dT%H:%M:%S.%fZ 21 | export AIRFLOW_DATETIME_FORMAT=%Y-%m-%dT%H:%M:%S.%fZ -------------------------------------------------------------------------------- /images/airflow-dag-process.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/airflow-dag-process.png -------------------------------------------------------------------------------- /images/airflow-dag-trigger-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/airflow-dag-trigger-ui.png -------------------------------------------------------------------------------- /images/airflow-fivetran-dbt-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/airflow-fivetran-dbt-arch.png -------------------------------------------------------------------------------- /images/dbt-lineage-graph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/dbt-lineage-graph.png -------------------------------------------------------------------------------- /images/gcp-associate-db-instance-with-network.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/gcp-associate-db-instance-with-network.png -------------------------------------------------------------------------------- /images/gcp-create-firewall-rules.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/gcp-create-firewall-rules.png -------------------------------------------------------------------------------- /images/gcp-create-http-rule-example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/gcp-create-http-rule-example.png -------------------------------------------------------------------------------- /images/gcp-create-project.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/gcp-create-project.png -------------------------------------------------------------------------------- /images/gcp-enable-compute-engine-api.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/gcp-enable-compute-engine-api.png -------------------------------------------------------------------------------- /images/gcp-place-instance-in-network.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/gcp-place-instance-in-network.png -------------------------------------------------------------------------------- /images/gcp-ssh-to-vm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/gcp-ssh-to-vm.png -------------------------------------------------------------------------------- /images/git-repo-ssh-keys.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/airflow-fivetran-dbt/5ad4a87848bded2186a41a601d8a438659025519/images/git-repo-ssh-keys.png -------------------------------------------------------------------------------- /src/dbt_cloud.py: -------------------------------------------------------------------------------- 1 | ## Sourced from: https://github.com/dwallace0723/dbt-cloud-plugin/ 2 | 3 | # -*- coding: utf-8 -*- 4 | import json 5 | import requests 6 | import time 7 | from datetime import datetime 8 | 9 | class DbtCloudApi(object): 10 | """ 11 | Class for interacting with the dbt Cloud API 12 | * :py:meth:`list_jobs` - list all Jobs for the specified Account ID 13 | * :py:meth:`get_run` - Get information about a specified Run ID 14 | * :py:meth:`trigger_job_run` - Trigger a Run for a specified Job ID 15 | * :py:meth: `try_get_run` - Attempts to get information about a specific Run ID for up to max_tries 16 | * :py:meth: `run_job` - Triggers a run for a job using the job name 17 | * :py:meth: `get_job_run_manifest` - Returns the run manifest from the target job for downstream logging 18 | """ 19 | 20 | def __init__(self, account_id, api_token, airflow_datetime_format, dbt_datetime_format): 21 | self.account_id = account_id 22 | self.api_token = api_token 23 | self.api_base = 'https://cloud.getdbt.com/api/v2' 24 | self.airflow_datetime_format = airflow_datetime_format 25 | self.dbt_datetime_format = dbt_datetime_format 26 | self.polling_timeout = 300 # timeout in seconds on polling loop 27 | 28 | def _get(self, url_suffix): 29 | url = self.api_base + url_suffix 30 | headers = {'Authorization': 'Token %s' % self.api_token} 31 | response = requests.get(url, headers=headers) 32 | if response.status_code == 200: 33 | return json.loads(response.content) 34 | else: 35 | raise RuntimeError(response.content) 36 | 37 | def _post(self, url_suffix, data=None): 38 | url = self.api_base + url_suffix 39 | print('request url: ', url) 40 | print('showing request body: ', json.dumps(data)) 41 | headers = {'Content-Type': 'application/json', 'Authorization': 'Token %s' % self.api_token} 42 | 43 | response = requests.post(url, data=json.dumps(data), headers=headers) 44 | 45 | if response.status_code == 200: 46 | return json.loads(response.content) 47 | else: 48 | raise RuntimeError(response.text) 49 | 50 | def list_jobs(self, **kwargs): 51 | return self._get('/accounts/%s/jobs/' % self.account_id).get('data') 52 | 53 | def get_run(self, run_id, **kwargs): 54 | return self._get('/accounts/%s/runs/%s/' % (self.account_id, run_id)).get('data') 55 | 56 | def trigger_job_run(self, job_id, data=None): 57 | 58 | return self._post(url_suffix='/accounts/%s/jobs/%s/run/' % (self.account_id, job_id), data=data).get('data') 59 | 60 | def get_dbt_job_run_status(self, max_tries=3, **kwargs): 61 | job_name = kwargs['dag_run'].conf['dbt_job_name'] 62 | 63 | ti = kwargs['ti'] 64 | run_id = ti.xcom_pull(key='dbt_run_id', task_ids='dbt_job') 65 | dbt_job_run_start_time = ti.xcom_pull(key = 'dbt_run_start_time', task_ids='dbt_job') 66 | dbt_job_run_start_time = datetime.strptime(dbt_job_run_start_time, self.airflow_datetime_format) 67 | 68 | tracker = 0 69 | # initialize this to None, then poll for updates 70 | # when the run_finished_at variable populates, we are done polling 71 | run_finished_at = None 72 | run_response = None 73 | run_status = None 74 | while not run_finished_at: 75 | # wait a bit between polls 76 | time.sleep(5) 77 | 78 | run_response = self.get_run(run_id=run_id) 79 | run_finished_at = run_response['finished_at'] 80 | run_status = run_response['status'] 81 | 82 | tracker += 5 83 | if tracker > self.polling_timeout: 84 | raise Exception(f'Error, the data sync for the {connector_id} connecter failed to complete within {self.polling_timeout} seconds') 85 | 86 | if run_status == 10: 87 | return { 88 | 'message': f'job {job_name} ran successfully, finishing at {run_finished_at}', 89 | 'response': run_response 90 | } 91 | 92 | else: 93 | return { 94 | 'message': f'job {job_name} failed, finishing at {run_finished_at}', 95 | 'response': run_response 96 | } 97 | 98 | def run_job(self, **kwargs): 99 | job_name = kwargs['dag_run'].conf['dbt_job_name'] 100 | 101 | jobs = self.list_jobs() 102 | 103 | job_matches = [j for j in jobs if j['name'] == job_name] 104 | 105 | if len(job_matches) != 1: 106 | raise Exception("{} jobs found for {}".format(len(job_matches), job_name)) 107 | 108 | job_def = job_matches[0] 109 | 110 | data = { 111 | "cause": "triggered from Airflow" 112 | } 113 | 114 | run_start_time = datetime.now() 115 | trigger_resp = self.trigger_job_run(job_id=job_def['id'], data=data) 116 | 117 | run_id = trigger_resp['id'] 118 | kwargs['ti'].xcom_push(key='dbt_run_id', value=str(run_id)) 119 | kwargs['ti'].xcom_push(key='dbt_run_start_time', value=str(run_start_time)) 120 | 121 | return { 122 | 'message': f'successfully triggered job {job_name}', 123 | 'response': trigger_resp 124 | } 125 | 126 | def get_job_run_manifest(self, **kwargs): 127 | """Returns the job run artifacts from a given job run""" 128 | ti = kwargs['ti'] 129 | run_id = ti.xcom_pull(key='dbt_run_id', task_ids='dbt_job') 130 | artifact_path = 'manifest.json' 131 | return self._get(url_suffix=f'/accounts/{self.account_id}/runs/{run_id}/artifacts/{artifact_path}') 132 | 133 | def create_job(self, data=None, **kwargs): 134 | return self._post(url_suffix='/accounts/%s/jobs/' % (self.account_id), data=data) 135 | 136 | def update_job(self, job_id, data=None, **kwargs): 137 | return self._post(url_suffix='/accounts/%s/jobs/%s/' % (self.account_id, job_id), data=data) -------------------------------------------------------------------------------- /src/example_fivetran_dbt.py: -------------------------------------------------------------------------------- 1 | 2 | import json 3 | import os 4 | import requests 5 | 6 | from datetime import datetime, timedelta 7 | from pprint import pprint 8 | 9 | # The DAG object; we'll need this to instantiate a DAG 10 | from airflow import DAG 11 | 12 | # Operators; we need this to operate! 13 | from airflow.operators.python import PythonOperator 14 | from airflow.utils.dates import days_ago 15 | 16 | from fivetran import FivetranApi 17 | from dbt_cloud import DbtCloudApi 18 | 19 | # these are environment variables stored on the virtual environment where airflow is running 20 | FIVETRAN_API_KEY = os.getenv('FIVETRAN_API_KEY', '') 21 | FIVETRAN_DATETIME_FORMAT = os.getenv('FIVETRAN_DATETIME_FORMAT', '') 22 | 23 | AIRFLOW_DATETIME_FORMAT = os.getenv('AIRFLOW_DATETIME_FORMAT', '') 24 | 25 | DBT_ACCOUNT_ID = os.getenv('DBT_ACCOUNT_ID', '') 26 | DBT_API_KEY = os.getenv('DBT_API_KEY', '') 27 | DBT_DATETIME_FORMAT = os.getenv('DBT_DATETIME_FORMAT', '') 28 | 29 | # initialize Fivetran API module 30 | ft = FivetranApi(api_token=FIVETRAN_API_KEY, 31 | fivetran_datetime_format=FIVETRAN_DATETIME_FORMAT, 32 | airflow_datetime_format=AIRFLOW_DATETIME_FORMAT) 33 | 34 | # initialize dbt Cloud module 35 | dbt = DbtCloudApi(account_id=DBT_ACCOUNT_ID, 36 | api_token=DBT_API_KEY, 37 | airflow_datetime_format=AIRFLOW_DATETIME_FORMAT, 38 | dbt_datetime_format=DBT_DATETIME_FORMAT) 39 | 40 | args = { 41 | 'owner': 'airflow', 42 | 'start_date': datetime.now(), 43 | 'provide_context': True 44 | } 45 | 46 | dag = DAG( 47 | dag_id='example_fivetran_dbt_operator', 48 | default_args=args, 49 | schedule_interval=None, 50 | start_date=days_ago(2), 51 | tags=['example'], 52 | ) 53 | 54 | run_fivetran_connector_sync = PythonOperator( 55 | task_id='fivetran_connector_sync', 56 | python_callable=ft.force_connector_sync, 57 | dag=dag, 58 | ) 59 | 60 | run_get_connector_sync_status = PythonOperator( 61 | task_id='get_connector_sync_status', 62 | python_callable=ft.get_connector_sync_status, 63 | dag=dag, 64 | ) 65 | 66 | run_dbt_job = PythonOperator( 67 | task_id='dbt_job', 68 | python_callable=dbt.run_job, 69 | dag=dag, 70 | ) 71 | 72 | run_get_dbt_job_status = PythonOperator( 73 | task_id='get_dbt_job_status', 74 | python_callable=dbt.get_dbt_job_run_status, 75 | dag=dag, 76 | ) 77 | 78 | run_extract_dbt_job_run_manifest = PythonOperator( 79 | task_id='extract_dbt_job_run_manifest', 80 | python_callable=dbt.get_job_run_manifest, 81 | dag=dag, 82 | ) 83 | 84 | # set upstream / downstream relationships for the apps 85 | run_get_connector_sync_status.set_upstream(run_fivetran_connector_sync) 86 | run_dbt_job.set_upstream(run_get_connector_sync_status) 87 | run_get_dbt_job_status.set_upstream(run_dbt_job) 88 | run_extract_dbt_job_run_manifest.set_upstream(run_get_dbt_job_status) 89 | 90 | # create the DAG pipeline 91 | run_fivetran_connector_sync >> \ 92 | run_get_connector_sync_status >> \ 93 | run_dbt_job >> \ 94 | run_get_dbt_job_status >> \ 95 | run_extract_dbt_job_run_manifest -------------------------------------------------------------------------------- /src/fivetran.py: -------------------------------------------------------------------------------- 1 | 2 | # -*- coding: utf-8 -*- 3 | import json 4 | import requests 5 | import time 6 | from datetime import datetime 7 | import pytz 8 | 9 | class FivetranApi(object): 10 | """ 11 | Class for interacting with the Fivetran API 12 | * :py:meth:`get_groups` - list all groups in the target account 13 | * :py:meth:`get_group_connectors` - list all connectors attached to a given group 14 | * :py:meth:`get_connector` - get connector information 15 | * :py:meth: `force_connector_sync` - Trigger a sync of the connector's target dataset 16 | * :py:meth: `get_connector_sync_status` - Return the status of connector sync process 17 | """ 18 | 19 | def __init__(self, api_token, fivetran_datetime_format, airflow_datetime_format): 20 | self.api_token = api_token 21 | self.fivetran_datetime_format = fivetran_datetime_format 22 | self.airflow_datetime_format = airflow_datetime_format 23 | self.api_base = 'https://api.fivetran.com/v1/' 24 | self.polling_timeout = 300 # timeout in seconds on a polling loop 25 | 26 | 27 | def _get(self, url_suffix): 28 | url = self.api_base + url_suffix 29 | headers = {'Content-Type': 'application/json', 'Authorization': 'Basic %s' % self.api_token} 30 | response = requests.get(url, headers=headers) 31 | if response.status_code == 200: 32 | return json.loads(response.content) 33 | else: 34 | raise RuntimeError(response.content) 35 | 36 | def _post(self, url_suffix, data=None): 37 | url = self.api_base + url_suffix 38 | print('request url: ', url) 39 | print('showing request body: ', json.dumps(data)) 40 | headers = {'Content-Type': 'application/json', 'Authorization': 'Basic %s' % self.api_token} 41 | 42 | response = requests.post(url, data=json.dumps(data), headers=headers) 43 | 44 | if response.status_code == 200: 45 | return json.loads(response.content) 46 | else: 47 | raise RuntimeError(response.text) 48 | 49 | def get_groups(self): 50 | """Returns group information from the fivetran account""" 51 | return self._get(url_suffix='groups/').get('data') 52 | 53 | def get_group_connectors(self, group_id): 54 | """Returns information about connectors attached to a group""" 55 | return self._get(url_suffix=f'groups/{group_id}/connectors').get('data') 56 | 57 | def get_connector(self, connector_id): 58 | """Returns information about the connector under connector_id""" 59 | return self._get(url_suffix=f'connectors/{connector_id}').get('data') 60 | 61 | def force_connector_sync(self, request_body={}, **kwargs): 62 | """Triggers a run of the target connector under connector_id""" 63 | connector_id = kwargs['dag_run'].conf['fivetran_connector_id'] # this comes from the airflow runtime configs 64 | response = self._post(url_suffix=f'connectors/{connector_id}/force', data=request_body).get('data') 65 | start_time = datetime.now() 66 | kwargs['ti'].xcom_push(key='start_time', value=str(start_time)) 67 | 68 | return { 69 | 'message': f'successfully ran connector sync for {connector_id}', 70 | 'response': response 71 | } 72 | 73 | def get_connector_sync_status(self, **kwargs): 74 | """Checks the execution status of connector""" 75 | connector_id = kwargs['dag_run'].conf['fivetran_connector_id'] # this comes from the airflow runtime configs 76 | 77 | ti = kwargs['ti'] 78 | connector_sync_start_time = ti.xcom_pull(key = 'start_time', task_ids='fivetran_connector_sync') 79 | connector_sync_start_time = datetime.strptime(connector_sync_start_time, self.airflow_datetime_format) 80 | 81 | # use this polling process with a timeout, because fivetran 82 | # returns the last time a given connector sync completed 83 | # this COULD BE from a prior run of the sync process 84 | tracker = 0 85 | poll_for_success = True 86 | while poll_for_success: 87 | # wait a bit between polling runs 88 | time.sleep(5) 89 | # check on the sync data 90 | response = self._get(url_suffix=f'connectors/{connector_id}').get('data') 91 | # get the sync success timestamp from the response 92 | succeeded_at = response['succeeded_at'] 93 | # convert succeeded_at to UTC so it matches the start_time recorded by airflow server 94 | succeeded_at = datetime.strptime(succeeded_at, self.fivetran_datetime_format) 95 | 96 | if succeeded_at > connector_sync_start_time: 97 | poll_for_success = False 98 | return { 99 | 'message': f'successfully returned connector sync status for {connector_id}', 100 | 'response': response 101 | } 102 | 103 | tracker += 5 104 | if tracker > self.polling_timeout: 105 | raise Exception(f'Error, the data sync for the {connector_id} connecter failed to complete within {self.polling_timeout} seconds') --------------------------------------------------------------------------------