├── .editorconfig
├── .github
├── ISSUE_TEMPLATE
│ ├── bug.yml
│ ├── config.yml
│ └── feature.yml
├── dependabot.yml
├── pull_request_template.md
├── setup-unit.sh
└── workflows
│ ├── main.yml
│ ├── packages-scheduled-update.yml
│ └── packages.yml
├── .gitignore
├── LICENSE
├── README.md
├── build.gradle
├── dockerfiles
├── dbt-athena.Dockerfile
├── dbt-bigquery.Dockerfile
├── dbt-clickhouse.Dockerfile
├── dbt-databricks.Dockerfile
├── dbt-dremio.Dockerfile
├── dbt-duckdb.Dockerfile
├── dbt-fabric.Dockerfile
├── dbt-postgres.Dockerfile
├── dbt-redshift.Dockerfile
├── dbt-snowflake.Dockerfile
├── dbt-spark.Dockerfile
├── dbt-synapse.Dockerfile
├── dbt-trino.Dockerfile
└── dbt.Dockerfile
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── lombok.config
├── settings.gradle
└── src
├── main
├── java
│ └── io
│ │ └── kestra
│ │ └── plugin
│ │ └── dbt
│ │ ├── ResultParser.java
│ │ ├── cli
│ │ ├── AbstractDbt.java
│ │ ├── AbstractRun.java
│ │ ├── Build.java
│ │ ├── Compile.java
│ │ ├── DbtCLI.java
│ │ ├── Deps.java
│ │ ├── Freshness.java
│ │ ├── List.java
│ │ ├── LogService.java
│ │ ├── Run.java
│ │ ├── Seed.java
│ │ ├── Setup.java
│ │ ├── Snapshot.java
│ │ ├── Test.java
│ │ └── package-info.java
│ │ ├── cloud
│ │ ├── AbstractDbtCloud.java
│ │ ├── CheckStatus.java
│ │ ├── JobScheduleDate.java
│ │ ├── JobScheduleDateType.java
│ │ ├── JobScheduleTime.java
│ │ ├── JobScheduleTimeType.java
│ │ ├── TriggerRun.java
│ │ ├── models
│ │ │ ├── Environment.java
│ │ │ ├── Job.java
│ │ │ ├── JobSchedule.java
│ │ │ ├── JobSettings.java
│ │ │ ├── JobStatus.java
│ │ │ ├── JobStatusHumanizedEnum.java
│ │ │ ├── JobTriggers.java
│ │ │ ├── LogArchiveType.java
│ │ │ ├── LogLocation.java
│ │ │ ├── ManifestArtifact.java
│ │ │ ├── Run.java
│ │ │ ├── RunResponse.java
│ │ │ ├── Status.java
│ │ │ ├── Step.java
│ │ │ └── Trigger.java
│ │ └── package-info.java
│ │ └── models
│ │ ├── Manifest.java
│ │ └── RunResult.java
└── resources
│ ├── META-INF
│ └── services
│ │ ├── com.fasterxml.jackson.databind.Module
│ │ ├── io.micronaut.http.client.HttpClientFactory
│ │ ├── io.micronaut.http.client.StreamingHttpClientFactory
│ │ └── io.micronaut.json.JsonMapperSupplier
│ └── icons
│ ├── io.kestra.plugin.dbt.cli.svg
│ ├── io.kestra.plugin.dbt.cloud.svg
│ └── plugin-icon.svg
└── test
├── java
└── io
│ └── kestra
│ └── plugin
│ └── dbt
│ ├── cli
│ ├── BuildTest.java
│ └── DbtCLITest.java
│ └── cloud
│ ├── CheckStatusTest.java
│ ├── MockTriggerRunTest.java
│ ├── SerializationTest.java
│ └── TriggerRunTest.java
└── resources
├── allure.properties
├── application.yml
├── flows
└── cloud.yaml
├── logback.xml
├── project
├── README.md
├── analyses
│ └── .gitkeep
├── data
│ └── .gitkeep
├── dbt_project.yml
├── macros
│ └── .gitkeep
├── models
│ ├── requests.sql
│ ├── schema.yml
│ └── stations.sql
├── seeds
│ ├── schema.yml
│ └── zipcode.csv
├── snapshots
│ ├── .gitkeep
│ └── requests.sql
└── tests
│ └── .gitkeep
├── responses
└── run.json
└── sanity-checks
└── all_dbt.yaml
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | charset=utf-8
5 | end_of_line=lf
6 | insert_final_newline=false
7 | trim_trailing_whitespace=true
8 | indent_style=space
9 | indent_size=4
10 | continuation_indent_size=4
11 |
12 | [*.yml]
13 | indent_size=2
14 |
15 | [*.md]
16 | indent_size=2
17 |
18 | [*.yaml]
19 | indent_size=2
20 |
21 | [*.json]
22 | indent_size=2
23 |
24 | [*.css]
25 | indent_size=2
26 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug.yml:
--------------------------------------------------------------------------------
1 | name: Bug report
2 | description: File a bug report
3 | body:
4 | - type: markdown
5 | attributes:
6 | value: |
7 | Thanks for reporting an issue! Please provide a [Minimal Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example) and share any additional information that may help reproduce, troubleshoot, and hopefully fix the issue, including screenshots, error traceback, and your Kestra server logs. For quick questions, you can contact us directly on [Slack](https://kestra.io/slack).
8 | - type: textarea
9 | attributes:
10 | label: Describe the issue
11 | description: A concise description of the issue and how we can reproduce it.
12 | placeholder: Describe the issue step by step
13 | validations:
14 | required: true
15 | - type: textarea
16 | attributes:
17 | label: Environment
18 | description: Environment information where the problem occurs.
19 | value: |
20 | - Kestra Version: develop
21 | validations:
22 | required: false
23 | labels:
24 | - bug
25 | - area/plugin
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | contact_links:
2 | - name: Chat
3 | url: https://kestra.io/slack
4 | about: Chat with us on Slack.
5 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature.yml:
--------------------------------------------------------------------------------
1 | name: Feature request
2 | description: Create a new feature request
3 | body:
4 | - type: textarea
5 | attributes:
6 | label: Feature description
7 | placeholder: Tell us more about your feature request
8 | validations:
9 | required: true
10 | labels:
11 | - enhancement
12 | - area/plugin
13 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # See GitHub's docs for more information on this file:
2 | # https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/configuration-options-for-dependency-updates
3 | version: 2
4 | updates:
5 | # Maintain dependencies for GitHub Actions
6 | - package-ecosystem: "github-actions"
7 | directory: "/"
8 | schedule:
9 | # Check for updates to GitHub Actions every weekday
10 | interval: "weekly"
11 | labels:
12 | - "dependency-upgrade"
13 | open-pull-requests-limit: 50
14 |
15 | # Maintain dependencies for Gradle modules
16 | - package-ecosystem: "gradle"
17 | directory: "/"
18 | schedule:
19 | # Check for updates to Gradle modules every week
20 | interval: "weekly"
21 | labels:
22 | - "dependency-upgrade"
23 | open-pull-requests-limit: 50
24 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 |
8 |
9 | ### What changes are being made and why?
10 |
11 |
12 | ---
13 |
14 | ### How the changes have been QAed?
15 |
16 |
25 |
26 | ---
27 |
28 | ### Setup Instructions
29 |
30 |
38 |
--------------------------------------------------------------------------------
/.github/setup-unit.sh:
--------------------------------------------------------------------------------
1 | echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
2 | echo "GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json" > $GITHUB_ENV
3 |
4 | echo $DBT_APPLICATION | base64 -d > src/test/resources/application-test.yml
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: Main
2 |
3 | on:
4 | schedule:
5 | - cron: '0 4 * * 1,2,3,4,5'
6 | push:
7 | branches:
8 | - master
9 | - main
10 | - releases/*
11 | tags:
12 | - v*
13 |
14 | pull_request:
15 | branches:
16 | - master
17 | - main
18 | - releases/*
19 |
20 | workflow_dispatch:
21 | inputs:
22 | skip-test:
23 | description: 'Skip test'
24 | type: choice
25 | required: true
26 | default: 'false'
27 | options:
28 | - "true"
29 | - "false"
30 |
31 | jobs:
32 | check:
33 | uses: kestra-io/actions/.github/workflows/plugins.yml@main
34 | with:
35 | skip-test: ${{ github.event.inputs.skip-test == 'true' }}
36 | secrets: inherit
37 |
--------------------------------------------------------------------------------
/.github/workflows/packages-scheduled-update.yml:
--------------------------------------------------------------------------------
1 | name: Scheduled Packages Update
2 |
3 | on:
4 | schedule:
5 | - cron: '0 9 * * MON'
6 | workflow_dispatch:
7 |
8 | jobs:
9 | list-dockerfiles:
10 | runs-on: ubuntu-latest
11 | outputs:
12 | matrix: ${{ steps.dockerfiles.outputs.matrix }}
13 | steps:
14 | - uses: actions/checkout@v4
15 | - id: dockerfiles
16 | run: echo "matrix=$(ls dockerfiles/ | jq -R -s -c 'split("\n")[:-1]')" >> $GITHUB_OUTPUT
17 |
18 | ghcr:
19 | runs-on: ubuntu-latest
20 | needs: list-dockerfiles
21 | permissions:
22 | contents: read
23 | packages: write
24 | strategy:
25 | matrix:
26 | image: ${{ fromJson(needs.list-dockerfiles.outputs.matrix) }}
27 | steps:
28 | - name: checkout
29 | uses: actions/checkout@v4
30 |
31 | - name: Set up QEMU
32 | uses: docker/setup-qemu-action@v3
33 |
34 | - name: Set up Docker Buildx
35 | uses: docker/setup-buildx-action@v3
36 |
37 | - name: GHCR Login
38 | uses: docker/login-action@v3
39 | with:
40 | registry: ghcr.io
41 | username: ${{ github.actor }}
42 | password: ${{ secrets.GITHUB_TOKEN }}
43 |
44 | - id: image-tag # example output: "ghcr.io/kestra-io/dbt-snowflake:latest"
45 | run: |
46 | export IMAGE=$(basename ${{ matrix.image }} .Dockerfile)
47 | echo "image_url=ghcr.io/kestra-io/$IMAGE:latest" >> $GITHUB_OUTPUT
48 | echo "file=dockerfiles/${{ matrix.image }}" >> $GITHUB_OUTPUT
49 |
50 | - name: Build and push Docker image
51 | uses: docker/build-push-action@v6
52 | continue-on-error: true # avoid blocking other images
53 | with:
54 | context: .
55 | push: true
56 | tags: ${{ steps.image-tag.outputs.image_url }}
57 | file: ${{ steps.image-tag.outputs.file }}
58 | platforms: linux/amd64,linux/arm64
59 |
--------------------------------------------------------------------------------
/.github/workflows/packages.yml:
--------------------------------------------------------------------------------
1 | name: Container Image Packages
2 |
3 | on:
4 | push:
5 | paths:
6 | - dockerfiles/*
7 | branches:
8 | - master
9 |
10 | jobs:
11 | changes:
12 | name: Dockerfile changes
13 | runs-on: ubuntu-latest
14 | outputs:
15 | dockerfiles: ${{ steps.filter.outputs.dockerfiles_files }}
16 | dockerfiles_changed: ${{ steps.filter.outputs.dockerfiles }}
17 | steps:
18 | - name: Checkout
19 | uses: actions/checkout@v4
20 | - uses: dorny/paths-filter@v3
21 | id: filter
22 | with:
23 | list-files: json
24 | filters: |
25 | dockerfiles:
26 | - added|modified: 'dockerfiles/*.Dockerfile'
27 | - name: Generate Markdown Summary
28 | run: |
29 | echo New/modified Dockerfiles: ${{ steps.filter.outputs.dockerfiles_files }} >> $GITHUB_STEP_SUMMARY
30 |
31 | ghcr:
32 | needs: [changes]
33 | if: ${{ needs.changes.outputs.dockerfiles_changed == 'true' }}
34 | runs-on: ubuntu-latest
35 | permissions:
36 | contents: read
37 | packages: write
38 | strategy:
39 | matrix:
40 | image: ${{ fromJson(needs.changes.outputs.dockerfiles) }}
41 | steps:
42 | - name: checkout
43 | uses: actions/checkout@v4
44 |
45 | - name: Set up QEMU
46 | uses: docker/setup-qemu-action@v3
47 |
48 | - name: Set up Docker Buildx
49 | uses: docker/setup-buildx-action@v3
50 |
51 | - name: GHCR Login
52 | uses: docker/login-action@v3
53 | with:
54 | registry: ghcr.io
55 | username: ${{ github.actor }}
56 | password: ${{ secrets.GITHUB_TOKEN }}
57 |
58 | - id: image-tag # example output: "ghcr.io/kestra-io/pydata:latest"
59 | run: |
60 | export IMAGE=$(basename ${{ matrix.image }} .Dockerfile)
61 | echo "image_url=ghcr.io/kestra-io/$IMAGE:latest" >> $GITHUB_OUTPUT
62 | echo "file=${{ matrix.image }}" >> $GITHUB_OUTPUT
63 |
64 | - name: Build and push Docker image
65 | uses: docker/build-push-action@v6
66 | with:
67 | context: .
68 | push: true
69 | tags: ${{ steps.image-tag.outputs.image_url }}
70 | file: ${{ steps.image-tag.outputs.file }}
71 | platforms: linux/amd64,linux/arm64
72 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | Thumbs.db
2 | .DS_Store
3 | .gradle
4 | build/
5 | target/
6 | out/
7 | .idea
8 | .vscode
9 | *.iml
10 | *.ipr
11 | *.iws
12 | .project
13 | .settings
14 | .classpath
15 | .attach*
16 | src/test/resources/application-test.yml
17 | src/test/resources/project/logs/
18 | src/test/resources/project/state/
19 | src/test/resources/project/.profile
20 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | https://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | Copyright 2019 Nigh Tech.
180 |
181 | Licensed under the Apache License, Version 2.0 (the "License");
182 | you may not use this file except in compliance with the License.
183 | You may obtain a copy of the License at
184 |
185 | https://www.apache.org/licenses/LICENSE-2.0
186 |
187 | Unless required by applicable law or agreed to in writing, software
188 | distributed under the License is distributed on an "AS IS" BASIS,
189 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
190 | See the License for the specific language governing permissions and
191 | limitations under the License.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | Event-Driven Declarative Orchestrator
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 | Get started with Kestra in 4 minutes.
34 |
35 |
36 | # Kestra dbt plugin
37 |
38 | Plugin to orchestrate dbt models and tests using [dbt core](https://github.com/dbt-labs/dbt-core) or [dbt Cloud](https://www.getdbt.com/).
39 |
40 | 
41 |
42 |
43 | ## Documentation
44 | * Full documentation can be found under [kestra.io/docs](https://kestra.io/docs)
45 | * Documentation for developing a plugin is included in the [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/).
46 |
47 |
48 | ## License
49 | Apache 2.0 © [Kestra Technologies](https://kestra.io)
50 |
51 |
52 | ## Stay up to date
53 |
54 | We release new versions every month. Give the [main repository](https://github.com/kestra-io/kestra) a star to stay up to date with the latest releases and get notified about future updates.
55 |
56 | 
57 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'java-library'
3 | id "idea"
4 | id 'jacoco'
5 | id "com.adarshr.test-logger" version "4.0.0"
6 | id "com.github.johnrengelman.shadow" version "8.1.1"
7 | id 'ru.vyarus.java-lib' version '3.0.0'
8 | id 'ru.vyarus.github-info' version '2.0.0'
9 | id 'signing'
10 | id "io.github.gradle-nexus.publish-plugin" version "2.0.0"
11 | id "com.github.ben-manes.versions" version "0.52.0"
12 | id 'net.researchgate.release' version '3.1.0'
13 | }
14 |
15 | def isBuildSnapshot = version.toString().endsWith("-SNAPSHOT")
16 |
17 | repositories {
18 | mavenLocal()
19 | mavenCentral()
20 | if (isBuildSnapshot) {
21 | maven { url "https://s01.oss.sonatype.org/content/repositories/snapshots/" }
22 | }
23 | }
24 |
25 | sourceCompatibility = 21
26 | targetCompatibility = 21
27 |
28 | group "io.kestra.plugin"
29 | description 'Integrate dbt data transformations into Kestra orchestration pipelines.'
30 |
31 | tasks.withType(JavaCompile) {
32 | options.encoding = "UTF-8"
33 | options.compilerArgs.add("-parameters")
34 | }
35 |
36 | dependencies {
37 | // Platform
38 | annotationProcessor enforcedPlatform("io.kestra:platform:$kestraVersion")
39 | implementation enforcedPlatform("io.kestra:platform:$kestraVersion")
40 | api enforcedPlatform("io.kestra:platform:$kestraVersion")
41 |
42 | // lombok
43 | annotationProcessor "org.projectlombok:lombok"
44 | compileOnly "org.projectlombok:lombok"
45 |
46 | // Micronaut
47 | compileOnly "io.micronaut:micronaut-http-client"
48 | compileOnly "io.micronaut.reactor:micronaut-reactor"
49 | compileOnly "io.micronaut:micronaut-jackson-databind"
50 |
51 | compileOnly group: 'com.fasterxml.jackson.module', name: 'jackson-module-parameter-names'
52 | compileOnly group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-guava'
53 | compileOnly group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-jsr310'
54 |
55 | // kestra
56 | annotationProcessor group: "io.kestra", name: "processor", version: kestraVersion
57 | compileOnly group: "io.kestra", name: "core", version: kestraVersion
58 | compileOnly group: "io.kestra", name: "script", version: kestraVersion
59 | }
60 |
61 |
62 | /**********************************************************************************************************************\
63 | * Test
64 | **********************************************************************************************************************/
65 | test {
66 | useJUnitPlatform()
67 | }
68 |
69 | testlogger {
70 | theme "mocha-parallel"
71 | showExceptions true
72 | showFullStackTraces true
73 | showStandardStreams true
74 | showPassedStandardStreams false
75 | showSkippedStandardStreams true
76 | }
77 |
78 | dependencies {
79 | // Platform
80 | testAnnotationProcessor enforcedPlatform("io.kestra:platform:$kestraVersion")
81 | testImplementation enforcedPlatform("io.kestra:platform:$kestraVersion")
82 |
83 | // lombok
84 | testAnnotationProcessor "org.projectlombok:lombok"
85 | testCompileOnly 'org.projectlombok:lombok'
86 |
87 | // micronaut
88 | testAnnotationProcessor "io.micronaut:micronaut-inject-java"
89 | testAnnotationProcessor "io.micronaut.validation:micronaut-validation-processor"
90 | testImplementation "io.micronaut.test:micronaut-test-junit5"
91 | testImplementation "io.micronaut:micronaut-jackson-databind"
92 |
93 | // test deps needed only for to have a runner
94 | testAnnotationProcessor group: "io.kestra", name: "processor", version: kestraVersion
95 |
96 | testImplementation group: "io.kestra", name: "core", version: kestraVersion
97 | testImplementation group: "io.kestra", name: "tests", version: kestraVersion
98 | testImplementation group: "io.kestra", name: "script", version: kestraVersion
99 | testImplementation group: "io.kestra", name: "repository-memory", version: kestraVersion
100 | testImplementation group: "io.kestra", name: "runner-memory", version: kestraVersion
101 | testImplementation group: "io.kestra", name: "storage-local", version: kestraVersion
102 |
103 | // test
104 | testImplementation "org.junit.jupiter:junit-jupiter-engine"
105 | testImplementation "org.junit.jupiter:junit-jupiter-params"
106 | testImplementation "org.hamcrest:hamcrest"
107 | testImplementation "org.hamcrest:hamcrest-library"
108 | testImplementation "org.wiremock:wiremock-jetty12"
109 | }
110 |
111 | /**********************************************************************************************************************\
112 | * Allure Reports
113 | **********************************************************************************************************************/
114 | dependencies {
115 | // Platform
116 | testImplementation enforcedPlatform("io.kestra:platform:$kestraVersion")
117 | testImplementation "io.qameta.allure:allure-junit5"
118 | }
119 |
120 | configurations {
121 | agent {
122 | canBeResolved = true
123 | canBeConsumed = true
124 | }
125 | }
126 |
127 | dependencies {
128 | agent "org.aspectj:aspectjweaver:1.9.24"
129 | }
130 |
131 | test {
132 | jvmArgs = [ "-javaagent:${configurations.agent.singleFile}" ]
133 | }
134 |
135 | /**********************************************************************************************************************\
136 | * Jacoco
137 | **********************************************************************************************************************/
138 | test {
139 | finalizedBy jacocoTestReport
140 | }
141 |
142 | jacocoTestReport {
143 | dependsOn test
144 | }
145 |
146 | /**********************************************************************************************************************\
147 | * Publish
148 | **********************************************************************************************************************/
149 | nexusPublishing {
150 | repositoryDescription = "${project.group}:${rootProject.name}:${project.version}"
151 | useStaging = !isBuildSnapshot
152 | repositories {
153 | sonatype {
154 | nexusUrl.set(uri("https://s01.oss.sonatype.org/service/local/"))
155 | snapshotRepositoryUrl.set(uri("https://s01.oss.sonatype.org/content/repositories/snapshots/"))
156 | }
157 | }
158 | }
159 |
160 | tasks.withType(GenerateModuleMetadata).configureEach {
161 | // Suppression this validation error as we want to enforce the Kestra platform
162 | suppressedValidationErrors.add('enforced-platform')
163 | }
164 |
165 | jar {
166 | manifest {
167 | attributes(
168 | "X-Kestra-Name": project.name,
169 | "X-Kestra-Title": "dbt",
170 | "X-Kestra-Group": project.group + ".dbt",
171 | "X-Kestra-Description": project.description,
172 | "X-Kestra-Version": project.version
173 | )
174 | }
175 | }
176 |
177 | maven.pom {
178 | developers {
179 | developer {
180 | id = "tchiotludo"
181 | name = "Ludovic Dehon"
182 | }
183 | }
184 | }
185 |
186 | shadowJar {
187 | archiveClassifier.set(null)
188 | mergeServiceFiles()
189 | }
190 |
191 | github {
192 | user 'kestra-io'
193 | license 'Apache'
194 | }
195 |
196 | /**********************************************************************************************************************\
197 | * Version
198 | **********************************************************************************************************************/
199 | release {
200 | preCommitText = 'chore(version):'
201 | preTagCommitMessage = 'update to version'
202 | tagCommitMessage = 'tag version'
203 | newVersionCommitMessage = 'update snapshot version'
204 | tagTemplate = 'v${version}'
205 | buildTasks = ['classes']
206 | git {
207 | requireBranch.set('master')
208 | }
209 |
210 | // Dynamically set properties with default values
211 | failOnSnapshotDependencies = (project.hasProperty('release.failOnSnapshotDependencies')
212 | ? project.property('release.failOnSnapshotDependencies').toBoolean()
213 | : true)
214 | pushReleaseVersionBranch = (project.hasProperty('release.pushReleaseVersionBranch')
215 | ? project.property('release.pushReleaseVersionBranch').toString()
216 | : null)
217 | }
218 |
219 | /**********************************************************************************************************************\
220 | * Dev
221 | **********************************************************************************************************************/
222 | idea {
223 | module {
224 | downloadJavadoc = true
225 | downloadSources = true
226 | }
227 | }
228 |
--------------------------------------------------------------------------------
/dockerfiles/dbt-athena.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-athena-community Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-athena-community
--------------------------------------------------------------------------------
/dockerfiles/dbt-bigquery.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-bigquery Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-bigquery
--------------------------------------------------------------------------------
/dockerfiles/dbt-clickhouse.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-clickhouse Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-clickhouse
--------------------------------------------------------------------------------
/dockerfiles/dbt-databricks.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-databricks Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-databricks
--------------------------------------------------------------------------------
/dockerfiles/dbt-dremio.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-dremio Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-dremio
--------------------------------------------------------------------------------
/dockerfiles/dbt-duckdb.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-duckdb Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra requests dbt-duckdb
6 |
--------------------------------------------------------------------------------
/dockerfiles/dbt-fabric.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-fabric Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-fabric
--------------------------------------------------------------------------------
/dockerfiles/dbt-postgres.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-postgres Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-postgres
--------------------------------------------------------------------------------
/dockerfiles/dbt-redshift.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-redshift Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-redshift
--------------------------------------------------------------------------------
/dockerfiles/dbt-snowflake.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-snowflake Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-snowflake
--------------------------------------------------------------------------------
/dockerfiles/dbt-spark.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-spark Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-spark
--------------------------------------------------------------------------------
/dockerfiles/dbt-synapse.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-synapse Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-synapse
--------------------------------------------------------------------------------
/dockerfiles/dbt-trino.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-trino Python package"
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-trino
--------------------------------------------------------------------------------
/dockerfiles/dbt.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 | LABEL org.opencontainers.image.source=https://github.com/kestra-io/plugin-dbt
3 | LABEL org.opencontainers.image.description="Image with the latest dbt-core Python package including the DuckDB adapter."
4 | RUN apt-get update && apt-get install -y git && apt-get clean
5 | RUN pip install --no-cache-dir kestra dbt-bigquery dbt-snowflake dbt-duckdb
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | version=0.24.0-SNAPSHOT
2 | kestraVersion=[0.23,)
3 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kestra-io/plugin-dbt/542963181c6d0f3ae9b84c2d0941629804d119cf/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.13-bin.zip
4 | networkTimeout=10000
5 | validateDistributionUrl=true
6 | zipStoreBase=GRADLE_USER_HOME
7 | zipStorePath=wrapper/dists
8 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | #
4 | # Copyright © 2015-2021 the original authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 | # SPDX-License-Identifier: Apache-2.0
19 | #
20 |
21 | ##############################################################################
22 | #
23 | # Gradle start up script for POSIX generated by Gradle.
24 | #
25 | # Important for running:
26 | #
27 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
28 | # noncompliant, but you have some other compliant shell such as ksh or
29 | # bash, then to run this script, type that shell name before the whole
30 | # command line, like:
31 | #
32 | # ksh Gradle
33 | #
34 | # Busybox and similar reduced shells will NOT work, because this script
35 | # requires all of these POSIX shell features:
36 | # * functions;
37 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
38 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»;
39 | # * compound commands having a testable exit status, especially «case»;
40 | # * various built-in commands including «command», «set», and «ulimit».
41 | #
42 | # Important for patching:
43 | #
44 | # (2) This script targets any POSIX shell, so it avoids extensions provided
45 | # by Bash, Ksh, etc; in particular arrays are avoided.
46 | #
47 | # The "traditional" practice of packing multiple parameters into a
48 | # space-separated string is a well documented source of bugs and security
49 | # problems, so this is (mostly) avoided, by progressively accumulating
50 | # options in "$@", and eventually passing that to Java.
51 | #
52 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
53 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
54 | # see the in-line comments for details.
55 | #
56 | # There are tweaks for specific operating systems such as AIX, CygWin,
57 | # Darwin, MinGW, and NonStop.
58 | #
59 | # (3) This script is generated from the Groovy template
60 | # https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
61 | # within the Gradle project.
62 | #
63 | # You can find Gradle at https://github.com/gradle/gradle/.
64 | #
65 | ##############################################################################
66 |
67 | # Attempt to set APP_HOME
68 |
69 | # Resolve links: $0 may be a link
70 | app_path=$0
71 |
72 | # Need this for daisy-chained symlinks.
73 | while
74 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
75 | [ -h "$app_path" ]
76 | do
77 | ls=$( ls -ld "$app_path" )
78 | link=${ls#*' -> '}
79 | case $link in #(
80 | /*) app_path=$link ;; #(
81 | *) app_path=$APP_HOME$link ;;
82 | esac
83 | done
84 |
85 | # This is normally unused
86 | # shellcheck disable=SC2034
87 | APP_BASE_NAME=${0##*/}
88 | # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
89 | APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
90 |
91 | # Use the maximum available, or set MAX_FD != -1 to use that value.
92 | MAX_FD=maximum
93 |
94 | warn () {
95 | echo "$*"
96 | } >&2
97 |
98 | die () {
99 | echo
100 | echo "$*"
101 | echo
102 | exit 1
103 | } >&2
104 |
105 | # OS specific support (must be 'true' or 'false').
106 | cygwin=false
107 | msys=false
108 | darwin=false
109 | nonstop=false
110 | case "$( uname )" in #(
111 | CYGWIN* ) cygwin=true ;; #(
112 | Darwin* ) darwin=true ;; #(
113 | MSYS* | MINGW* ) msys=true ;; #(
114 | NONSTOP* ) nonstop=true ;;
115 | esac
116 |
117 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
118 |
119 |
120 | # Determine the Java command to use to start the JVM.
121 | if [ -n "$JAVA_HOME" ] ; then
122 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
123 | # IBM's JDK on AIX uses strange locations for the executables
124 | JAVACMD=$JAVA_HOME/jre/sh/java
125 | else
126 | JAVACMD=$JAVA_HOME/bin/java
127 | fi
128 | if [ ! -x "$JAVACMD" ] ; then
129 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
130 |
131 | Please set the JAVA_HOME variable in your environment to match the
132 | location of your Java installation."
133 | fi
134 | else
135 | JAVACMD=java
136 | if ! command -v java >/dev/null 2>&1
137 | then
138 | die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
139 |
140 | Please set the JAVA_HOME variable in your environment to match the
141 | location of your Java installation."
142 | fi
143 | fi
144 |
145 | # Increase the maximum file descriptors if we can.
146 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
147 | case $MAX_FD in #(
148 | max*)
149 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
150 | # shellcheck disable=SC2039,SC3045
151 | MAX_FD=$( ulimit -H -n ) ||
152 | warn "Could not query maximum file descriptor limit"
153 | esac
154 | case $MAX_FD in #(
155 | '' | soft) :;; #(
156 | *)
157 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
158 | # shellcheck disable=SC2039,SC3045
159 | ulimit -n "$MAX_FD" ||
160 | warn "Could not set maximum file descriptor limit to $MAX_FD"
161 | esac
162 | fi
163 |
164 | # Collect all arguments for the java command, stacking in reverse order:
165 | # * args from the command line
166 | # * the main class name
167 | # * -classpath
168 | # * -D...appname settings
169 | # * --module-path (only if needed)
170 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
171 |
172 | # For Cygwin or MSYS, switch paths to Windows format before running java
173 | if "$cygwin" || "$msys" ; then
174 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
175 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
176 |
177 | JAVACMD=$( cygpath --unix "$JAVACMD" )
178 |
179 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
180 | for arg do
181 | if
182 | case $arg in #(
183 | -*) false ;; # don't mess with options #(
184 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
185 | [ -e "$t" ] ;; #(
186 | *) false ;;
187 | esac
188 | then
189 | arg=$( cygpath --path --ignore --mixed "$arg" )
190 | fi
191 | # Roll the args list around exactly as many times as the number of
192 | # args, so each arg winds up back in the position where it started, but
193 | # possibly modified.
194 | #
195 | # NB: a `for` loop captures its iteration list before it begins, so
196 | # changing the positional parameters here affects neither the number of
197 | # iterations, nor the values presented in `arg`.
198 | shift # remove old arg
199 | set -- "$@" "$arg" # push replacement arg
200 | done
201 | fi
202 |
203 |
204 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
205 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
206 |
207 | # Collect all arguments for the java command:
208 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
209 | # and any embedded shellness will be escaped.
210 | # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
211 | # treated as '${Hostname}' itself on the command line.
212 |
213 | set -- \
214 | "-Dorg.gradle.appname=$APP_BASE_NAME" \
215 | -classpath "$CLASSPATH" \
216 | org.gradle.wrapper.GradleWrapperMain \
217 | "$@"
218 |
219 | # Stop when "xargs" is not available.
220 | if ! command -v xargs >/dev/null 2>&1
221 | then
222 | die "xargs is not available"
223 | fi
224 |
225 | # Use "xargs" to parse quoted args.
226 | #
227 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed.
228 | #
229 | # In Bash we could simply go:
230 | #
231 | # readarray ARGS < <( xargs -n1 <<<"$var" ) &&
232 | # set -- "${ARGS[@]}" "$@"
233 | #
234 | # but POSIX shell has neither arrays nor command substitution, so instead we
235 | # post-process each arg (as a line of input to sed) to backslash-escape any
236 | # character that might be a shell metacharacter, then use eval to reverse
237 | # that process (while maintaining the separation between arguments), and wrap
238 | # the whole thing up as a single "set" statement.
239 | #
240 | # This will of course break if any of these variables contains a newline or
241 | # an unmatched quote.
242 | #
243 |
244 | eval "set -- $(
245 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
246 | xargs -n1 |
247 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
248 | tr '\n' ' '
249 | )" '"$@"'
250 |
251 | exec "$JAVACMD" "$@"
252 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 | @rem SPDX-License-Identifier: Apache-2.0
17 | @rem
18 |
19 | @if "%DEBUG%"=="" @echo off
20 | @rem ##########################################################################
21 | @rem
22 | @rem Gradle startup script for Windows
23 | @rem
24 | @rem ##########################################################################
25 |
26 | @rem Set local scope for the variables with windows NT shell
27 | if "%OS%"=="Windows_NT" setlocal
28 |
29 | set DIRNAME=%~dp0
30 | if "%DIRNAME%"=="" set DIRNAME=.
31 | @rem This is normally unused
32 | set APP_BASE_NAME=%~n0
33 | set APP_HOME=%DIRNAME%
34 |
35 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
36 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
37 |
38 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
39 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
40 |
41 | @rem Find java.exe
42 | if defined JAVA_HOME goto findJavaFromJavaHome
43 |
44 | set JAVA_EXE=java.exe
45 | %JAVA_EXE% -version >NUL 2>&1
46 | if %ERRORLEVEL% equ 0 goto execute
47 |
48 | echo. 1>&2
49 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
50 | echo. 1>&2
51 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2
52 | echo location of your Java installation. 1>&2
53 |
54 | goto fail
55 |
56 | :findJavaFromJavaHome
57 | set JAVA_HOME=%JAVA_HOME:"=%
58 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
59 |
60 | if exist "%JAVA_EXE%" goto execute
61 |
62 | echo. 1>&2
63 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
64 | echo. 1>&2
65 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2
66 | echo location of your Java installation. 1>&2
67 |
68 | goto fail
69 |
70 | :execute
71 | @rem Setup the command line
72 |
73 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
74 |
75 |
76 | @rem Execute Gradle
77 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
78 |
79 | :end
80 | @rem End local scope for the variables with windows NT shell
81 | if %ERRORLEVEL% equ 0 goto mainEnd
82 |
83 | :fail
84 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
85 | rem the _cmd.exe /c_ return code!
86 | set EXIT_CODE=%ERRORLEVEL%
87 | if %EXIT_CODE% equ 0 set EXIT_CODE=1
88 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
89 | exit /b %EXIT_CODE%
90 |
91 | :mainEnd
92 | if "%OS%"=="Windows_NT" endlocal
93 |
94 | :omega
95 |
--------------------------------------------------------------------------------
/lombok.config:
--------------------------------------------------------------------------------
1 | config.stopBubbling = true
2 | lombok.addLombokGeneratedAnnotation = true
3 | lombok.anyConstructor.addConstructorProperties = true
4 | lombok.equalsAndHashCode.callSuper = call
5 | lombok.tostring.callsuper = call
6 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = 'plugin-dbt'
2 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/ResultParser.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt;
2 |
3 | import com.fasterxml.jackson.annotation.JsonInclude;
4 | import com.fasterxml.jackson.databind.ObjectMapper;
5 | import io.kestra.core.exceptions.IllegalVariableEvaluationException;
6 | import io.kestra.core.models.executions.TaskRun;
7 | import io.kestra.core.models.executions.TaskRunAttempt;
8 | import io.kestra.core.models.executions.metrics.Counter;
9 | import io.kestra.core.models.flows.State;
10 | import io.kestra.core.runners.RunContext;
11 | import io.kestra.core.runners.WorkerTaskResult;
12 | import io.kestra.core.serializers.JacksonMapper;
13 | import io.kestra.core.utils.IdUtils;
14 | import io.kestra.plugin.dbt.models.RunResult;
15 |
16 | import java.io.File;
17 | import java.io.IOException;
18 | import java.net.URI;
19 | import java.time.Instant;
20 | import java.util.ArrayList;
21 | import java.util.List;
22 | import java.util.Objects;
23 |
24 | import static io.kestra.core.utils.Rethrow.throwFunction;
25 |
26 | public abstract class ResultParser {
27 | static final protected ObjectMapper MAPPER = JacksonMapper.ofJson(false)
28 | .setSerializationInclusion(JsonInclude.Include.NON_NULL);
29 |
30 | public static URI parseManifest(RunContext runContext, File file) throws IOException {
31 | return runContext.storage().putFile(file);
32 | }
33 |
34 | public static URI parseRunResult(RunContext runContext, File file) throws IOException, IllegalVariableEvaluationException {
35 | RunResult result = MAPPER.readValue(
36 | file,
37 | RunResult.class
38 | );
39 |
40 | java.util.List workerTaskResults = result
41 | .getResults()
42 | .stream()
43 | .map(throwFunction(r -> {
44 | ArrayList histories = new ArrayList<>();
45 |
46 | // List of status are not safe and can be not present on api calls
47 | r.getTiming()
48 | .stream()
49 | .mapToLong(timing -> timing.getStartedAt().toEpochMilli())
50 | .min()
51 | .ifPresent(value -> {
52 | histories.add(new State.History(
53 | State.Type.CREATED,
54 | Instant.ofEpochMilli(value)
55 | ));
56 | });
57 |
58 | r.getTiming()
59 | .stream()
60 | .filter(timing -> timing.getName().equals("execute"))
61 | .mapToLong(timing -> timing.getStartedAt().toEpochMilli())
62 | .min()
63 | .ifPresent(value -> {
64 | histories.add(new State.History(
65 | State.Type.RUNNING,
66 | Instant.ofEpochMilli(value)
67 | ));
68 | });
69 |
70 | r.getTiming()
71 | .stream()
72 | .mapToLong(timing -> timing.getCompletedAt().toEpochMilli())
73 | .max()
74 | .ifPresent(value -> {
75 | histories.add(new State.History(
76 | r.state(),
77 | Instant.ofEpochMilli(value)
78 | ));
79 | });
80 |
81 | State state = State.of(
82 | r.state(),
83 | histories
84 | );
85 |
86 | r.getAdapterResponse()
87 | .entrySet()
88 | .stream()
89 | .map(e -> {
90 | return switch (e.getKey()) {
91 | case "rows_affected" -> Counter.of("rows.affected", Double.valueOf(e.getValue()));
92 | case "bytes_processed" -> Counter.of("bytes.processed", Double.valueOf(e.getValue()));
93 | default -> null;
94 | };
95 | })
96 | .filter(Objects::nonNull)
97 | .forEach(runContext::metric);
98 |
99 | return WorkerTaskResult.builder()
100 | .taskRun(TaskRun.builder()
101 | .id(IdUtils.create())
102 | .namespace(runContext.render("{{ flow.namespace }}"))
103 | .flowId(runContext.render("{{ flow.id }}"))
104 | .taskId(r.getUniqueId())
105 | .value(runContext.render("{{ taskrun.id }}"))
106 | .executionId(runContext.render("{{ execution.id }}"))
107 | .parentTaskRunId(runContext.render("{{ taskrun.id }}"))
108 | .state(state)
109 | .attempts(List.of(TaskRunAttempt.builder()
110 | .state(state)
111 | .build()
112 | ))
113 | .build()
114 | )
115 | .build();
116 | }))
117 | .toList();
118 |
119 | runContext.dynamicWorkerResult(workerTaskResults);
120 |
121 | return runContext.storage().putFile(file);
122 | }
123 | }
124 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/AbstractDbt.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import com.fasterxml.jackson.annotation.JsonSetter;
4 | import io.kestra.core.exceptions.IllegalVariableEvaluationException;
5 | import io.kestra.core.models.annotations.PluginProperty;
6 | import io.kestra.core.models.property.Property;
7 | import io.kestra.core.models.tasks.*;
8 | import io.kestra.core.models.tasks.runners.AbstractLogConsumer;
9 | import io.kestra.core.models.tasks.runners.ScriptService;
10 | import io.kestra.core.models.tasks.runners.TaskRunner;
11 | import io.kestra.core.runners.RunContext;
12 | import io.kestra.core.serializers.JacksonMapper;
13 | import io.kestra.plugin.dbt.ResultParser;
14 | import io.kestra.plugin.scripts.exec.scripts.models.DockerOptions;
15 | import io.kestra.plugin.scripts.exec.scripts.models.RunnerType;
16 | import io.kestra.plugin.scripts.exec.scripts.models.ScriptOutput;
17 | import io.kestra.plugin.scripts.exec.scripts.runners.CommandsWrapper;
18 | import io.kestra.plugin.scripts.runner.docker.Docker;
19 | import io.swagger.v3.oas.annotations.media.Schema;
20 | import jakarta.validation.Valid;
21 | import lombok.*;
22 | import lombok.experimental.SuperBuilder;
23 | import org.apache.commons.io.FileUtils;
24 |
25 | import java.io.File;
26 | import java.io.IOException;
27 | import java.net.URI;
28 | import java.nio.charset.StandardCharsets;
29 | import java.nio.file.Files;
30 | import java.nio.file.Path;
31 | import java.time.Instant;
32 | import java.util.*;
33 | import java.util.List;
34 |
35 | @SuperBuilder
36 | @ToString
37 | @EqualsAndHashCode
38 | @Getter
39 | @NoArgsConstructor
40 | public abstract class AbstractDbt extends Task implements RunnableTask, NamespaceFilesInterface, InputFilesInterface, OutputFilesInterface {
41 | private static final String DEFAULT_IMAGE = "ghcr.io/kestra-io/dbt";
42 |
43 | @Builder.Default
44 | @Schema(
45 | title = "Stop execution at the first failure."
46 | )
47 | Property failFast = Property.of(false);
48 |
49 | @Builder.Default
50 | @Schema(
51 | title = "When dbt would normally warn, raise an exception.",
52 | description = "Examples include --models that selects nothing, deprecations, configurations with no " +
53 | "associated models, invalid test configurations, and missing sources/refs in tests."
54 | )
55 | Property warnError = Property.of(false);
56 |
57 | @Builder.Default
58 | @Schema(
59 | title = "Display debug logging during dbt execution.",
60 | description = "Useful for debugging and making bug reports."
61 | )
62 | Property debug = Property.of(false);
63 |
64 | @Schema(
65 | title = "Which directory to look in for the dbt_project.yml file.",
66 | description = "Default is the current working directory and its parents."
67 | )
68 | Property projectDir;
69 |
70 | @Builder.Default
71 | @Schema(
72 | title = "The path to the dbt CLI"
73 | )
74 | Property dbtPath = Property.of("./bin/dbt");
75 |
76 | @Schema(
77 | title = "The `profiles.yml` file content",
78 | description = "If a `profile.yml` file already exist in the current working directory, it will be overridden."
79 | )
80 | Property profiles;
81 |
82 | @Schema(
83 | title = "The task runner to use.",
84 | description = """
85 | Task runners are provided by plugins, each have their own properties.
86 | If you change from the default one, be careful to also configure the entrypoint to an empty list if needed."""
87 | )
88 | @Builder.Default
89 | @PluginProperty
90 | @Valid
91 | protected TaskRunner> taskRunner = Docker.builder()
92 | .type(Docker.class.getName())
93 | .entryPoint(new ArrayList<>())
94 | .build();
95 |
96 | @Schema(title = "The task runner container image, only used if the task runner is container-based.")
97 | @Builder.Default
98 | protected Property containerImage = Property.of(DEFAULT_IMAGE);
99 |
100 | @Schema(
101 | title = "The runner type.",
102 | description = "Deprecated, use 'taskRunner' instead."
103 | )
104 | @Deprecated
105 | protected Property runner;
106 |
107 | @Schema(
108 | title = "Deprecated, use 'taskRunner' instead"
109 | )
110 | @Deprecated
111 | private Property docker;
112 |
113 | @Schema(title = "Deprecated, use the `docker` property instead", deprecated = true)
114 | @Deprecated
115 | private Property dockerOptions;
116 |
117 | @JsonSetter
118 | public void setDockerOptions(Property dockerOptions) {
119 | this.dockerOptions = dockerOptions;
120 | this.docker = dockerOptions;
121 | }
122 |
123 | @Schema(
124 | title = "Additional environment variables for the current process."
125 | )
126 | protected Property> env;
127 |
128 | @Builder.Default
129 | @Schema(
130 | title = "Parse run result",
131 | description = "Parsing run result to display duration of each task inside dbt"
132 | )
133 | protected Property parseRunResults = Property.of(Boolean.TRUE);
134 |
135 | private NamespaceFiles namespaceFiles;
136 |
137 | private Object inputFiles;
138 |
139 | private Property> outputFiles;
140 |
141 | protected abstract java.util.List dbtCommands(RunContext runContext) throws IllegalVariableEvaluationException;
142 |
143 | @Override
144 | public ScriptOutput run(RunContext runContext) throws Exception {
145 | var renderedOutputFiles = runContext.render(this.outputFiles).asList(String.class);
146 | var renderedEnvMap = runContext.render(this.getEnv()).asMap(String.class, String.class);
147 |
148 | CommandsWrapper commandsWrapper = new CommandsWrapper(runContext)
149 | .withEnv(renderedEnvMap.isEmpty() ? new HashMap<>() : renderedEnvMap)
150 | .withNamespaceFiles(namespaceFiles)
151 | .withInputFiles(inputFiles)
152 | .withOutputFiles(renderedOutputFiles.isEmpty() ? null : renderedOutputFiles)
153 | .withRunnerType(runContext.render(this.getRunner()).as(RunnerType.class).orElse(null))
154 | .withDockerOptions(runContext.render(this.getDocker()).as(DockerOptions.class).orElse(null))
155 | .withContainerImage(runContext.render(this.getContainerImage()).as(String.class).orElseThrow())
156 | .withTaskRunner(this.taskRunner)
157 | .withLogConsumer(new AbstractLogConsumer() {
158 | @Override
159 | public void accept(String line, Boolean isStdErr, Instant instant) {
160 | LogService.parse(runContext, line);
161 | }
162 | @Override
163 | public void accept(String line, Boolean isStdErr) {
164 | LogService.parse(runContext, line);
165 | }
166 | })
167 | .withEnableOutputDirectory(true); //force output files on task runners
168 | Path workingDirectory = commandsWrapper.getWorkingDirectory();
169 |
170 | Optional profileString = runContext.render(profiles).as(String.class);
171 | if (profileString.isPresent() && !profileString.get().isEmpty()) {
172 | if (Files.exists(Path.of(".profiles/profiles.yml"))) {
173 | runContext.logger().warn("A 'profiles.yml' file already exist in the task working directory, it will be overridden.");
174 | }
175 |
176 | FileUtils.writeStringToFile(
177 | new File(workingDirectory.resolve(".profile").toString(), "profiles.yml"),
178 | profileString.get(),
179 | StandardCharsets.UTF_8
180 | );
181 | }
182 |
183 | ScriptOutput run = commandsWrapper
184 | .addEnv(Map.of(
185 | "PYTHONUNBUFFERED", "true",
186 | "PIP_ROOT_USER_ACTION", "ignore"
187 | ))
188 | .withInterpreter(Property.of(List.of("/bin/sh", "-c")))
189 | .withCommands(new Property<>(JacksonMapper.ofJson().writeValueAsString(
190 | List.of(createDbtCommand(runContext)))
191 | ))
192 | .run();
193 |
194 | parseResults(runContext, workingDirectory, run);
195 |
196 | return run;
197 | }
198 |
199 | private String createDbtCommand(RunContext runContext) throws IllegalVariableEvaluationException {
200 | List commands = new ArrayList<>(List.of(
201 | runContext.render(this.dbtPath).as(String.class).orElseThrow(),
202 | "--log-format json"
203 | ));
204 |
205 | if (Boolean.TRUE.equals(runContext.render(this.debug).as(Boolean.class).orElse(false))) {
206 | commands.add("--debug");
207 | }
208 |
209 | if (Boolean.TRUE.equals(runContext.render(this.failFast).as(Boolean.class).orElse(false))) {
210 | commands.add("--fail-fast");
211 | }
212 |
213 | if (Boolean.TRUE.equals(runContext.render(this.warnError).as(Boolean.class).orElse(false))) {
214 | commands.add("--warn-error");
215 | }
216 |
217 | commands.addAll(dbtCommands(runContext));
218 |
219 | if (runContext.render(this.projectDir).as(String.class).isPresent()) {
220 | commands.add("--project-dir {{" + ScriptService.VAR_WORKING_DIR + "}}" + runContext.render(this.projectDir).as(String.class).get());
221 | } else {
222 | commands.add("--project-dir {{" + ScriptService.VAR_WORKING_DIR + "}}");
223 | }
224 |
225 | return String.join(" ", commands);
226 | }
227 |
228 | protected void parseResults(RunContext runContext, Path workingDirectory, ScriptOutput scriptOutput) throws IllegalVariableEvaluationException, IOException {
229 | String baseDir = runContext.render(this.projectDir).as(String.class).orElse("");
230 |
231 | File runResults = workingDirectory.resolve(baseDir + "target/run_results.json").toFile();
232 |
233 | if (runContext.render(this.parseRunResults).as(Boolean.class).orElse(true) && runResults.exists()) {
234 | URI results = ResultParser.parseRunResult(runContext, runResults);
235 | scriptOutput.getOutputFiles().put("run_results.json", results);
236 | }
237 |
238 | File manifestFile = workingDirectory.resolve(baseDir + "target/manifest.json").toFile();
239 |
240 | if (manifestFile.exists()) {
241 | URI manifest = ResultParser.parseManifest(runContext, manifestFile);
242 | scriptOutput.getOutputFiles().put("manifest.json", manifest);
243 | }
244 | }
245 | }
246 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/AbstractRun.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.exceptions.IllegalVariableEvaluationException;
4 | import io.kestra.core.models.property.Property;
5 | import io.kestra.core.models.tasks.runners.ScriptService;
6 | import io.kestra.core.runners.RunContext;
7 | import io.swagger.v3.oas.annotations.media.Schema;
8 | import lombok.*;
9 | import lombok.experimental.SuperBuilder;
10 |
11 | import java.util.ArrayList;
12 | import java.util.List;
13 |
14 | @SuperBuilder
15 | @ToString
16 | @EqualsAndHashCode
17 | @Getter
18 | @NoArgsConstructor
19 | public abstract class AbstractRun extends AbstractDbt {
20 | @Schema(
21 | title = "Specify the number of threads to use while executing models."
22 | )
23 | Property thread;
24 |
25 | @Builder.Default
26 | @Schema(
27 | title = "Whether dbt will drop incremental models and fully-recalculate the incremental table " +
28 | "from the model definition."
29 | )
30 | Property fullRefresh = Property.of(Boolean.FALSE);
31 |
32 | @Schema(
33 | title = "Which target to load for the given profile"
34 | )
35 | Property target;
36 |
37 | @Schema(
38 | title = "The selector name to use, as defined in selectors.yml"
39 | )
40 | Property selector;
41 |
42 | @Schema(
43 | title = "List of nodes to include"
44 | )
45 | Property> select;
46 |
47 | @Schema(
48 | title = "List of models to exclude"
49 | )
50 | Property> exclude;
51 |
52 | abstract protected String dbtCommand();
53 |
54 | @Override
55 | protected java.util.List dbtCommands(RunContext runContext) throws IllegalVariableEvaluationException {
56 | java.util.List commands = new ArrayList<>(java.util.List.of(
57 | this.dbtCommand(),
58 | "--profiles-dir {{" + ScriptService.VAR_WORKING_DIR + "}}/.profile"));
59 |
60 | if (runContext.render(this.thread).as(Integer.class).isPresent()) {
61 | commands.add("--threads " + runContext.render(this.thread).as(Integer.class).get());
62 | }
63 |
64 | if (runContext.render(this.fullRefresh).as(Boolean.class).orElse(false)) {
65 | commands.add("--full-refresh");
66 | }
67 |
68 | if (runContext.render(this.target).as(String.class).isPresent()) {
69 | commands.add("--target " + runContext.render(this.target).as(String.class).get());
70 | }
71 |
72 | if (runContext.render(this.selector).as(String.class).isPresent()) {
73 | commands.add("--selector " + runContext.render(this.target).as(String.class).get());
74 | }
75 |
76 | if (!runContext.render(this.select).asList(String.class).isEmpty()) {
77 | commands.add("--select " + String.join(" ", runContext.render(this.select).asList(String.class)));
78 | }
79 |
80 | if (!runContext.render(this.exclude).asList(String.class).isEmpty()) {
81 | commands.add("--exclude " + String.join(" ", runContext.render(this.exclude).asList(String.class)));
82 | }
83 |
84 | return commands;
85 | }
86 | }
87 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/Build.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.models.annotations.Example;
4 | import io.kestra.core.models.annotations.Plugin;
5 | import io.swagger.v3.oas.annotations.media.Schema;
6 | import lombok.EqualsAndHashCode;
7 | import lombok.Getter;
8 | import lombok.NoArgsConstructor;
9 | import lombok.ToString;
10 | import lombok.experimental.SuperBuilder;
11 |
12 | @SuperBuilder
13 | @ToString
14 | @EqualsAndHashCode
15 | @Getter
16 | @NoArgsConstructor
17 | @Schema(
18 | title = "Invoke dbt build command."
19 | )
20 | @Plugin(
21 | examples = {
22 | @Example(
23 | full = true,
24 | title = "Invoke dbt `build` command.",
25 | code = """
26 | id: dbt_build
27 | namespace: company.team
28 |
29 | tasks:
30 | - id: working_directory
31 | type: io.kestra.plugin.core.flow.WorkingDirectory
32 | tasks:
33 | - id: clone_repository
34 | type: io.kestra.plugin.git.Clone
35 | url: https://github.com/kestra-io/dbt-demo
36 | branch: main
37 |
38 | - id: dbt_build
39 | type: io.kestra.plugin.dbt.cli.Build
40 | taskRunner:
41 | type: io.kestra.plugin.scripts.runner.docker.Docker
42 | dbtPath: /usr/local/bin/dbt
43 | containerImage: ghcr.io/kestra-io/dbt-duckdb
44 | profiles: |
45 | jaffle_shop:
46 | outputs:
47 | dev:
48 | type: duckdb
49 | path: ':memory:'
50 | extensions:
51 | - parquet
52 | target: dev
53 | """
54 | )
55 | }
56 | )
57 | public class Build extends AbstractRun {
58 | @Override
59 | protected String dbtCommand() {
60 | return "build";
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/Compile.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.models.annotations.Example;
4 | import io.kestra.core.models.annotations.Plugin;
5 | import io.swagger.v3.oas.annotations.media.Schema;
6 | import lombok.EqualsAndHashCode;
7 | import lombok.Getter;
8 | import lombok.NoArgsConstructor;
9 | import lombok.ToString;
10 | import lombok.experimental.SuperBuilder;
11 |
12 | @SuperBuilder
13 | @ToString
14 | @EqualsAndHashCode
15 | @Getter
16 | @NoArgsConstructor
17 | @Schema(
18 | title = "Invoke dbt compile command."
19 | )
20 | @Plugin(
21 | examples = {
22 | @Example(
23 | full = true,
24 | title = "Invoke dbt `compile` command.",
25 | code = """
26 | id: dbt_compile
27 | namespace: company.team
28 |
29 | tasks:
30 | - id: working_directory
31 | type: io.kestra.plugin.core.flow.WorkingDirectory
32 | tasks:
33 | - id: clone_repository
34 | type: io.kestra.plugin.git.Clone
35 | url: https://github.com/kestra-io/dbt-demo
36 | branch: main
37 |
38 | - id: dbt_compile
39 | type: io.kestra.plugin.dbt.cli.Compile
40 | taskRunner:
41 | type: io.kestra.plugin.scripts.runner.docker.Docker
42 | dbtPath: /usr/local/bin/dbt
43 | containerImage: ghcr.io/kestra-io/dbt-duckdb
44 | profiles: |
45 | jaffle_shop:
46 | outputs:
47 | dev:
48 | type: duckdb
49 | path: ':memory:'
50 | extensions:
51 | - parquet
52 | target: dev
53 | """
54 | )
55 | }
56 | )
57 | public class Compile extends AbstractRun {
58 | @Override
59 | protected String dbtCommand() {
60 | return "compile";
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/Deps.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.models.annotations.Example;
4 | import io.kestra.core.models.annotations.Plugin;
5 | import io.kestra.core.runners.RunContext;
6 | import io.kestra.plugin.scripts.exec.scripts.models.ScriptOutput;
7 | import io.swagger.v3.oas.annotations.media.Schema;
8 | import lombok.EqualsAndHashCode;
9 | import lombok.Getter;
10 | import lombok.NoArgsConstructor;
11 | import lombok.ToString;
12 | import lombok.experimental.SuperBuilder;
13 |
14 | import java.nio.file.Path;
15 |
16 |
17 | @SuperBuilder
18 | @ToString
19 | @EqualsAndHashCode
20 | @Getter
21 | @NoArgsConstructor
22 | @Schema(
23 | title = "Invoke dbt deps command."
24 | )
25 | @Plugin(
26 | examples = {
27 | @Example(
28 | full = true,
29 | title = "Invoke dbt `deps` command",
30 | code = """
31 | id: dbt_deps
32 | namespace: company.team
33 |
34 | tasks:
35 | - id: working_directory
36 | type: io.kestra.plugin.core.flow.WorkingDirectory
37 | tasks:
38 | - id: clone_repository
39 | type: io.kestra.plugin.git.Clone
40 | url: https://github.com/kestra-io/dbt-demo
41 | branch: main
42 |
43 | - id: dbt_deps
44 | type: io.kestra.plugin.dbt.cli.Deps
45 | taskRunner:
46 | type: io.kestra.plugin.scripts.runner.docker.Docker
47 | dbtPath: /usr/local/bin/dbt
48 | containerImage: ghcr.io/kestra-io/dbt-duckdb
49 | profiles: |
50 | jaffle_shop:
51 | outputs:
52 | dev:
53 | type: duckdb
54 | path: ':memory:'
55 | extensions:
56 | - parquet
57 | target: dev
58 | """
59 | )
60 | }
61 | )
62 | public class Deps extends AbstractRun {
63 | @Override
64 | protected String dbtCommand() {
65 | return "deps";
66 | }
67 |
68 | @Override
69 | protected void parseResults(RunContext runContext, Path workingDirectory, ScriptOutput scriptOutput) {
70 | // 'dbt deps' didn't return any result files.
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/Freshness.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.models.annotations.Example;
4 | import io.kestra.core.models.annotations.Plugin;
5 | import io.kestra.core.runners.RunContext;
6 | import io.kestra.plugin.scripts.exec.scripts.models.ScriptOutput;
7 | import io.swagger.v3.oas.annotations.media.Schema;
8 | import lombok.EqualsAndHashCode;
9 | import lombok.Getter;
10 | import lombok.NoArgsConstructor;
11 | import lombok.ToString;
12 | import lombok.experimental.SuperBuilder;
13 |
14 | import java.nio.file.Path;
15 |
16 | @SuperBuilder
17 | @ToString
18 | @EqualsAndHashCode
19 | @Getter
20 | @NoArgsConstructor
21 | @Schema(
22 | title = "Invoke dbt source freshness command."
23 | )
24 | @Plugin(
25 | examples = {
26 | @Example(
27 | full = true,
28 | title = "Invoke dbt `source freshness` command.",
29 | code = """
30 | id: dbt_freshness
31 | namespace: company.team
32 |
33 | tasks:
34 | - id: working_directory
35 | type: io.kestra.plugin.core.flow.WorkingDirectory
36 | tasks:
37 | - id: clone_repository
38 | type: io.kestra.plugin.git.Clone
39 | url: https://github.com/kestra-io/dbt-demo
40 | branch: main
41 |
42 | - id: dbt_freshness
43 | type: io.kestra.plugin.dbt.cli.Freshness
44 | taskRunner:
45 | type: io.kestra.plugin.scripts.runner.docker.Docker
46 | dbtPath: /usr/local/bin/dbt
47 | containerImage: ghcr.io/kestra-io/dbt-duckdb
48 | profiles: |
49 | jaffle_shop:
50 | outputs:
51 | dev:
52 | type: duckdb
53 | path: ':memory:'
54 | extensions:
55 | - parquet
56 | target: dev
57 | """
58 | )
59 | }
60 | )
61 | public class Freshness extends AbstractRun {
62 | @Override
63 | protected String dbtCommand() {
64 | return "source freshness";
65 | }
66 |
67 | @Override
68 | protected void parseResults(RunContext runContext, Path workingDirectory, ScriptOutput scriptOutput) {
69 | // 'dbt source freshness' didn't return any result files.
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/List.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.models.annotations.Example;
4 | import io.kestra.core.models.annotations.Plugin;
5 | import io.kestra.core.runners.RunContext;
6 | import io.kestra.plugin.scripts.exec.scripts.models.ScriptOutput;
7 | import io.swagger.v3.oas.annotations.media.Schema;
8 | import lombok.EqualsAndHashCode;
9 | import lombok.Getter;
10 | import lombok.NoArgsConstructor;
11 | import lombok.ToString;
12 | import lombok.experimental.SuperBuilder;
13 |
14 | import java.nio.file.Path;
15 |
16 | @SuperBuilder
17 | @ToString
18 | @EqualsAndHashCode
19 | @Getter
20 | @NoArgsConstructor
21 | @Schema(
22 | title = "Invoke dbt list command."
23 | )
24 | @Plugin(
25 | examples = {
26 | @Example(
27 | full = true,
28 | title = "Invoke dbt `list` command.",
29 | code = """
30 | id: dbt_list
31 | namespace: company.team
32 |
33 | tasks:
34 | - id: working_directory
35 | type: io.kestra.plugin.core.flow.WorkingDirectory
36 | tasks:
37 | - id: clone_repository
38 | type: io.kestra.plugin.git.Clone
39 | url: https://github.com/kestra-io/dbt-demo
40 | branch: main
41 |
42 | - id: dbt_list
43 | type: io.kestra.plugin.dbt.cli.List
44 | taskRunner:
45 | type: io.kestra.plugin.scripts.runner.docker.Docker
46 | dbtPath: /usr/local/bin/dbt
47 | containerImage: ghcr.io/kestra-io/dbt-duckdb
48 | profiles: |
49 | jaffle_shop:
50 | outputs:
51 | dev:
52 | type: duckdb
53 | path: ':memory:'
54 | extensions:
55 | - parquet
56 | target: dev
57 | """
58 | )
59 | }
60 | )
61 | public class List extends AbstractRun {
62 | @Override
63 | protected String dbtCommand() {
64 | return "list";
65 | }
66 |
67 | @Override
68 | protected void parseResults(RunContext runContext, Path workingDirectory, ScriptOutput scriptOutput) {
69 | // 'dbt list' didn't return any result files.
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/LogService.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import com.fasterxml.jackson.annotation.JsonInclude;
4 | import com.fasterxml.jackson.databind.ObjectMapper;
5 | import io.kestra.core.models.executions.metrics.Counter;
6 | import io.kestra.core.runners.RunContext;
7 | import io.kestra.core.serializers.JacksonMapper;
8 |
9 | import java.util.HashMap;
10 | import java.util.Map;
11 |
12 | class LogService {
13 | static final protected ObjectMapper MAPPER = JacksonMapper.ofJson()
14 | .setSerializationInclusion(JsonInclude.Include.NON_NULL);
15 |
16 | @SuppressWarnings("unchecked")
17 | protected static void parse(RunContext runContext, String line) {
18 | try {
19 | Map jsonLog = (Map) MAPPER.readValue(line, Object.class);
20 |
21 | String level;
22 | String ts;
23 | String thread;
24 | String type;
25 | String msg;
26 | HashMap additional = new HashMap<>();
27 |
28 | if (jsonLog.containsKey("info")) {
29 | Map infoLog = (Map) jsonLog.get("info");
30 |
31 | level = (String) infoLog.get("level");
32 | ts = (String) infoLog.get("ts");
33 | thread = (String) infoLog.get("thread");
34 | type = (String) infoLog.get("name");
35 | msg = (String) infoLog.get("msg");
36 |
37 | additional.putAll(infoLog);
38 | } else {
39 | level = (String) jsonLog.get("level");
40 | ts = (String) jsonLog.get("ts");
41 | thread = (String) jsonLog.get("thread_name");
42 | type = (String) jsonLog.get("type");
43 | msg = (String) jsonLog.get("msg");
44 | }
45 |
46 | additional.remove("category");
47 | additional.remove("code");
48 | additional.remove("invocation_id");
49 | additional.remove("level");
50 | additional.remove("log_version");
51 | additional.remove("code");
52 | additional.remove("msg");
53 | additional.remove("thread");
54 | additional.remove("thread_name");
55 | additional.remove("type");
56 | additional.remove("name");
57 | additional.remove("ts");
58 | additional.remove("pid");
59 | additional.remove("extra");
60 |
61 | String format = "[Date: {}] [Thread: {}] [Type: {}] {}{}";
62 | String[] args = new String[]{
63 | ts,
64 | thread,
65 | type,
66 | msg != null ? msg + " " : "",
67 | !additional.isEmpty() ? additional.toString() : ""
68 | };
69 |
70 | if (jsonLog.containsKey("data")) {
71 | Map data = (Map) jsonLog.get("data");
72 |
73 | if (data.containsKey("stats")) {
74 | Map stats = (Map) data.get("stats");
75 |
76 | stats.forEach((s, integer) -> runContext.metric(Counter.of(s, integer)));
77 | }
78 | }
79 |
80 | switch (level) {
81 | case "debug":
82 | runContext.logger().debug(format, (Object[]) args);
83 | break;
84 | case "info":
85 | runContext.logger().info(format, (Object[]) args);
86 | break;
87 | case "warn":
88 | runContext.logger().warn(format, (Object[]) args);
89 | break;
90 | default:
91 | runContext.logger().error(format, (Object[]) args);
92 | }
93 | } catch (Throwable e) {
94 | runContext.logger().info(line.trim());
95 | }
96 | }
97 | }
98 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/Run.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.models.annotations.Example;
4 | import io.kestra.core.models.annotations.Plugin;
5 | import io.swagger.v3.oas.annotations.media.Schema;
6 | import lombok.EqualsAndHashCode;
7 | import lombok.Getter;
8 | import lombok.NoArgsConstructor;
9 | import lombok.ToString;
10 | import lombok.experimental.SuperBuilder;
11 |
12 | @SuperBuilder
13 | @ToString
14 | @EqualsAndHashCode
15 | @Getter
16 | @NoArgsConstructor
17 | @Schema(
18 | title = "Invoke dbt run command."
19 | )
20 | @Plugin(
21 | examples = {
22 | @Example(
23 | full = true,
24 | title = "Invoke dbt `run` command.",
25 | code = """
26 | id: dbt_run
27 | namespace: company.team
28 |
29 | tasks:
30 | - id: working_directory
31 | type: io.kestra.plugin.core.flow.WorkingDirectory
32 | tasks:
33 | - id: clone_repository
34 | type: io.kestra.plugin.git.Clone
35 | url: https://github.com/kestra-io/dbt-demo
36 | branch: main
37 |
38 | - id: dbt_run
39 | type: io.kestra.plugin.dbt.cli.Run
40 | taskRunner:
41 | type: io.kestra.plugin.scripts.runner.docker.Docker
42 | dbtPath: /usr/local/bin/dbt
43 | containerImage: ghcr.io/kestra-io/dbt-duckdb
44 | profiles: |
45 | jaffle_shop:
46 | outputs:
47 | dev:
48 | type: duckdb
49 | path: ':memory:'
50 | extensions:
51 | - parquet
52 | target: dev
53 | """
54 | )
55 | }
56 | )
57 | public class Run extends AbstractRun {
58 | @Override
59 | protected String dbtCommand() {
60 | return "run";
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/Seed.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.models.annotations.Example;
4 | import io.kestra.core.models.annotations.Plugin;
5 | import io.swagger.v3.oas.annotations.media.Schema;
6 | import lombok.EqualsAndHashCode;
7 | import lombok.Getter;
8 | import lombok.NoArgsConstructor;
9 | import lombok.ToString;
10 | import lombok.experimental.SuperBuilder;
11 |
12 | @SuperBuilder
13 | @ToString
14 | @EqualsAndHashCode
15 | @Getter
16 | @NoArgsConstructor
17 | @Schema(
18 | title = "Invoke dbt seed command."
19 | )
20 | @Plugin(
21 | examples = {
22 | @Example(
23 | full = true,
24 | title = "Invoke dbt `seed` command.",
25 | code = """
26 | id: dbt_seed
27 | namespace: company.team
28 |
29 | tasks:
30 | - id: working_directory
31 | type: io.kestra.plugin.core.flow.WorkingDirectory
32 | tasks:
33 | - id: clone_repository
34 | type: io.kestra.plugin.git.Clone
35 | url: https://github.com/kestra-io/dbt-demo
36 | branch: main
37 |
38 | - id: dbt_seed
39 | type: io.kestra.plugin.dbt.cli.Seed
40 | taskRunner:
41 | type: io.kestra.plugin.scripts.runner.docker.Docker
42 | dbtPath: /usr/local/bin/dbt
43 | containerImage: ghcr.io/kestra-io/dbt-duckdb
44 | profiles: |
45 | jaffle_shop:
46 | outputs:
47 | dev:
48 | type: duckdb
49 | path: ':memory:'
50 | extensions:
51 | - parquet
52 | target: dev
53 | """
54 | )
55 | }
56 | )
57 | public class Seed extends AbstractRun {
58 | @Override
59 | protected String dbtCommand() {
60 | return "seed";
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/Setup.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import com.fasterxml.jackson.annotation.JsonSetter;
4 | import com.fasterxml.jackson.core.JsonProcessingException;
5 | import com.fasterxml.jackson.databind.ObjectMapper;
6 | import io.kestra.core.exceptions.IllegalVariableEvaluationException;
7 | import io.kestra.core.models.annotations.Example;
8 | import io.kestra.core.models.annotations.Plugin;
9 | import io.kestra.core.models.annotations.PluginProperty;
10 | import io.kestra.core.models.property.Property;
11 | import io.kestra.core.models.tasks.RunnableTask;
12 | import io.kestra.core.models.tasks.runners.PluginUtilsService;
13 | import io.kestra.core.models.tasks.runners.ScriptService;
14 | import io.kestra.core.models.tasks.runners.TaskRunner;
15 | import io.kestra.core.runners.RunContext;
16 | import io.kestra.core.serializers.JacksonMapper;
17 | import io.kestra.plugin.scripts.exec.AbstractExecScript;
18 | import io.kestra.plugin.scripts.exec.scripts.models.DockerOptions;
19 | import io.kestra.plugin.scripts.exec.scripts.models.ScriptOutput;
20 | import io.kestra.plugin.scripts.exec.scripts.runners.CommandsWrapper;
21 | import io.kestra.plugin.scripts.runner.docker.Docker;
22 | import io.swagger.v3.oas.annotations.media.Schema;
23 | import jakarta.validation.Valid;
24 | import lombok.Builder;
25 | import lombok.EqualsAndHashCode;
26 | import lombok.Getter;
27 | import lombok.NoArgsConstructor;
28 | import lombok.ToString;
29 | import lombok.experimental.SuperBuilder;
30 | import org.apache.commons.io.FileUtils;
31 |
32 | import java.io.File;
33 | import java.io.IOException;
34 | import java.nio.charset.StandardCharsets;
35 | import java.nio.file.Path;
36 | import java.util.ArrayList;
37 | import java.util.Arrays;
38 | import java.util.HashMap;
39 | import java.util.List;
40 | import java.util.Map;
41 | import jakarta.validation.constraints.NotEmpty;
42 | import jakarta.validation.constraints.NotNull;
43 |
44 | @SuperBuilder
45 | @ToString
46 | @EqualsAndHashCode
47 | @Getter
48 | @NoArgsConstructor
49 | @Schema(
50 | title = "Setup dbt in a Python virtualenv.",
51 | description = """
52 | Use this task to install dbt requirements locally in a Python virtualenv if you don't want to use dbt via Docker.
53 | In this case, you need to use a `WorkingDirectory` task and this `Setup` task to setup dbt prior to using any of the dbt tasks."""
54 | )
55 | @Plugin(
56 | examples = {
57 | @Example(
58 | full = true,
59 | title = "Setup dbt by installing pip dependencies in a Python virtualenv and initializing the profile directory.",
60 | code = """
61 | id: dbt_setup
62 | namespace: company.team
63 |
64 | tasks:
65 | - id: working_directory
66 | type: io.kestra.plugin.core.flow.WorkingDirectory
67 | tasks:
68 | - id: clone_repository
69 | type: io.kestra.plugin.git.Clone
70 | url: https://github.com/kestra-io/dbt-demo
71 | branch: main
72 |
73 | - id: dbt_setup
74 | type: io.kestra.plugin.dbt.cli.Setup
75 | requirements:
76 | - dbt-duckdb
77 | profiles:
78 | jaffle_shop:
79 | outputs:
80 | dev:
81 | type: duckdb
82 | path: ':memory:'
83 | extensions:
84 | - parquet
85 | target: dev
86 |
87 | - id: dbt_build
88 | type: io.kestra.plugin.dbt.cli.Build
89 | """
90 | )
91 | }
92 | )
93 | public class Setup extends AbstractExecScript implements RunnableTask {
94 | static final private ObjectMapper MAPPER = JacksonMapper.ofYaml();
95 |
96 | private static final String DEFAULT_IMAGE = "python";
97 |
98 | @Schema(
99 | title = "The `profiles.yml` file content. Can be an object (a map) or a string.",
100 | anyOf = { Map.class, String.class }
101 | )
102 | @NotNull
103 | Property profiles;
104 |
105 | @Builder.Default
106 | @Schema(
107 | title = "The python interpreter to use.",
108 | description = "Set the python interpreter path to use."
109 | )
110 | @NotNull
111 | @NotEmpty
112 | @PluginProperty(dynamic = true)
113 | private final String pythonPath = DEFAULT_IMAGE;
114 |
115 | @Schema(
116 | title = "List of python dependencies to add to the python execution process.",
117 | description = "Python dependencies list to setup in the virtualenv, in the same format than requirements.txt. It must at least provides dbt."
118 | )
119 | @NotNull
120 | protected Property> requirements;
121 |
122 | @Builder.Default
123 | @Schema(
124 | title = "Exit if any non true return value.",
125 | description = "This tells bash that it should exit the script if any statement returns a non-true return value. \n" +
126 | "The benefit of using -e is that it prevents errors snowballing into serious issues when they could " +
127 | "have been caught earlier. This option is deprecated. Use `failFast` instead."
128 | )
129 | @NotNull
130 | @Deprecated(since = "0.16.0", forRemoval = true)
131 | protected Property exitOnFailed = Property.of(Boolean.TRUE);
132 |
133 | @Schema(
134 | title = "Input files are extra files that will be available in the dbt working directory.",
135 | description = "You can define the files as map or a JSON string. " +
136 | "Each file can be defined inlined or can reference a file from Kestra's internal storage."
137 | )
138 | private Property inputFiles;
139 |
140 | @Schema(
141 | title = "The task runner to use.",
142 | description = "Task runners are provided by plugins, each have their own properties."
143 | )
144 | @Builder.Default
145 | @PluginProperty
146 | @Valid
147 | protected TaskRunner> taskRunner = Docker.instance();
148 |
149 | @Builder.Default
150 | protected Property containerImage = Property.of(DEFAULT_IMAGE);
151 |
152 | @Schema(title = "Deprecated, use the `docker` property instead", deprecated = true)
153 | @Deprecated
154 | private Property dockerOptions;
155 |
156 | @JsonSetter
157 | public void setDockerOptions(DockerOptions dockerOptions) {
158 | this.dockerOptions = Property.of(dockerOptions);
159 | this.docker = dockerOptions;
160 | }
161 |
162 | @Override
163 | protected DockerOptions injectDefaults(RunContext runContext, DockerOptions original) throws IllegalVariableEvaluationException {
164 | var builder = original.toBuilder();
165 | if (original.getImage() == null) {
166 | builder.image(runContext.render(this.getContainerImage()).as(String.class).orElse(DEFAULT_IMAGE));
167 | }
168 |
169 | return builder.build();
170 | }
171 |
172 | @Override
173 | public ScriptOutput run(RunContext runContext) throws Exception {
174 | CommandsWrapper commandsWrapper = this.commands(runContext);
175 | Path workingDirectory = commandsWrapper.getWorkingDirectory();
176 |
177 | List commands = this.virtualEnvCommand(runContext, workingDirectory, runContext.render(this.requirements).asList(String.class));
178 |
179 | // write profile
180 | File profileDir = workingDirectory.resolve(".profile").toFile();
181 | // noinspection ResultOfMethodCallIgnored
182 | profileDir.mkdirs();
183 |
184 | String profilesContent = profilesContent(runContext, runContext.render(this.profiles).as(Object.class).orElseThrow());
185 | FileUtils.writeStringToFile(
186 | new File(profileDir, "profiles.yml"),
187 | profilesContent,
188 | StandardCharsets.UTF_8
189 | );
190 |
191 | PluginUtilsService.createInputFiles(
192 | runContext,
193 | workingDirectory,
194 | this.finalInputFiles(runContext),
195 | new HashMap<>()
196 | );
197 |
198 | return commandsWrapper
199 | .addEnv(Map.of(
200 | "PYTHONUNBUFFERED", "true",
201 | "PIP_ROOT_USER_ACTION", "ignore"
202 | ))
203 | .withInterpreter(this.interpreter)
204 | .withBeforeCommands(this.beforeCommands)
205 | .withBeforeCommandsWithOptions(true)
206 | .withCommands(Property.of(commands))
207 | .run();
208 | }
209 |
210 | private List virtualEnvCommand(RunContext runContext, Path workingDirectory, List requirements) throws IllegalVariableEvaluationException {
211 | List renderer = new ArrayList<>();
212 |
213 | renderer.add(runContext.render(this.pythonPath) + " -m venv --system-site-packages " + workingDirectory + " > /dev/null");
214 |
215 | if (requirements != null) {
216 | renderer.addAll(Arrays.asList(
217 | "./bin/pip install pip --upgrade > /dev/null",
218 | "./bin/pip install " + runContext.render(String.join(" ", requirements) + " > /dev/null")));
219 | }
220 |
221 | return renderer;
222 | }
223 |
224 | private String profilesContent(RunContext runContext, Object profiles) throws IllegalVariableEvaluationException, JsonProcessingException {
225 | if (profiles instanceof String content) {
226 | return content;
227 | }
228 | if(profiles instanceof Map contentMap) {
229 | return MAPPER.writeValueAsString(runContext.render(contentMap));
230 | }
231 | throw new IllegalArgumentException("The `profiles` attribute must be a String or a Map");
232 | }
233 |
234 | private Map finalInputFiles(RunContext runContext) throws IOException, IllegalVariableEvaluationException {
235 | return runContext.render(this.inputFiles).as(Object.class).isPresent() ?
236 | new HashMap<>(PluginUtilsService.transformInputFiles(runContext, runContext.render(this.inputFiles).as(Object.class).orElseThrow())) :
237 | new HashMap<>();
238 | }
239 | }
240 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/Snapshot.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.models.annotations.Example;
4 | import io.kestra.core.models.annotations.Plugin;
5 | import io.swagger.v3.oas.annotations.media.Schema;
6 | import lombok.EqualsAndHashCode;
7 | import lombok.Getter;
8 | import lombok.NoArgsConstructor;
9 | import lombok.ToString;
10 | import lombok.experimental.SuperBuilder;
11 |
12 | @SuperBuilder
13 | @ToString
14 | @EqualsAndHashCode
15 | @Getter
16 | @NoArgsConstructor
17 | @Schema(
18 | title = "Invoke dbt snapshot command."
19 | )
20 | @Plugin(
21 | examples = {
22 | @Example(
23 | full = true,
24 | title = "Invoke dbt `snapshot` command.",
25 | code = """
26 | id: dbt_snapshot
27 | namespace: company.team
28 |
29 | tasks:
30 | - id: working_directory
31 | type: io.kestra.plugin.core.flow.WorkingDirectory
32 | tasks:
33 | - id: clone_repository
34 | type: io.kestra.plugin.git.Clone
35 | url: https://github.com/kestra-io/dbt-demo
36 | branch: main
37 |
38 | - id: dbt_snapshot
39 | type: io.kestra.plugin.dbt.cli.Snapshot
40 | taskRunner:
41 | type: io.kestra.plugin.scripts.runner.docker.Docker
42 | dbtPath: /usr/local/bin/dbt
43 | containerImage: ghcr.io/kestra-io/dbt-duckdb
44 | profiles: |
45 | jaffle_shop:
46 | outputs:
47 | dev:
48 | type: duckdb
49 | path: ':memory:'
50 | extensions:
51 | - parquet
52 | target: dev
53 | """
54 | )
55 | }
56 | )
57 | public class Snapshot extends AbstractRun {
58 | @Override
59 | protected String dbtCommand() {
60 | return "snapshot";
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/Test.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.models.annotations.Example;
4 | import io.kestra.core.models.annotations.Plugin;
5 | import io.swagger.v3.oas.annotations.media.Schema;
6 | import lombok.EqualsAndHashCode;
7 | import lombok.Getter;
8 | import lombok.NoArgsConstructor;
9 | import lombok.ToString;
10 | import lombok.experimental.SuperBuilder;
11 |
12 | @SuperBuilder
13 | @ToString
14 | @EqualsAndHashCode
15 | @Getter
16 | @NoArgsConstructor
17 | @Schema(
18 | title = "Invoke dbt test command."
19 | )
20 | @Plugin(
21 | examples = {
22 | @Example(
23 | full = true,
24 | title = "Invoke dbt `test` command.",
25 | code = """
26 | id: dbt_test
27 | namespace: company.team
28 |
29 | tasks:
30 | - id: wdir
31 | type: io.kestra.plugin.core.flow.WorkingDirectory
32 | tasks:
33 | - id: clone_repository
34 | type: io.kestra.plugin.git.Clone
35 | url: https://github.com/kestra-io/dbt-example
36 | branch: main
37 |
38 | - id: dbt_test
39 | type: io.kestra.plugin.dbt.cli.Test
40 | taskRunner:
41 | type: io.kestra.plugin.scripts.runner.docker.Docker
42 | dbtPath: /usr/local/bin/dbt
43 | containerImage: ghcr.io/kestra-io/dbt-duckdb
44 | profiles: |
45 | my_dbt_project:
46 | outputs:
47 | dev:
48 | type: duckdb
49 | path: ':memory:'
50 | target: dev
51 | """
52 | )
53 | }
54 | )
55 | public class Test extends AbstractRun {
56 | @Override
57 | protected String dbtCommand() {
58 | return "test";
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cli/package-info.java:
--------------------------------------------------------------------------------
1 | @PluginSubGroup(
2 | title = "Dbt CLI",
3 | description = "This sub-group of plugins contains tasks for using dbt with its CLI.\n" +
4 | "dbt is a data transformation tool that enables data analysts and engineers to transform, test and document data in the cloud data warehouse.",
5 | categories = PluginSubGroup.PluginCategory.TOOL
6 | )
7 | package io.kestra.plugin.dbt.cli;
8 |
9 | import io.kestra.core.models.annotations.PluginSubGroup;
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/AbstractDbtCloud.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud;
2 |
3 | import com.fasterxml.jackson.databind.DeserializationFeature;
4 | import com.fasterxml.jackson.databind.ObjectMapper;
5 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
6 | import io.kestra.core.exceptions.IllegalVariableEvaluationException;
7 | import io.kestra.core.models.property.Property;
8 | import io.kestra.core.models.tasks.Task;
9 | import io.kestra.core.runners.RunContext;
10 | import io.kestra.core.http.HttpRequest;
11 | import io.kestra.core.http.HttpResponse;
12 | import io.kestra.core.http.client.HttpClient;
13 | import io.kestra.core.http.client.HttpClientException;
14 | import io.kestra.core.http.client.configurations.HttpConfiguration;
15 |
16 | import io.swagger.v3.oas.annotations.media.Schema;
17 | import lombok.*;
18 | import lombok.experimental.SuperBuilder;
19 |
20 | import java.io.IOException;
21 |
22 | import jakarta.validation.constraints.NotNull;
23 |
24 | @SuperBuilder
25 | @ToString
26 | @EqualsAndHashCode
27 | @Getter
28 | @NoArgsConstructor
29 | public abstract class AbstractDbtCloud extends Task {
30 | private static final ObjectMapper MAPPER = new ObjectMapper()
31 | .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
32 | .registerModule(new JavaTimeModule());
33 |
34 | @Schema(title = "Base URL to select the tenant.")
35 | @NotNull
36 | @Builder.Default
37 | Property baseUrl = Property.of("https://cloud.getdbt.com");
38 |
39 | @Schema(title = "Numeric ID of the account.")
40 | @NotNull
41 | Property accountId;
42 |
43 | @Schema(title = "API key.")
44 | @NotNull
45 | Property token;
46 |
47 | @Schema(title = "The HTTP client configuration.")
48 | HttpConfiguration options;
49 |
50 | /**
51 | * Perform an HTTP request using Kestra HttpClient.
52 | *
53 | * @param requestBuilder The prepared HTTP request builder.
54 | * @param responseType The expected response type.
55 | * @param The response class.
56 | * @return HttpResponse of type RES.
57 | */
58 | protected HttpResponse request(RunContext runContext, HttpRequest.HttpRequestBuilder requestBuilder, Class responseType)
59 | throws HttpClientException, IllegalVariableEvaluationException {
60 |
61 | var request = requestBuilder
62 | .addHeader("Authorization", "Bearer " + runContext.render(this.token).as(String.class).orElseThrow())
63 | .addHeader("Content-Type", "application/json")
64 | .build();
65 |
66 | try (HttpClient client = new HttpClient(runContext, options)) {
67 | HttpResponse response = client.request(request, String.class);
68 |
69 | RES parsedResponse = MAPPER.readValue(response.getBody(), responseType);
70 | return HttpResponse.builder()
71 | .request(request)
72 | .body(parsedResponse)
73 | .headers(response.getHeaders())
74 | .status(response.getStatus())
75 | .build();
76 |
77 | } catch (IOException e) {
78 | throw new RuntimeException("Error executing HTTP request", e);
79 | }
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/CheckStatus.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud;
2 |
3 | import io.kestra.core.exceptions.IllegalVariableEvaluationException;
4 | import io.kestra.core.http.HttpRequest;
5 | import io.kestra.core.http.client.HttpClientException;
6 | import io.kestra.core.models.annotations.Example;
7 | import io.kestra.core.models.annotations.Plugin;
8 | import io.kestra.core.models.property.Property;
9 | import io.kestra.core.models.tasks.RunnableTask;
10 | import io.kestra.core.runners.RunContext;
11 | import io.kestra.core.serializers.JacksonMapper;
12 | import io.kestra.core.utils.Await;
13 | import io.kestra.plugin.dbt.ResultParser;
14 | import io.kestra.plugin.dbt.cloud.models.ManifestArtifact;
15 | import io.kestra.plugin.dbt.cloud.models.JobStatusHumanizedEnum;
16 | import io.kestra.plugin.dbt.cloud.models.RunResponse;
17 | import io.kestra.plugin.dbt.cloud.models.Step;
18 |
19 | import io.kestra.plugin.dbt.models.RunResult;
20 | import io.swagger.v3.oas.annotations.media.Schema;
21 | import jakarta.validation.constraints.NotNull;
22 | import lombok.*;
23 | import lombok.experimental.SuperBuilder;
24 | import org.slf4j.Logger;
25 |
26 | import java.io.IOException;
27 | import java.net.URI;
28 | import java.net.URLEncoder;
29 | import java.nio.charset.StandardCharsets;
30 | import java.nio.file.Files;
31 | import java.nio.file.Path;
32 | import java.nio.file.StandardOpenOption;
33 | import java.time.Duration;
34 | import java.util.*;
35 |
36 | import static io.kestra.core.utils.Rethrow.throwSupplier;
37 | import static java.lang.Math.max;
38 |
39 | @SuperBuilder
40 | @ToString
41 | @EqualsAndHashCode
42 | @Getter
43 | @NoArgsConstructor
44 | @Schema(
45 | title = "Check the status of a dbt Cloud job."
46 | )
47 | @Plugin(
48 | examples = {
49 | @Example(
50 | full = true,
51 | code = """
52 | id: dbt_check_status
53 | namespace: company.team
54 |
55 | tasks:
56 | - id: check_status
57 | type: io.kestra.plugin.dbt.cloud.CheckStatus
58 | accountId: "dbt_account"
59 | token: "dbt_token"
60 | runId: "run_id"
61 | """
62 | )
63 | }
64 | )
65 | public class CheckStatus extends AbstractDbtCloud implements RunnableTask {
66 | private static final List ENDED_STATUS = List.of(
67 | JobStatusHumanizedEnum.ERROR,
68 | JobStatusHumanizedEnum.CANCELLED,
69 | JobStatusHumanizedEnum.SUCCESS
70 | );
71 |
72 | @Schema(
73 | title = "The job run ID to check the status for."
74 | )
75 | @NotNull
76 | Property runId;
77 |
78 |
79 | @Schema(
80 | title = "Specify how often the task should poll for the job status."
81 | )
82 | @Builder.Default
83 | Property pollFrequency = Property.of(Duration.ofSeconds(5));
84 |
85 | @Schema(
86 | title = "The maximum duration the task should poll for the job completion."
87 | )
88 | @Builder.Default
89 | Property maxDuration = Property.of(Duration.ofMinutes(60));
90 |
91 | @Builder.Default
92 | @Schema(
93 | title = "Parse run result.",
94 | description = "Whether to parse the run result to display the duration of each dbt node in the Gantt view."
95 | )
96 | protected Property parseRunResults = Property.of(Boolean.TRUE);
97 |
98 | @Builder.Default
99 | @Getter(AccessLevel.NONE)
100 | private transient List loggedStatus = new ArrayList<>();
101 |
102 | @Builder.Default
103 | @Getter(AccessLevel.NONE)
104 | private transient Map loggedSteps = new HashMap<>();
105 |
106 | @Override
107 | public CheckStatus.Output run(RunContext runContext) throws Exception {
108 | Logger logger = runContext.logger();
109 |
110 | // Check rendered runId provided is an Integer
111 | Long runIdRendered = Long.parseLong(runContext.render(this.runId).as(String.class).orElseThrow());
112 |
113 | // wait for end
114 | RunResponse finalRunResponse = Await.until(
115 | throwSupplier(() -> {
116 | Optional fetchRunResponse = fetchRunResponse(
117 | runContext,
118 | runIdRendered,
119 | false
120 | );
121 |
122 | if (fetchRunResponse.isPresent()) {
123 | logSteps(logger, fetchRunResponse.get());
124 |
125 | // we rely on truncated logs to be sure
126 | boolean allLogs = fetchRunResponse.get()
127 | .getData()
128 | .getRunSteps()
129 | .stream()
130 | .filter(step -> step.getTruncatedDebugLogs() != null)
131 | .count() ==
132 | fetchRunResponse.get()
133 | .getData()
134 | .getRunSteps().size();
135 |
136 | // ended
137 | if (ENDED_STATUS.contains(fetchRunResponse.get().getData().getStatusHumanized()) && allLogs) {
138 | return fetchRunResponse.get();
139 | }
140 | }
141 |
142 | return null;
143 | }),
144 | runContext.render(this.pollFrequency).as(Duration.class).orElseThrow(),
145 | runContext.render(this.maxDuration).as(Duration.class).orElseThrow()
146 | );
147 |
148 | // final response
149 | logSteps(logger, finalRunResponse);
150 |
151 | if (!finalRunResponse.getData().getStatusHumanized().equals(JobStatusHumanizedEnum.SUCCESS)) {
152 | throw new Exception("Failed run with status '" + finalRunResponse.getData().getStatusHumanized() +
153 | "' after " + finalRunResponse.getData().getDurationHumanized() + ": " + finalRunResponse
154 | );
155 | }
156 |
157 | Path runResultsArtifact = downloadArtifacts(runContext, runIdRendered, "run_results.json", RunResult.class);
158 | Path manifestArtifact = downloadArtifacts(runContext, runIdRendered, "manifest.json", ManifestArtifact.class);
159 |
160 |
161 | URI runResultsUri = null;
162 |
163 | if (Boolean.TRUE.equals(runContext.render(this.parseRunResults).as(Boolean.class).orElse(false))) {
164 | runResultsUri = ResultParser.parseRunResult(runContext, runResultsArtifact.toFile());
165 | } else {
166 | if (Files.exists(runResultsArtifact)) {
167 | runResultsUri = runContext.storage().putFile(runResultsArtifact.toFile());
168 | }
169 | }
170 |
171 | return Output.builder()
172 | .runResults(runResultsUri)
173 | .manifest(manifestArtifact.toFile().exists() ? runContext.storage().putFile(manifestArtifact.toFile()) : null)
174 | .build();
175 | }
176 |
177 | private void logSteps(Logger logger, RunResponse runResponse) {
178 | // status changed
179 | if (!loggedStatus.contains(runResponse.getData().getStatusHumanized())) {
180 | logger.debug("Status changed to '{}' after {}",
181 | runResponse.getData().getStatusHumanized(),
182 | runResponse.getData().getDurationHumanized()
183 | );
184 | loggedStatus.add(runResponse.getData().getStatusHumanized());
185 | }
186 |
187 | // log steps
188 | for (Step step : runResponse.getData().getRunSteps()) {
189 | if (!step.getLogs().isEmpty()){
190 | if (!loggedSteps.containsKey(step.getId())){
191 | loggedSteps.put(step.getId(), 0L);
192 | }
193 |
194 | if (step.getLogs().length() > loggedSteps.get(step.getId())) {
195 | for (String s : step.getLogs().substring((int) max(loggedSteps.get(step.getId()) -1L, 0L)).split("\n")) {
196 | logger.info("[Step {}]: {}", step.getName(), s);
197 | }
198 | loggedSteps.put(step.getId(), (long) step.getLogs().length());
199 | }
200 | }
201 | }
202 | }
203 |
204 | private Optional fetchRunResponse(RunContext runContext, Long id, Boolean debug) throws IllegalVariableEvaluationException, HttpClientException {
205 | HttpRequest.HttpRequestBuilder requestBuilder = HttpRequest.builder()
206 | .uri(URI.create(runContext.render(this.baseUrl).as(String.class).orElseThrow() + "/api/v2/accounts/" + runContext.render(this.accountId).as(String.class).orElseThrow() + "/runs/" + id +
207 | "/?include_related=" + URLEncoder.encode("[\"trigger\",\"job\"," + (debug ? "\"debug_logs\"" : "") + ",\"run_steps\", \"environment\"]", StandardCharsets.UTF_8)))
208 | .method("GET");
209 |
210 | return Optional.ofNullable(this.request(runContext, requestBuilder, RunResponse.class).getBody());
211 | }
212 |
213 | private Path downloadArtifacts(RunContext runContext, Long runId, String path, Class responseType)
214 | throws IllegalVariableEvaluationException, IOException, HttpClientException {
215 | HttpRequest.HttpRequestBuilder requestBuilder = HttpRequest.builder()
216 | .uri(URI.create(runContext.render(this.baseUrl).as(String.class).orElseThrow()
217 | + "/api/v2/accounts/" + runContext.render(this.accountId).as(String.class).orElseThrow()
218 | + "/runs/" + runId + "/artifacts/" + path))
219 | .method("GET");
220 |
221 | T artifact = this.request(runContext, requestBuilder, responseType).getBody();
222 |
223 | String artifactJson = JacksonMapper.ofJson().writeValueAsString(artifact);
224 |
225 | Path tempFile = runContext.workingDir().createTempFile(".json");
226 | Files.writeString(tempFile, artifactJson, StandardOpenOption.TRUNCATE_EXISTING);
227 |
228 | return tempFile;
229 | }
230 |
231 | @Builder
232 | @Getter
233 | public static class Output implements io.kestra.core.models.tasks.Output {
234 | @Schema(
235 | title = "URI of the run result"
236 | )
237 | private URI runResults;
238 |
239 | @Schema(
240 | title = "URI of a manifest"
241 | )
242 | private URI manifest;
243 | }
244 | }
245 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/JobScheduleDate.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud;
2 |
3 | import com.fasterxml.jackson.annotation.JsonProperty;
4 | import lombok.Value;
5 | import lombok.experimental.SuperBuilder;
6 | import lombok.extern.jackson.Jacksonized;
7 |
8 | import java.util.List;
9 | import jakarta.validation.Valid;
10 |
11 | @Value
12 | @Jacksonized
13 | @SuperBuilder
14 | public class JobScheduleDate {
15 | @JsonProperty("type")
16 | JobScheduleDateType type;
17 |
18 | @JsonProperty("days")
19 | @Valid
20 | List days;
21 |
22 | @JsonProperty("cron")
23 | String cron;
24 | }
25 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/JobScheduleDateType.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud;
2 |
3 | import com.fasterxml.jackson.annotation.JsonCreator;
4 | import com.fasterxml.jackson.annotation.JsonValue;
5 |
6 | public enum JobScheduleDateType {
7 | EVERY_DAY("every_day"),
8 |
9 | DAYS_OF_WEEK("days_of_week"),
10 |
11 | CUSTOM_CRON("custom_cron");
12 |
13 | private String value;
14 |
15 | JobScheduleDateType(String value) {
16 | this.value = value;
17 | }
18 |
19 | @Override
20 | @JsonValue
21 | public String toString() {
22 | return String.valueOf(value);
23 | }
24 |
25 | @JsonCreator
26 | public static JobScheduleDateType fromValue(String text) {
27 | for (JobScheduleDateType b : JobScheduleDateType.values()) {
28 | if (String.valueOf(b.value).equals(text)) {
29 | return b;
30 | }
31 | }
32 | return null;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/JobScheduleTime.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud;
2 |
3 | import com.fasterxml.jackson.annotation.JsonProperty;
4 | import lombok.Value;
5 | import lombok.experimental.SuperBuilder;
6 | import lombok.extern.jackson.Jacksonized;
7 |
8 | import java.util.List;
9 | import jakarta.validation.Valid;
10 |
11 | @Value
12 | @Jacksonized
13 | @SuperBuilder
14 | public class JobScheduleTime {
15 | @JsonProperty("type")
16 | JobScheduleTimeType type;
17 |
18 | @JsonProperty("interval")
19 | Integer interval;
20 |
21 | @JsonProperty("hours")
22 | @Valid
23 | List hours;
24 | }
25 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/JobScheduleTimeType.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud;
2 |
3 | import com.fasterxml.jackson.annotation.JsonCreator;
4 | import com.fasterxml.jackson.annotation.JsonValue;
5 |
6 | public enum JobScheduleTimeType {
7 | EVERY_HOUR("every_hour"),
8 |
9 | AT_EXACT_HOURS("at_exact_hours");
10 |
11 | private String value;
12 |
13 | JobScheduleTimeType(String value) {
14 | this.value = value;
15 | }
16 |
17 | @Override
18 | @JsonValue
19 | public String toString() {
20 | return String.valueOf(value);
21 | }
22 |
23 | @JsonCreator
24 | public static JobScheduleTimeType fromValue(String text) {
25 | for (JobScheduleTimeType b : JobScheduleTimeType.values()) {
26 | if (String.valueOf(b.value).equals(text)) {
27 | return b;
28 | }
29 | }
30 | return null;
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/TriggerRun.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud;
2 |
3 | import io.kestra.core.http.HttpRequest;
4 | import io.kestra.core.http.HttpResponse;
5 | import io.kestra.core.models.annotations.Example;
6 | import io.kestra.core.models.annotations.Plugin;
7 | import io.kestra.core.models.property.Property;
8 | import io.kestra.core.models.tasks.RunnableTask;
9 | import io.kestra.core.runners.RunContext;
10 | import io.kestra.plugin.dbt.cloud.models.RunResponse;
11 | import io.swagger.v3.oas.annotations.media.Schema;
12 | import jakarta.validation.constraints.NotNull;
13 | import lombok.Builder;
14 | import lombok.EqualsAndHashCode;
15 | import lombok.Getter;
16 | import lombok.NoArgsConstructor;
17 | import lombok.ToString;
18 | import lombok.experimental.SuperBuilder;
19 | import org.slf4j.Logger;
20 |
21 | import java.net.URI;
22 | import java.time.Duration;
23 | import java.util.HashMap;
24 | import java.util.List;
25 | import java.util.Map;
26 |
27 | @SuperBuilder
28 | @ToString
29 | @EqualsAndHashCode
30 | @Getter
31 | @NoArgsConstructor
32 | @Schema(
33 | title = "Trigger a dbt cloud job to run.",
34 | description = "Use this task to kick off a run for a job. When this endpoint returns a successful response, a " +
35 | "new run will be enqueued for the account. If you activate the `wait` option, it will wait for the job to be ended " +
36 | "and will display all the log and dynamic tasks."
37 | )
38 | @Plugin(
39 | examples = {
40 | @Example(
41 | full = true,
42 | code = """
43 | id: dbt_trigger_job_run
44 | namespace: company.team
45 |
46 | tasks:
47 | - id: trigger_run
48 | type: io.kestra.plugin.dbt.cloud.TriggerRun
49 | accountId: "dbt_account"
50 | token: "dbt_token"
51 | jobId: "job_id"
52 | """
53 | )
54 | }
55 | )
56 | public class TriggerRun extends AbstractDbtCloud implements RunnableTask {
57 |
58 | @Schema(
59 | title = "Numeric ID of the job."
60 | )
61 | @NotNull
62 | Property jobId;
63 |
64 | @Schema(
65 | title = "A text description of the reason for running this job."
66 | )
67 | @Builder.Default
68 | @NotNull
69 | Property cause = Property.of("Triggered by Kestra.");
70 |
71 | @Schema(
72 | title = "The git SHA to check out before running this job."
73 | )
74 | Property gitSha;
75 |
76 | @Schema(
77 | title = "The git branch to check out before running this job."
78 | )
79 | Property gitBranch;
80 |
81 | @Schema(
82 | title = "Override the destination schema in the configured target for this job."
83 | )
84 | Property schemaOverride;
85 |
86 | @Schema(
87 | title = "Override the version of dbt used to run this job."
88 | )
89 | Property dbtVersionOverride;
90 |
91 | @Schema(
92 | title = "Override the number of threads used to run this job."
93 | )
94 | Property threadsOverride;
95 |
96 | @Schema(
97 | title = "Override the target.name context variable used when running this job."
98 | )
99 | Property targetNameOverride;
100 |
101 | @Schema(
102 | title = "Override whether or not this job generates docs."
103 | )
104 | Property generateDocsOverride;
105 |
106 | @Schema(
107 | title = "Override the timeout in seconds for this job."
108 | )
109 | Property timeoutSecondsOverride;
110 |
111 | @Schema(
112 | title = "Override the list of steps for this job."
113 | )
114 | Property> stepsOverride;
115 |
116 | @Schema(
117 | title = "Wait for the end of the run.",
118 | description = "Allowing to capture job status & logs."
119 | )
120 | @Builder.Default
121 | Property wait = Property.of(Boolean.TRUE);
122 |
123 | @Schema(
124 | title = "Specify frequency for job state check API calls."
125 | )
126 | @Builder.Default
127 | Property pollFrequency = Property.of(Duration.ofSeconds(5));
128 |
129 | @Schema(
130 | title = "The maximum total wait duration."
131 | )
132 | @Builder.Default
133 | Property maxDuration = Property.of(Duration.ofMinutes(60));
134 |
135 | @Builder.Default
136 | @Schema(
137 | title = "Parse run result.",
138 | description = "Parsing run result to display duration of each task inside dbt."
139 | )
140 | protected Property parseRunResults = Property.of(Boolean.TRUE);
141 |
142 | @Override
143 | public TriggerRun.Output run(RunContext runContext) throws Exception {
144 | Logger logger = runContext.logger();
145 |
146 | // trigger
147 | Map body = new HashMap<>();
148 | body.put("cause", runContext.render(this.cause).as(String.class).orElseThrow());
149 |
150 | runContext.render(this.gitSha).as(String.class).ifPresent(sha -> body.put("git_sha", sha));
151 | runContext.render(this.gitBranch).as(String.class).ifPresent(branch -> body.put("git_branch", branch));
152 | runContext.render(this.schemaOverride).as(String.class).ifPresent(schema -> body.put("schema_override", schema));
153 | runContext.render(this.dbtVersionOverride).as(String.class).ifPresent(version -> body.put("dbt_version_override", version));
154 | runContext.render(this.threadsOverride).as(String.class).ifPresent(thread -> body.put("threads_override", thread));
155 | runContext.render(this.targetNameOverride).as(String.class).ifPresent(target -> body.put("target_name_override", target));
156 | runContext.render(this.generateDocsOverride).as(Boolean.class).ifPresent(doc -> body.put("generate_docs_override", doc));
157 | runContext.render(this.timeoutSecondsOverride).as(Integer.class).ifPresent(timeout -> body.put("timeout_seconds_override", timeout));
158 |
159 | if (!runContext.render(this.stepsOverride).asList(String.class).isEmpty()) {
160 | body.put("steps_override", runContext.render(this.stepsOverride).asList(String.class));
161 | }
162 |
163 | HttpRequest.HttpRequestBuilder requestBuilder = HttpRequest.builder()
164 | .uri(URI.create(runContext.render(this.baseUrl).as(String.class).orElseThrow() + "/api/v2/accounts/" + runContext.render(this.accountId).as(String.class).orElseThrow() +
165 | "/jobs/" + runContext.render(this.jobId).as(String.class).orElseThrow() + "/run/"))
166 | .method("POST")
167 | .body(HttpRequest.JsonRequestBody.builder()
168 | .content(body)
169 | .build());
170 |
171 | HttpResponse triggerResponse = this.request(runContext, requestBuilder, RunResponse.class);
172 |
173 | RunResponse triggerRunResponse = triggerResponse.getBody();
174 | if (triggerRunResponse == null) {
175 | throw new IllegalStateException("Missing body on trigger");
176 | }
177 |
178 | logger.info("Job status {} with response: {}", triggerResponse.getStatus(), triggerRunResponse);
179 | Long runId = triggerRunResponse.getData().getId();
180 |
181 | if (Boolean.FALSE.equals(runContext.render(this.wait).as(Boolean.class).orElse(Boolean.TRUE))) {
182 | return Output.builder()
183 | .runId(runId)
184 | .build();
185 | }
186 |
187 | CheckStatus checkStatusJob = CheckStatus.builder()
188 | .runId(Property.of(runId.toString()))
189 | .baseUrl(getBaseUrl())
190 | .token(getToken())
191 | .accountId(getAccountId())
192 | .pollFrequency(getPollFrequency())
193 | .maxDuration(getMaxDuration())
194 | .parseRunResults(getParseRunResults())
195 | .build();
196 |
197 | CheckStatus.Output runOutput = checkStatusJob.run(runContext);
198 |
199 | return Output.builder()
200 | .runId(runId)
201 | .runResults(runOutput.getRunResults())
202 | .manifest(runOutput.getManifest())
203 | .build();
204 | }
205 |
206 | @Builder
207 | @Getter
208 | public static class Output implements io.kestra.core.models.tasks.Output {
209 | @Schema(
210 | title = "The run ID."
211 | )
212 | private Long runId;
213 |
214 | @Schema(
215 | title = "URI of a run result."
216 | )
217 | private URI runResults;
218 |
219 | @Schema(
220 | title = "URI of a manifest."
221 | )
222 | private URI manifest;
223 | }
224 | }
225 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/Environment.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonProperty;
4 | import lombok.Value;
5 | import lombok.experimental.SuperBuilder;
6 | import lombok.extern.jackson.Jacksonized;
7 |
8 | @Value
9 | @Jacksonized
10 | @SuperBuilder
11 | public class Environment {
12 | @JsonProperty("id")
13 | Long id;
14 |
15 | @JsonProperty("account_id")
16 | Long accountId;
17 |
18 | @JsonProperty("deploy_key_id")
19 | Long deployKeyId;
20 |
21 | @JsonProperty("created_by_id")
22 | Long createdById;
23 |
24 | @JsonProperty("repository_id")
25 | Long repositoryId;
26 |
27 | @JsonProperty("name")
28 | String name;
29 |
30 | @JsonProperty("dbt_version")
31 | String dbtVersion;
32 |
33 | @JsonProperty("use_custom_branch")
34 | Boolean useCustomBranch;
35 |
36 | @JsonProperty("custom_branch")
37 | String customBranch;
38 |
39 | @JsonProperty("supports_docs")
40 | Boolean supportsDocs;
41 |
42 | @JsonProperty("state")
43 | Integer state;
44 | }
45 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/Job.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonProperty;
4 | import lombok.Value;
5 | import lombok.experimental.SuperBuilder;
6 | import lombok.extern.jackson.Jacksonized;
7 |
8 | import java.util.ArrayList;
9 | import java.util.List;
10 | import jakarta.validation.Valid;
11 |
12 | @Value
13 | @Jacksonized
14 | @SuperBuilder
15 | public class Job {
16 | @JsonProperty("id")
17 | Long id;
18 |
19 | @JsonProperty("account_id")
20 | Long accountId;
21 |
22 | @JsonProperty("project_id")
23 | Long projectId;
24 |
25 | @JsonProperty("environment_id")
26 | Long environmentId;
27 |
28 | @JsonProperty("name")
29 | String name;
30 |
31 | @JsonProperty("dbt_version")
32 | String dbtVersion;
33 |
34 | @JsonProperty("triggers")
35 | JobTriggers triggers;
36 |
37 | @JsonProperty("execute_steps")
38 | @Valid
39 | List executeSteps = new ArrayList<>();
40 |
41 | @JsonProperty("settings")
42 | JobSettings settings;
43 |
44 | @JsonProperty("state")
45 | Integer state;
46 |
47 | @JsonProperty("generate_docs")
48 | Boolean generateDocs;
49 |
50 | @JsonProperty("schedule")
51 | JobSchedule schedule;
52 | }
53 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/JobSchedule.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonProperty;
4 | import lombok.Value;
5 | import lombok.experimental.SuperBuilder;
6 | import lombok.extern.jackson.Jacksonized;
7 |
8 | @Value
9 | @Jacksonized
10 | @SuperBuilder
11 | public class JobSchedule {
12 | @JsonProperty("cron")
13 | String cron;
14 |
15 | @JsonProperty("date")
16 | String date;
17 |
18 | @JsonProperty("time")
19 | String time;
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/JobSettings.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonProperty;
4 | import lombok.Value;
5 | import lombok.experimental.SuperBuilder;
6 | import lombok.extern.jackson.Jacksonized;
7 |
8 | @Value
9 | @Jacksonized
10 | @SuperBuilder
11 | public class JobSettings {
12 | @JsonProperty("threads")
13 | Integer threads;
14 |
15 | @JsonProperty("target_name")
16 | String targetName;
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/JobStatus.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonCreator;
4 | import com.fasterxml.jackson.annotation.JsonValue;
5 | import io.kestra.core.models.flows.State;
6 |
7 | public enum JobStatus {
8 | NUMBER_1(1), // Queued
9 |
10 | NUMBER_2(2), // Starting
11 |
12 | NUMBER_3(3), // Running
13 |
14 | NUMBER_10(10), // Success
15 |
16 | NUMBER_20(20), // Error
17 |
18 | NUMBER_30(30); // Cancelled
19 |
20 | private final Integer value;
21 |
22 | JobStatus(Integer value) {
23 | this.value = value;
24 | }
25 |
26 | @Override
27 | @JsonValue
28 | public String toString() {
29 | return String.valueOf(value);
30 | }
31 |
32 | @JsonCreator
33 | public static JobStatus fromValue(String text) {
34 | for (JobStatus b : JobStatus.values()) {
35 | if (String.valueOf(b.value).equals(text)) {
36 | return b;
37 | }
38 | }
39 | return null;
40 | }
41 |
42 | public State.Type state() {
43 | switch (this.value) {
44 | case 1:
45 | case 2:
46 | return State.Type.CREATED;
47 | case 3:
48 | return State.Type.RUNNING;
49 | case 10:
50 | return State.Type.SUCCESS;
51 | case 20:
52 | return State.Type.FAILED;
53 | case 30:
54 | return State.Type.KILLED;
55 | }
56 |
57 | throw new IllegalStateException("No suitable status for '" + this.value + "'");
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/JobStatusHumanizedEnum.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonCreator;
4 | import com.fasterxml.jackson.annotation.JsonValue;
5 |
6 | public enum JobStatusHumanizedEnum {
7 | QUEUED("Queued"),
8 |
9 | STARTING("Starting"),
10 |
11 | RUNNING("Running"),
12 |
13 | SUCCESS("Success"),
14 |
15 | ERROR("Error"),
16 |
17 | CANCELLED("Cancelled");
18 |
19 | private String value;
20 |
21 | JobStatusHumanizedEnum(String value) {
22 | this.value = value;
23 | }
24 |
25 | @Override
26 | @JsonValue
27 | public String toString() {
28 | return String.valueOf(value);
29 | }
30 |
31 | @JsonCreator
32 | public static JobStatusHumanizedEnum fromValue(String text) {
33 | for (JobStatusHumanizedEnum b : JobStatusHumanizedEnum.values()) {
34 | if (String.valueOf(b.value).equals(text)) {
35 | return b;
36 | }
37 | }
38 | return null;
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/JobTriggers.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonProperty;
4 | import lombok.Value;
5 | import lombok.experimental.SuperBuilder;
6 | import lombok.extern.jackson.Jacksonized;
7 |
8 | @Value
9 | @Jacksonized
10 | @SuperBuilder
11 | public class JobTriggers {
12 | @JsonProperty("github_webhook")
13 | Boolean githubWebhook;
14 |
15 | @JsonProperty("git_provider_webhook")
16 | Boolean gitProviderWebhook;
17 |
18 | @JsonProperty("schedule")
19 | Boolean schedule;
20 |
21 | @JsonProperty("custom_branch_only")
22 | Boolean customBranchOnly;
23 | }
24 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/LogArchiveType.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonCreator;
4 | import com.fasterxml.jackson.annotation.JsonValue;
5 |
6 | /**
7 | * Gets or Sets logArchiveType
8 | */
9 | public enum LogArchiveType {
10 | DB_FLUSHED("db_flushed"),
11 |
12 | SCRIBE("scribe");
13 |
14 | private String value;
15 |
16 | LogArchiveType(String value) {
17 | this.value = value;
18 | }
19 |
20 | @Override
21 | @JsonValue
22 | public String toString() {
23 | return String.valueOf(value);
24 | }
25 |
26 | @JsonCreator
27 | public static LogArchiveType fromValue(String text) {
28 | for (LogArchiveType b : LogArchiveType.values()) {
29 | if (String.valueOf(b.value).equals(text)) {
30 | return b;
31 | }
32 | }
33 | return null;
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/LogLocation.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonCreator;
4 | import com.fasterxml.jackson.annotation.JsonValue;
5 |
6 | /**
7 | * Gets or Sets logLocation
8 | */
9 | public enum LogLocation {
10 | LEGACY("legacy"),
11 |
12 | DB("db"),
13 |
14 | S3("s3"),
15 |
16 | EMPTY("empty");
17 |
18 | private String value;
19 |
20 | LogLocation(String value) {
21 | this.value = value;
22 | }
23 |
24 | @Override
25 | @JsonValue
26 | public String toString() {
27 | return String.valueOf(value);
28 | }
29 |
30 | @JsonCreator
31 | public static LogLocation fromValue(String text) {
32 | for (LogLocation b : LogLocation.values()) {
33 | if (String.valueOf(b.value).equals(text)) {
34 | return b;
35 | }
36 | }
37 | return null;
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/ManifestArtifact.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonProperty;
4 | import lombok.Value;
5 | import lombok.experimental.SuperBuilder;
6 | import lombok.extern.jackson.Jacksonized;
7 |
8 | import java.util.Map;
9 | import java.util.List;
10 |
11 | @Value
12 | @Jacksonized
13 | @SuperBuilder
14 | public class ManifestArtifact {
15 | Map metadata;
16 |
17 | Map> nodes;
18 |
19 | Map> sources;
20 |
21 | Map> macros;
22 |
23 | Map> docs;
24 |
25 | Map> exposures;
26 |
27 | Map> metrics;
28 |
29 | Map> groups;
30 |
31 | Map selectors;
32 |
33 | @JsonProperty("disabled")
34 | Map>> disabled;
35 |
36 | @JsonProperty("parent_map")
37 | Map> parentMap;
38 |
39 | @JsonProperty("child_map")
40 | Map> childMap;
41 |
42 | @JsonProperty("group_map")
43 | Map> groupMap;
44 |
45 | @JsonProperty("saved_queries")
46 | Map savedQueries;
47 |
48 | @JsonProperty("semantic_models")
49 | Map> semanticModels;
50 |
51 | @JsonProperty("unit_tests")
52 | Map> unitTests;
53 | }
54 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/Run.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonFormat;
4 | import com.fasterxml.jackson.annotation.JsonProperty;
5 | import lombok.Value;
6 | import lombok.experimental.SuperBuilder;
7 | import lombok.extern.jackson.Jacksonized;
8 |
9 | import java.time.ZonedDateTime;
10 | import java.util.List;
11 |
12 | @Value
13 | @Jacksonized
14 | @SuperBuilder
15 | public class Run {
16 | @JsonProperty("id")
17 | Long id;
18 |
19 | @JsonProperty("trigger_id")
20 | Long triggerId;
21 |
22 | @JsonProperty("account_id")
23 | Long accountId;
24 |
25 | @JsonProperty("project_id")
26 | Long projectId;
27 |
28 | @JsonProperty("job_id")
29 | Long jobId;
30 |
31 | @JsonProperty("job_definition_id")
32 | Long jobDefinitionId;
33 |
34 | @JsonProperty("status")
35 | JobStatus status;
36 |
37 | @JsonProperty("git_branch")
38 | String gitBranch;
39 |
40 | @JsonProperty("git_sha")
41 | String gitSha;
42 |
43 | @JsonProperty("status_message")
44 | String statusMessage;
45 |
46 | @JsonProperty("dbt_version")
47 | String dbtVersion;
48 |
49 | @JsonProperty("created_at")
50 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
51 | ZonedDateTime createdAt;
52 |
53 | @JsonProperty("updated_at")
54 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
55 | ZonedDateTime updatedAt;
56 |
57 | @JsonProperty("dequeued_at")
58 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
59 | ZonedDateTime dequeuedAt;
60 |
61 | @JsonProperty("started_at")
62 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
63 | ZonedDateTime startedAt;
64 |
65 | @JsonProperty("finished_at")
66 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
67 | ZonedDateTime finishedAt;
68 |
69 | @JsonProperty("last_checked_at")
70 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
71 | ZonedDateTime lastCheckedAt;
72 |
73 | @JsonProperty("last_heartbeat_at")
74 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
75 | ZonedDateTime lastHeartbeatAt;
76 |
77 | @JsonProperty("should_start_at")
78 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
79 | ZonedDateTime shouldStartAt;
80 |
81 | @JsonProperty("owner_thread_id")
82 | String ownerThreadId;
83 |
84 | @JsonProperty("executed_by_thread_id")
85 | String executedByThreadId;
86 |
87 | @JsonProperty("deferring_run_id")
88 | String deferringRunId;
89 |
90 | @JsonProperty("artifacts_saved")
91 | Boolean artifactsSaved;
92 |
93 | @JsonProperty("artifact_s3_path")
94 | String artifactS3Path;
95 |
96 | @JsonProperty("has_docs_generated")
97 | Boolean hasDocsGenerated;
98 |
99 | @JsonProperty("has_sources_generated")
100 | Boolean hasSourcesGenerated;
101 |
102 | @JsonProperty("notifications_sent")
103 | Boolean notificationsSent;
104 |
105 | @JsonProperty("scribe_enabled")
106 | Boolean scribeEnabled;
107 |
108 | @JsonProperty("trigger")
109 | Trigger trigger;
110 |
111 | @JsonProperty("job")
112 | Job job;
113 |
114 | @JsonProperty("environment")
115 | Environment environment;
116 |
117 | @JsonProperty("run_steps")
118 | List runSteps;
119 |
120 | @JsonProperty("duration")
121 | String duration;
122 |
123 | @JsonProperty("queued_duration")
124 | String queuedDuration;
125 |
126 | @JsonProperty("run_duration")
127 | String runDuration;
128 |
129 | @JsonProperty("duration_humanized")
130 | String durationHumanized;
131 |
132 | @JsonProperty("queued_duration_humanized")
133 | String queuedDurationHumanized;
134 |
135 | @JsonProperty("run_duration_humanized")
136 | String runDurationHumanized;
137 |
138 | @JsonProperty("finished_at_humanized")
139 | String finishedAtHumanized;
140 |
141 | @JsonProperty("status_humanized")
142 | JobStatusHumanizedEnum statusHumanized;
143 |
144 | @JsonProperty("created_at_humanized")
145 | String createdAtHumanized;
146 | }
147 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/RunResponse.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import lombok.Value;
4 | import lombok.experimental.SuperBuilder;
5 | import lombok.extern.jackson.Jacksonized;
6 |
7 | @Value
8 | @Jacksonized
9 | @SuperBuilder
10 | public class RunResponse {
11 | Run data;
12 |
13 | Status status;
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/Status.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonProperty;
4 | import lombok.Value;
5 | import lombok.experimental.SuperBuilder;
6 | import lombok.extern.jackson.Jacksonized;
7 |
8 | @Value
9 | @Jacksonized
10 | @SuperBuilder
11 | public class Status {
12 | Integer code;
13 |
14 | @JsonProperty("is_success")
15 | Boolean isSuccess;
16 |
17 | @JsonProperty("user_message")
18 | String userMessage;
19 |
20 | @JsonProperty("developer_message")
21 | String developerMessage;
22 | }
23 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/Step.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonFormat;
4 | import com.fasterxml.jackson.annotation.JsonProperty;
5 | import lombok.Value;
6 | import lombok.experimental.SuperBuilder;
7 | import lombok.extern.jackson.Jacksonized;
8 |
9 | import java.time.ZonedDateTime;
10 |
11 | @Value
12 | @Jacksonized
13 | @SuperBuilder
14 | public class Step {
15 | @JsonProperty("id")
16 | Long id;
17 |
18 | @JsonProperty("run_id")
19 | Long runId;
20 |
21 | @JsonProperty("account_id")
22 | Long accountId;
23 |
24 | @JsonProperty("name")
25 | String name;
26 |
27 | @JsonProperty("logs")
28 | String logs;
29 |
30 | @JsonProperty("debug_logs")
31 | String debugLogs;
32 |
33 | @JsonProperty("log_location")
34 | LogLocation logLocation;
35 |
36 | @JsonProperty("log_path")
37 | String logPath;
38 |
39 | @JsonProperty("debug_log_path")
40 | String debugLogPath;
41 |
42 | @JsonProperty("log_archive_type")
43 | LogArchiveType logArchiveType;
44 |
45 | @JsonProperty("truncated_debug_logs")
46 | String truncatedDebugLogs;
47 |
48 | @JsonProperty("created_at")
49 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
50 | ZonedDateTime createdAt;
51 |
52 | @JsonProperty("updated_at")
53 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
54 | ZonedDateTime updatedAt;
55 |
56 | @JsonProperty("started_at")
57 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
58 | ZonedDateTime startedAt;
59 |
60 | @JsonProperty("finished_at")
61 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
62 | ZonedDateTime finishedAt;
63 |
64 | @JsonProperty("status_color")
65 | String statusColor;
66 |
67 | @JsonProperty("status")
68 | JobStatus status;
69 |
70 | @JsonProperty("duration")
71 | String duration;
72 |
73 | @JsonProperty("duration_humanized")
74 | String durationHumanized;
75 | }
76 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/models/Trigger.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonFormat;
4 | import com.fasterxml.jackson.annotation.JsonProperty;
5 | import lombok.Value;
6 | import lombok.experimental.SuperBuilder;
7 | import lombok.extern.jackson.Jacksonized;
8 |
9 | import java.time.ZonedDateTime;
10 | import java.util.List;
11 | import jakarta.validation.Valid;
12 |
13 | @Value
14 | @Jacksonized
15 | @SuperBuilder
16 | public class Trigger {
17 | @JsonProperty("id")
18 | Long id;
19 |
20 | @JsonProperty("cause")
21 | String cause;
22 |
23 | @JsonProperty("job_definition_id")
24 | Long jobDefinitionId;
25 |
26 | @JsonProperty("git_branch")
27 | String gitBranch;
28 |
29 | @JsonProperty("git_sha")
30 | String gitSha;
31 |
32 | @JsonProperty("github_pull_request_id")
33 | Integer githubPullRequestId;
34 |
35 | @JsonProperty("schema_override")
36 | String schemaOverride;
37 |
38 | @JsonProperty("dbt_version_override")
39 | String dbtVersionOverride;
40 |
41 | @JsonProperty("threads_override")
42 | Integer threadsOverride;
43 |
44 | @JsonProperty("target_name_override")
45 | String targetNameOverride;
46 |
47 | @JsonProperty("generate_docs_override")
48 | Boolean generateDocsOverride;
49 |
50 | @JsonProperty("timeout_seconds_override")
51 | Integer timeoutSecondsOverride;
52 |
53 | @JsonProperty("steps_override")
54 | @Valid
55 | List stepsOverride;
56 |
57 | @JsonProperty("created_at")
58 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd' 'HH:mm:ss[.SSSSSS]XXX")
59 | ZonedDateTime createdAt;
60 | }
61 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/cloud/package-info.java:
--------------------------------------------------------------------------------
1 | @PluginSubGroup(
2 | title = "Dbt Cloud",
3 | description = "This sub-group of plugins contains tasks for using DBT Cloud.\n" +
4 | "dbt is a data transformation tool that enables data analysts and engineers to transform, test and document data in the cloud data warehouse.",
5 | categories = PluginSubGroup.PluginCategory.TOOL
6 | )
7 | package io.kestra.plugin.dbt.cloud;
8 |
9 | import io.kestra.core.models.annotations.PluginSubGroup;
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/models/Manifest.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonProperty;
4 | import lombok.Value;
5 | import lombok.experimental.SuperBuilder;
6 | import lombok.extern.jackson.Jacksonized;
7 |
8 | import java.util.List;
9 | import java.util.Map;
10 |
11 | @Value
12 | @Jacksonized
13 | @SuperBuilder
14 | public class Manifest {
15 | Map nodes;
16 |
17 | @Value
18 | @Jacksonized
19 | @SuperBuilder
20 | public static class Node {
21 | @JsonProperty("compiled_sql")
22 | String compiledSql;
23 |
24 | @JsonProperty("resource_type")
25 | String resourceType;
26 |
27 | @JsonProperty("depends_on")
28 | Map> dependsOn;
29 |
30 | @JsonProperty("unique_id")
31 | String uniqueId;
32 | }
33 | }
34 |
35 |
--------------------------------------------------------------------------------
/src/main/java/io/kestra/plugin/dbt/models/RunResult.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.models;
2 |
3 | import com.fasterxml.jackson.annotation.JsonProperty;
4 | import io.kestra.core.models.flows.State;
5 | import lombok.Value;
6 | import lombok.experimental.SuperBuilder;
7 | import lombok.extern.jackson.Jacksonized;
8 |
9 | import java.time.Instant;
10 | import java.util.List;
11 | import java.util.Map;
12 |
13 | @Value
14 | @Jacksonized
15 | @SuperBuilder
16 | public class RunResult {
17 | List results;
18 |
19 | @JsonProperty("elapsed_time")
20 | Double elapsedTime;
21 |
22 | Map args;
23 |
24 | @Value
25 | @Jacksonized
26 | @SuperBuilder
27 | public static class Result {
28 | String status;
29 |
30 | List timing;
31 |
32 | @JsonProperty("thread_id")
33 | String threadId;
34 |
35 | @JsonProperty("execution_time")
36 | Double executionTime;
37 |
38 | @JsonProperty("adapter_response")
39 | Map adapterResponse;
40 |
41 | String message;
42 |
43 | Integer failures;
44 |
45 | @JsonProperty("unique_id")
46 | String uniqueId;
47 |
48 | public State.Type state() {
49 | switch (this.status) {
50 | case "error":
51 | case "fail":
52 | case "runtime_error":
53 | return State.Type.FAILED;
54 | case "warn":
55 | return State.Type.WARNING;
56 | case "success":
57 | case "pass":
58 | return State.Type.SUCCESS;
59 | }
60 |
61 | throw new IllegalStateException("No suitable status for '" + this.status + "'");
62 | }
63 | }
64 |
65 | @Value
66 | @Jacksonized
67 | @SuperBuilder
68 | public static class Timing {
69 | String name;
70 |
71 | @JsonProperty("started_at")
72 | Instant startedAt;
73 |
74 | @JsonProperty("completed_at")
75 | Instant completedAt;
76 | }
77 | }
78 |
79 |
--------------------------------------------------------------------------------
/src/main/resources/META-INF/services/com.fasterxml.jackson.databind.Module:
--------------------------------------------------------------------------------
1 | com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
2 |
--------------------------------------------------------------------------------
/src/main/resources/META-INF/services/io.micronaut.http.client.HttpClientFactory:
--------------------------------------------------------------------------------
1 | io.micronaut.http.client.netty.NettyHttpClientFactory
--------------------------------------------------------------------------------
/src/main/resources/META-INF/services/io.micronaut.http.client.StreamingHttpClientFactory:
--------------------------------------------------------------------------------
1 | io.micronaut.http.client.netty.NettyHttpClientFactory
--------------------------------------------------------------------------------
/src/main/resources/META-INF/services/io.micronaut.json.JsonMapperSupplier:
--------------------------------------------------------------------------------
1 | io.micronaut.jackson.databind.JacksonDatabindMapperSupplier
--------------------------------------------------------------------------------
/src/main/resources/icons/io.kestra.plugin.dbt.cli.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/main/resources/icons/io.kestra.plugin.dbt.cloud.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/main/resources/icons/plugin-icon.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/test/java/io/kestra/plugin/dbt/cli/BuildTest.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.models.property.Property;
4 | import io.kestra.core.runners.RunContext;
5 | import io.kestra.core.runners.RunContextFactory;
6 | import io.kestra.core.utils.IdUtils;
7 | import io.kestra.core.utils.TestsUtils;
8 | import io.kestra.plugin.core.runner.Process;
9 | import io.kestra.plugin.scripts.exec.scripts.models.ScriptOutput;
10 | import io.kestra.core.junit.annotations.KestraTest;
11 | import jakarta.inject.Inject;
12 | import org.junit.jupiter.api.Test;
13 |
14 | import java.io.ByteArrayInputStream;
15 | import java.io.IOException;
16 | import java.nio.file.Files;
17 | import java.nio.file.Path;
18 | import java.util.*;
19 | import java.util.List;
20 | import java.util.stream.Stream;
21 |
22 | import static io.kestra.core.utils.Rethrow.throwConsumer;
23 | import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
24 | import static org.hamcrest.MatcherAssert.assertThat;
25 | import static org.hamcrest.Matchers.*;
26 | import static org.junit.jupiter.api.Assertions.assertTrue;
27 |
28 | @KestraTest
29 | class BuildTest {
30 | private static final String PROFILES = """
31 | unit-kestra:
32 | outputs:
33 | dev:
34 | dataset: kestra_unit_test_us
35 | fixed_retries: 1
36 | location: US
37 | method: service-account
38 | priority: interactive
39 | project: kestra-unit-test
40 | threads: 1
41 | timeout_seconds: 300
42 | type: bigquery
43 | keyfile: sa.json
44 | target: dev
45 | """;
46 | @Inject
47 | private RunContextFactory runContextFactory;
48 |
49 | public void copyFolder(Path src, Path dest) throws IOException {
50 | try (Stream stream = Files.walk(src)) {
51 | stream
52 | .forEach(throwConsumer(source -> Files.copy(
53 | source,
54 | dest.resolve(src.relativize(source)),
55 | REPLACE_EXISTING
56 | )));
57 | }
58 | }
59 |
60 | @Test
61 | void run() throws Exception {
62 | Setup setup = Setup.builder()
63 | .id(IdUtils.create())
64 | .type(Setup.class.getName())
65 | .taskRunner(Process.instance())
66 | .profiles(Property.of(PROFILES))
67 | .requirements(Property.of(List.of("dbt-bigquery")))
68 | .build();
69 |
70 | RunContext runContext = TestsUtils.mockRunContext(runContextFactory, setup, Map.of());
71 |
72 | copyFolder(Path.of(Objects.requireNonNull(this.getClass().getClassLoader().getResource("project")).getPath()), runContext.workingDir().path(true));
73 |
74 | setup.run(runContext);
75 |
76 | try(var inputStream = new ByteArrayInputStream(Base64.getDecoder().decode(System.getenv("GOOGLE_SERVICE_ACCOUNT").getBytes()))) {
77 | Files.copy(inputStream, runContext.workingDir().resolve(Path.of("sa.json")));
78 | }
79 | Map env = new HashMap<>();
80 | env.put("GOOGLE_APPLICATION_CREDENTIALS", runContext.workingDir().resolve(Path.of("sa.json")).toString());
81 | Build task = Build.builder()
82 | .thread((Property.of(8)))
83 | .taskRunner(Process.instance())
84 | .env(Property.of(env))
85 | .build();
86 |
87 | ScriptOutput runOutput = task.run(runContext);
88 |
89 | assertThat(runOutput.getExitCode(), is(0));
90 | assertTrue(runOutput.getOutputFiles().containsKey("run_results.json"));
91 | assertTrue(runOutput.getOutputFiles().containsKey("manifest.json"));
92 | assertThat(runContext.dynamicWorkerResults(), hasSize(12));
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/src/test/java/io/kestra/plugin/dbt/cli/DbtCLITest.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cli;
2 |
3 | import io.kestra.core.models.property.Property;
4 | import io.kestra.core.runners.RunContext;
5 | import io.kestra.core.runners.RunContextFactory;
6 | import io.kestra.core.storages.StorageInterface;
7 | import io.kestra.core.storages.kv.KVStore;
8 | import io.kestra.core.storages.kv.KVValueAndMetadata;
9 | import io.kestra.core.utils.IdUtils;
10 | import io.kestra.core.utils.TestsUtils;
11 | import io.kestra.plugin.scripts.exec.scripts.models.ScriptOutput;
12 | import io.kestra.core.junit.annotations.KestraTest;
13 | import jakarta.inject.Inject;
14 | import org.junit.jupiter.api.Disabled;
15 | import org.junit.jupiter.api.Test;
16 | import org.junit.jupiter.params.ParameterizedTest;
17 | import org.junit.jupiter.params.provider.EnumSource;
18 |
19 | import java.io.IOException;
20 | import java.nio.charset.StandardCharsets;
21 | import java.nio.file.Files;
22 | import java.nio.file.Path;
23 | import java.util.List;
24 | import java.util.Map;
25 | import java.util.Objects;
26 | import java.util.stream.Stream;
27 |
28 | import static io.kestra.core.utils.Rethrow.throwConsumer;
29 | import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
30 | import static org.hamcrest.MatcherAssert.assertThat;
31 | import static org.hamcrest.Matchers.is;
32 |
33 | @KestraTest
34 | class DbtCLITest {
35 | @Inject
36 | StorageInterface storageInterface;
37 |
38 | @Inject
39 | private RunContextFactory runContextFactory;
40 |
41 | private static final String NAMESPACE_ID = "io.kestra.plugin.dbt.cli.dbtclitest";
42 |
43 | private static final String MANIFEST_KEY = "manifest.json";
44 |
45 | private static final String PROFILES = """
46 | unit-kestra:
47 | outputs:
48 | dev:
49 | dataset: kestra_unit_test_us
50 | fixed_retries: 1
51 | location: US
52 | method: service-account
53 | priority: interactive
54 | project: kestra-unit-test
55 | threads: 1
56 | timeout_seconds: 300
57 | type: bigquery
58 | keyfile: sa.json
59 | target: dev
60 | """;
61 |
62 | public void copyFolder(Path src, Path dest) throws IOException {
63 | try (Stream stream = Files.walk(src)) {
64 | stream
65 | .forEach(throwConsumer(source -> Files.copy(
66 | source,
67 | dest.resolve(src.relativize(source)),
68 | REPLACE_EXISTING
69 | )));
70 | }
71 | }
72 |
73 | @ParameterizedTest
74 | @EnumSource(DbtCLI.LogFormat.class)
75 | void run(DbtCLI.LogFormat logFormat) throws Exception {
76 | DbtCLI execute = DbtCLI.builder()
77 | .id(IdUtils.create())
78 | .type(DbtCLI.class.getName())
79 | .profiles(Property.of(PROFILES)
80 | )
81 | .logFormat(Property.of(logFormat))
82 | .containerImage(new Property<>("ghcr.io/kestra-io/dbt-bigquery:latest"))
83 | .commands(Property.of(List.of("dbt build")))
84 | .build();
85 |
86 | RunContext runContext = TestsUtils.mockRunContext(runContextFactory, execute, Map.of());
87 |
88 | Path workingDir = runContext.workingDir().path(true);
89 | copyFolder(Path.of(Objects.requireNonNull(this.getClass().getClassLoader().getResource("project")).getPath()), workingDir);
90 | createSaFile(workingDir);
91 |
92 | ScriptOutput runOutput = execute.run(runContext);
93 |
94 | assertThat(runOutput.getExitCode(), is(0));
95 | }
96 |
97 | @Test
98 | void testDbtCliWithStoreManifest_manifestShouldBePresentInKvStore() throws Exception {
99 | DbtCLI execute = DbtCLI.builder()
100 | .id(IdUtils.create())
101 | .type(DbtCLI.class.getName())
102 | .profiles(Property.of(PROFILES)
103 | )
104 | .containerImage(new Property<>("ghcr.io/kestra-io/dbt-bigquery:latest"))
105 | .commands(Property.of(List.of("dbt build")))
106 | .storeManifest(
107 | DbtCLI.KvStoreManifest.builder()
108 | .key(Property.of(MANIFEST_KEY))
109 | .namespace(Property.of(NAMESPACE_ID))
110 | .build()
111 | )
112 | .build();
113 |
114 | RunContext runContext = TestsUtils.mockRunContext(runContextFactory, execute, Map.of());
115 |
116 | Path workingDir = runContext.workingDir().path(true);
117 | copyFolder(Path.of(Objects.requireNonNull(this.getClass().getClassLoader().getResource("project")).getPath()), workingDir);
118 | createSaFile(workingDir);
119 |
120 | ScriptOutput runOutput = execute.run(runContext);
121 |
122 | assertThat(runOutput.getExitCode(), is(0));
123 | KVStore kvStore = runContext.namespaceKv(NAMESPACE_ID);
124 | assertThat(kvStore.get(MANIFEST_KEY).isPresent(), is(true));
125 | Map manifestValue = (Map) kvStore.getValue(MANIFEST_KEY).get().value();
126 | assertThat(((Map) manifestValue.get("metadata")).get("project_name"), is("unit_kestra"));
127 | }
128 |
129 | @Disabled("To run put a manifest.json under src/test/resources/manifest/")
130 | @Test
131 | void testDbtWithLoadManifest_manifestShouldBeLoadedFromKvStore() throws Exception {
132 | DbtCLI loadManifest = DbtCLI.builder()
133 | .id(IdUtils.create())
134 | .type(DbtCLI.class.getName())
135 | .profiles(Property.of(PROFILES))
136 | .projectDir(Property.of("unit-kestra"))
137 | .containerImage(new Property<>("ghcr.io/kestra-io/dbt-bigquery:latest"))
138 | .commands(Property.of(List.of("dbt build --project-dir unit-kestra")))
139 | .loadManifest(
140 | DbtCLI.KvStoreManifest.builder()
141 | .key(Property.of(MANIFEST_KEY))
142 | .namespace(Property.of(NAMESPACE_ID))
143 | .build()
144 | )
145 | .build();
146 |
147 | RunContext runContextLoad = TestsUtils.mockRunContext(runContextFactory, loadManifest, Map.of());
148 |
149 | Path workingDir = runContextLoad.workingDir().path(true);
150 | copyFolder(Path.of(Objects.requireNonNull(this.getClass().getClassLoader().getResource("project")).getPath()),
151 | Path.of(runContextLoad.workingDir().path().toString(),"unit-kestra"));
152 | createSaFile(workingDir);
153 | String manifestValue = Files.readString(Path.of(
154 | Objects.requireNonNull(this.getClass().getClassLoader().getResource("manifest/manifest.json")).getPath())
155 | , StandardCharsets.UTF_8);
156 | runContextLoad.namespaceKv(NAMESPACE_ID).put(MANIFEST_KEY, new KVValueAndMetadata(null, manifestValue));
157 |
158 | ScriptOutput runOutputLoad = loadManifest.run(runContextLoad);
159 |
160 | assertThat(runOutputLoad.getExitCode(), is(0));
161 | }
162 |
163 | private void createSaFile(Path workingDir) throws IOException {
164 | Path existingSa = Path.of(System.getenv("GOOGLE_APPLICATION_CREDENTIALS"));
165 | Path workingDirSa = workingDir.resolve("sa.json");
166 | Files.copy(existingSa, workingDirSa);
167 | }
168 | }
169 |
--------------------------------------------------------------------------------
/src/test/java/io/kestra/plugin/dbt/cloud/CheckStatusTest.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud;
2 |
3 | import com.google.common.collect.ImmutableMap;
4 | import io.kestra.core.models.property.Property;
5 | import io.kestra.core.runners.RunContext;
6 | import io.kestra.core.runners.RunContextFactory;
7 | import io.kestra.core.utils.IdUtils;
8 | import io.micronaut.context.annotation.Value;
9 | import io.kestra.core.junit.annotations.KestraTest;
10 | import jakarta.inject.Inject;
11 | import org.junit.jupiter.api.Disabled;
12 | import org.junit.jupiter.api.Test;
13 |
14 | import java.time.Duration;
15 |
16 | import static org.hamcrest.MatcherAssert.assertThat;
17 | import static org.hamcrest.Matchers.is;
18 | import static org.hamcrest.Matchers.notNullValue;
19 |
20 | @KestraTest
21 | class CheckStatusTest {
22 | @Inject
23 | private RunContextFactory runContextFactory;
24 |
25 | @Value("${dbt.cloud.account-id}")
26 | private String accountId;
27 |
28 | @Value("${dbt.cloud.token}")
29 | private String token;
30 |
31 | @Value("${dbt.cloud.job-id}")
32 | private String jobId;
33 |
34 | @Test
35 | @Disabled("Trial account can't trigger run through api")
36 | void run() throws Exception {
37 |
38 | RunContext runContext = runContextFactory.of(ImmutableMap.of());
39 |
40 | TriggerRun task = TriggerRun.builder()
41 | .id(IdUtils.create())
42 | .type(TriggerRun.class.getName())
43 | .accountId(Property.of(this.accountId))
44 | .wait(Property.of(false))
45 | .token(Property.of(this.token))
46 | .jobId(Property.of(this.jobId))
47 | .build();
48 |
49 | TriggerRun.Output runOutput = task.run(runContext);
50 |
51 | CheckStatus checkStatus = CheckStatus.builder()
52 | .runId(Property.of(runOutput.getRunId().toString()))
53 | .token(Property.of(this.token))
54 | .accountId(Property.of(this.accountId))
55 | .maxDuration(Property.of(Duration.ofMinutes(60)))
56 | .parseRunResults(Property.of(false))
57 | .build();
58 |
59 | CheckStatus.Output checkStatusOutput = checkStatus.run(runContext);
60 |
61 | assertThat(checkStatusOutput, is(notNullValue()));
62 | assertThat(checkStatusOutput.getManifest(), is(notNullValue()));
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/src/test/java/io/kestra/plugin/dbt/cloud/MockTriggerRunTest.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud;
2 |
3 | import io.kestra.core.http.client.HttpClientResponseException;
4 | import io.kestra.core.models.property.Property;
5 | import io.kestra.core.runners.RunContextFactory;
6 | import io.kestra.core.junit.annotations.KestraTest;
7 | import io.kestra.core.runners.RunContext;
8 | import io.kestra.core.utils.IdUtils;
9 |
10 | import jakarta.inject.Inject;
11 | import org.junit.jupiter.api.Test;
12 |
13 | import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
14 | import static org.hamcrest.MatcherAssert.assertThat;
15 |
16 | import com.github.tomakehurst.wiremock.junit5.WireMockTest;
17 |
18 | import java.util.Map;
19 |
20 | import static com.github.tomakehurst.wiremock.client.WireMock.*;
21 | import static org.hamcrest.Matchers.*;
22 |
23 | @KestraTest
24 | @WireMockTest(httpPort = 28181)
25 | class MockTriggerRunTest {
26 |
27 | @Inject
28 | RunContextFactory runContextFactory;
29 |
30 | @Test
31 | void testTriggerRun() throws Exception {
32 |
33 | stubFor(post(urlEqualTo("/api/v2/accounts/123/jobs/456/run/"))
34 | .willReturn(aResponse()
35 | .withStatus(200)
36 | .withHeader("Content-Type", "application/json")
37 | .withBody("{\"data\":{\"id\":789}}")));
38 |
39 | TriggerRun task = TriggerRun.builder()
40 | .id(IdUtils.create())
41 | .type(TriggerRun.class.getName())
42 | .accountId(Property.of("123"))
43 | .jobId(Property.of("456"))
44 | .token(Property.of("my-token"))
45 | .baseUrl(Property.of("http://localhost:28181"))
46 | .wait(Property.of(false))
47 | .build();
48 |
49 | RunContext runContext = runContextFactory.of(Map.of());
50 | TriggerRun.Output output = task.run(runContext);
51 |
52 | assertThat(output, is(notNullValue()));
53 | assertThat(output.getRunId(), is(789L));
54 | }
55 |
56 | @Test
57 | void testTriggerRunWithWait() throws Exception {
58 |
59 | stubFor(post(urlEqualTo("/api/v2/accounts/123/jobs/456/run/"))
60 | .willReturn(okJson("{\"data\":{\"id\":789}}")));
61 |
62 | stubFor(get(urlPathEqualTo("/api/v2/accounts/123/runs/789/"))
63 | .withQueryParam("include_related", matching(".*run_steps.*"))
64 | .willReturn(okJson("""
65 | {
66 | "data": {
67 | "id": 789,
68 | "status_humanized": "Success",
69 | "duration_humanized": "1m",
70 | "run_steps": [{
71 | "id": 1,
72 | "name": "step1",
73 | "logs": "log line 1",
74 | "truncated_debug_logs": "truncated"
75 | }]
76 | }
77 | }
78 | """)));
79 |
80 | // stub for run result artifacts
81 | stubFor(get(urlEqualTo("/api/v2/accounts/123/runs/789/artifacts/run_results.json"))
82 | .willReturn(okJson("""
83 | {
84 | "metadata": {},
85 | "results": [
86 | {
87 | "status": "success",
88 | "unique_id": "model.my_model",
89 | "execution_time": 1.23,
90 | "adapter_response": {},
91 | "message": "Success",
92 | "failures": 0,
93 | "thread_id": "Thread-1",
94 | "timing": []
95 | }
96 | ]
97 | }
98 | """)));
99 |
100 | // stub for run manifest artifacts
101 | stubFor(get(urlEqualTo("/api/v2/accounts/123/runs/789/artifacts/manifest.json"))
102 | .willReturn(okJson("{\"nodes\": {}}")));
103 |
104 | TriggerRun task = TriggerRun.builder()
105 | .id(IdUtils.create())
106 | .type(TriggerRun.class.getName())
107 | .accountId(Property.of("123"))
108 | .jobId(Property.of("456"))
109 | .token(Property.of("demo"))
110 | .parseRunResults(Property.of(true))
111 | .baseUrl(Property.of("http://localhost:28181"))
112 | .wait(Property.of(true))
113 | .build();
114 |
115 | RunContext runContext = runContextFactory.of(Map.of(
116 | "flow", Map.of(
117 | "id", "my-flow",
118 | "namespace", "my.namespace"
119 | ),
120 | "execution", Map.of(
121 | "id", "exec-123"
122 | ),
123 | "taskrun", Map.of(
124 | "id", "taskrun-123"
125 | )
126 | ));
127 | TriggerRun.Output output = task.run(runContext);
128 |
129 | assertThat(output, is(notNullValue()));
130 | assertThat(output.getRunId(), is(789L));
131 | assertThat(output.getRunResults().toString(), containsString("kestra://"));
132 | assertThat(output.getManifest(), is(notNullValue()));
133 | }
134 |
135 | @Test
136 | void shouldThrowOnNon200Response() {
137 | stubFor(post(urlEqualTo("/api/v2/accounts/123/jobs/456/run/"))
138 | .willReturn(aResponse()
139 | .withStatus(500)
140 | .withHeader("Content-Type", "application/json")));
141 |
142 | RunContext runContext = runContextFactory.of(Map.of());
143 |
144 | TriggerRun task = TriggerRun.builder()
145 | .accountId(Property.of("123"))
146 | .jobId(Property.of("456"))
147 | .token(Property.of("demo"))
148 | .baseUrl(Property.of("http://localhost:28181"))
149 | .wait(Property.of(false))
150 | .build();
151 |
152 | assertThatThrownBy(() -> task.run(runContext))
153 | .isInstanceOf(HttpClientResponseException.class)
154 | .hasMessageContaining("Failed http request with response code '500'");
155 | }
156 | }
157 |
--------------------------------------------------------------------------------
/src/test/java/io/kestra/plugin/dbt/cloud/SerializationTest.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud;
2 |
3 | import com.google.common.collect.ImmutableMap;
4 | import io.kestra.core.junit.annotations.KestraTest;
5 | import io.kestra.core.models.executions.Execution;
6 | import io.kestra.core.models.flows.State;
7 | import io.kestra.core.queues.QueueException;
8 | import io.kestra.core.repositories.LocalFlowRepositoryLoader;
9 | import io.kestra.core.runners.RunContext;
10 | import io.kestra.core.runners.RunContextFactory;
11 | import io.kestra.core.runners.RunnerUtils;
12 | import io.kestra.core.runners.StandAloneRunner;
13 | import io.kestra.core.tenant.TenantService;
14 | import io.kestra.core.utils.IdUtils;
15 | import io.micronaut.context.ApplicationContext;
16 | import io.micronaut.context.annotation.Value;
17 | import io.micronaut.http.HttpResponse;
18 | import io.micronaut.http.annotation.Body;
19 | import io.micronaut.http.annotation.Controller;
20 | import io.micronaut.http.annotation.Post;
21 | import io.micronaut.runtime.server.EmbeddedServer;
22 | import jakarta.inject.Inject;
23 | import org.apache.commons.io.IOUtils;
24 | import org.junit.jupiter.api.BeforeEach;
25 | import org.junit.jupiter.api.Disabled;
26 | import org.junit.jupiter.api.Test;
27 |
28 | import java.io.IOException;
29 | import java.net.URISyntaxException;
30 | import java.nio.charset.StandardCharsets;
31 | import java.time.Duration;
32 | import java.util.Objects;
33 | import java.util.concurrent.TimeoutException;
34 |
35 | import static org.hamcrest.MatcherAssert.assertThat;
36 | import static org.hamcrest.Matchers.*;
37 | import static org.hamcrest.Matchers.containsString;
38 |
39 | @KestraTest
40 | class SerializationTest {
41 | @Inject
42 | private ApplicationContext applicationContext;
43 |
44 | @Inject
45 | protected StandAloneRunner runner;
46 |
47 | @Inject
48 | protected RunnerUtils runnerUtils;
49 |
50 | @Inject
51 | protected LocalFlowRepositoryLoader repositoryLoader;
52 |
53 | @BeforeEach
54 | protected void init() throws IOException, URISyntaxException {
55 | repositoryLoader.load(Objects.requireNonNull(SerializationTest.class.getClassLoader().getResource("flows")));
56 | this.runner.run();
57 | }
58 |
59 | @Test
60 | void flow() throws TimeoutException, QueueException {
61 | EmbeddedServer embeddedServer = applicationContext.getBean(EmbeddedServer.class);
62 | embeddedServer.start();
63 |
64 | Execution execution = runnerUtils.runOne(
65 | TenantService.MAIN_TENANT,
66 | "io.kestra.tests",
67 | "cloud",
68 | null,
69 | (f, e) -> ImmutableMap.of("url", embeddedServer.getURI().toString()),
70 | Duration.ofMinutes(10)
71 | );
72 |
73 | assertThat(execution.getState().getCurrent(), is(State.Type.SUCCESS));
74 | }
75 |
76 | @Controller()
77 | public static class FakeDbtCloudController {
78 | @Post("/api/v2/accounts/{accountId}/jobs/{jobId}/run")
79 | public HttpResponse get(String jobId) throws IOException {
80 | return HttpResponse.ok(IOUtils.toString(Objects.requireNonNull(SerializationTest.class.getClassLoader().getResourceAsStream("responses/run.json")), StandardCharsets.UTF_8));
81 | }
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/src/test/java/io/kestra/plugin/dbt/cloud/TriggerRunTest.java:
--------------------------------------------------------------------------------
1 | package io.kestra.plugin.dbt.cloud;
2 |
3 | import io.kestra.core.models.property.Property;
4 | import io.kestra.core.runners.RunContext;
5 | import io.kestra.core.runners.RunContextFactory;
6 | import io.kestra.core.utils.IdUtils;
7 | import io.kestra.core.utils.TestsUtils;
8 | import io.micronaut.context.annotation.Value;
9 | import io.kestra.core.junit.annotations.KestraTest;
10 | import jakarta.inject.Inject;
11 | import org.junit.jupiter.api.Disabled;
12 | import org.junit.jupiter.api.Test;
13 |
14 | import java.util.Map;
15 |
16 | import static org.hamcrest.MatcherAssert.assertThat;
17 | import static org.hamcrest.Matchers.is;
18 | import static org.hamcrest.Matchers.notNullValue;
19 |
20 | @KestraTest
21 | class TriggerRunTest {
22 | @Inject
23 | private RunContextFactory runContextFactory;
24 |
25 | @Value("${dbt.cloud.account-id}")
26 | private String accountId;
27 |
28 | @Value("${dbt.cloud.token}")
29 | private String token;
30 |
31 | @Value("${dbt.cloud.job-id}")
32 | private String jobId;
33 |
34 | @Test
35 | @Disabled("Trial account can't trigger run through api")
36 | void run() throws Exception {
37 | TriggerRun task = TriggerRun.builder()
38 | .id(IdUtils.create())
39 | .type(TriggerRun.class.getName())
40 | .accountId(Property.of(this.accountId))
41 | .token(Property.of(this.token))
42 | .jobId(Property.of(this.jobId))
43 | .build();
44 |
45 | RunContext runContext = TestsUtils.mockRunContext(runContextFactory, task, Map.of());
46 |
47 | TriggerRun.Output run = task.run(runContext);
48 |
49 | assertThat(run.getRunId(), is(notNullValue()));
50 | assertThat(runContext.dynamicWorkerResults().size(), is(13));
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/src/test/resources/allure.properties:
--------------------------------------------------------------------------------
1 | allure.results.directory=build/allure-results
2 |
--------------------------------------------------------------------------------
/src/test/resources/application.yml:
--------------------------------------------------------------------------------
1 | kestra:
2 | repository:
3 | type: memory
4 | queue:
5 | type: memory
6 | storage:
7 | type: local
8 | local:
9 | base-path: /tmp/unittest
10 |
--------------------------------------------------------------------------------
/src/test/resources/flows/cloud.yaml:
--------------------------------------------------------------------------------
1 | id: cloud
2 | namespace: io.kestra.tests
3 |
4 | inputs:
5 | - id: url
6 | type: STRING
7 |
8 | tasks:
9 | - id: run
10 | type: io.kestra.plugin.dbt.cloud.TriggerRun
11 | baseUrl: "{{ inputs.url }}"
12 | wait: false
13 | accountId: fakeAccountId
14 | token: "fakeToken"
15 | jobId: "fakeJobId"
16 |
--------------------------------------------------------------------------------
/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/src/test/resources/project/README.md:
--------------------------------------------------------------------------------
1 | Welcome to your new dbt project!
2 |
3 | ### Using the starter project
4 |
5 | Try running the following commands:
6 | - dbt run
7 | - dbt test
8 |
9 |
10 | ### Resources:
11 | - Learn more about dbt [in the docs](https://docs.getdbt.com/docs/introduction)
12 | - Check out [Discourse](https://discourse.getdbt.com/) for commonly asked questions and answers
13 | - Join the [chat](https://community.getdbt.com/) on Slack for live discussions and support
14 | - Find [dbt events](https://events.getdbt.com) near you
15 | - Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices
16 |
--------------------------------------------------------------------------------
/src/test/resources/project/analyses/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kestra-io/plugin-dbt/542963181c6d0f3ae9b84c2d0941629804d119cf/src/test/resources/project/analyses/.gitkeep
--------------------------------------------------------------------------------
/src/test/resources/project/data/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kestra-io/plugin-dbt/542963181c6d0f3ae9b84c2d0941629804d119cf/src/test/resources/project/data/.gitkeep
--------------------------------------------------------------------------------
/src/test/resources/project/dbt_project.yml:
--------------------------------------------------------------------------------
1 |
2 | # Name your project! Project names should contain only lowercase characters
3 | # and underscores. A good package name should reflect your organization's
4 | # name or the intended use of these models
5 | name: 'unit_kestra'
6 | version: '1.0.0'
7 | config-version: 2
8 |
9 | # This setting configures which "profile" dbt uses for this project.
10 | profile: 'unit-kestra'
11 |
12 | # These configurations specify where dbt should look for different types of files.
13 | # The `model-paths` config, for example, states that models in this project can be
14 | # found in the "models/" directory. You probably won't need to change these!
15 | model-paths: ["models"]
16 | analysis-paths: ["analyses"]
17 | test-paths: ["tests"]
18 | seed-paths: ["seeds"]
19 | macro-paths: ["macros"]
20 | snapshot-paths: ["snapshots"]
21 |
22 | target-path: "target" # directory which will store compiled SQL files
23 | clean-targets: # directories to be removed by `dbt clean`
24 | - "target"
25 | - "dbt_packages"
26 |
27 |
28 | # Configuring models
29 | # Full documentation: https://docs.getdbt.com/docs/configuring-models
30 |
31 | # In this example config, we tell dbt to build all models in the example/ directory
32 | # as tables. These settings can be overridden in the individual model files
33 | # using the `{{ config(...) }}` macro.
34 | models:
35 | unit_kestra:
36 | +materialized: table
37 |
38 | #seeds:
39 | # +schema: zipcode
40 |
--------------------------------------------------------------------------------
/src/test/resources/project/macros/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kestra-io/plugin-dbt/542963181c6d0f3ae9b84c2d0941629804d119cf/src/test/resources/project/macros/.gitkeep
--------------------------------------------------------------------------------
/src/test/resources/project/models/requests.sql:
--------------------------------------------------------------------------------
1 | {{
2 | config(
3 | materialized='incremental',
4 | unique_key='unique_key'
5 | )
6 | }}
7 |
8 | SELECT
9 | unique_key,
10 | source,
11 | status,
12 | status_change_date
13 | FROM `bigquery-public-data.austin_311.311_service_requests`
14 | LEFT JOIN {{ ref('zipcode') }} ON zipcode_id = CAST(incident_zip as INTEGER)
15 | WHERE city IS NOT NULL
16 | -- this filter will only be applied on an incremental run
17 | {% if is_incremental() %}
18 | AND status_change_date > (SELECT MAX(status_change_date) FROM {{ this }})
19 | {% endif %}
20 |
21 |
22 |
--------------------------------------------------------------------------------
/src/test/resources/project/models/schema.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | models:
4 | - name: requests
5 | columns:
6 | - name: unique_key
7 | tests:
8 | - not_null
9 | - name: source
10 | tests:
11 | - not_null
12 | - name: status
13 | tests:
14 | - not_null
15 | - name: map_name
16 | - name: status_change_date
17 | tests:
18 | - not_null
19 | - name: stations
20 | columns:
21 | - name: station_id
22 | tests:
23 | - unique
24 | - not_null
25 | - name: name
26 | tests:
27 | - not_null
28 | - name: status
29 | tests:
30 | - not_null
31 |
--------------------------------------------------------------------------------
/src/test/resources/project/models/stations.sql:
--------------------------------------------------------------------------------
1 | SELECT
2 | station_id,
3 | name,
4 | status
5 | FROM `bigquery-public-data.austin_bikeshare.bikeshare_stations`
6 |
--------------------------------------------------------------------------------
/src/test/resources/project/seeds/schema.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | seeds:
4 | - name: zipcode
5 | description: "a seed table"
6 | docs:
7 | show: true
8 | columns:
9 | - name: zipcode_id
10 | - name: zipcode_name
11 |
--------------------------------------------------------------------------------
/src/test/resources/project/seeds/zipcode.csv:
--------------------------------------------------------------------------------
1 | zipcode_id,zipcode_name
2 | 78664,Madrid
3 | 78728,New York
4 | 78660,Paris
5 |
--------------------------------------------------------------------------------
/src/test/resources/project/snapshots/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kestra-io/plugin-dbt/542963181c6d0f3ae9b84c2d0941629804d119cf/src/test/resources/project/snapshots/.gitkeep
--------------------------------------------------------------------------------
/src/test/resources/project/snapshots/requests.sql:
--------------------------------------------------------------------------------
1 | {% snapshot request_snapshot %}
2 |
3 | {{
4 | config(
5 | target_schema='kestra_unit_test_us',
6 | unique_key='unique_key',
7 | strategy='timestamp',
8 | updated_at='status_change_date',
9 | )
10 | }}
11 |
12 | select * from {{ ref('requests') }}
13 |
14 | {% endsnapshot %}
15 |
--------------------------------------------------------------------------------
/src/test/resources/project/tests/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kestra-io/plugin-dbt/542963181c6d0f3ae9b84c2d0941629804d119cf/src/test/resources/project/tests/.gitkeep
--------------------------------------------------------------------------------
/src/test/resources/responses/run.json:
--------------------------------------------------------------------------------
1 | {
2 | "status": {
3 | "code": 200,
4 | "is_success": true,
5 | "user_message": "Success!",
6 | "developer_message": ""
7 | },
8 | "data": {
9 | "id": 300580980,
10 | "trigger_id": 301400550,
11 | "account_id": 22219,
12 | "environment_id": 46412,
13 | "project_id": 52901,
14 | "job_definition_id": 559062,
15 | "status": 1,
16 | "dbt_version": "versionless",
17 | "git_branch": null,
18 | "git_sha": null,
19 | "status_message": null,
20 | "owner_thread_id": null,
21 | "executed_by_thread_id": null,
22 | "deferring_run_id": null,
23 | "artifacts_saved": false,
24 | "artifact_s3_path": null,
25 | "has_docs_generated": false,
26 | "has_sources_generated": false,
27 | "notifications_sent": false,
28 | "blocked_by": [],
29 | "created_at": "2024-07-03 13:59:00.653171+00:00",
30 | "updated_at": "2024-07-03 13:59:00.653185+00:00",
31 | "dequeued_at": null,
32 | "started_at": null,
33 | "finished_at": null,
34 | "last_checked_at": null,
35 | "last_heartbeat_at": null,
36 | "should_start_at": null,
37 | "trigger": {
38 | "id": 301400550,
39 | "cause": "Triggered by Kestra.",
40 | "job_definition_id": 559062,
41 | "git_branch": "main",
42 | "git_sha": null,
43 | "azure_pull_request_id": null,
44 | "github_pull_request_id": null,
45 | "gitlab_merge_request_id": null,
46 | "non_native_pull_request_id": null,
47 | "schema_override": null,
48 | "dbt_version_override": null,
49 | "threads_override": null,
50 | "target_name_override": null,
51 | "generate_docs_override": null,
52 | "timeout_seconds_override": 7200,
53 | "steps_override": null,
54 | "created_at": "2024-07-03 13:59:00.634637+00:00",
55 | "cause_humanized": "Triggered by Kestra.",
56 | "job": null,
57 | "cause_category": "api"
58 | },
59 | "job": {
60 | "execution": {
61 | "timeout_seconds": 0
62 | },
63 | "generate_docs": false,
64 | "run_generate_sources": false,
65 | "run_compare_changes": false,
66 | "id": 559062,
67 | "account_id": 22219,
68 | "project_id": 52901,
69 | "environment_id": 46412,
70 | "name": "[Core] CI build on merge",
71 | "description": "",
72 | "dbt_version": null,
73 | "raw_dbt_version": null,
74 | "created_at": "2024-03-25T13:47:20.129690Z",
75 | "updated_at": "2024-04-12T10:53:05.333419Z",
76 | "execute_steps": [
77 | "dbt build --select state:modified+"
78 | ],
79 | "state": 1,
80 | "deactivated": false,
81 | "run_failure_count": 0,
82 | "deferring_job_definition_id": null,
83 | "deferring_environment_id": 46412,
84 | "lifecycle_webhooks": false,
85 | "lifecycle_webhooks_url": null,
86 | "triggers": {
87 | "github_webhook": false,
88 | "git_provider_webhook": false,
89 | "schedule": false,
90 | "on_merge": null
91 | },
92 | "settings": {
93 | "threads": 8,
94 | "target_name": "prod"
95 | },
96 | "schedule": {
97 | "cron": "0 * * * *",
98 | "date": "every_day",
99 | "time": "every_hour"
100 | },
101 | "is_deferrable": false,
102 | "job_type": "other",
103 | "triggers_on_draft_pr": false,
104 | "job_completion_trigger_condition": null
105 | },
106 | "environment": null,
107 | "run_steps": [],
108 | "status_humanized": "Queued",
109 | "in_progress": true,
110 | "is_complete": false,
111 | "is_success": false,
112 | "is_error": false,
113 | "is_cancelled": false,
114 | "duration": "00:00:00",
115 | "queued_duration": "00:00:00",
116 | "run_duration": "00:00:00",
117 | "duration_humanized": "0 minutes",
118 | "queued_duration_humanized": "0 minutes",
119 | "run_duration_humanized": "0 minutes",
120 | "created_at_humanized": "0 minutes ago",
121 | "finished_at_humanized": "0 minutes from now",
122 | "retrying_run_id": null,
123 | "can_retry": false,
124 | "retry_not_supported_reason": "RETRY_NOT_FAILED_RUN",
125 | "job_id": 559062,
126 | "is_running": null,
127 | "href": "https://cloud.getdbt.com/deploy/22219/projects/52901/runs/300580980/",
128 | "used_repo_cache": null
129 | }
130 | }
131 |
--------------------------------------------------------------------------------
/src/test/resources/sanity-checks/all_dbt.yaml:
--------------------------------------------------------------------------------
1 | id: all_dbt
2 | namespace: sanitychecks.plugin-dbt
3 |
4 | tasks:
5 | - id: dbt
6 | type: io.kestra.plugin.core.flow.WorkingDirectory
7 | tasks:
8 | - id: clone_repository
9 | type: io.kestra.plugin.git.Clone
10 | url: https://github.com/kestra-io/dbt-demo
11 | branch: main
12 |
13 | - id: dbt_build
14 | type: io.kestra.plugin.dbt.cli.DbtCLI
15 | taskRunner:
16 | type: io.kestra.plugin.scripts.runner.docker.Docker
17 | containerImage: ghcr.io/kestra-io/dbt-duckdb:latest
18 | commands:
19 | - dbt deps
20 | - dbt build
21 | profiles: |
22 | jaffle_shop:
23 | outputs:
24 | dev:
25 | type: duckdb
26 | path: dbt.duckdb
27 | extensions:
28 | - parquet
29 | fixed_retries: 1
30 | threads: 16
31 | timeout_seconds: 300
32 | target: dev
33 |
34 | - id: python
35 | type: io.kestra.plugin.scripts.python.Script
36 | outputFiles:
37 | - "*.csv"
38 | taskRunner:
39 | type: io.kestra.plugin.scripts.runner.docker.Docker
40 | containerImage: ghcr.io/kestra-io/duckdb:latest
41 | script: |
42 | import duckdb
43 | import pandas as pd
44 |
45 | conn = duckdb.connect(database='dbt.duckdb', read_only=False)
46 |
47 | tables_query = "SELECT table_name FROM information_schema.tables WHERE table_schema = 'main';"
48 |
49 | tables = conn.execute(tables_query).fetchall()
50 |
51 | # Export each table to CSV, excluding tables that start with 'raw' or
52 | 'stg'
53 |
54 | for table_name in tables:
55 | table_name = table_name[0]
56 | # Skip tables with names starting with 'raw' or 'stg'
57 | if not table_name.startswith('raw') and not table_name.startswith('stg'):
58 | query = f"SELECT * FROM {table_name}"
59 | df = conn.execute(query).fetchdf()
60 | df.to_csv(f"{table_name}.csv", index=False)
61 |
62 | conn.close()
63 |
--------------------------------------------------------------------------------