├── src
├── test
│ ├── resources
│ │ ├── gradle.properties
│ │ ├── maven
│ │ │ └── com
│ │ │ │ └── redpillanalytics
│ │ │ │ ├── simple-build
│ │ │ │ ├── maven-metadata.xml.md5
│ │ │ │ ├── 1.0.0
│ │ │ │ │ ├── simple-build-1.0.0.jar.md5
│ │ │ │ │ ├── simple-build-1.0.0.pom.md5
│ │ │ │ │ ├── simple-build-1.0.0.module.md5
│ │ │ │ │ ├── simple-build-1.0.0.jar.sha1
│ │ │ │ │ ├── simple-build-1.0.0.pom.sha1
│ │ │ │ │ ├── simple-build-1.0.0.module.sha1
│ │ │ │ │ ├── simple-build-1.0.0.jar.sha256
│ │ │ │ │ ├── simple-build-1.0.0.pom.sha256
│ │ │ │ │ ├── simple-build-1.0.0.module.sha256
│ │ │ │ │ ├── simple-build-1.0.0.jar.sha512
│ │ │ │ │ ├── simple-build-1.0.0.pom.sha512
│ │ │ │ │ ├── simple-build-1.0.0.module.sha512
│ │ │ │ │ ├── simple-build-1.0.0.jar
│ │ │ │ │ ├── simple-build-1.0.0.pom
│ │ │ │ │ └── simple-build-1.0.0.module
│ │ │ │ ├── maven-metadata.xml.sha1
│ │ │ │ ├── maven-metadata.xml.sha256
│ │ │ │ ├── maven-metadata.xml.sha512
│ │ │ │ └── maven-metadata.xml
│ │ │ │ └── simple-build-pipeline
│ │ │ │ ├── maven-metadata.xml.md5
│ │ │ │ ├── maven-metadata.xml.sha1
│ │ │ │ ├── 1.0.0
│ │ │ │ ├── simple-build-pipeline-1.0.0.pom.md5
│ │ │ │ ├── simple-build-pipeline-1.0.0.zip.md5
│ │ │ │ ├── simple-build-pipeline-1.0.0.pom.sha1
│ │ │ │ ├── simple-build-pipeline-1.0.0.zip.sha1
│ │ │ │ ├── simple-build-pipeline-1.0.0.pom.sha256
│ │ │ │ ├── simple-build-pipeline-1.0.0.zip.sha256
│ │ │ │ ├── simple-build-pipeline-1.0.0.pom.sha512
│ │ │ │ ├── simple-build-pipeline-1.0.0.zip.sha512
│ │ │ │ ├── simple-build-pipeline-1.0.0.zip
│ │ │ │ └── simple-build-pipeline-1.0.0.pom
│ │ │ │ ├── maven-metadata.xml.sha256
│ │ │ │ ├── maven-metadata.xml.sha512
│ │ │ │ └── maven-metadata.xml
│ │ └── src
│ │ │ └── main
│ │ │ ├── resources
│ │ │ └── streams.properties
│ │ │ └── pipeline
│ │ │ ├── 04-test-quoted-names
│ │ │ └── 01-create.sql
│ │ │ ├── 02-clickstream-users
│ │ │ ├── 01-create.sql
│ │ │ ├── 03-deliver.sql
│ │ │ └── 02-integrate.sql
│ │ │ ├── 01-clickstream
│ │ │ ├── 01-create.sql
│ │ │ ├── 03-deliver.sql
│ │ │ └── 02-integrate.sql
│ │ │ └── 03-test-earliest
│ │ │ └── 01-create.sql
│ └── groovy
│ │ ├── TasksTest.groovy
│ │ └── LoadConfigTest.groovy
├── main
│ └── groovy
│ │ └── com
│ │ └── redpillanalytics
│ │ ├── gradle
│ │ ├── tasks
│ │ │ ├── ListTopicsTask.groovy
│ │ │ ├── PipelineScriptTask.groovy
│ │ │ ├── PipelineEndpointTask.groovy
│ │ │ ├── LoadConfigTask.groovy
│ │ │ ├── PipelineTask.groovy
│ │ │ └── PipelineExecuteTask.groovy
│ │ ├── containers
│ │ │ └── TaskGroupContainer.groovy
│ │ ├── ConfluentExtension.groovy
│ │ └── ConfluentPlugin.groovy
│ │ └── KsqlRest.groovy
├── ksqlServerTest
│ └── groovy
│ │ ├── KsqlRestTest.groovy
│ │ └── KsqlRestAuthTest.groovy
├── buildTest
│ └── groovy
│ │ └── BuildTest.groovy
├── deployTest
│ └── groovy
│ │ ├── PropertiesTest.groovy
│ │ └── DeployTest.groovy
└── ksqlPipelinesTest
│ └── groovy
│ └── ExecuteTest.groovy
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── settings.gradle
├── .gitignore
├── .github
└── ISSUE_TEMPLATE
│ ├── feature_request.md
│ └── bug_report.md
├── gradlew.bat
├── cloudbuild.yaml
├── docker-compose.yml
├── gradlew
├── LICENSE
└── README.md
/src/test/resources/gradle.properties:
--------------------------------------------------------------------------------
1 | org.gradle.caching=true
2 |
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/maven-metadata.xml.md5:
--------------------------------------------------------------------------------
1 | 3c8fa7c845511b38f173797b93e1339e
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | githubToken = hidden
2 | analyticsVersion = 1.4.6
3 | version = 1.0.0
4 | org.gradle.caching=true
5 |
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/maven-metadata.xml.md5:
--------------------------------------------------------------------------------
1 | 6fb3b9392f4b5dd5b1c85b3c1036b0ca
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.jar.md5:
--------------------------------------------------------------------------------
1 | e47a7afc22469f6cc2c2a3b501ced100
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.pom.md5:
--------------------------------------------------------------------------------
1 | 211b7ccd8a952ce304cdf72e7ac92701
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/maven-metadata.xml.sha1:
--------------------------------------------------------------------------------
1 | e0e35760b6647ede6a2e4b7ea2a77dc1c699e006
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.module.md5:
--------------------------------------------------------------------------------
1 | 2fc3618743c91256dd9f6ffe03559a3a
--------------------------------------------------------------------------------
/src/test/resources/src/main/resources/streams.properties:
--------------------------------------------------------------------------------
1 | APPLICATION_ID = ${APPLICATION_ID}
2 | TOPIC_PREFIX = ${TOPIC_PREFIX}
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/maven-metadata.xml.sha1:
--------------------------------------------------------------------------------
1 | ff8725c28017c7db09456ed65e5af01d0e394873
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.jar.sha1:
--------------------------------------------------------------------------------
1 | c8fc6712dd8161474ca457563f82d26e3af60887
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.pom.sha1:
--------------------------------------------------------------------------------
1 | 8ae47b5f2289aa53c0d8454b5bdfd5e67a647c49
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RedPillAnalytics/gradle-confluent/HEAD/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.module.sha1:
--------------------------------------------------------------------------------
1 | 7f7027e117b879b587abf00dff8f387ac34c4727
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/1.0.0/simple-build-pipeline-1.0.0.pom.md5:
--------------------------------------------------------------------------------
1 | e4b78a25950f5f304e9e8bb4f08698c9
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/1.0.0/simple-build-pipeline-1.0.0.zip.md5:
--------------------------------------------------------------------------------
1 | 0ef1300fe626365dcfbf45bb53876391
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/maven-metadata.xml.sha256:
--------------------------------------------------------------------------------
1 | ed653ccc095c8c60d10fb318de1edc356152efa3cff156a22540413d1cb09179
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/1.0.0/simple-build-pipeline-1.0.0.pom.sha1:
--------------------------------------------------------------------------------
1 | 1d084a2b49dd6183927969e46329d849b9fd06b2
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/1.0.0/simple-build-pipeline-1.0.0.zip.sha1:
--------------------------------------------------------------------------------
1 | 1d5d0d673d55488f9766588b83ece00a3c0f524a
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/maven-metadata.xml.sha256:
--------------------------------------------------------------------------------
1 | 10a0360e1e474eb2c853dc7502faa7181fda0216d7ac84a4ecc45d61701fa90c
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.jar.sha256:
--------------------------------------------------------------------------------
1 | 004d138c33198b2746d3088247b4c89ec2db0091bd8be24af27a650fe7c1ebad
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.pom.sha256:
--------------------------------------------------------------------------------
1 | 350e76306d9b0a4fb2df668b09a9f261107dfa0dd3d6964bf58c11cdc84f4f6f
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.module.sha256:
--------------------------------------------------------------------------------
1 | 164124f40c33592babc0e6900699597b492bd94a3e37597fff0c8b7d6671bb3d
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/1.0.0/simple-build-pipeline-1.0.0.pom.sha256:
--------------------------------------------------------------------------------
1 | 4e1f00beffaaf32da057b8bbd35fdd669b70abd698545293e0182feacf7d4dc6
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/1.0.0/simple-build-pipeline-1.0.0.zip.sha256:
--------------------------------------------------------------------------------
1 | 87a88702dbb3a41030e88d5a41341edf764ed2b41d179130d34a7d966c06ba06
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/maven-metadata.xml.sha512:
--------------------------------------------------------------------------------
1 | b268c1a91efb0019b489709c1f98f9f6242f683a972ad7c87f9a4f7a6bc3b1b5746ce64499509636ba9e097896228926694fa12c0ab1bf792d4e5a566f31e42b
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/maven-metadata.xml.sha512:
--------------------------------------------------------------------------------
1 | 4764e301b97ef80489a3e44a9dbf53ec33ad3c98285669484a07c5cac224b73f81ab6bab811f3405d3b98538a92d982c99bcc31f7ceeb782e1434eff8aebbbd7
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.jar.sha512:
--------------------------------------------------------------------------------
1 | 469c9d60d70c340ecc013c1508151f811c542d7f3ff5b66da18a78aa6a40e55fa8b8a55a09ca57b918c1c296ac3c90fea0e59103c1b6bbfd4a1b6e3bce8aa059
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.pom.sha512:
--------------------------------------------------------------------------------
1 | e2e887d12ac41c87ffe9dab0afb4d134afa4740e2255d97561601a0cf22221be9ae1817ef91e648dc04f4f869fba144e96c9fdee564d164d04f23e007f95c5c2
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.module.sha512:
--------------------------------------------------------------------------------
1 | ab3a394c5d15d78d7cc4f0575a45458370959b390556d6af563bf5d0864149f73f2c0094c1d1cbbe0b546b38113ed603ef684feb2c3ac9ddc69151a7760579aa
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/1.0.0/simple-build-pipeline-1.0.0.pom.sha512:
--------------------------------------------------------------------------------
1 | ec406131764e88d5658761f0d844d9ab9485cdbd3d03196950599d0189ec5db71c4115e586a182882179c5361bb490f46e805749b6284ec5874a27969fa0deaa
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/1.0.0/simple-build-pipeline-1.0.0.zip.sha512:
--------------------------------------------------------------------------------
1 | 399d7615eb6f1e949d8a302c0c5d9aebba513a287b9f4a87ef8e7e42aed244514de1ee9a218abe8789ef59b5860e3c401e27d658b52d6e69aa518af7f45e626e
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RedPillAnalytics/gradle-confluent/HEAD/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.jar
--------------------------------------------------------------------------------
/src/test/resources/src/main/pipeline/04-test-quoted-names/01-create.sql:
--------------------------------------------------------------------------------
1 | --@DeleteTopic
2 | CREATE STREAM "testStream" (
3 | user_id int
4 | ) WITH (
5 | KAFKA_TOPIC = 'testStream',
6 | VALUE_FORMAT = 'JSON',
7 | PARTITIONS = 1
8 | );
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 |
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/1.0.0/simple-build-pipeline-1.0.0.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RedPillAnalytics/gradle-confluent/HEAD/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/1.0.0/simple-build-pipeline-1.0.0.zip
--------------------------------------------------------------------------------
/src/test/resources/src/main/pipeline/02-clickstream-users/01-create.sql:
--------------------------------------------------------------------------------
1 | -- users lookup table
2 | CREATE TABLE WEB_USERS (user_id int primary key, registered_At bigint, username varchar, first_name varchar, last_name varchar, city varchar, level varchar)
3 | with (kafka_topic = 'clickstream_users', value_format = 'json');
4 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id "com.ridedott.gradle-gcs-build-cache" version "1.0.1"
3 | }
4 |
5 | boolean isCiServer = System.getenv().containsKey("CI")
6 |
7 | buildCache {
8 | remote(com.ridedott.gradle.buildcache.GCSBuildCache) {
9 | bucket = "rpa-gradle-build-cache"
10 | push = isCiServer
11 | }
12 | }
13 | rootProject.name = 'gradle-confluent'
14 |
--------------------------------------------------------------------------------
/src/test/resources/src/main/pipeline/01-clickstream/01-create.sql:
--------------------------------------------------------------------------------
1 | --SOURCE of ClickStream
2 | CREATE STREAM clickstream (_time bigint,time varchar, ip varchar, request varchar, status int, userid int, bytes bigint, agent varchar)
3 | with (kafka_topic = 'clickstream', value_format = 'json');
4 |
5 | --BUILD STATUS_CODES
6 | CREATE TABLE clickstream_codes (code int primary key, definition varchar)
7 | with (kafka_topic = 'clickstream_codes', value_format = 'json');
8 |
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/maven-metadata.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | com.redpillanalytics
4 | simple-build
5 |
6 | 1.0.0
7 | 1.0.0
8 |
9 | 1.0.0
10 |
11 | 20201231162351
12 |
13 |
14 |
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/maven-metadata.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | com.redpillanalytics
4 | simple-build-pipeline
5 |
6 | 1.0.0
7 | 1.0.0
8 |
9 | 1.0.0
10 |
11 | 20201231162351
12 |
13 |
14 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # No log files
2 | *.log
3 |
4 | #IntelliJ stuff
5 | *.iml
6 |
7 | # Gradle Build Directories
8 | build/
9 | gradle_cache/
10 |
11 | # Ignore Gradle GUI config
12 | gradle-app.setting
13 |
14 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored)
15 | !gradle-wrapper.jar
16 |
17 | # Ignore all hidden files, except this one
18 | .*
19 | !.gitignore
20 | !.github/
21 | !.env
22 |
23 | # Visual Studio Code build directory
24 | bin
25 | out
26 |
27 | # Misc
28 | *.hprof
29 | version.txt
30 |
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build-pipeline/1.0.0/simple-build-pipeline-1.0.0.pom:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 | com.redpillanalytics
6 | simple-build-pipeline
7 | 1.0.0
8 | zip
9 |
10 |
--------------------------------------------------------------------------------
/src/test/resources/src/main/pipeline/02-clickstream-users/03-deliver.sql:
--------------------------------------------------------------------------------
1 | -- Aggregate (count&groupBy) using a TABLE-Window
2 | CREATE TABLE USER_IP_ACTIVITY \
3 | WITH (KEY_FORMAT='JSON') \
4 | AS \
5 | SELECT username, ip, city, COUNT(*) AS count \
6 | FROM USER_CLICKSTREAM WINDOW TUMBLING (size 60 second) \
7 | GROUP BY username, ip, city \
8 | HAVING COUNT(*) > 1;
9 |
10 | -- Sessionisation using IP addresses - 300 seconds of inactivity expires the session
11 | CREATE TABLE CLICK_USER_SESSIONS AS \
12 | SELECT username, count(*) AS events \
13 | FROM USER_CLICKSTREAM window SESSION (300 second) \
14 | GROUP BY username;
15 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 |
5 | ---
6 |
7 | **Is your feature request related to a problem? Please describe.**
8 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
9 |
10 | **Describe the solution you'd like**
11 | A clear and concise description of what you want to happen.
12 |
13 | **Describe alternatives you've considered**
14 | A clear and concise description of any alternative solutions or features you've considered.
15 |
16 | **Additional context**
17 | Add any other context or screenshots about the feature request here.
18 |
--------------------------------------------------------------------------------
/src/test/resources/src/main/pipeline/03-test-earliest/01-create.sql:
--------------------------------------------------------------------------------
1 | SET 'auto.offset.reset'='earliest';
2 |
3 | CREATE TABLE streama (id BIGINT primary key, userid varchar, name varchar)
4 | with (kafka_topic = 'streama', value_format = 'json', PARTITIONS=1, REPLICAS=1);
5 |
6 | SET 'auto.offset.reset'='latest';
7 |
8 | CREATE TABLE streamb (id BIGINT primary key, userid varchar, name varchar)
9 | with (kafka_topic = 'streamb', value_format = 'json', PARTITIONS=1, REPLICAS=1);
10 |
11 | UNSET 'auto.offset.reset';
12 |
13 | CREATE TABLE streamc (id BIGINT primary key, userid varchar, name varchar)
14 | with (kafka_topic = 'streamc', value_format = 'json', PARTITIONS=1, REPLICAS=1);
15 |
--------------------------------------------------------------------------------
/src/test/resources/src/main/pipeline/01-clickstream/03-deliver.sql:
--------------------------------------------------------------------------------
1 |
2 | ----------------------------------------------------------------------------------------------------------------------------
3 | -- A series of basic clickstream-analytics
4 | --
5 | -- Min, Max, UDFs etc
6 | ----------------------------------------------------------------------------------------------------------------------------
7 | -- Aggregate (count&groupBy) using a TABLE-Window
8 | CREATE TABLE ENRICHED_ERROR_CODES_COUNT
9 | WITH (KEY_FORMAT='JSON')
10 | AS
11 | SELECT code, definition, COUNT(*) AS count
12 | FROM ENRICHED_ERROR_CODES WINDOW TUMBLING (size 30 second)
13 | GROUP BY code, definition
14 | HAVING COUNT(*) > 1
15 | emit changes;
16 |
--------------------------------------------------------------------------------
/src/main/groovy/com/redpillanalytics/gradle/tasks/ListTopicsTask.groovy:
--------------------------------------------------------------------------------
1 | package com.redpillanalytics.gradle.tasks
2 |
3 | import groovy.util.logging.Slf4j
4 | import org.gradle.api.tasks.TaskAction
5 |
6 | /**
7 | * List all topics available to KSQL
8 | */
9 | @Slf4j
10 | class ListTopicsTask extends PipelineEndpointTask {
11 |
12 | ListTopicsTask() {
13 | description = "List all topics."
14 | group = project.extensions.confluent.taskGroup
15 | }
16 |
17 | @TaskAction
18 | def listTopics(){
19 |
20 | ksqlRest.getTopics().each { topic ->
21 | println "Name: $topic.name, Registered: $topic.registered, Partitions: ${topic.replicaInfo.size()}, Consumers: $topic.consumerCount, Consumer Groups: $topic.consumerGroupCount"
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 |
5 | ---
6 |
7 | **Describe the bug**
8 | A clear and concise description of what the bug is.
9 |
10 | **To Reproduce**
11 | Steps to reproduce the behavior:
12 | 1. Go to '...'
13 | 2. Click on '....'
14 | 3. Scroll down to '....'
15 | 4. See error
16 |
17 | **Expected behavior**
18 | A clear and concise description of what you expected to happen.
19 |
20 | **Screenshots**
21 | If applicable, add screenshots to help explain your problem.
22 |
23 | **Desktop (please complete the following information):**
24 | - OS: [e.g. iOS]
25 | - Browser [e.g. chrome, safari]
26 | - Version [e.g. 22]
27 |
28 | **Smartphone (please complete the following information):**
29 | - Device: [e.g. iPhone6]
30 | - OS: [e.g. iOS8.1]
31 | - Browser [e.g. stock browser, safari]
32 | - Version [e.g. 22]
33 |
34 | **Additional context**
35 | Add any other context about the problem here.
36 |
--------------------------------------------------------------------------------
/src/main/groovy/com/redpillanalytics/gradle/tasks/PipelineScriptTask.groovy:
--------------------------------------------------------------------------------
1 | package com.redpillanalytics.gradle.tasks
2 |
3 | import groovy.util.logging.Slf4j
4 | import org.gradle.api.tasks.*
5 |
6 |
7 | /**
8 | * Generate CREATE and DROP scripts used for deployment to KSQL Servers.
9 | */
10 | @Slf4j
11 | class PipelineScriptTask extends PipelineTask {
12 |
13 | PipelineScriptTask() {
14 | group = project.confluent.taskGroup
15 | description = 'Build a single KSQL deployment script with individual pipeline processes ordered and normalized. Primarily used for building a KSQL queries file used for KSQL Server startup.'
16 | }
17 |
18 | /**
19 | * Builds the KSQL script for the directory or directories.
20 | */
21 | File createScript() {
22 |
23 | createScript.delete()
24 | pipelineSql.each { sql ->
25 | createScript.append(sql + ";\n")
26 | }
27 | return createScript
28 | }
29 |
30 | @TaskAction
31 | def pipelineScript() {
32 | createScript()
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.pom:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 | 4.0.0
10 | com.redpillanalytics
11 | simple-build
12 | 1.0.0
13 |
14 |
15 | org.slf4j
16 | slf4j-simple
17 | +
18 | compile
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/src/test/resources/src/main/pipeline/01-clickstream/02-integrate.sql:
--------------------------------------------------------------------------------
1 |
2 | ----------------------------------------------------------------------------------------------------------------------------
3 | -- A series of basic clickstream-analytics
4 | --
5 | -- Min, Max, UDFs etc
6 | ----------------------------------------------------------------------------------------------------------------------------
7 |
8 | -- number of events per minute - think about key-for-distribution-purpose - shuffling etc - shouldnt use 'userid'
9 | --@DeleteTopic
10 | CREATE table events_per_min AS
11 | SELECT userid, count(*) AS events
12 | FROM clickstream window TUMBLING (size 60 second)
13 | GROUP BY userid
14 | emit changes;
15 |
16 | -- BUILD PAGE_VIEWS
17 | CREATE TABLE pages_per_min AS
18 | SELECT userid, count(*) AS pages
19 | FROM clickstream WINDOW HOPPING (size 60 second, advance by 5 second)
20 | WHERE request like '%html%'
21 | GROUP BY userid
22 | emit changes;
23 |
24 | --Join using a STREAM
25 | CREATE STREAM ENRICHED_ERROR_CODES AS SELECT code, definition
26 | FROM clickstream
27 | LEFT JOIN clickstream_codes
28 | ON clickstream.status = clickstream_codes.code
29 | emit changes;
30 |
31 |
--------------------------------------------------------------------------------
/src/test/resources/src/main/pipeline/02-clickstream-users/02-integrate.sql:
--------------------------------------------------------------------------------
1 | -- Use 'HAVING' Filter to show ERROR codes > 400 where count > 5
2 | CREATE TABLE ERRORS_PER_MIN_ALERT AS
3 | SELECT status, count(*) AS errors
4 | FROM clickstream window HOPPING ( size 30 second, advance by 20 second)
5 | WHERE status > 400
6 | GROUP BY status HAVING count(*) > 5
7 | AND count(*) is not NULL
8 | emit changes;
9 |
10 | CREATE table ERRORS_PER_MIN AS
11 | SELECT status, count(*) AS errors
12 | FROM clickstream window HOPPING ( size 60 second, advance by 5 second)
13 | WHERE status > 400 GROUP BY status
14 | emit changes;
15 |
16 | -- Clickstream enriched with user account data
17 | CREATE STREAM customer_clickstream WITH (PARTITIONS=2) AS
18 | SELECT userid, u.first_name, u.last_name, u.level, time, ip, request, status, agent
19 | FROM clickstream c
20 | LEFT JOIN web_users u
21 | ON c.userid = u.user_id
22 | emit changes;
23 |
24 | -- View IP, username and City Versus web-site-activity (hits)
25 | CREATE STREAM USER_CLICKSTREAM AS
26 | SELECT userid, u.username, ip, u.city, request, status, bytes, regexp_replace(ip,'\\.','') regexp_test
27 | FROM clickstream c
28 | LEFT JOIN web_users u
29 | ON c.userid = u.user_id
30 | emit changes;
31 |
--------------------------------------------------------------------------------
/src/ksqlServerTest/groovy/KsqlRestTest.groovy:
--------------------------------------------------------------------------------
1 | import com.redpillanalytics.KsqlRest
2 | import groovy.util.logging.Slf4j
3 | import org.testcontainers.containers.DockerComposeContainer
4 | import org.testcontainers.containers.wait.strategy.Wait
5 | import org.testcontainers.spock.Testcontainers
6 | import spock.lang.Shared
7 | import spock.lang.Specification
8 |
9 | import java.time.Duration
10 |
11 | @Slf4j
12 | @Testcontainers
13 | class KsqlRestTest extends Specification {
14 | @Shared
15 | def ksqlRest
16 |
17 | @Shared
18 | DockerComposeContainer environment =
19 | new DockerComposeContainer<>(new File('docker-compose.yml'))
20 | .withServices("zookeeper", "kafka", "ksqldb-server")
21 | .withExposedService("ksqldb-server", 8088, Wait.forHealthcheck().withStartupTimeout(Duration.ofMinutes(5)))
22 | .withLocalCompose(true)
23 |
24 | def setupSpec() {
25 | ksqlRest = new KsqlRest(restUrl: ("http://${environment.getServiceHost('ksqldb-server', 8088)}:${environment.getServicePort('ksqldb-server', 8088)}".toString()))
26 | }
27 |
28 | def "KSQL Server properties fetched"() {
29 |
30 | when:
31 | def result = ksqlRest.getProperties()
32 |
33 | then:
34 | log.warn "result: ${result.toString()}"
35 | result
36 | }
37 |
38 | def "KSQL extension directory path is returned"() {
39 |
40 | when:
41 | def path = ksqlRest.getExtensionPath()
42 |
43 | then:
44 | path
45 | }
46 |
47 | def "KSQL extension directory file is returned"() {
48 |
49 | when:
50 | def dir = ksqlRest.getExtensionDir()
51 |
52 | then:
53 | dir
54 | }
55 |
56 | def "KSQL REST URL is returned"() {
57 |
58 | when:
59 | def url = ksqlRest.getSchemaRegistry()
60 |
61 | then:
62 | url
63 | }
64 |
65 | def "List of topics returned"() {
66 |
67 | when:
68 | def topics = ksqlRest.getTopics()
69 |
70 | then:
71 | topics
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/src/main/groovy/com/redpillanalytics/gradle/containers/TaskGroupContainer.groovy:
--------------------------------------------------------------------------------
1 | package com.redpillanalytics.gradle.containers
2 |
3 | import groovy.util.logging.Slf4j
4 |
5 | @Slf4j
6 | class TaskGroupContainer {
7 |
8 | TaskGroupContainer(String name) {
9 | this.name = name
10 | }
11 |
12 | // Build Group defaults
13 | private static final String DEFAULT_GROUP = 'default'
14 |
15 | /**
16 | * The name of the container entity.
17 | */
18 | String name
19 |
20 | /**
21 | * Turn on/off certain features that are only enabled for task groups used to deploy code.
22 | */
23 | Boolean isDeployEnv = true
24 |
25 | /**
26 | * Turn on/off certain features that are only enabled for task groups used to build code.
27 | */
28 | Boolean isBuildEnv = true
29 |
30 | /**
31 | * Capture the debug status from the Gradle logging framework. Not currently used.
32 | */
33 | Boolean isDebugEnabled = log.isDebugEnabled()
34 |
35 | def getDomainName() {
36 |
37 | return ((getClass() =~ /\w+$/)[0] - "Container")
38 | }
39 |
40 | /**
41 | * Easy method for instrumentation configuring Gradle tasks.
42 | */
43 | def logTaskName(String task) {
44 |
45 | log.debug "${getDomainName()}: $name, TaskName: $task"
46 |
47 | }
48 |
49 | /**
50 | * This plugin has a default set of tasks that are configured with a single task group called 'default'. This method is used during configuration when special handling is needed for those tasks.
51 | */
52 | def isDefaultTask(String buildName) {
53 |
54 | return (buildName == DEFAULT_GROUP) ? true : false
55 |
56 | }
57 |
58 | /**
59 | * A method that makes it easy for naming 'default' tasks versus non-'default' tasks.
60 | */
61 | def getTaskName(String baseTaskName) {
62 |
63 | // return either the baseTaskName or prepend with a name
64 | String taskName = isDefaultTask(getName()) ? baseTaskName : getName() + baseTaskName.capitalize()
65 |
66 | logTaskName(taskName)
67 |
68 | return taskName
69 |
70 |
71 | }
72 |
73 | }
74 |
--------------------------------------------------------------------------------
/src/ksqlServerTest/groovy/KsqlRestAuthTest.groovy:
--------------------------------------------------------------------------------
1 | import com.redpillanalytics.KsqlRest
2 | import groovy.util.logging.Slf4j
3 | import org.testcontainers.containers.DockerComposeContainer
4 | import org.testcontainers.containers.wait.strategy.Wait
5 | import org.testcontainers.spock.Testcontainers
6 | import spock.lang.Shared
7 | import spock.lang.Specification
8 |
9 | import java.time.Duration
10 |
11 | @Slf4j
12 | @Testcontainers
13 | class KsqlRestAuthTest extends Specification {
14 | @Shared
15 | def ksqlRest
16 |
17 | @Shared
18 | DockerComposeContainer environment =
19 | new DockerComposeContainer<>(new File('docker-compose.yml'))
20 | .withServices("zookeeper", "kafka", "ksqldb-server")
21 | .withExposedService("ksqldb-server", 8088, Wait.forHealthcheck().withStartupTimeout(Duration.ofMinutes(5)))
22 | .withLocalCompose(true)
23 |
24 | @Shared
25 | String username = System.getProperty("pipelineUsername") ?: 'test'
26 | @Shared
27 | String password = System.getProperty("pipelinePassword") ?: 'test'
28 |
29 | def setupSpec() {
30 | ksqlRest = new KsqlRest(restUrl: ("http://${environment.getServiceHost('ksqldb-server', 8088)}:${environment.getServicePort('ksqldb-server', 8088)}".toString()), username: username, password: password)
31 | }
32 |
33 | def "KSQL Server properties fetched"() {
34 |
35 | when:
36 | def result = ksqlRest.getProperties()
37 |
38 | then:
39 | log.warn "result: ${result.toString()}"
40 | result
41 | }
42 |
43 | def "KSQL extension directory path is returned"() {
44 |
45 | when:
46 | def path = ksqlRest.getExtensionPath()
47 |
48 | then:
49 | path
50 | }
51 |
52 | def "KSQL extension directory file is returned"() {
53 |
54 | when:
55 | def dir = ksqlRest.getExtensionDir()
56 |
57 | then:
58 | dir
59 | }
60 |
61 | def "KSQL REST URL is returned"() {
62 |
63 | when:
64 | def url = ksqlRest.getSchemaRegistry()
65 |
66 | then:
67 | url
68 | }
69 |
70 | def "List of topics returned"() {
71 |
72 | when:
73 | def topics = ksqlRest.getTopics()
74 |
75 | then:
76 | topics
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/src/main/groovy/com/redpillanalytics/gradle/tasks/PipelineEndpointTask.groovy:
--------------------------------------------------------------------------------
1 | package com.redpillanalytics.gradle.tasks
2 |
3 | import com.redpillanalytics.KsqlRest
4 | import groovy.util.logging.Slf4j
5 | import org.gradle.api.tasks.Input
6 | import org.gradle.api.tasks.Internal
7 | import org.gradle.api.tasks.Optional
8 | import org.gradle.api.tasks.options.Option
9 |
10 | /**
11 | * Generate CREATE and DROP scripts used for deployment to KSQL Servers.
12 | */
13 | @Slf4j
14 | class PipelineEndpointTask extends PipelineTask {
15 |
16 | /**
17 | * The REST API URL for the KSQL Server. Default: the extension property {@link com.redpillanalytics.gradle.ConfluentExtension#pipelineEndpoint}.
18 | */
19 | @Input
20 | @Option(option = "rest-url",
21 | description = "The REST API URL for the KSQL Server. Default: value of 'confluent.pipelineEndpoint' or 'http://localhost:8088'."
22 | )
23 | String restUrl = project.extensions.confluent.pipelineEndpoint
24 |
25 | /**
26 | * The Username for Basic Authentication with the REST API URL for the KSQL Server. Default: the extension property {@link com.redpillanalytics.gradle.ConfluentExtension#pipelineUsername}.
27 | */
28 | @Input
29 | @Optional
30 | @Option(option = "basic-username",
31 | description = "The Username for Basic Authentication with the REST API URL for the KSQL Server. Default: value of 'confluent.pipelineUsername' or ''."
32 | )
33 | String username = project.extensions.confluent.pipelineUsername
34 |
35 | /**
36 | * The Password for Basic Authentication with the REST API URL for the KSQL Server. Default: the extension property {@link com.redpillanalytics.gradle.ConfluentExtension#pipelinePassword}.
37 | */
38 | @Input
39 | @Optional
40 | @Option(option = "basic-password",
41 | description = "The Password for Basic Authentication with the REST API URL for the KSQL Server. Default: value of 'confluent.pipelinePassword' or ''."
42 | )
43 | String password = project.extensions.confluent.pipelinePassword
44 |
45 | /**
46 | * Instantiates a KsqlRest Class, which is used for interacting with the KSQL RESTful API.
47 | *
48 | * @return {@link com.redpillanalytics.KsqlRest}
49 | */
50 | @Internal
51 | def getKsqlRest() {
52 | return new KsqlRest(restUrl: restUrl, username: username, password: password)
53 | }
54 |
55 | }
56 |
--------------------------------------------------------------------------------
/src/test/resources/maven/com/redpillanalytics/simple-build/1.0.0/simple-build-1.0.0.module:
--------------------------------------------------------------------------------
1 | {
2 | "formatVersion": "1.1",
3 | "component": {
4 | "group": "com.redpillanalytics",
5 | "module": "simple-build",
6 | "version": "1.0.0",
7 | "attributes": {
8 | "org.gradle.status": "release"
9 | }
10 | },
11 | "createdBy": {
12 | "gradle": {
13 | "version": "6.7.1",
14 | "buildId": "eeskrxbcpna2xk6hte6fw3quw4"
15 | }
16 | },
17 | "variants": [
18 | {
19 | "name": "apiElements",
20 | "attributes": {
21 | "org.gradle.category": "library",
22 | "org.gradle.dependency.bundling": "external",
23 | "org.gradle.jvm.version": 13,
24 | "org.gradle.libraryelements": "jar",
25 | "org.gradle.usage": "java-api"
26 | },
27 | "dependencies": [
28 | {
29 | "group": "org.slf4j",
30 | "module": "slf4j-simple",
31 | "version": {
32 | "requires": "+"
33 | }
34 | }
35 | ],
36 | "files": [
37 | {
38 | "name": "simple-build-1.0.0.jar",
39 | "url": "simple-build-1.0.0.jar",
40 | "size": 2013,
41 | "sha512": "469c9d60d70c340ecc013c1508151f811c542d7f3ff5b66da18a78aa6a40e55fa8b8a55a09ca57b918c1c296ac3c90fea0e59103c1b6bbfd4a1b6e3bce8aa059",
42 | "sha256": "004d138c33198b2746d3088247b4c89ec2db0091bd8be24af27a650fe7c1ebad",
43 | "sha1": "c8fc6712dd8161474ca457563f82d26e3af60887",
44 | "md5": "e47a7afc22469f6cc2c2a3b501ced100"
45 | }
46 | ]
47 | },
48 | {
49 | "name": "runtimeElements",
50 | "attributes": {
51 | "org.gradle.category": "library",
52 | "org.gradle.dependency.bundling": "external",
53 | "org.gradle.jvm.version": 13,
54 | "org.gradle.libraryelements": "jar",
55 | "org.gradle.usage": "java-runtime"
56 | },
57 | "dependencies": [
58 | {
59 | "group": "org.slf4j",
60 | "module": "slf4j-simple",
61 | "version": {
62 | "requires": "+"
63 | }
64 | }
65 | ],
66 | "files": [
67 | {
68 | "name": "simple-build-1.0.0.jar",
69 | "url": "simple-build-1.0.0.jar",
70 | "size": 2013,
71 | "sha512": "469c9d60d70c340ecc013c1508151f811c542d7f3ff5b66da18a78aa6a40e55fa8b8a55a09ca57b918c1c296ac3c90fea0e59103c1b6bbfd4a1b6e3bce8aa059",
72 | "sha256": "004d138c33198b2746d3088247b4c89ec2db0091bd8be24af27a650fe7c1ebad",
73 | "sha1": "c8fc6712dd8161474ca457563f82d26e3af60887",
74 | "md5": "e47a7afc22469f6cc2c2a3b501ced100"
75 | }
76 | ]
77 | }
78 | ]
79 | }
80 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%"=="" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%"=="" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if %ERRORLEVEL% equ 0 goto execute
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto execute
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :execute
68 | @rem Setup the command line
69 |
70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
71 |
72 |
73 | @rem Execute Gradle
74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
75 |
76 | :end
77 | @rem End local scope for the variables with windows NT shell
78 | if %ERRORLEVEL% equ 0 goto mainEnd
79 |
80 | :fail
81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
82 | rem the _cmd.exe /c_ return code!
83 | set EXIT_CODE=%ERRORLEVEL%
84 | if %EXIT_CODE% equ 0 set EXIT_CODE=1
85 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
86 | exit /b %EXIT_CODE%
87 |
88 | :mainEnd
89 | if "%OS%"=="Windows_NT" endlocal
90 |
91 | :omega
92 |
--------------------------------------------------------------------------------
/src/main/groovy/com/redpillanalytics/gradle/tasks/LoadConfigTask.groovy:
--------------------------------------------------------------------------------
1 | package com.redpillanalytics.gradle.tasks
2 |
3 | import groovy.util.logging.Slf4j
4 | import org.gradle.api.DefaultTask
5 | import org.gradle.api.tasks.*
6 | import org.gradle.api.tasks.options.Option
7 |
8 | @Slf4j
9 | class LoadConfigTask extends DefaultTask {
10 |
11 | LoadConfigTask() {
12 | group = project.extensions.confluent.taskGroup
13 | description = "Load a config file using ConfigSlurper."
14 | }
15 |
16 | /**
17 | * "The path of the Streams configuration file. A relative path will be resolved in the project directory, while absolute paths are resolved absolutely.
18 | */
19 | @Optional
20 | @Input
21 | @Option(option = "config-path",
22 | description = "The path of the Streams configuration file. A relative path will be resolved in the project directory, while absolute paths are resolved absolutely."
23 | )
24 | String filePath
25 |
26 | /**
27 | * The environment to pass when configuring 'configPath'. This uses ConfigSlurper, which allows for an environment attribute.
28 | */
29 | @Optional
30 | @Input
31 | @Option(option = "config-env",
32 | description = "The environment to pass when configuring 'configPath'. This uses ConfigSlurper, which allows for an environment attribute."
33 | )
34 | String environment
35 |
36 | /**
37 | * Get the configuration File object for managing Streams applications.
38 | *
39 | * @return The configuration File object for managing Streams applications.
40 | */
41 | @InputFile
42 | def getConfigFile() {
43 | log.debug "filePath: ${filePath}"
44 | return project.file(filePath)
45 | }
46 |
47 | /**
48 | * Get the ConfigSlurper representation of the Configuration.
49 | *
50 | * @return The ConfigSlurper representation of the Configuration.
51 | */
52 | @Internal
53 | def getConfig() {
54 |
55 | log.debug "configPath: $configFile.canonicalPath"
56 | log.debug "configPath text:$configFile.text"
57 | log.debug "environment: ${environment}"
58 | return new ConfigSlurper(environment).parse(configFile.text)
59 | }
60 |
61 | /**
62 | * Execute the Gradle task action.
63 | *
64 | */
65 | @TaskAction
66 | def loadProperties() {
67 |
68 | def properties = new Properties()
69 |
70 | getConfig().each { k, v ->
71 |
72 | log.debug "property: $k: $v"
73 | // create a properties object for use in expand()
74 | properties.put(k, v)
75 |
76 | // if we are specifically asking for the application defaults
77 | if (k == 'applicationDefaultJvmArgs') {
78 |
79 | // replace the text of the startup scripts
80 | project.startScripts {
81 | doLast {
82 | unixScript.text = unixScript.text
83 | .replaceAll(/(DEFAULT_JVM_OPTS=)(")(")/, /$1$2$v$3/)
84 | windowsScript.text = windowsScript.text
85 | .replaceAll(/(DEFAULT_JVM_OPTS)(=)/, /$1$2"$v"/)
86 | }
87 | }
88 | }
89 | }
90 |
91 | project.processResources.configure {
92 |
93 | expand(properties)
94 | }
95 | }
96 | }
97 |
--------------------------------------------------------------------------------
/src/buildTest/groovy/BuildTest.groovy:
--------------------------------------------------------------------------------
1 | import groovy.util.logging.Slf4j
2 | import org.gradle.testkit.runner.GradleRunner
3 | import spock.lang.Shared
4 | import spock.lang.Specification
5 | import spock.lang.Title
6 | import spock.lang.Unroll
7 |
8 | @Slf4j
9 | @Title("Test that :build functions successfully")
10 | class BuildTest extends Specification {
11 |
12 | @Shared
13 | File projectDir, buildDir, resourcesDir, buildFile, settingsFile, pipelineArtifact, script
14 |
15 | @Shared
16 | String projectName = 'simple-build'
17 |
18 | @Shared
19 | def result
20 |
21 | def setupSpec() {
22 |
23 | projectDir = new File("${System.getProperty("projectDir")}/$projectName")
24 | buildDir = new File(projectDir, 'build')
25 | pipelineArtifact = new File(buildDir, 'distributions/simple-build-pipeline-1.0.0.zip')
26 | script = new File(buildDir, 'pipeline/ksql-script.sql')
27 |
28 | resourcesDir = new File('src/test/resources')
29 |
30 | new AntBuilder().copy(todir: projectDir) {
31 | fileset(dir: resourcesDir)
32 | }
33 |
34 | settingsFile = new File(projectDir, 'settings.gradle').write("""
35 | |rootProject.name = '$projectName'
36 | """.stripMargin())
37 |
38 | buildFile = new File(projectDir, 'build.gradle').write("""
39 | plugins {
40 | id 'com.redpillanalytics.gradle-confluent'
41 | id 'maven-publish'
42 | id 'application'
43 | id 'groovy'
44 | }
45 | dependencies {
46 | implementation localGroovy()
47 | }
48 |
49 | publishing {
50 | publications {
51 | groovy(MavenPublication) {
52 | from components.java
53 | }
54 | }
55 | repositories {
56 | mavenLocal()
57 | maven {
58 | name 'test'
59 | url '${resourcesDir}/maven'
60 | }
61 | }
62 | }
63 | group = 'com.redpillanalytics'
64 | version = '1.0.0'
65 |
66 | repositories {
67 | jcenter()
68 | mavenLocal()
69 | maven {
70 | name 'test'
71 | url '${resourcesDir}/maven'
72 | }
73 | }
74 | mainClassName = "streams.TestClass"
75 | """)
76 |
77 | result = GradleRunner.create()
78 | .withProjectDir(projectDir)
79 | .withArguments('-Si', 'build', 'publish')
80 | .withPluginClasspath()
81 | .build()
82 |
83 | log.warn result.getOutput()
84 | }
85 |
86 | def "Verify the correctness of artifacts"() {
87 |
88 | when: 1==1
89 |
90 | then:
91 | pipelineArtifact.exists()
92 | script.exists()
93 | script.readLines().size() == 20
94 | }
95 |
96 | @Unroll
97 | def "Verify the following result: #task"() {
98 |
99 | when: 1==1
100 |
101 | then:
102 | !task.outcome != 'FAILURE'
103 |
104 | where:
105 | task << result.getTasks()
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/cloudbuild.yaml:
--------------------------------------------------------------------------------
1 | substitutions:
2 | _ARTIFACT_BUCKET: gs://rpa-devops-artifacts
3 | _DOC_BUCKET: documentation.redpillanalytics.com
4 | _DOC_PATH: ${_DOC_BUCKET}/${REPO_NAME}
5 | _DOC_REMOTE: s3://${_DOC_PATH}
6 | _DOC_LOCAL: build/docs/groovydoc/
7 | _DOC_BASE_URL: https://s3.amazonaws.com/${_DOC_PATH}
8 | _ARTIFACT_LOCATION: ${_ARTIFACT_BUCKET}/${REPO_NAME}
9 | _STATUS_FILE: build-status.txt
10 | # Get repository URL
11 | _REPO_URL: $(body.repository.html_url)
12 | steps:
13 | - name: gcr.io/$PROJECT_ID/project-version
14 | id: version
15 | args:
16 | - $_REPO_URL
17 | - $BRANCH_NAME
18 | waitFor: [ '-' ]
19 | - name: gcr.io/$PROJECT_ID/docker-compose
20 | id: test
21 | entrypoint: bash
22 | args:
23 | - '-cx'
24 | - |
25 | ./gradlew cleanLibs build groovydoc runAllTests buildDashboard validatePlugins
26 | env:
27 | - 'PROJECT_ID=$PROJECT_ID'
28 | waitFor:
29 | - version
30 | - name: gcr.io/google.com/cloudsdktool/cloud-sdk
31 | id: publish
32 | entrypoint: bash
33 | args:
34 | - '-cx'
35 | - |
36 | if [ $BRANCH_NAME == "master" ]
37 | then
38 | ./gradlew publish -Pgradle.publish.key=$${GRADLE_KEY} -Pgradle.publish.secret=$${GRADLE_SECRET}
39 | fi
40 | waitFor:
41 | - test
42 | # Need a rewrite of gradle-analytics that doesn't rely on a local git repo
43 | # - name: gcr.io/google.com/cloudsdktool/cloud-sdk
44 | # id: analytics
45 | # entrypoint: bash
46 | # args:
47 | # - '-c'
48 | # - |
49 | # ./gradlew producer
50 | # waitFor:
51 | # - publish
52 | - name: amazon/aws-cli
53 | id: publish-docs
54 | entrypoint: bash
55 | args:
56 | - '-cx'
57 | - |
58 | if [ $BRANCH_NAME == "master" ]
59 | then
60 | aws s3 sync ${_DOC_LOCAL} ${_DOC_REMOTE}/` docker-compose exec ksqldb-cli ksql http://primary-ksqldb-server:8088
62 | ksqldb-cli:
63 | image: confluentinc/ksqldb-cli:0.19.0
64 | #container_name: ksqldb-cli
65 | depends_on:
66 | - ksqldb-server
67 | entrypoint: /bin/sh
68 | tty: true
69 |
70 | # Runs the Kafka KSQL data generator
71 | datagen:
72 | #container_name: datagen
73 | image: confluentinc/ksql-examples:5.4.2
74 | depends_on:
75 | - kafka
76 | command: "bash -c 'ksql-datagen \
77 | bootstrap-server=kafka:9092 \
78 | quickstart=clickstream_codes \
79 | format=json \
80 | topic=clickstream_codes \
81 | maxInterval=1 \
82 | iterations=100 && \
83 | ksql-datagen \
84 | bootstrap-server=kafka:9092 \
85 | quickstart=clickstream_users \
86 | format=json \
87 | topic=clickstream_users \
88 | maxInterval=1 \
89 | iterations=1000 && \
90 | ksql-datagen \
91 | bootstrap-server=kafka:9092 \
92 | quickstart=clickstream \
93 | format=json \
94 | topic=clickstream \
95 | maxInterval=1 \
96 | iterations=100'"
97 | # This network is defined for Google Cloud Build jobs
98 | # For local testing, just run: docker network create cloudbuild
99 | #networks:
100 | # default:
101 | # external:
102 | # name: cloudbuild
103 |
--------------------------------------------------------------------------------
/src/deployTest/groovy/PropertiesTest.groovy:
--------------------------------------------------------------------------------
1 | import groovy.util.logging.Slf4j
2 | import org.gradle.testkit.runner.BuildResult
3 | import org.gradle.testkit.runner.GradleRunner
4 | import spock.lang.Shared
5 | import spock.lang.Specification
6 | import spock.lang.Title
7 | import spock.lang.Unroll
8 |
9 | @Slf4j
10 | @Title("Execute :properties task")
11 | class PropertiesTest extends Specification {
12 | @Shared
13 | File projectDir, buildDir, buildFile, resourcesDir, settingsFile
14 |
15 | @Shared
16 | String taskName
17 |
18 | @Shared
19 | List tasks
20 |
21 | @Shared
22 | BuildResult result
23 |
24 | @Shared
25 | String projectName = 'run-properties'
26 |
27 | @Shared
28 | AntBuilder ant = new AntBuilder()
29 |
30 | @Shared
31 | String pipelineEndpoint = System.getProperty("pipelineEndpoint") ?: 'http://localhost:8088'
32 |
33 | @Shared
34 | String kafkaServers = System.getProperty("kafkaServers") ?: 'localhost:9092'
35 |
36 | @Shared
37 | String analyticsVersion = System.getProperty("analyticsVersion")
38 |
39 | def setupSpec() {
40 |
41 | projectDir = new File("${System.getProperty("projectDir")}/$projectName")
42 | buildDir = new File(projectDir, 'build')
43 | buildFile = new File(projectDir, 'build.gradle')
44 |
45 | resourcesDir = new File('src/test/resources')
46 |
47 | ant.copy(todir: projectDir) {
48 | fileset(dir: resourcesDir)
49 | }
50 |
51 | buildFile.write("""
52 | |plugins {
53 | | id 'com.redpillanalytics.gradle-confluent'
54 | | id "com.redpillanalytics.gradle-analytics" version "$analyticsVersion"
55 | | id 'maven-publish'
56 | |}
57 | |
58 | |publishing {
59 | | repositories {
60 | | mavenLocal()
61 | | }
62 | |}
63 | |archivesBaseName = 'test'
64 | |group = 'com.redpillanalytics'
65 | |version = '1.0.0'
66 | |
67 | |repositories {
68 | | jcenter()
69 | | mavenLocal()
70 | | maven {
71 | | name 'test'
72 | | url '${resourcesDir}/maven'
73 | | }
74 | |}
75 | |
76 | |dependencies {
77 | | archives group: 'com.redpillanalytics', name: 'simple-build', version: '+'
78 | | archives group: 'com.redpillanalytics', name: 'simple-build-pipeline', version: '+'
79 | |}
80 | |
81 | |analytics {
82 | | kafka {
83 | | test {
84 | | bootstrapServers = '$kafkaServers'
85 | | }
86 | | }
87 | |}
88 | |
89 | |""".stripMargin())
90 |
91 | settingsFile = new File(projectDir, 'settings.gradle').write("""rootProject.name = '$projectName'""")
92 |
93 | }
94 |
95 | def executeSingleTask(String taskName, List otherArgs) {
96 |
97 | otherArgs.add(0, taskName)
98 |
99 | log.warn "runner arguments: ${otherArgs.toString()}"
100 |
101 | // execute the Gradle test build
102 | result = GradleRunner.create()
103 | .withProjectDir(projectDir)
104 | .withArguments(otherArgs)
105 | .withPluginClasspath()
106 | .forwardOutput()
107 | .build()
108 | }
109 |
110 | def "Execute :properties with enableStreams = true"() {
111 |
112 | given:
113 | taskName = 'properties'
114 | result = executeSingleTask(taskName, ['-Si','-Pconfluent.enableStreams=true',"-PmainClassName=streams.TestClass"])
115 |
116 | expect:
117 | !result.tasks.collect { it.outcome }.contains('FAILED')
118 |
119 | }
120 |
121 | def "Execute :properties with enableFunctions = true"() {
122 |
123 | given:
124 | taskName = 'properties'
125 | result = executeSingleTask(taskName, ['-Si','-Pconfluent.enableFunctions=true','-Pconfluent.functionPattern = simple-build'])
126 |
127 | expect:
128 | !result.tasks.collect { it.outcome }.contains('FAILED')
129 |
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/src/test/groovy/TasksTest.groovy:
--------------------------------------------------------------------------------
1 | import groovy.util.logging.Slf4j
2 | import org.gradle.testkit.runner.GradleRunner
3 | import spock.lang.Shared
4 | import spock.lang.Specification
5 | import spock.lang.Title
6 | import spock.lang.Unroll
7 |
8 | @Slf4j
9 | @Title("Check basic configuration")
10 | class TasksTest extends Specification {
11 |
12 | @Shared
13 | File projectDir, buildDir, settingsFile, resourcesDir, buildFile, artifact
14 |
15 | @Shared
16 | def result, tasks, taskList
17 |
18 | @Shared
19 | String projectName = 'run-tasks'
20 |
21 | @Shared
22 | String pipelineEndpoint = System.getProperty("pipelineEndpoint") ?: 'http://localhost:8088'
23 |
24 | @Shared
25 | String kafkaServers = System.getProperty("kafkaServers") ?: 'localhost:9092'
26 |
27 | @Shared
28 | String analyticsVersion = System.getProperty("analyticsVersion")
29 |
30 | def setupSpec() {
31 |
32 | projectDir = new File("${System.getProperty("projectDir")}/$projectName")
33 | buildDir = new File(projectDir, 'build')
34 | artifact = new File(buildDir, 'distributions/build-test-pipeline.zip')
35 | taskList = ['clean', 'assemble', 'check', 'pipelineScript', 'pipelineZip', 'build']
36 |
37 | resourcesDir = new File('src/test/resources')
38 |
39 | new AntBuilder().copy(todir: projectDir) {
40 | fileset(dir: resourcesDir)
41 | }
42 |
43 | settingsFile = new File(projectDir, 'settings.gradle').write("""rootProject.name = '$projectName'""")
44 |
45 | buildFile = new File(projectDir, 'build.gradle').write("""
46 | |plugins {
47 | | id 'com.redpillanalytics.gradle-confluent'
48 | | id 'com.redpillanalytics.gradle-analytics' version "$analyticsVersion"
49 | | id 'maven-publish'
50 | | id 'application'
51 | |}
52 | |
53 | |publishing {
54 | | repositories {
55 | | mavenLocal()
56 | | }
57 | |}
58 | |archivesBaseName = 'test'
59 | |group = 'com.redpillanalytics'
60 | |version = '1.0.0'
61 | |
62 | |repositories {
63 | | jcenter()
64 | | mavenLocal()
65 | | maven {
66 | | name 'test'
67 | | url 'maven'
68 | | }
69 | |}
70 | |
71 | |dependencies {
72 | | archives group: 'com.redpillanalytics', name: 'simple-build', version: '+'
73 | | archives group: 'com.redpillanalytics', name: 'simple-build-pipeline', version: '+'
74 | |}
75 | |
76 | |confluent {
77 | | pipelineEndpoint = '$pipelineEndpoint'
78 | | functionPattern = 'simple-build'
79 | |}
80 | |analytics {
81 | | kafka {
82 | | test {
83 | | bootstrapServers = '$kafkaServers'
84 | | }
85 | | }
86 | |}
87 | |mainClassName = "streams.TestClass"
88 | |
89 | |""".stripMargin())
90 |
91 | result = GradleRunner.create()
92 | .withProjectDir(projectDir)
93 | .withArguments('-Si', 'tasks', '--all', 'showConfiguration')
94 | .withPluginClasspath()
95 | .build()
96 |
97 | tasks = result.output.readLines().grep(~/(> Task :)(.+)/).collect {
98 | it.replaceAll(/(> Task :)(\w+)( UP-TO-DATE)*/, '$2')
99 | }
100 |
101 | log.warn result.getOutput()
102 | }
103 |
104 | def "All tasks run and in the correct order"() {
105 |
106 | given:
107 | ":tasks execution is successful"
108 |
109 | expect:
110 | ['SUCCESS', 'UP_TO_DATE'].contains(result.task(":tasks").outcome.toString())
111 | }
112 |
113 | @Unroll
114 | def "Executing :tasks contains :#task"() {
115 |
116 | when:
117 | "Gradle build runs"
118 |
119 | then:
120 | result.output.contains(task)
121 |
122 | where:
123 | task << ['build', 'pipelineScript', 'pipelineZip', 'publish', 'listTopics']
124 | }
125 | }
126 |
--------------------------------------------------------------------------------
/src/main/groovy/com/redpillanalytics/gradle/ConfluentExtension.groovy:
--------------------------------------------------------------------------------
1 | package com.redpillanalytics.gradle
2 |
3 | import groovy.util.logging.Slf4j
4 |
5 | @Slf4j
6 | class ConfluentExtension {
7 |
8 | /**
9 | * The group name to use for all tasks. Default: 'confluent'.
10 | */
11 | String taskGroup = 'confluent'
12 |
13 | /**
14 | * Enable KSQL pipeline support. Default: true.
15 | */
16 | Boolean enablePipelines = true
17 |
18 | /**
19 | * Enable KSQL UD(A)F support. Default: true.
20 | */
21 | Boolean enableFunctions = true
22 |
23 | /**
24 | * Enable Kafka Streams support. Default: true.
25 | */
26 | Boolean enableStreams = true
27 |
28 | /**
29 | * Base source directory for the Confluent plugin. Default: 'src/main'.
30 | */
31 | String sourceBase = 'src/main'
32 |
33 | /**
34 | * Name of the Pipeline source directory that resides in the {@link #sourceBase} directory. Default: 'pipeline'.
35 | */
36 | String pipelineSourceName = 'pipeline'
37 |
38 | /**
39 | * Full path of the Pipeline source directory. When set, this overrides the values of {@link #sourceBase} and {@link #pipelineSourceName}. Default: null.
40 | */
41 | String pipelineSourcePath
42 |
43 | /**
44 | * The name of the Pipeline build directory in the project build directory. Default: 'pipeline'.
45 | */
46 | String pipelineBuildName = 'pipeline'
47 |
48 | /**
49 | * The name of the Pipeline deploy directory in the project build directory. Default: 'pipeline'.
50 | */
51 | String pipelineDeployName = 'pipeline'
52 |
53 | /**
54 | * The name of the Function deploy directory in the project build directory. Default: 'function'.
55 | */
56 | String functionDeployName = 'function'
57 |
58 | /**
59 | * If populated, the KSQL Function JAR file will be renamed to this value during the copy. This makes it easy to hand-off to downstream deployment mechanisms. Default: null.
60 | */
61 | String functionArtifactName
62 |
63 | /**
64 | * The name of the Pipeline deployment 'create' script, which contains all the persistent statements that need to be executed. Default: 'ksql-script.sql'.
65 | */
66 | String pipelineCreateName = 'ksql-script.sql'
67 |
68 | /**
69 | * RESTful endpoint for the KSQL Server. Default: 'http://localhost:8088'.
70 | */
71 | String pipelineEndpoint = 'http://localhost:8088'
72 |
73 | /**
74 | * Username for Basic Authentication with the RESTful endpoint. Default: ''.
75 | */
76 | String pipelineUsername
77 |
78 | /**
79 | * Password for Basic Authentication with the RESTful endpoint. Default: ''.
80 | */
81 | String pipelinePassword
82 |
83 | /**
84 | * The pattern used for matching the pipeline deployment artifact. Default: 'pipeline'.
85 | */
86 | String pipelinePattern = 'pipeline'
87 |
88 | /**
89 | * The pattern used for matching the function deployment artifact. Default: 'function'.
90 | */
91 | String functionPattern = 'function'
92 |
93 | /**
94 | * The path of the Streams configuration file. A relative path will be resolved in the project directory, while absolute paths are resolved absolutely. Default: 'streams.config'.
95 | */
96 | String configPath = 'streams.config'
97 |
98 | /**
99 | * The environment to pass when configuring {@link #configPath}. This uses the ConfigSlurper concept of default values with environmental overloads. Default: 'development'.
100 | */
101 | String configEnv = 'development'
102 |
103 | /**
104 | * The number of seconds to pause execution after a create statement. Default: 0
105 | */
106 | Integer statementPause = 0
107 |
108 | /**
109 | * The number of seconds to pause execution before retrying a drop statement. Default: 10
110 | */
111 | Integer dropRetryPause = 10
112 |
113 | /**
114 | * The maximum number of times drop statements are to be retried. Default: 10
115 | */
116 | Integer dropMaxRetries = 10
117 |
118 | /**
119 | * Provides the path for Pipeline source files.
120 | *
121 | * @return The full path of the Pipeline source files. Uses {@link #pipelineSourcePath} first if it exists, and if it doesn't (the default), then it uses {@link #sourceBase} and {@link #pipelineSourceName}.
122 | */
123 | String getPipelinePath() {
124 | return (pipelineSourcePath ?: "${sourceBase}/${pipelineSourceName}")
125 | }
126 | }
127 |
--------------------------------------------------------------------------------
/src/deployTest/groovy/DeployTest.groovy:
--------------------------------------------------------------------------------
1 | import groovy.util.logging.Slf4j
2 | import org.gradle.testkit.runner.GradleRunner
3 | import org.testcontainers.containers.DockerComposeContainer
4 | import org.testcontainers.containers.KafkaContainer
5 | import org.testcontainers.containers.wait.strategy.Wait
6 | import org.testcontainers.spock.Testcontainers
7 | import org.testcontainers.utility.DockerImageName
8 | import spock.lang.Shared
9 | import spock.lang.Specification
10 | import spock.lang.Stepwise
11 |
12 | import java.time.Duration
13 |
14 | @Slf4j
15 | @Stepwise
16 | @Testcontainers
17 | class DeployTest extends Specification {
18 |
19 | @Shared
20 | DockerComposeContainer environment =
21 | new DockerComposeContainer<>(new File('docker-compose.yml'))
22 | .withExposedService("ksqldb-server", 8088, Wait.forHealthcheck().withStartupTimeout(Duration.ofMinutes(5)))
23 | .withLocalCompose(true)
24 |
25 | @Shared
26 | KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:6.2.0"))
27 |
28 | @Shared
29 | File projectDir, buildDir, resourcesDir, settingsFile, artifact, buildFile
30 |
31 | @Shared
32 | def result
33 |
34 | @Shared
35 | String projectName = 'simple-deploy', taskName, endpoint
36 |
37 | @Shared
38 | String analyticsVersion = System.getProperty("analyticsVersion")
39 |
40 | def copyResources() {
41 | new AntBuilder().copy(todir: projectDir) {
42 | fileset(dir: resourcesDir)
43 | }
44 | }
45 |
46 | def setupSpec() {
47 |
48 | projectDir = new File("${System.getProperty("projectDir")}/$projectName")
49 | buildDir = new File(projectDir, 'build')
50 | artifact = new File(buildDir, 'distributions/simple-deploy-pipeline.zip')
51 | resourcesDir = new File('src/test/resources')
52 | buildFile = new File(projectDir, 'build.gradle')
53 | endpoint = "http://${environment.getServiceHost('ksqldb-server', 8088)}:${environment.getServicePort('ksqldb-server', 8088)}".toString()
54 |
55 | copyResources()
56 |
57 | settingsFile = new File(projectDir, 'settings.gradle').write("""rootProject.name = '$projectName'""")
58 | buildFile.write("""
59 | |plugins {
60 | | id 'com.redpillanalytics.gradle-confluent'
61 | | id "com.redpillanalytics.gradle-analytics" version "$analyticsVersion"
62 | | id 'maven-publish'
63 | |}
64 | |
65 | |publishing {
66 | | repositories {
67 | | mavenLocal()
68 | | }
69 | |}
70 | |group = 'com.redpillanalytics'
71 | |version = '1.0.0'
72 | |
73 | |repositories {
74 | | jcenter()
75 | | mavenLocal()
76 | | maven {
77 | | name 'test'
78 | | url 'maven'
79 | | }
80 | |}
81 | |
82 | |dependencies {
83 | | archives group: 'com.redpillanalytics', name: 'simple-build', version: '+'
84 | | archives group: 'com.redpillanalytics', name: 'simple-build-pipeline', version: '+'
85 | |}
86 | |
87 | |confluent {
88 | | functionPattern = 'simple-build'
89 | | pipelineEndpoint = '$endpoint'
90 | |}
91 | |analytics {
92 | | kafka {
93 | | test {
94 | | bootstrapServers = '${kafka.getBootstrapServers()}'
95 | | }
96 | | }
97 | |}
98 | |""".stripMargin())
99 | }
100 |
101 | def setup() {
102 | copyResources()
103 | }
104 |
105 | // helper method
106 | def executeSingleTask(String taskName, List otherArgs = []) {
107 | otherArgs.add(0, taskName)
108 | log.warn "runner arguments: ${otherArgs.toString()}"
109 |
110 | // execute the Gradle test build
111 | result = GradleRunner.create()
112 | .withProjectDir(projectDir)
113 | .withArguments(otherArgs)
114 | .withPluginClasspath()
115 | .forwardOutput()
116 | .build()
117 | }
118 |
119 | def "Execute :tasks task"() {
120 | given:
121 | taskName = 'tasks'
122 | result = executeSingleTask(taskName, ['-Si'])
123 |
124 | expect:
125 | !result.tasks.collect { it.outcome }.contains('FAILED')
126 | }
127 |
128 | def "Deploy test from Resources"() {
129 | given:
130 | taskName = 'deploy'
131 | result = executeSingleTask(taskName, ['-Si'])
132 |
133 | expect:
134 | !result.tasks.collect { it.outcome }.contains('FAILED')
135 | result.tasks.collect { it.path - ":" } == ["functionCopy", "pipelineExtract", "pipelineDeploy", "deploy"]
136 | }
137 |
138 | def "Producer test to Kafka"() {
139 | given:
140 | taskName = 'producer'
141 | result = executeSingleTask(taskName, ['-Si'])
142 | log.warn "Kafka: $kafka"
143 |
144 | expect:
145 | !result.tasks.collect { it.outcome }.contains('FAILED')
146 | result.tasks.collect { it.path - ":" } == ['kafkaTestSink', 'producer']
147 | }
148 | }
149 |
--------------------------------------------------------------------------------
/src/ksqlPipelinesTest/groovy/ExecuteTest.groovy:
--------------------------------------------------------------------------------
1 | import groovy.util.logging.Slf4j
2 | import org.gradle.testkit.runner.GradleRunner
3 | import org.testcontainers.containers.DockerComposeContainer
4 | import org.testcontainers.containers.KafkaContainer
5 | import org.testcontainers.containers.wait.strategy.Wait
6 | import org.testcontainers.spock.Testcontainers
7 | import org.testcontainers.utility.DockerImageName
8 | import spock.lang.Shared
9 | import spock.lang.Specification
10 | import spock.lang.Stepwise
11 |
12 | import java.time.Duration
13 |
14 | @Slf4j
15 | @Stepwise
16 | @Testcontainers
17 | class ExecuteTest extends Specification {
18 |
19 | @Shared
20 | DockerComposeContainer environment =
21 | new DockerComposeContainer<>(new File('docker-compose.yml'))
22 | .withExposedService("ksqldb-server", 8088, Wait.forHealthcheck().withStartupTimeout(Duration.ofMinutes(5)))
23 | .withLocalCompose(true)
24 | @Shared
25 | KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:6.2.0"))
26 |
27 | @Shared
28 | File projectDir, buildDir, buildFile, settingsFile, resourcesDir
29 |
30 | @Shared
31 | String projectName = 'execute-test'
32 |
33 | @Shared
34 | String taskName, endpoint
35 |
36 | @Shared
37 | def result, taskList
38 |
39 | @Shared
40 | String analyticsVersion = System.getProperty("analyticsVersion")
41 |
42 | def copyResources() {
43 | new groovy.util.AntBuilder().copy(todir: projectDir) {
44 | fileset(dir: resourcesDir)
45 | }
46 | }
47 |
48 | def setupSpec() {
49 | projectDir = new File("${System.getProperty("projectDir")}/$projectName")
50 | buildDir = new File(projectDir, 'build')
51 | taskList = ['pipelineExecute']
52 | resourcesDir = new File('src/test/resources')
53 | buildFile = new File(projectDir, 'build.gradle')
54 | endpoint = "http://${environment.getServiceHost('ksqldb-server', 8088)}:${environment.getServicePort('ksqldb-server', 8088)}".toString()
55 |
56 | copyResources()
57 |
58 | settingsFile = new File(projectDir, 'settings.gradle').write("""rootProject.name = '$projectName'""")
59 | buildFile.write("""
60 | |plugins {
61 | | id 'com.redpillanalytics.gradle-confluent'
62 | | id "com.redpillanalytics.gradle-analytics" version "$analyticsVersion"
63 | |}
64 | |
65 | |confluent {
66 | | pipelineEndpoint '$endpoint'
67 | |}
68 | |
69 | |analytics {
70 | | kafka {
71 | | test {
72 | | bootstrapServers = '${kafka.getBootstrapServers()}'
73 | | }
74 | | }
75 | |}
76 | |""".stripMargin())
77 | }
78 |
79 | def setup() {
80 | copyResources()
81 | }
82 |
83 | // helper method
84 | def executeSingleTask(String taskName, List otherArgs, Boolean logOutput = true) {
85 |
86 | otherArgs.add(0, taskName)
87 | log.warn "runner arguments: ${otherArgs.toString()}"
88 |
89 | // execute the Gradle test build
90 | result = GradleRunner.create()
91 | .withProjectDir(projectDir)
92 | .withArguments(otherArgs)
93 | .withPluginClasspath()
94 | .build()
95 |
96 | // log the results
97 | if (logOutput) log.warn result.getOutput()
98 |
99 | return result
100 | }
101 |
102 | def "Execute :tasks task"() {
103 | given:
104 | taskName = 'tasks'
105 | result = executeSingleTask(taskName, ['-Si'])
106 |
107 | expect:
108 | !result.tasks.collect { it.outcome }.contains('FAILURE')
109 | }
110 |
111 |
112 | def "Execute :listTopics task"() {
113 | given:
114 | taskName = 'listTopics'
115 | result = executeSingleTask(taskName, ['-Si'])
116 |
117 | expect:
118 | !result.tasks.collect { it.outcome }.contains('FAILURE')
119 | }
120 |
121 | def "Execute :pipelineExecute task with default values"() {
122 | given:
123 | taskName = 'pipelineExecute'
124 | result = executeSingleTask(taskName, ['-Si'])
125 |
126 | expect:
127 | !result.tasks.collect { it.outcome }.contains('FAILURE')
128 | }
129 |
130 | def "Execute :pipelineExecute task with --drop-only first"() {
131 |
132 | given:
133 | taskName = 'pipelineExecute'
134 | result = executeSingleTask(taskName, ['--drop-only', '-Si'])
135 |
136 | expect:
137 | !result.tasks.collect { it.outcome }.contains('FAILURE')
138 | !result.output.toLowerCase().contains('create table')
139 | !result.output.toLowerCase().contains('insert into')
140 | }
141 |
142 | def "Execute :pipelineExecute task with custom directory"() {
143 | given:
144 | taskName = 'pipelineExecute'
145 | result = executeSingleTask(taskName, ['--pipeline-dir=src/main/pipeline/01-clickstream', '-Si'])
146 |
147 | expect:
148 | !result.tasks.collect { it.outcome }.contains('FAILURE')
149 | }
150 |
151 | def "Execute :pipelineExecute task with --drop-only second"() {
152 | given:
153 | taskName = 'pipelineExecute'
154 | result = executeSingleTask(taskName, ['--drop-only', '-Si'])
155 |
156 | expect:
157 | !result.tasks.collect { it.outcome }.contains('FAILURE')
158 | !result.output.toLowerCase().contains('create table')
159 | !result.output.toLowerCase().contains('insert into')
160 | }
161 |
162 | def "Execute :pipelineExecute task with --no-drop"() {
163 | given:
164 | taskName = 'pipelineExecute'
165 | result = executeSingleTask(taskName, ['--no-drop', '-Si'])
166 |
167 | expect:
168 | !result.tasks.collect { it.outcome }.contains('FAILURE')
169 | !result.output.toLowerCase().contains('drop table')
170 | }
171 |
172 | def "Execute :pipelineExecute task with --drop-only third"() {
173 | given:
174 | taskName = 'pipelineExecute'
175 | result = executeSingleTask(taskName, ['--drop-only', '-Si'])
176 |
177 | expect:
178 | !result.tasks.collect { it.outcome }.contains('FAILURE')
179 | !result.output.toLowerCase().contains('create table')
180 | !result.output.toLowerCase().contains('insert into')
181 | }
182 |
183 | def "Execute :pipelineExecute task with --no-terminate"() {
184 | given:
185 | taskName = 'pipelineExecute'
186 | result = executeSingleTask(taskName, ['--no-terminate', '-Si'])
187 |
188 | expect:
189 | !result.tasks.collect { it.outcome }.contains('FAILURE')
190 | !result.output.toLowerCase().contains('terminating query')
191 | }
192 |
193 | def "Execute :pipelineExecute task with --from-beginning"() {
194 | given:
195 | taskName = 'pipelineExecute'
196 | result = executeSingleTask(taskName, ['--from-beginning', '-Si'])
197 |
198 | expect:
199 | !result.tasks.collect { it.outcome }.contains('FAILURE')
200 | }
201 |
202 | def "Execute :pipelineExecute and test for --@DeleteTopic directive"() {
203 | given:
204 | taskName = 'pipelineExecute'
205 | result = executeSingleTask(taskName, ['--drop-only', '-Si'])
206 |
207 | expect:
208 | result.task(":${taskName}").outcome.name() != 'FAILED'
209 | result.output.toLowerCase().contains('drop table if exists events_per_min delete topic')
210 | result.output.toLowerCase().contains('drop stream if exists "teststream" delete topic')
211 | }
212 |
213 | def "Execute :pipelineExecute task with custom REST endpoint"() {
214 | given:
215 | taskName = 'pipelineExecute'
216 | result = executeSingleTask(taskName, ["--rest-url", endpoint, '-Si'])
217 |
218 | expect:
219 | result.task(":${taskName}").outcome.name() != 'FAILED'
220 | }
221 |
222 | def "Execute :producer task"() {
223 | given:
224 | taskName = 'producer'
225 | result = executeSingleTask(taskName, ['-Si'])
226 |
227 | expect:
228 | result.task(":${taskName}").outcome.name() != 'FAILED'
229 | result.tasks.collect { it.path - ":" } == ['kafkaTestSink', 'producer']
230 | }
231 | }
232 |
--------------------------------------------------------------------------------
/src/main/groovy/com/redpillanalytics/gradle/tasks/PipelineTask.groovy:
--------------------------------------------------------------------------------
1 | package com.redpillanalytics.gradle.tasks
2 |
3 | import groovy.util.logging.Slf4j
4 | import org.gradle.api.DefaultTask
5 | import org.gradle.api.tasks.*
6 | import org.gradle.api.tasks.options.Option
7 |
8 |
9 | /**
10 | * This class is meant to be inherited, which is why it doesn't have a @TaskAction-annotated method.
11 | */
12 | @Slf4j
13 | class PipelineTask extends DefaultTask {
14 | /**
15 | * The base directory containing SQL scripts to execute, including recursive subdirectories. Default: {@getDir}.
16 | */
17 | @Input
18 | @Option(option = "pipeline-dir",
19 | description = "The base directory containing SQL scripts to execute, including recursive subdirectories. Default: value of 'confluent.pipelineSourcePath' or 'src/main/pipeline'."
20 | )
21 | String pipelinePath
22 |
23 | @Input
24 | @Option(option = "pipeline-file",
25 | description="File containing the SQL scripts to execute.")
26 | @Optional
27 | String pipelineFilePath = null
28 |
29 | /**
30 | * When defined, DROP statements are not processed in reverse order of the CREATE statements, which is the default.
31 | */
32 | @Input
33 | @Option(option = 'no-reverse-drops',
34 | description = 'When defined, DROP statements are not processed in reverse order of the CREATE statements, which is the default.'
35 | )
36 | boolean noReverseDrops
37 |
38 | /**
39 | * Returns a File object representation of the {@project.extensions.confluent.pipelineBuildName} parameter.
40 | *
41 | * @return The File object representation of the {@project.extensions.confluent.pipelineBuildName} parameter.
42 | */
43 | @Internal
44 | File getPipelineBuildDir() {
45 |
46 | return project.file("${project.buildDir}/${project.extensions.confluent.pipelineBuildName}")
47 | }
48 |
49 | /**
50 | * Returns a File object representation of the {@pipelinePath} parameter.
51 | *
52 | * @return The File object representation of the {@pipelinePath} parameter.
53 | */
54 | @InputDirectory
55 | File getDir() {
56 |
57 | // first let's look for the existence in src/main/pipeline
58 | File dir = new File(pipelineBuildDir, pipelinePath)
59 | //File dir = project.file("${project.extensions.confluent.sourceBase}/${project.extensions.confluent.pipelineSourceName}/${pipelinePath}")
60 |
61 | return dir.exists() ? dir : project.file(pipelinePath)
62 |
63 | }
64 |
65 | /**
66 | * Returns a File object representation of the {@pipelinePath}/{@pipelineFilePath} parameter.
67 | *
68 | * @return The File object representation of the {@pipelinePath}/{@pipelineFilePath} parameter.
69 | */
70 | @InputFile
71 | @Optional
72 | File getFile() {
73 | return pipelineFilePath != null ? new File(dir, pipelineFilePath) : null
74 | }
75 |
76 | /**
77 | * Returns a File object representation of the KSQL create script.
78 | *
79 | * @return The File object representation of the KSQL create script.
80 | */
81 | @OutputFile
82 | File getCreateScript() {
83 | return project.file("${dir}/${project.extensions.confluent.pipelineCreateName}")
84 | }
85 |
86 | /**
87 | * Gets the hierarchical collection of pipeline files, sorted using folder structure and alphanumeric logic.
88 | * If a particular pipeline file is specified, returns a list with only that file.
89 | *
90 | * @return The List of pipeline KSQL files.
91 | */
92 | @Internal
93 | List getPipelineFiles() {
94 | if(file != null)
95 | return Collections.singletonList(file)
96 |
97 | def tree = project.fileTree(dir: dir, includes: ['**/*.sql', '**/*.SQL', '**/*.ksql', '**/*.KSQL'], exclude: project.extensions.confluent.pipelineCreateName)
98 | return tree.sort()
99 | }
100 |
101 | /**
102 | * Gets tokenized (based on ';') pipeline KSQL statements using {@link #getPipelineFiles}.
103 | *
104 | * @return The List of tokenized pipeline KSQL statements.
105 | */
106 | @Internal
107 | def getTokenizedSql() {
108 | //tokenize individual KSQL statements out of each SQL script
109 | def tokenized = []
110 | getPipelineFiles().each { file ->
111 | file.text.trim().tokenize(';').each {
112 | tokenized << it
113 | }
114 | }
115 | log.debug "parsed:"
116 | tokenized.each { log.debug "sql: $it \n" }
117 | }
118 |
119 | /**
120 | * Gets the hierarchical collection of pipeline SQL statements--tokenized and normalized--and sorted using {@link #getPipelineFiles}.
121 | *
122 | * @return The List of pipeline KSQL statements.
123 | */
124 | @Internal
125 | def getPipelineSql() {
126 |
127 | // clean up, removing any backslashes
128 | def transformed = tokenizedSql.findResults { String sql ->
129 |
130 | // all the transformations of the statements after tokenization
131 | sql
132 |
133 | .replaceAll(~/(\s)*(?:--.*)?/) { all, statement -> (statement ?: '') } // remove comments
134 | .trim() // basically trim things up
135 | .replace("\\\n", ' ') // replace backslash then newline with a space
136 | .replace("\n", ' ') // replace newline with a space
137 | .replace(' ', ' ') // replace 2 spaces with 1 space
138 | }
139 |
140 | transformed.removeAll('')
141 | log.debug "cleaned:"
142 | transformed.each { log.debug "sql: $it \n" }
143 |
144 | return transformed
145 | }
146 |
147 | /**
148 | * Returns a List of Map objects of "Comment Directives" from the KSQL source directory. These directives are of the form: "--@", and are used to control certain behaviors.
149 | *
150 | * @return List of Map objects of structure: [type: type, object: stream or table name]. For instance: [type:DeleteTopic, object:events_per_min].
151 | */
152 | @Internal
153 | def getDirectives() {
154 | List directives = []
155 | tokenizedSql.each { String sql ->
156 | sql.find(/(?i)(--@{1,1})(\w+)(\n)(CREATE{1,1})( {1,})(\w+)( {1,})(\w+|"\w+")/) { match, directive, directiveType, s1, create, s2, table, s3, object ->
157 | if (match != null) directives << [type: directiveType, object: object]
158 | }
159 | }
160 |
161 | return directives
162 | }
163 |
164 | /**
165 | * Returns a List of tables or streams that have a specific directive for execution behavior. Directives are defined in KSQL scripts using: "--@DirectiveName".
166 | *
167 | * For instance, the directive that controls whether or not an underlying topic is deleted during {@pipelineExecute} is: --@DeleteTopic.
168 | *
169 | * @param directiveType The type of directive to get included objects for.
170 | *
171 | * @return objects A list of tables/streams that have the specific directive.
172 | */
173 | def getDirectiveObjects(String directiveType) {
174 | directives.collect { map ->
175 | if (map.type == directiveType) map.object
176 | }
177 | }
178 |
179 | /**
180 | * Returns a List of DROP KSQL statements: one for each CREATE statement in the specified pipeline directory.
181 | * The default behavior is to return those DROP statements in the reverse order of the CREATE statement.
182 | * This can be disabled using {@noReverseDrops} in the API, or the task option '--no-reverse-drops'.
183 | *
184 | * @return The List of KSQL DROP statements.
185 | */
186 | @Internal
187 | List getDropSql() {
188 | List script = pipelineSql.collect { String sql ->
189 | sql.find(/(?i)(.*)(CREATE)(\s+)(table|stream|source connector|sink connector)(\s+)(\w+|"\w+")/) { all, x1, create, x3, type, x4, name ->
190 | if (type.toLowerCase() == 'source connector' || type.toLowerCase() == 'sink connector') {
191 | return "DROP CONNECTOR $name;\n"
192 | } else {
193 | return "DROP $type IF EXISTS ${name}${getDirectiveObjects('DeleteTopic').contains(name) ? ' DELETE TOPIC' : ''};\n"
194 | }
195 | }
196 | }
197 |
198 | script.removeAll([null])
199 |
200 | // put the drop statements in reverse order or original order
201 | return noReverseDrops ? script : script.reverse()
202 | }
203 |
204 | }
205 |
--------------------------------------------------------------------------------
/src/test/groovy/LoadConfigTest.groovy:
--------------------------------------------------------------------------------
1 | import groovy.util.logging.Slf4j
2 | import org.gradle.testkit.runner.GradleRunner
3 | import spock.lang.Shared
4 | import spock.lang.Specification
5 | import spock.lang.Title
6 |
7 | @Slf4j
8 | @Title("Check basic configuration")
9 | class LoadConfigTest extends Specification {
10 |
11 | @Shared
12 | File projectDir, buildDir, pipelineDir, pipelineScript, resourcesDir, buildFile, settingsFile, artifact, absoluteDir, absoluteFile, relativeFile, processed, unixScript, windowsScript
13 |
14 | @Shared
15 | def result, taskList
16 |
17 | @Shared
18 | String taskName, projectName = 'load-config'
19 |
20 | def setupSpec() {
21 |
22 | projectDir = new File("${System.getProperty("projectDir")}/$projectName")
23 | buildDir = new File(projectDir, 'build')
24 | pipelineDir = new File(buildDir, 'pipeline')
25 | pipelineScript = new File(pipelineDir, 'ksql-script.ksql')
26 | artifact = new File(buildDir, 'distributions/build-test-pipeline.zip')
27 | taskList = ['clean', 'assemble', 'check', 'pipelineScript', 'pipelineZip', 'build']
28 | absoluteDir = new File(System.getProperty("projectDir"))
29 | absoluteFile = new File(absoluteDir, 'streams.config')
30 | relativeFile = new File(projectDir, 'streams.config')
31 | processed = new File(buildDir, 'resources/main/streams.properties')
32 | unixScript = new File(buildDir, 'scripts/load-config')
33 | windowsScript = new File(buildDir, 'scripts/load-config.bat')
34 |
35 | resourcesDir = new File('src/test/resources')
36 |
37 | new AntBuilder().copy(todir: projectDir) {
38 | fileset(dir: resourcesDir)
39 | }
40 |
41 | settingsFile = new File(projectDir, 'settings.gradle').write("""rootProject.name = '$projectName'""")
42 |
43 | buildFile = new File(projectDir, 'build.gradle').write("""
44 | plugins {
45 | id 'com.redpillanalytics.gradle-confluent'
46 | id 'maven-publish'
47 | id 'application'
48 | }
49 |
50 | archivesBaseName = 'test'
51 | group = 'com.redpillanalytics'
52 | version = '1.0.0'
53 |
54 | mainClassName = "streams.TestClass"
55 | confluent.enableStreams = true
56 | """)
57 | }
58 |
59 | def setup() {
60 |
61 | relativeFile.delete()
62 | absoluteFile.delete()
63 | processed.delete()
64 | }
65 |
66 | // helper method
67 | def executeSingleTask(String taskName, List args, Boolean logOutput = true) {
68 |
69 | args.add(0, taskName)
70 |
71 | log.warn "runner arguments: ${args.toString()}"
72 |
73 | // execute the Gradle test build
74 | result = GradleRunner.create()
75 | .withProjectDir(projectDir)
76 | .withArguments(args)
77 | .withPluginClasspath()
78 | .build()
79 |
80 | // log the results
81 | if (logOutput) log.warn result.getOutput()
82 |
83 | return result
84 | }
85 |
86 | def "Application Plugin expand works with default file"() {
87 |
88 | given:
89 |
90 | relativeFile.write("""
91 | APPLICATION_ID = 'dev-application'
92 | TOPIC_PREFIX = 'dev-'
93 | """)
94 |
95 | taskName = 'build'
96 | result = executeSingleTask(taskName, ['--rerun-tasks', '-Si'])
97 |
98 | expect:
99 | result.task(":${taskName}").outcome.name() != 'FAILED'
100 | processed.exists()
101 | processed.text.contains('APPLICATION_ID = dev-application')
102 | processed.text.contains('TOPIC_PREFIX = dev-')
103 | }
104 |
105 | def "Application Plugin expand works with relative file"() {
106 |
107 | given:
108 |
109 | relativeFile.write("""
110 | APPLICATION_ID = 'dev-application'
111 | TOPIC_PREFIX = 'dev-'
112 | """)
113 |
114 | taskName = 'build'
115 | result = executeSingleTask(taskName, ['--rerun-tasks', '-Si', "-Pconfluent.configPath=streams.config"])
116 |
117 | expect:
118 | result.task(":${taskName}").outcome.name() != 'FAILED'
119 | processed.exists()
120 | processed.text.contains('APPLICATION_ID = dev-application')
121 | processed.text.contains('TOPIC_PREFIX = dev-')
122 | }
123 |
124 | // using executeTask is not working for this test
125 | // something to do with the "-P" parameter referencing a variable.
126 | def "Application Plugin expand works with absolute file"() {
127 |
128 | given:
129 |
130 | absoluteFile.write("""
131 | APPLICATION_ID = 'dev-application'
132 | TOPIC_PREFIX = 'dev-'
133 | """)
134 |
135 | result = GradleRunner.create()
136 | .withProjectDir(projectDir)
137 | .withArguments('-Si', 'build', "-Pconfluent.configPath=${absoluteFile.canonicalPath}", '--rerun-tasks')
138 | .withPluginClasspath()
139 | .build()
140 |
141 | log.warn result.getOutput()
142 |
143 | expect:
144 | ['SUCCESS', 'UP_TO_DATE', 'SKIPPED'].contains(result.task(":build").outcome.toString())
145 | processed.exists()
146 | processed.text.contains('APPLICATION_ID = dev-application')
147 | processed.text.contains('TOPIC_PREFIX = dev-')
148 | }
149 |
150 | // using executeTask is not working for this test
151 | // something to do with the "-P" parameter referencing a variable.
152 | def "Application Plugin expand works with absolute file and environment"() {
153 |
154 | given:
155 |
156 | absoluteFile.write("""
157 | APPLICATION_ID = 'dev-application'
158 | TOPIC_PREFIX = 'dev-'
159 |
160 | environments {
161 | test {
162 | APPLICATION_ID = 'test-application'
163 | TOPIC_PREFIX = 'test-'
164 | }
165 | }
166 | """)
167 |
168 | result = GradleRunner.create()
169 | .withProjectDir(projectDir)
170 | .withArguments('-Si', 'build', "-Pconfluent.configPath=${absoluteFile.canonicalPath}", "-Pconfluent.configEnv=test", '--rerun-tasks')
171 | .withPluginClasspath()
172 | .build()
173 |
174 | log.warn result.getOutput()
175 |
176 | expect:
177 | ['SUCCESS', 'UP_TO_DATE', 'SKIPPED'].contains(result.task(":build").outcome.toString())
178 | processed.exists()
179 | processed.text.contains('APPLICATION_ID = test-application')
180 | processed.text.contains('TOPIC_PREFIX = test-')
181 | }
182 |
183 | // using executeTask is not working for this test
184 | // something to do with the "-P" parameter referencing a variable.
185 | def "Application Plugin expand works with absolute file and bogus environment"() {
186 |
187 | given:
188 |
189 | absoluteFile.write("""
190 | APPLICATION_ID = 'dev-application'
191 | TOPIC_PREFIX = 'dev-'
192 |
193 | environments {
194 | test {
195 | APPLICATION_ID = 'test-application'
196 | TOPIC_PREFIX = 'test-'
197 | }
198 | }
199 | """)
200 |
201 | result = GradleRunner.create()
202 | .withProjectDir(projectDir)
203 | .withArguments('-Si', 'build', "-Pconfluent.configPath=${absoluteFile.canonicalPath}", "-Pconfluent.configEnv=nothing", '--rerun-tasks')
204 | .withPluginClasspath()
205 | .build()
206 |
207 | log.warn result.getOutput()
208 |
209 | expect:
210 | ['SUCCESS', 'UP_TO_DATE', 'SKIPPED'].contains(result.task(":build").outcome.toString())
211 | processed.exists()
212 | processed.text.contains('APPLICATION_ID = dev-application')
213 | processed.text.contains('TOPIC_PREFIX = dev-')
214 | }
215 |
216 | def "Application Plugin applicationDefaultJvmArgs are replaced"() {
217 |
218 | given:
219 |
220 | relativeFile.write("""
221 | APPLICATION_ID = 'dev-application'
222 | TOPIC_PREFIX = 'dev-'
223 | applicationDefaultJvmArgs = '-Djava.io.tmpdir=/tmp'
224 | """)
225 |
226 | taskName = 'build'
227 | result = executeSingleTask(taskName, ['--rerun-tasks', '-Si'])
228 |
229 | expect:
230 | result.task(":${taskName}").outcome.name() != 'FAILED'
231 | unixScript.exists()
232 | unixScript.text.contains('''DEFAULT_JVM_OPTS="-Djava.io.tmpdir=/tmp"''')
233 | windowsScript.exists()
234 | windowsScript.text.contains('''set DEFAULT_JVM_OPTS="-Djava.io.tmpdir=/tmp"''')
235 | }
236 | }
237 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | #
4 | # Copyright © 2015-2021 the original authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | #
21 | # Gradle start up script for POSIX generated by Gradle.
22 | #
23 | # Important for running:
24 | #
25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
26 | # noncompliant, but you have some other compliant shell such as ksh or
27 | # bash, then to run this script, type that shell name before the whole
28 | # command line, like:
29 | #
30 | # ksh Gradle
31 | #
32 | # Busybox and similar reduced shells will NOT work, because this script
33 | # requires all of these POSIX shell features:
34 | # * functions;
35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»;
37 | # * compound commands having a testable exit status, especially «case»;
38 | # * various built-in commands including «command», «set», and «ulimit».
39 | #
40 | # Important for patching:
41 | #
42 | # (2) This script targets any POSIX shell, so it avoids extensions provided
43 | # by Bash, Ksh, etc; in particular arrays are avoided.
44 | #
45 | # The "traditional" practice of packing multiple parameters into a
46 | # space-separated string is a well documented source of bugs and security
47 | # problems, so this is (mostly) avoided, by progressively accumulating
48 | # options in "$@", and eventually passing that to Java.
49 | #
50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
52 | # see the in-line comments for details.
53 | #
54 | # There are tweaks for specific operating systems such as AIX, CygWin,
55 | # Darwin, MinGW, and NonStop.
56 | #
57 | # (3) This script is generated from the Groovy template
58 | # https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
59 | # within the Gradle project.
60 | #
61 | # You can find Gradle at https://github.com/gradle/gradle/.
62 | #
63 | ##############################################################################
64 |
65 | # Attempt to set APP_HOME
66 |
67 | # Resolve links: $0 may be a link
68 | app_path=$0
69 |
70 | # Need this for daisy-chained symlinks.
71 | while
72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
73 | [ -h "$app_path" ]
74 | do
75 | ls=$( ls -ld "$app_path" )
76 | link=${ls#*' -> '}
77 | case $link in #(
78 | /*) app_path=$link ;; #(
79 | *) app_path=$APP_HOME$link ;;
80 | esac
81 | done
82 |
83 | APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
84 |
85 | APP_NAME="Gradle"
86 | APP_BASE_NAME=${0##*/}
87 |
88 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
89 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
90 |
91 | # Use the maximum available, or set MAX_FD != -1 to use that value.
92 | MAX_FD=maximum
93 |
94 | warn () {
95 | echo "$*"
96 | } >&2
97 |
98 | die () {
99 | echo
100 | echo "$*"
101 | echo
102 | exit 1
103 | } >&2
104 |
105 | # OS specific support (must be 'true' or 'false').
106 | cygwin=false
107 | msys=false
108 | darwin=false
109 | nonstop=false
110 | case "$( uname )" in #(
111 | CYGWIN* ) cygwin=true ;; #(
112 | Darwin* ) darwin=true ;; #(
113 | MSYS* | MINGW* ) msys=true ;; #(
114 | NONSTOP* ) nonstop=true ;;
115 | esac
116 |
117 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
118 |
119 |
120 | # Determine the Java command to use to start the JVM.
121 | if [ -n "$JAVA_HOME" ] ; then
122 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
123 | # IBM's JDK on AIX uses strange locations for the executables
124 | JAVACMD=$JAVA_HOME/jre/sh/java
125 | else
126 | JAVACMD=$JAVA_HOME/bin/java
127 | fi
128 | if [ ! -x "$JAVACMD" ] ; then
129 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
130 |
131 | Please set the JAVA_HOME variable in your environment to match the
132 | location of your Java installation."
133 | fi
134 | else
135 | JAVACMD=java
136 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
137 |
138 | Please set the JAVA_HOME variable in your environment to match the
139 | location of your Java installation."
140 | fi
141 |
142 | # Increase the maximum file descriptors if we can.
143 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
144 | case $MAX_FD in #(
145 | max*)
146 | MAX_FD=$( ulimit -H -n ) ||
147 | warn "Could not query maximum file descriptor limit"
148 | esac
149 | case $MAX_FD in #(
150 | '' | soft) :;; #(
151 | *)
152 | ulimit -n "$MAX_FD" ||
153 | warn "Could not set maximum file descriptor limit to $MAX_FD"
154 | esac
155 | fi
156 |
157 | # Collect all arguments for the java command, stacking in reverse order:
158 | # * args from the command line
159 | # * the main class name
160 | # * -classpath
161 | # * -D...appname settings
162 | # * --module-path (only if needed)
163 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
164 |
165 | # For Cygwin or MSYS, switch paths to Windows format before running java
166 | if "$cygwin" || "$msys" ; then
167 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
168 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
169 |
170 | JAVACMD=$( cygpath --unix "$JAVACMD" )
171 |
172 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
173 | for arg do
174 | if
175 | case $arg in #(
176 | -*) false ;; # don't mess with options #(
177 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
178 | [ -e "$t" ] ;; #(
179 | *) false ;;
180 | esac
181 | then
182 | arg=$( cygpath --path --ignore --mixed "$arg" )
183 | fi
184 | # Roll the args list around exactly as many times as the number of
185 | # args, so each arg winds up back in the position where it started, but
186 | # possibly modified.
187 | #
188 | # NB: a `for` loop captures its iteration list before it begins, so
189 | # changing the positional parameters here affects neither the number of
190 | # iterations, nor the values presented in `arg`.
191 | shift # remove old arg
192 | set -- "$@" "$arg" # push replacement arg
193 | done
194 | fi
195 |
196 | # Collect all arguments for the java command;
197 | # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
198 | # shell script including quotes and variable substitutions, so put them in
199 | # double quotes to make sure that they get re-expanded; and
200 | # * put everything else in single quotes, so that it's not re-expanded.
201 |
202 | set -- \
203 | "-Dorg.gradle.appname=$APP_BASE_NAME" \
204 | -classpath "$CLASSPATH" \
205 | org.gradle.wrapper.GradleWrapperMain \
206 | "$@"
207 |
208 | # Stop when "xargs" is not available.
209 | if ! command -v xargs >/dev/null 2>&1
210 | then
211 | die "xargs is not available"
212 | fi
213 |
214 | # Use "xargs" to parse quoted args.
215 | #
216 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed.
217 | #
218 | # In Bash we could simply go:
219 | #
220 | # readarray ARGS < <( xargs -n1 <<<"$var" ) &&
221 | # set -- "${ARGS[@]}" "$@"
222 | #
223 | # but POSIX shell has neither arrays nor command substitution, so instead we
224 | # post-process each arg (as a line of input to sed) to backslash-escape any
225 | # character that might be a shell metacharacter, then use eval to reverse
226 | # that process (while maintaining the separation between arguments), and wrap
227 | # the whole thing up as a single "set" statement.
228 | #
229 | # This will of course break if any of these variables contains a newline or
230 | # an unmatched quote.
231 | #
232 |
233 | eval "set -- $(
234 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
235 | xargs -n1 |
236 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
237 | tr '\n' ' '
238 | )" '"$@"'
239 |
240 | exec "$JAVACMD" "$@"
241 |
--------------------------------------------------------------------------------
/src/main/groovy/com/redpillanalytics/gradle/ConfluentPlugin.groovy:
--------------------------------------------------------------------------------
1 | package com.redpillanalytics.gradle
2 |
3 | import com.redpillanalytics.common.GradleUtils
4 | import com.redpillanalytics.gradle.containers.TaskGroupContainer
5 | import com.redpillanalytics.gradle.tasks.ListTopicsTask
6 | import com.redpillanalytics.gradle.tasks.PipelineExecuteTask
7 | import com.redpillanalytics.gradle.tasks.PipelineScriptTask
8 | import com.redpillanalytics.gradle.tasks.LoadConfigTask
9 | import groovy.util.logging.Slf4j
10 | import org.gradle.api.Plugin
11 | import org.gradle.api.Project
12 | import org.gradle.api.publish.maven.MavenPublication
13 | import org.gradle.api.tasks.Copy
14 | import org.gradle.api.tasks.Sync
15 | import org.gradle.api.tasks.bundling.Zip
16 |
17 | @Slf4j
18 | class ConfluentPlugin implements Plugin {
19 |
20 | /**
21 | * Apply the Gradle Plugin.
22 | */
23 | void apply(Project project) {
24 |
25 | // apply Gradle built-in plugins
26 | project.apply plugin: 'base'
27 | project.apply plugin: 'com.redpillanalytics.gradle-properties'
28 |
29 | // apply the Gradle plugin extension and the context container
30 | applyExtension(project)
31 |
32 | project.afterEvaluate {
33 |
34 | // Go look for any -P properties that have "confluent." in them
35 | // If so... update the extension value
36 | project.pluginProps.setParameters(project, 'confluent')
37 |
38 | // add task to show configurations
39 | project.task('showConfigurations') {
40 |
41 | group "help"
42 |
43 | doLast {
44 | project.configurations.each { config ->
45 | log.info config.toString()
46 | }
47 | }
48 | }
49 |
50 | // get the taskGroup
51 | String taskGroup = project.extensions.confluent.taskGroup
52 |
53 | // get the location of the KSQL source files
54 | File pipelineDir = project.file(project.extensions.confluent.getPipelinePath())
55 | log.debug "pipelineDir: ${pipelineDir.getCanonicalPath()}"
56 |
57 | File pipelineBuildDir = project.file("${project.buildDir}/${project.extensions.confluent.pipelineBuildName}")
58 | log.debug "pipelineBuildDir: ${pipelineBuildDir.canonicalPath}"
59 |
60 | File pipelineDeployDir = project.file("${project.buildDir}/${project.extensions.confluent.pipelineDeployName}")
61 | log.debug "pipelineDeployDir: ${pipelineDeployDir.canonicalPath}"
62 |
63 | File functionDeployDir = project.file("${project.buildDir}/${project.extensions.confluent.functionDeployName}")
64 | log.debug "pipelineDeployDir: ${pipelineDeployDir.canonicalPath}"
65 |
66 | String pipelinePattern = project.extensions.confluent.pipelinePattern
67 | log.debug "pipelinePattern: ${pipelinePattern}"
68 |
69 | String functionPattern = project.extensions.confluent.functionPattern
70 | //log.debug "functionPattern: ${functionPattern}"
71 |
72 | String configPath = project.extensions.confluent.configPath
73 | //log.debug "configPath: ${configPath}"
74 |
75 | String configEnv = project.extensions.confluent.configEnv
76 | //log.debug "configEnv: ${configEnv}"
77 |
78 | Boolean enablePipelines = project.extensions.confluent.enablePipelines
79 | //log.debug "enablePipelines: ${enablePipelines}"
80 |
81 | Boolean enableFunctions = project.extensions.confluent.enableFunctions
82 | //log.debug "enableFunctions: ${enableFunctions}"
83 |
84 | Boolean enableStreams = project.extensions.confluent.enableStreams
85 | //log.debug "enableStreams: ${enableStreams}"
86 |
87 | // show all topics
88 | project.task('listTopics', type: ListTopicsTask) {
89 | pipelinePath pipelineDir.canonicalPath
90 | }
91 |
92 | // create deploy task
93 | project.task('deploy') {
94 | group taskGroup
95 | description "Execute any configured deployment tasks."
96 | }
97 |
98 | // configure build groups
99 | project.confluent.taskGroups.all { tg ->
100 |
101 | if (tg.isBuildEnv && enablePipelines) {
102 |
103 | project.task(tg.getTaskName('pipelineSync'), type: Sync) {
104 | group taskGroup
105 | description = "Synchronize the pipeline build directory from the pipeline source directory."
106 | from pipelineDir
107 | into pipelineBuildDir
108 | doFirst {
109 | log.info "Synchronizing '$pipelineBuildDir' from '$pipelineDir'."
110 | }
111 | }
112 |
113 | project.build.dependsOn tg.getTaskName('pipelineSync')
114 |
115 | project.task(tg.getTaskName('pipelineScript'), type: PipelineScriptTask) {
116 | pipelinePath pipelineBuildDir.canonicalPath
117 | onlyIf { dir.exists() }
118 | dependsOn tg.getTaskName('pipelineSync')
119 | }
120 |
121 | project.build.dependsOn tg.getTaskName('pipelineScript')
122 |
123 | project.task(tg.getTaskName('pipelineZip'), type: Zip) {
124 | group taskGroup
125 | description "Build a distribution ZIP file with original pipeline source files plus the KSQL queries file generated by '${tg.getTaskName('pipelineScript')}'."
126 | appendix = project.extensions.confluent.pipelinePattern
127 | includeEmptyDirs false
128 | from pipelineBuildDir
129 | dependsOn tg.getTaskName('pipelineScript')
130 | onlyIf { pipelineBuildDir.exists() }
131 | }
132 |
133 | project.build.dependsOn tg.getTaskName('pipelineZip')
134 |
135 | project.task(tg.getTaskName('pipelineExecute'), type: PipelineExecuteTask) {
136 | pipelinePath pipelineBuildDir.canonicalPath
137 | onlyIf { pipelineBuildDir.exists() }
138 | dependsOn tg.getTaskName('pipelineSync')
139 | }
140 | }
141 |
142 | if (enablePipelines && tg.isDeployEnv) {
143 | if (GradleUtils.isUsableConfiguration(project, 'archives', pipelinePattern)) {
144 |
145 | project.task(tg.getTaskName('pipelineExtract'), type: Copy) {
146 | group taskGroup
147 | description = "Extract the KSQL pipeline deployment dependency (or zip file) into the deployment directory."
148 | from project.zipTree(GradleUtils.getDependency(project, 'archives', pipelinePattern))
149 | into { pipelineDeployDir }
150 |
151 | }
152 |
153 | project.task(tg.getTaskName('pipelineDeploy'), type: PipelineExecuteTask) {
154 | group taskGroup
155 | description = "Execute all KSQL pipelines extracted from an artifact dependency, in hierarchical order, with options for auto-generating and executing DROP and TERMINATE commands."
156 | pipelinePath pipelineDeployDir.canonicalPath
157 | onlyIf { pipelineDeployDir.exists() }
158 | dependsOn tg.getTaskName('pipelineExtract')
159 | }
160 |
161 | project.deploy.dependsOn tg.getTaskName('pipelineDeploy')
162 | }
163 | }
164 |
165 | if (GradleUtils.isUsableConfiguration(project, 'archives', functionPattern) && enableFunctions && tg.isDeployEnv) {
166 |
167 | project.task(tg.getTaskName('functionCopy'), type: Copy) {
168 | group taskGroup
169 | description = "Copy the KSQL custom function deployment dependency (or JAR file) into the deployment directory."
170 | from GradleUtils.getDependency(project, 'archives', functionPattern)
171 | into { functionDeployDir }
172 | if (project.extensions.confluent.functionArtifactName) rename {
173 | project.extensions.confluent.functionArtifactName
174 | }
175 | }
176 |
177 | project.deploy.dependsOn tg.getTaskName('functionCopy')
178 | }
179 |
180 | if (tg.isBuildEnv && enableStreams && project.rootProject.plugins.findPlugin('application')) {
181 | project.task(tg.getTaskName('loadConfig'), type: LoadConfigTask) {
182 | filePath configPath
183 | environment configEnv
184 | onlyIf { configFile.exists() }
185 | }
186 | project.build.dependsOn tg.getTaskName('loadConfig')
187 | }
188 |
189 | }
190 |
191 | // a bit of a hack at the moment
192 |
193 | if (project.tasks.findByName('loadConfig')) {
194 |
195 | project.tasks.each {
196 | task ->
197 | if ((task.group == 'confluent' || task.group == 'build') && task.name != 'loadConfig') {
198 | task.mustRunAfter project.loadConfig
199 | }
200 | }
201 | }
202 |
203 | if (enablePipelines && project.tasks.findByName('pipelineZip') && project.plugins.findPlugin('maven-publish')) {
204 |
205 | project.publishing.publications {
206 |
207 | pipeline(MavenPublication) {
208 | artifact project.pipelineZip {
209 | artifactId project.archivesBaseName + '-' + pipelinePattern
210 | }
211 | }
212 | }
213 | }
214 | }
215 |
216 | // end of afterEvaluate
217 | }
218 | /**
219 | * Apply the Gradle Plugin extension.
220 | */
221 | void applyExtension(Project project) {
222 |
223 | project.configure(project) {
224 | extensions.create('confluent', ConfluentExtension)
225 | }
226 |
227 | project.confluent.extensions.taskGroups = project.container(TaskGroupContainer)
228 |
229 | project.extensions.confluent.taskGroups.add(new TaskGroupContainer('default'))
230 |
231 | }
232 | }
233 |
234 |
--------------------------------------------------------------------------------
/src/main/groovy/com/redpillanalytics/gradle/tasks/PipelineExecuteTask.groovy:
--------------------------------------------------------------------------------
1 | package com.redpillanalytics.gradle.tasks
2 |
3 | import groovy.util.logging.Slf4j
4 | import org.gradle.api.tasks.Input
5 | import org.gradle.api.tasks.Optional
6 | import org.gradle.api.tasks.TaskAction
7 | import org.gradle.api.tasks.options.Option
8 |
9 | /**
10 | * Use the KSQL RESTful API to execute all pipelines in a particular directory.
11 | */
12 | @Slf4j
13 | class PipelineExecuteTask extends PipelineEndpointTask {
14 |
15 | static final String ANALYTICS_NAME = 'ksqlstatements.json'
16 |
17 | PipelineExecuteTask() {
18 | group = project.extensions.confluent.taskGroup
19 | description = "Execute all KSQL pipelines from the provided source directory, in hierarchical order, with options for auto-generating and executing DROP and TERMINATE commands."
20 |
21 | outputs.upToDateWhen { false }
22 | }
23 |
24 | /**
25 | * When defined, then set "ksql.streams.auto.offset.reset" to "earliest".
26 | */
27 | @Input
28 | @Option(option = "from-beginning",
29 | description = "When defined, set 'ksql.streams.auto.offset.reset' to 'earliest'."
30 | )
31 | boolean fromBeginning = false
32 |
33 | /**
34 | * When defined, applicable TERMINATE statements are not auto-generated and executed.
35 | */
36 | @Input
37 | @Option(option = 'no-terminate',
38 | description = 'When defined, applicable TERMINATE statements are not auto-generated and executed.'
39 | )
40 | boolean noTerminate
41 |
42 | /**
43 | * When defined, applicable DROP statements are not auto-generated and executed.
44 | */
45 | @Input
46 | @Option(option = 'no-drop',
47 | description = 'When defined, applicable DROP statements are not auto-generated and executed.'
48 | )
49 | boolean noDrop
50 |
51 | /**
52 | * When defined, CREATE statements found in KSQL scripts are not executed. Used primarily for auto-generating and executing applicable DROP and/or TERMINATE statements.
53 | */
54 | @Input
55 | @Option(option = 'drop-only',
56 | description = 'When defined, only DROP and TERMINATE statements in KSQL scripts are executed. Used primarily for cleaning existing TABLES/STREAMS and terminating queries.'
57 | )
58 | boolean dropOnly
59 |
60 | /**
61 | * The number of seconds to pause execution after a create statement. Default: the extension property {@link com.redpillanalytics.gradle.ConfluentExtension#statementPause}.
62 | */
63 | @Input
64 | @Optional
65 | @Option(option = "statement-pause",
66 | description = "The number of seconds to pause execution after a create statement. Default: value of 'confluent.statementPause'."
67 | )
68 | String statementPause = project.extensions.confluent.statementPause.toString()
69 |
70 | /**
71 | * The number of seconds to pause execution before retrying a drop statement. Default: value of 'confluent.dropRetryPause'.
72 | */
73 | @Input
74 | @Optional
75 | @Option(option = "drop-retry-pause",
76 | description = "The number of seconds to pause execution before retrying a drop statement. Default: value of 'confluent.dropRetryPause'."
77 | )
78 | String dropRetryPause = project.extensions.confluent.dropRetryPause.toString()
79 |
80 | /**
81 | * The maximum number of times drop statements are to be retried. Default: value of 'confluent.dropMaxRetries'.
82 | */
83 | @Input
84 | @Optional
85 | @Option(option = "drop-max-retries",
86 | description = "The maximum number of times drop statements are to be retried. Default: value of 'confluent.dropMaxRetries'."
87 | )
88 | String dropMaxRetries = project.extensions.confluent.dropMaxRetries.toString()
89 |
90 | def doSkip(it) {
91 | boolean setCmd = it.toString().toLowerCase().startsWith("set ")
92 | boolean unsetCmd = it.toString().toLowerCase().startsWith("unset ")
93 | boolean offsetReset = it.toString().toLowerCase().contains("auto.offset.reset")
94 | if(setCmd && offsetReset) {
95 | boolean earliest = it.toString().toLowerCase().contains("earliest")
96 | setFromBeginning(earliest)
97 | }
98 | if(unsetCmd && offsetReset) {
99 | setFromBeginning(false)
100 | }
101 | return setCmd || unsetCmd
102 | }
103 |
104 | @TaskAction
105 | def executePipelines() {
106 |
107 | Integer numTerminated = 0
108 | Integer numCreated = 0
109 | Integer numDropped = 0
110 |
111 | Integer dropRetryPause = dropRetryPause.toInteger()
112 | Integer dropMaxRetries = dropMaxRetries.toInteger()
113 |
114 | // first execute the DROP KSQL statements
115 | // this also catches running statements and terminates them
116 | if (!noDrop) {
117 |
118 | // drop KSQL objects
119 | dropSql.each { sql ->
120 |
121 | if(doSkip(sql))
122 | return
123 |
124 | // extract the object name from the query
125 | String object = ksqlRest.getObjectName(sql)
126 |
127 | // extract the object type from the query
128 | String objectType = ksqlRest.getObjectType(sql)
129 |
130 | // don't bother unless it actually exists
131 | if (ksqlRest.getSourceDescription(object, objectType)) {
132 |
133 | // queries won't exist for connector objects
134 | if (objectType != 'connector') {
135 |
136 | // get any persistent queries reading or writing to this table/stream
137 | List queryIds = ksqlRest.getQueryIds(object)
138 |
139 | if (!queryIds.isEmpty()) {
140 |
141 | if (!noTerminate) {
142 | queryIds.each { query ->
143 | logger.info "Terminating query $query..."
144 | def result = ksqlRest.execKsql("TERMINATE ${query}")
145 | // write the analytics record if the analytics plugin is there
146 | if (project.rootProject.plugins.findPlugin('com.redpillanalytics.gradle-analytics')) {
147 | project.rootProject.extensions.analytics.writeAnalytics(
148 | ANALYTICS_NAME,
149 | project.rootProject.buildDir,
150 | project.rootProject.extensions.analytics.getBuildHeader() <<
151 | [
152 | type : 'terminate',
153 | object : object,
154 | statement : sql,
155 | status : result.status,
156 | statustext: result.statusText
157 | ]
158 | )
159 | }
160 | numTerminated++
161 | }
162 |
163 | } else log.info "Persistent queries exist, but '--no-terminate' option provided."
164 | }
165 | }
166 |
167 | // execute the statement
168 | def result = ksqlRest.dropKsql(sql, [:], dropRetryPause, dropMaxRetries)
169 |
170 | // write the analytics record if the analytics plugin is there
171 | if (project.rootProject.plugins.findPlugin('com.redpillanalytics.gradle-analytics')) {
172 |
173 | project.rootProject.extensions.analytics.writeAnalytics(
174 | ANALYTICS_NAME,
175 | project.rootProject.buildDir,
176 | project.rootProject.extensions.analytics.getBuildHeader() <<
177 | [
178 | type : 'drop',
179 | object : object,
180 | statement : sql,
181 | status : result.status,
182 | statustext: result.statusText
183 | ]
184 | )
185 | }
186 | numDropped++
187 | }
188 | }
189 | }
190 |
191 | // create KSQL objects
192 | if (!dropOnly) {
193 | pipelineSql.each {
194 | if(doSkip(it))
195 | return
196 |
197 | // extract the object name from the query
198 | String object = ksqlRest.getObjectName(it)
199 |
200 | def result = ksqlRest.createKsql(it, fromBeginning)
201 | // write the analytics record if the analytics plugin is there
202 | if (project.rootProject.plugins.findPlugin('com.redpillanalytics.gradle-analytics')) {
203 | project.rootProject.extensions.analytics.writeAnalytics(
204 | ANALYTICS_NAME,
205 | project.rootProject.buildDir,
206 | project.rootProject.extensions.analytics.getBuildHeader() <<
207 | [
208 | type : 'create',
209 | object : object,
210 | statement : it,
211 | status : result.status,
212 | statustext: result.statusText
213 | ]
214 | )
215 | }
216 | numCreated++
217 |
218 | Integer pause = statementPause.toInteger()
219 |
220 | if (pause != 0) {
221 | // pause for the configured number of seconds after executing a create statement
222 | log.info "Pausing for $statementPause second" + (statementPause == 1 ? '' : 's') + '...'
223 | sleep(statementPause.toInteger() * 1000)
224 | }
225 | }
226 | }
227 | log.warn "${numTerminated} queries terminated."
228 | log.warn "${numDropped} objects dropped."
229 | log.warn "${numCreated} objects created."
230 | }
231 | }
232 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/src/main/groovy/com/redpillanalytics/KsqlRest.groovy:
--------------------------------------------------------------------------------
1 | package com.redpillanalytics
2 |
3 | import com.google.gson.Gson
4 | import kong.unirest.HttpResponse
5 | import kong.unirest.Unirest
6 | import groovy.json.JsonOutput
7 | import groovy.json.JsonSlurper
8 | import groovy.util.logging.Slf4j
9 | import org.gradle.api.GradleException
10 |
11 | @Slf4j
12 | /**
13 | * Class for interacting and normalizing behavior using the Confluent KSQL RESTful API.
14 | */
15 | class KsqlRest {
16 | /**
17 | * Regular expression for parsing KSQL statements into underlying chunks.
18 | */
19 | static final String KSQLREGEX = /(?i)(?:.*)(create|drop|insert)(?:\s+)(table|stream|into|source connector|sink connector|connector)(?:\s+)(?:IF EXISTS\s+)?(\w+|"\w+")/
20 |
21 | /**
22 | * The base REST endpoint for the KSQL server. Defaults to 'http://localhost:8088', which is handy when developing against Confluent CLI.
23 | */
24 | String restUrl = 'http://localhost:8088'
25 |
26 | /**
27 | * The username for basic authentication for the KSQL server. If unspecified, Basic Authentication credentials are not provided.
28 | */
29 | String username
30 |
31 | /**
32 | * The password for basic authentication for the KSQL server. If unspecified, Basic Authentication credentials are not provided.
33 | */
34 | String password
35 |
36 | /**
37 | * GSON serialization object.
38 | */
39 | Gson gson = new Gson()
40 |
41 | /**
42 | * Prepare a KSQL statement.
43 | *
44 | * @param ksql The KSQL statement to prepare.
45 | *
46 | * @return String The prepared KSQL statement.
47 | */
48 | String prepareSql(String ksql) {
49 | return (ksql + ';').replace('\n', '').replace(';;', ';')
50 | }
51 |
52 | /**
53 | * Executes a KSQL statement using the KSQL RESTful API.
54 | *
55 | * @param ksql the KSQL statement to execute.
56 | *
57 | * @param properties Any KSQL parameters to include with the KSQL execution.
58 | *
59 | * @return Map with meaningful elements returned in the REST call, plus a 'body' key with the full JSON payload.
60 | */
61 | def execKsql(String ksql, Map properties) {
62 | String prepared = prepareSql(ksql)
63 |
64 | if (['create', 'drop'].contains(getStatementType(ksql))) log.info prepared
65 |
66 | if (username && password) {
67 | Unirest.config().setDefaultBasicAuth(username, password)
68 | }
69 |
70 | HttpResponse response = Unirest.post("${restUrl}/ksql")
71 | .header("Content-Type", "application/vnd.ksql.v1+json")
72 | .header("Cache-Control", "no-cache")
73 | .body(JsonOutput.toJson([ksql: prepared, streamsProperties: properties]))
74 | .asString()
75 |
76 | log.debug "unirest response: ${response.dump()}"
77 | def body = new JsonSlurper().parseText(response.body)
78 |
79 | def result = [
80 | status : response.status,
81 | statusText: response.statusText,
82 | body : body
83 | ]
84 | log.debug "status: ${result.status}, statusText: ${result.statusText}"
85 | log.debug "body: $result.body"
86 | return result
87 | }
88 |
89 | /**
90 | * Executes a List of KSQL statements using the KSQL RESTful API.
91 | *
92 | * @param ksql the List of KSQL statements to execute.
93 | *
94 | * @param properties Any KSQL parameters to include with the KSQL execution.
95 | *
96 | * @return Map with meaningful elements from the JSON payload elevated as attributes, plus a 'body' key will the full JSON payload.
97 | */
98 | def execKsql(List ksql, Map properties) {
99 | ksql.each {
100 | execKsql(it, properties)
101 | }
102 | }
103 |
104 | /**
105 | * Executes a KSQL statement using the KSQL RESTful API.
106 | *
107 | * @param ksql The KSQL statement to execute.
108 | *
109 | * @param earliest Boolean dictating that the statement should set 'auto.offset.reset' to 'earliest'.
110 | *
111 | * @return Map with meaningful elements from the JSON payload elevated as attributes, plus a 'body' key will the full JSON payload.
112 | */
113 | def execKsql(String ksql, Boolean earliest = false) {
114 | def data = execKsql(ksql, (earliest ? ["ksql.streams.auto.offset.reset": "earliest"] : [:]))
115 | return data
116 | }
117 |
118 | /**
119 | * Executes a List of KSQL statements using the KSQL RESTful API.
120 | *
121 | * @param ksql the List of KSQL statements to execute.
122 | *
123 | * @param earliest Boolean dictating that the statement should set 'auto.offset.reset' to 'earliest'.
124 | *
125 | * @return Map with meaningful elements from the JSON payload elevated as attributes, plus a 'body' key will the full JSON payload.
126 | */
127 | def execKsql(List ksql, Boolean earliest = false) {
128 | ksql.each {
129 | execKsql(it, earliest)
130 | }
131 | }
132 |
133 | /**
134 | * Executes a KSQL statement using the KSQL RESTful API. Optimized for issuing CREATE TABLE/STREAM statements.
135 | *
136 | * @param ksql the KSQL statement to execute.
137 | *
138 | * @param properties Any KSQL parameters to include with the KSQL execution.
139 | *
140 | * @return Map with meaningful elements returned in the REST call, plus a 'body' key with the full JSON payload.
141 | */
142 | def createKsql(String ksql, Map properties) {
143 |
144 | def response = execKsql(ksql, properties)
145 |
146 | def result = [
147 | status : response.status,
148 | statusText : response.statusText,
149 | error_code : response.body.error_code,
150 | message : response.body.message,
151 | statementText : response.body.statementText,
152 | commandId : response.body.commandId,
153 | commandStatus : response.body.commandStatus,
154 | commandMessage: response.body.commandStatus,
155 | body : response.body
156 | ]
157 |
158 | if (result.error_code.findResult { it }) {
159 | throw new GradleException("error_code: ${result.error_code}: ${result.message}")
160 | }
161 |
162 | // No command id is returned for connectors, so result can be returned.
163 | if (ksql.toLowerCase().startsWith("create sink connector ")
164 | || ksql.toLowerCase().startsWith("create source connector ")) {
165 | return result;
166 | }
167 |
168 | // test normalizing the commandId
169 | String commandId = result.commandId[0].toString().replace('`','')
170 |
171 | // ensure the statement is complete
172 | while (['QUEUED', 'PARSING', 'EXECUTING'].contains(getCommandStatus(commandId))) {
173 | log.info "Command ${result.body.commandId} still pending..."
174 | }
175 |
176 | log.debug "result: $result"
177 | return result
178 | }
179 |
180 | /**
181 | * Executes a List of KSQL statements using the KSQL RESTful API. Optimized for issuing CREATE TABLE/STREAM statements.
182 | *
183 | * @param ksql the List of KSQL statements to execute.
184 | *
185 | * @param properties Any KSQL parameters to include with the KSQL execution.
186 | *
187 | * @return Map with meaningful elements from the JSON payload elevated as attributes, plus a 'body' key will the full JSON payload.
188 | */
189 | def createKsql(List ksql, Map properties) {
190 | ksql.each {
191 | createKsql(it, properties)
192 | }
193 | log.warn "${ksql.size()} objects created."
194 | }
195 |
196 | /**
197 | * Executes a KSQL statement using the KSQL RESTful API. Optimized for issuing CREATE TABLE/STREAM statements.
198 | *
199 | * @param ksql The KSQL statement to execute.
200 | *
201 | * @param earliest Boolean dictating that the statement should set 'auto.offset.reset' to 'earliest'.
202 | *
203 | * @return Map with meaningful elements from the JSON payload elevated as attributes, plus a 'body' key will the full JSON payload.
204 | */
205 | def createKsql(String ksql, Boolean earliest = false, Boolean latest = false) {
206 | def val = [:]
207 | if (earliest || latest) {
208 | val = ["ksql.streams.auto.offset.reset": (earliest ? "earliest" : "latest")]
209 | }
210 | createKsql(ksql, val)
211 | }
212 |
213 | /**
214 | * Executes a List of KSQL statements using the KSQL RESTful API. Optimized for issuing CREATE TABLE/STREAM statements.
215 | *
216 | * @param ksql the List of KSQL statements to execute.
217 | *
218 | * @param earliest Boolean dictating that the statement should set 'auto.offset.reset' to 'earliest'.
219 | *
220 | * @return Map with meaningful elements from the JSON payload elevated as attributes, plus a 'body' key will the full JSON payload.
221 | */
222 | def createKsql(List ksql, Boolean earliest = false) {
223 | ksql.each {
224 | createKsql(it, earliest)
225 | }
226 | log.warn "${ksql.size()} objects created."
227 | }
228 |
229 | /**
230 | * Executes a KSQL DROP statement using the KSQL RESTful API. Manages issuing TERMINATE statements as part of the DROP, if desired.
231 | *
232 | * @param ksql the KSQL DROP statement to execute.
233 | *
234 | * @param properties Any KSQL parameters to include with the KSQL execution.
235 | *
236 | * @return Map with meaningful elements from the JSON payload elevated as attributes, plus a 'body' key with the full JSON payload.
237 | */
238 | def dropKsql(String ksql, Map properties, Integer dropRetryPause = 10, Integer dropMaxRetries = 10) {
239 | def result
240 | Integer retryCount = dropMaxRetries
241 |
242 | do {
243 | result = execKsql(ksql, properties)
244 | log.debug "result: ${result}"
245 |
246 | // No command id is returned for connectors, so result can be returned.
247 | if (ksql.toLowerCase().startsWith("drop connector ")) {
248 | return result
249 | }
250 |
251 | if (result.status == 400 && result.body.message.contains('Incompatible data source type is STREAM')) {
252 | log.info "Type is now STREAM. Issuing DROP STREAM..."
253 | result = execKsql(ksql.replace('TABLE', 'STREAM'), properties)
254 | }
255 |
256 | if (result.status == 400 && result.body.message.contains('Incompatible data source type is TABLE')) {
257 | log.info "Type is now TABLE. Issuing DROP TABLE..."
258 | result = execKsql(ksql.replace('STREAM', 'TABLE'), properties)
259 | }
260 |
261 | if (result.body.commandId == null) {
262 | if (retryCount <= 0) {
263 | throw new GradleException("Maximum retry attempts made for drop statements. Failed to get the command id.")
264 | }
265 | retryCount--
266 |
267 | log.info "Command id is null. Pausing for $dropRetryPause seconds before retrying."
268 | sleep(dropRetryPause * 1000)
269 | }
270 | } while (result.body.commandId == null)
271 |
272 | while (['QUEUED', 'PARSING', 'EXECUTING'].contains(getCommandStatus(result.body.commandId))) {
273 | log.info "Command ${result.body.commandId} still pending..."
274 | }
275 | log.debug "final result: ${result}"
276 | return result
277 | }
278 |
279 | /**
280 | * Returns the current command status for a 'commandId'.
281 | *
282 | * @return The status of the current command.
283 | */
284 | String getCommandStatus(String commandId) {
285 | if (username && password) {
286 | Unirest.config().setDefaultBasicAuth(username, password)
287 | }
288 |
289 | HttpResponse response = Unirest.get("${restUrl}/status/${commandId}")
290 | .header("Content-Type", "application/vnd.ksql.v1+json")
291 | .header("Cache-Control", "no-cache")
292 | .asString()
293 |
294 | Map body = gson.fromJson(response.body, Map)
295 | body
296 | log.debug "Response: $response"
297 | return body.status
298 | }
299 |
300 | /**
301 | * Returns KSQL Server 'sourceDescription' object, containing the results of the 'DESCRIBE' command.
302 | *
303 | * @return sourceDescription object, generated by the KSQL 'DESCRIBE' command.
304 | */
305 | def getSourceDescription(String object, String type = '') {
306 | if (type == 'connector' || type == 'source connector' || type == 'sink connector') {
307 | def response = execKsql("DESCRIBE CONNECTOR ${toLowerCaseIfUnquoted(object)}", false)
308 | return response.body[0].status
309 | } else {
310 | def response = execKsql("DESCRIBE ${toLowerCaseIfUnquoted(object)}", false)
311 | return response.body.sourceDescription
312 | }
313 | }
314 |
315 | /**
316 | * Returns KSQL Server 'readQueries' object, detailing all the queries currently reading a particular table or stream.
317 | *
318 | * @return readQueries object, generated by the KSQL 'DESCRIBE' command.
319 | */
320 | def getReadQueries(String object) {
321 | getSourceDescription(object)?.readQueries?.get(0)
322 | }
323 |
324 | /**
325 | * Returns KSQL Server 'writeQueries' object, detailing all the queries currently writing to a particular table or stream.
326 | *
327 | * @return writeQueries object, generated by the KSQL 'DESCRIBE' command.
328 | */
329 | def getWriteQueries(String object) {
330 | getSourceDescription(object)?.writeQueries?.get(0)
331 | }
332 |
333 | /**
334 | * Returns KSQL Server query IDs for all 'writeQueries' and 'readQueries' associated with a particular object.
335 | *
336 | * @return List of query IDs associated with a particular object.
337 | */
338 | def getQueryIds(String object) {
339 | // null safe all the way
340 | // Return any query IDs from either the write or read queries
341 | return ([] + getReadQueries(object) + getWriteQueries(object)).findResults { query -> query?.id }
342 | }
343 |
344 | /**
345 | * Returns KSQL Server properties from the KSQL RESTful API using the 'LIST PROPERTIES' sql statement.
346 | *
347 | * @return All the KSQL properties. This is a helper method, used to return individual properties in other methods such as {@link #getExtensionPath} and {@link #getSchemaRegistry}.
348 | */
349 | def getProperties() {
350 | def response = execKsql('LIST PROPERTIES', false)
351 | log.debug "response: ${response.toString()}"
352 | def properties = response.body[0].properties
353 | log.debug "properties: ${properties.toString()}"
354 | return properties
355 | }
356 |
357 | /**
358 | * Returns an individual KSQL server property using {@link #getProperties}. This is a helper method, used to return individual properties in other methods such as {@link #getExtensionPath} and {@link #getSchemaRegistry}.
359 | *
360 | * @param property The individual property to return a value for.
361 | *
362 | * @return The value of the property specified in the 'property' parameter.
363 | */
364 | String getProperty(String property) {
365 | def prop = getProperties()."$property"
366 | return prop
367 | }
368 |
369 | /**
370 | * Returns KSQL Server property value for 'ksql.extension.dir'.
371 | *
372 | * @return KSQL Server property value for 'ksql.extension.dir'.
373 | */
374 | String getExtensionPath() {
375 | return getProperty('ksql.extension.dir')
376 | }
377 |
378 | /**
379 | * Returns File object for the KSQL Server property value for 'ksql.extension.dir'.
380 | *
381 | * @return File object for the KSQL Server property value for 'ksql.extension.dir'.
382 | */
383 | File getExtensionDir() {
384 | return new File(getExtensionPath())
385 | }
386 |
387 | /**
388 | * Returns the KSQL Server property value for 'ksql.schema.registry.url'.
389 | *
390 | * @return The KSQL Server property value for 'ksql.schema.registry.url'.
391 | */
392 | String getSchemaRegistry() {
393 | return getProperty('ksql.schema.registry.url')
394 | }
395 |
396 | /**
397 | * Returns the object type from a KSQL CREATE or DROP statement.
398 | *
399 | * @return Either 'table' or 'stream' or 'into' (the latter denotes it was an INSERT statement).
400 | */
401 | String getObjectName(String sql) {
402 | sql.find(KSQLREGEX) { String all, String statement, String type, String name -> toLowerCaseIfUnquoted(name) }
403 | }
404 |
405 | /**
406 | * Returns the object type from a KSQL CREATE or DROP statement.
407 | *
408 | * @return Either 'table', 'stream', 'into' (denotes it was an INSERT statement) or 'connector'
409 | */
410 | String getObjectType(String sql) {
411 | sql.find(KSQLREGEX) { String all, String statement, String type, String name ->
412 | type.toLowerCase()
413 | }
414 | }
415 |
416 | /**
417 | * Returns the statement type from a KSQL CREATE or DROP statement.
418 | *
419 | * @return Either 'create' or 'drop' or 'insert'
420 | */
421 | String getStatementType(String sql) {
422 | return sql.find(KSQLREGEX) { String all, String statement, String type, String name -> statement.toLowerCase() } ?: 'other'
423 | }
424 |
425 | /**
426 | * Return a list of topic objects
427 | *
428 | * @return List of topic objects
429 | */
430 | def getTopics() {
431 | def topics = execKsql('show topics').body.topics[0]
432 | log.debug "Topics: ${topics}"
433 | return topics
434 | }
435 |
436 | /**
437 | * Return a list of stream objects
438 | *
439 | * @return List of KSQL stream objects
440 | */
441 | def getStreams() {
442 | def topics = execKsql('show streams').body.topics[0]
443 | log.warn "Topics: ${topics}"
444 | return topics
445 | }
446 |
447 | def toLowerCaseIfUnquoted(String value) {
448 | if(value == null || isQuoted(value)) return value
449 | return value?.toLowerCase()
450 | }
451 |
452 | def isQuoted(String value) {
453 | return value.length() >= 2 &&
454 | value.charAt(0) == '"' && value.charAt(value.length() - 1) == '"'
455 | }
456 | }
457 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Gradle Confluent Plugin
2 | You can get this plugin from the [Gradle Plugin Portal](https://plugins.gradle.org/plugin/com.redpillanalytics.gradle-confluent).
3 | You can also read the [API documentation](https://s3.amazonaws.com/documentation.redpillanalytics.com/gradle-confluent/latest/index.html).
4 |
5 | You can run the unit tests by executing:
6 |
7 | ```bash
8 | ./gradlew test
9 | ```
10 |
11 | There are a series of integration tests that use the topics from the [Confluent clickstream quickstart](https://docs.confluent.io/current/ksql/docs/tutorials/clickstream-docker.html#ksql-clickstream-docker) implemented using [TestContainers with docker compose](https://www.testcontainers.org/modules/docker_compose/) using a [compose file](./docker-compose.yml), as well as a standalone [TestContainer for Kafka.](https://www.testcontainers.org/modules/kafka/)
12 | The integration tests, plus the unit tests, can be run with the command below:
13 |
14 | ```bash
15 | ./gradlew runAllTests
16 | ```
17 | # Motivation
18 | This plugin was motivated by a real-world project.
19 | We were stuggling to easily deploy all the pieces of our Confluent pipeline: KSQL scripts, KSQL user-defined functions (UDFs), and Kafka Streams microservices.
20 | The biggest gap we had was deploying KSQL scripts to downstream environments, so the majority of this plugin is for remedying that.
21 | Since Gradle already has functionality and plugins for compiling JARS (for UDFs) and building Java applications (for Kafka Streams microservices), this plugin addresses just a few gaps for those patterns.
22 |
23 | # Plugin Extension
24 | Configuration properties for the `gradle-confluent` plugin are specified using the `confluent{}` closure, which adds the `confluent` [*extension*](https://docs.gradle.org/current/userguide/custom_plugins.html#sec:getting_input_from_the_build) to the Gradle project [ExtensionContainer](https://docs.gradle.org/current/javadoc/org/gradle/api/plugins/ExtensionContainer.html).
25 | For instance, if I wanted to disable KSQL Function support and Kafka Streams support (see below), then I could add the following closure to my `build.gradle` file:
26 |
27 | ```Gradle
28 | confluent {
29 | enableFunctions = false
30 | enableStreams = false
31 | }
32 | ```
33 | or
34 | ```Gradle
35 | confluent.enableFunctions = false
36 | confluent.enableStreams = false
37 | ```
38 |
39 | All of the extension properties and their default values are listed [here](https://s3.amazonaws.com/documentation.redpillanalytics.com/gradle-confluent/latest/com/redpillanalytics/gradle/ConfluentPluginExtension.html).
40 |
41 | # Confluent KSQL
42 | Building streaming pipelines using KSQL is done with a series of SQL statements, similar to the below:
43 |
44 | ```sql
45 | CREATE STREAM clickstream (_time bigint,time varchar, ip varchar, request varchar, status int, userid int, bytes bigint, agent varchar) with (kafka_topic = 'clickstream', value_format = 'json');
46 |
47 | CREATE TABLE clickstream_codes (code int, definition varchar) with (key='code', kafka_topic = 'clickstream_codes', value_format = 'json');
48 |
49 | CREATE TABLE events_per_min AS SELECT userid, count(*) AS events FROM clickstream window TUMBLING (size 60 second) GROUP BY userid;
50 | ```
51 |
52 | The third statement above is called a *persistent query* in KSQL terminology, as it selects data from a KSQL stream or table, creates or uses an underlying Kafka topic, and initialize the streaming processes to persist data to that topic.
53 | Because of this, KSQL persistent query statements are regularly dependent on the creation of other KSQL streams and tables.
54 | We wanted to eliminate the need for developers to concern themselves (much) with how to express these dependencies in their KSQL scripts.
55 | We didn't want them to have to write and test *driving* scripts, which included DROP statements or TERMINATE statements, which is time-consuming and error-prone.
56 | We also wanted to make it easy for developers to tweak and rerun their individual pipelines.
57 | So we knew we wanted our approach to auto-generate DROP and TERMINATE statements as a part of the development and deployment processes.
58 | We considered many alternatives for expressing these dependencies, and even briefly considered using the [Gradle Task DAG](https://docs.gradle.org/current/userguide/build_lifecycle.html) to do this.
59 | In the end, we decided on using simple alphanumeric file and directory structure naming.
60 | We use Gradle's built-in [FileTree](https://docs.gradle.org/current/userguide/working_with_files.html#sec:file_trees) functionality which makes this very easy.
61 | You can see a sample of how this is achieved in [the KSQL scripts used for testing this plugin](src/test/resources/src/main/pipeline/).
62 | Notice that none of these sample test scripts have DROP statements or any scripted dependencies.
63 | Scripts and directories can use any naming standard desired, but the script order dependency is managed by a simple `sort()` of the FileTree object.
64 |
65 | So let's start preparing our `build.gradle` file.
66 | First, we need to apply the `gradle-confluent` plugin, but we'll also apply the `maven-publish` plugin for handling our artifacts.
67 |
68 | ```gradle
69 | plugins {
70 | id 'maven-publish'
71 | id "com.redpillanalytics.gradle-confluent" version '1.1.11'
72 | }
73 | ```
74 | Now we can use the `./gradlew tasks` command to see the new tasks available under the **Confluent** Task Group:
75 |
76 | ```
77 | Confluent tasks
78 | ---------------
79 | deploy - Calls all dependent deployment tasks.
80 | pipelineExecute - Execute all KSQL pipelines from the provided source directory, in hierarchical order, proceeded by applicable DROP and TERMINATE commands.
81 | pipelineScript - Build a single KSQL deployment script with all the individual pipeline processes ordered. Primarily used for building a KSQL Server start script.
82 | pipelineSync - Synchronize the pipeline build directory from the pipeline source directory.
83 | pipelineZip - Build a distribution ZIP file with the pipeline source files, plus a single KSQL 'create' script.
84 | ```
85 |
86 | ## Executing KSQL Pipelines
87 | The easiest wasy to use this plugin is to simply execute all of our persistent query statements--or a subset of them--in source control.
88 | We do this using the `pipelineExecute` task, which uses the KSQL REST API to handle all of the heavy-lifting.
89 | I'll turn up the logging a bit this first time with the `-i` option so we can see exactly what's going on.
90 | Apologies in advance for the verbose screen output, but I think it's worth it:
91 |
92 | ```bash
93 | ==> ./gradlew pipelineExecute --console=plain -i
94 |
95 | > Configure project :
96 | Evaluating root project 'ksql-examples' using build file '/Users/stewartbryson/Source/ksql-examples/build.gradle'.
97 | Selected primary task ':jar' from project :
98 | All projects evaluated.
99 | Selected primary task 'pipelineExecute' from project :
100 | Tasks to be executed: [task ':pipelineSync', task ':pipelineExecute']
101 | :pipelineSync (Thread[Execution worker for ':',5,main]) started.
102 |
103 | > Task :pipelineSync
104 | Deleting stale output file: /Users/stewartbryson/Source/ksql-examples/build/pipeline
105 | Task ':pipelineSync' is not up-to-date because:
106 | No history is available.
107 | Custom actions are attached to task ':pipelineSync'.
108 | Synchronizing '/Users/stewartbryson/Source/ksql-examples/build/pipeline' from '/Users/stewartbryson/Source/ksql-examples/src/main/pipeline'.
109 | :pipelineSync (Thread[Execution worker for ':',5,main]) completed. Took 0.018 secs.
110 | :pipelineExecute (Thread[Execution worker for ':',5,main]) started.
111 |
112 | > Task :pipelineExecute
113 | Task ':pipelineExecute' is not up-to-date because:
114 | Task.upToDateWhen is false.
115 | Terminating query CTAS_CLICK_USER_SESSIONS_107...
116 | DROP TABLE IF EXISTS CLICK_USER_SESSIONS;
117 | Terminating query CTAS_USER_IP_ACTIVITY_106...
118 | DROP TABLE IF EXISTS USER_IP_ACTIVITY;
119 | Terminating query CSAS_USER_CLICKSTREAM_105...
120 | DROP STREAM IF EXISTS USER_CLICKSTREAM;
121 | Terminating query CSAS_CUSTOMER_CLICKSTREAM_104...
122 | DROP STREAM IF EXISTS customer_clickstream;
123 | Terminating query CTAS_ERRORS_PER_MIN_103...
124 | DROP table IF EXISTS ERRORS_PER_MIN;
125 | Terminating query CTAS_ERRORS_PER_MIN_ALERT_102...
126 | DROP TABLE IF EXISTS ERRORS_PER_MIN_ALERT;
127 | DROP TABLE IF EXISTS WEB_USERS;
128 | Terminating query CTAS_ENRICHED_ERROR_CODES_COUNT_101...
129 | DROP TABLE IF EXISTS ENRICHED_ERROR_CODES_COUNT DELETE TOPIC;
130 | Terminating query CSAS_ENRICHED_ERROR_CODES_100...
131 | DROP STREAM IF EXISTS ENRICHED_ERROR_CODES;
132 | Terminating query CTAS_PAGES_PER_MIN_99...
133 | DROP TABLE IF EXISTS pages_per_min;
134 | Terminating query CTAS_EVENTS_PER_MIN_98...
135 | DROP table IF EXISTS events_per_min;
136 | DROP TABLE IF EXISTS clickstream_codes;
137 | DROP STREAM IF EXISTS clickstream;
138 | 10 queries terminated.
139 | 13 objects dropped.
140 | CREATE STREAM clickstream (_time bigint,time varchar, ip varchar, request varchar, status int, userid int, bytes bigint, agent varchar) with (kafka_topic = 'clickstream', value_format = 'json');
141 | CREATE TABLE clickstream_codes (code int, definition varchar) with (key='code', kafka_topic = 'clickstream_codes', value_format = 'json');
142 | CREATE table events_per_min AS SELECT userid, count(*) AS events FROM clickstream window TUMBLING (size 60 second) GROUP BY userid;
143 | CREATE TABLE pages_per_min AS SELECT userid, count(*) AS pages FROM clickstream WINDOW HOPPING (size 60 second, advance by 5 second) WHERE request like '%html%' GROUP BY userid;
144 | CREATE STREAM ENRICHED_ERROR_CODES AS SELECT code, definition FROM clickstream LEFT JOIN clickstream_codes ON clickstream.status = clickstream_codes.code;
145 | CREATE TABLE ENRICHED_ERROR_CODES_COUNT AS SELECT code, definition, COUNT(*) AS count FROM ENRICHED_ERROR_CODES WINDOW TUMBLING (size 30 second) GROUP BY code, definition HAVING COUNT(*) > 1;
146 | CREATE TABLE WEB_USERS (user_id int, registered_At bigint, username varchar, first_name varchar, last_name varchar, city varchar, level varchar) with (key='user_id', kafka_topic = 'clickstream_users', value_format = 'json');
147 | CREATE TABLE ERRORS_PER_MIN_ALERT AS SELECT status, count(*) AS errors FROM clickstream window HOPPING ( size 30 second, advance by 20 second) WHERE status > 400 GROUP BY status HAVING count(*) > 5 AND count(*) is not NULL;
148 | CREATE table ERRORS_PER_MIN AS SELECT status, count(*) AS errors FROM clickstream window HOPPING ( size 60 second, advance by 5 second) WHERE status > 400 GROUP BY status;
149 | CREATE STREAM customer_clickstream WITH (PARTITIONS=2) AS SELECT userid, u.first_name, u.last_name, u.level, time, ip, request, status, agent FROM clickstream c LEFT JOIN web_users u ON c.userid = u.user_id;
150 | CREATE STREAM USER_CLICKSTREAM AS SELECT userid, u.username, ip, u.city, request, status, bytes FROM clickstream c LEFT JOIN web_users u ON c.userid = u.user_id;
151 | CREATE TABLE USER_IP_ACTIVITY AS SELECT username, ip, city, COUNT(*) AS count FROM USER_CLICKSTREAM WINDOW TUMBLING (size 60 second) GROUP BY username, ip, city HAVING COUNT(*) > 1;
152 | CREATE TABLE CLICK_USER_SESSIONS AS SELECT username, count(*) AS events FROM USER_CLICKSTREAM window SESSION (300 second) GROUP BY username;
153 | 13 objects created.
154 | :pipelineExecute (Thread[Execution worker for ':',5,main]) completed. Took 5.798 secs.
155 |
156 | BUILD SUCCESSFUL in 7s
157 | 2 actionable tasks: 2 executed
158 | ==>
159 | ```
160 |
161 | First thing to notice is that the plugin automatically constructs and issues the DROP statements for any applicable CREATE statement encountered: no need to write those yourself.
162 | It runs all the DROP statements at the beginning, but also runs them in the reverse order of the CREATE statement dependency ordering: this just makes sense if you think about it.
163 | Additionally, if any DROP statements have persistent queries involving that table or stream, the plugin finds the query ID involved and issues the required TERMINATE statement.
164 | So there are a triad of statements that are run: TERMINATE, DROP and CREATE.
165 | This behavior can be controlled with command-line options. Here is the output from the help task command:
166 |
167 | ```bash
168 | ==> ./gradlew help --task pipelineExecute
169 |
170 | > Task :help
171 | Detailed task information for pipelineExecute
172 |
173 | Path
174 | :pipelineExecute
175 |
176 | Type
177 | PipelineExecuteTask (com.redpillanalytics.gradle.tasks.PipelineExecuteTask)
178 |
179 | Options
180 | --basic-password The Password for Basic Authentication with the REST API URL for the KSQL Server. Default: value of 'confluent.pipelinePassword' or ''.
181 |
182 | --basic-username The Username for Basic Authentication with the REST API URL for the KSQL Server. Default: value of 'confluent.pipelineUsername' or ''.
183 |
184 | --drop-only When defined, only DROP and TERMINATE statements in KSQL scripts are executed. Used primarily for cleaning existing TABLES/STREAMS and terminating queries.
185 |
186 | --from-beginning When defined, set 'ksql.streams.auto.offset.reset' to 'earliest'.
187 |
188 | --no-drop When defined, applicable DROP statements are not auto-generated and executed.
189 |
190 | --no-reverse-drops When defined, DROP statements are not processed in reverse order of the CREATE statements, which is the default.
191 |
192 | --no-terminate When defined, applicable TERMINATE statements are not auto-generated and executed.
193 |
194 | --pipeline-dir The base directory containing SQL scripts to execute, including recursive subdirectories. Default: value of 'confluent.pipelineSourcePath' or 'src/main/pipeline'.
195 |
196 | --rest-url The REST API URL for the KSQL Server. Default: value of 'confluent.pipelineEndpoint' or 'http://localhost:8088'.
197 |
198 | --statement-pause The number of seconds to pause execution after a create statement. Default: value of 'confluent.statementPause'.
199 |
200 | Description
201 | Execute all KSQL pipelines from the provided source directory, in hierarchical order, proceeded by applicable DROP and TERMINATE commands.
202 |
203 | Group
204 | confluent
205 |
206 | BUILD SUCCESSFUL in 1s
207 | 1 actionable task: 1 executed
208 | ```
209 |
210 | Seeing some command-line options, we can see how the `gradle-confluent` plugin is very helpful for developers during the KSQL development phase.
211 | We can process just a single directory of KSQL scripts easily as we iterate on our KSQL code.
212 |
213 | ```bash
214 | ==> ./gradlew pipelineExecute --pipeline-dir 01-clickstream --from-beginning
215 |
216 | > Task :pipelineExecute
217 | 8 queries terminated.
218 | 6 objects dropped.
219 | 6 objects created.
220 |
221 | BUILD SUCCESSFUL in 3s
222 | 2 actionable tasks: 1 executed, 1 up-to-date
223 | ```
224 |
225 | ## Building Artifacts
226 | While executing KSQL scripts from our source repository is useful for developers using KSQL, and might even suffice for some deployment pipelines, `gradle-confluent` is really designed to build and publish artifacts for downstream deployment.
227 | We of course support this using Gradle's built-in support for Maven.
228 | We simply execute `./gradlew build` to build a .zip distribution artifact with all of our KSQL in it, or `./gradlew build publish` to build and publish the distribution artifact.
229 | Let's make a few changes to our `build.gradle` file to publish to a local Maven repository.
230 | Of course, a local Maven repository is not fit for real environments, and Gradle supports all major Maven repository servers, as well as AWS S3 and Google Cloud Storage as Maven artifact repositories.
231 | We're also hard-coding our version number in the `build.gradle` file... we would normally use a plugin to automatically handle version bumping.
232 |
233 | ```gradle
234 | plugins {
235 | id 'maven-publish'
236 | id "com.redpillanalytics.gradle-confluent" version '1.1.11'
237 | }
238 | publishing {
239 | repositories {
240 | mavenLocal()
241 | }
242 | }
243 | group = 'com.redpillanalytics'
244 | version = '1.0.0'
245 | ```
246 |
247 | Now we can build and publish the artifacts with a single Gradle statement:
248 |
249 | ```bash
250 | ==> ./gradlew --console=plain build publish
251 | > Task :assemble UP-TO-DATE
252 | > Task :check UP-TO-DATE
253 | > Task :pipelineSync UP-TO-DATE
254 | > Task :pipelineScript
255 | > Task :pipelineZip
256 | > Task :build
257 | > Task :generatePomFileForPipelinePublication
258 | > Task :publishPipelinePublicationToMavenLocalRepository
259 | > Task :publish
260 |
261 | BUILD SUCCESSFUL in 1s
262 | 5 actionable tasks: 4 executed, 1 up-to-date
263 | ```
264 |
265 | We can now see our zip distribution file in the `build/distributions` directory:
266 |
267 | ```bash
268 | ==> cd build/distributions/
269 | ==> zipinfo ksql-examples-pipeline-1.0.0.zip
270 | Archive: ksql-examples-pipeline-1.0.0.zip
271 | Zip file size: 3632 bytes, number of entries: 9
272 | drwxr-xr-x 2.0 unx 0 b- defN 19-Jan-11 04:00 01-clickstream/
273 | -rw-r--r-- 2.0 unx 449 b- defN 19-Jan-11 04:00 01-clickstream/01-create.sql
274 | -rw-r--r-- 2.0 unx 633 b- defN 19-Jan-11 04:00 01-clickstream/02-integrate.sql
275 | -rw-r--r-- 2.0 unx 257 b- defN 19-Jan-11 04:00 01-clickstream/03-deliver.sql
276 | drwxr-xr-x 2.0 unx 0 b- defN 19-Jan-11 04:00 02-clickstream-users/
277 | -rw-r--r-- 2.0 unx 248 b- defN 19-Jan-11 04:00 02-clickstream-users/01-create.sql
278 | -rw-r--r-- 2.0 unx 960 b- defN 19-Jan-11 04:00 02-clickstream-users/02-integrate.sql
279 | -rw-r--r-- 2.0 unx 473 b- defN 19-Jan-11 04:00 02-clickstream-users/03-deliver.sql
280 | -rw-r--r-- 2.0 unx 2312 b- defN 19-Jan-11 04:07 ksql-script.sql
281 | 9 files, 5332 bytes uncompressed, 2436 bytes compressed: 54.3%
282 | ```
283 |
284 | Notice our zip file has all the source scripts, but it also has the single, normalized `ksql-script.sql` file, which can be used as a KSQL server start script if we choose to deploy in that manner.
285 |
286 | ## Deploying KSQL Artifacts
287 | If we want to deploy our KSQL pipelines from Maven instead of Git (which let's face it, should be standard), then we define a Gradle dependency on the `ksql-examples-pipeline` artifact (or whatever we named the Gradle project building our pipelines) so that Gradle can pull that artifact from Maven to use for deployment.
288 | We are changing our `build.gradle` file again.
289 | Notice we are adding the `repositories{}` and `dependencies{}` closures, and with our dependency version, we have specified '+' which simply pulls the most recent.
290 |
291 | ```gradle
292 | plugins {
293 | id 'maven-publish'
294 | id "com.redpillanalytics.gradle-confluent" version '1.1.11'
295 | }
296 | publishing {
297 | repositories {
298 | mavenLocal()
299 | }
300 | }
301 | group = 'com.redpillanalytics'
302 | version = '1.0.0'
303 |
304 | repositories {
305 | mavenLocal()
306 | }
307 |
308 | dependencies {
309 | archives 'com.redpillanalytics:ksql-examples-pipeline:+'
310 | }
311 | ```
312 |
313 | With our KSQL pipeline dependency added, we get a few more tasks in our **Confluent** task group when we run `./gradlew tasks --group confluent`, specifically the `pipelineExtract` and `pipelineDeploy` tasks:
314 |
315 | ```
316 | Confluent tasks
317 | ---------------
318 | deploy - Execute any configured deployment tasks.
319 | pipelineDeploy - Execute all KSQL pipelines extracted from an artifact dependency, in hierarchical order, proceeded by applicable DROP and TERMINATE commands.
320 | pipelineExecute - Execute all KSQL pipelines from the provided source directory, in hierarchical order, proceeded by applicable DROP and TERMINATE commands.
321 | pipelineExtract - Extract the KSQL pipeline deployment dependency (or zip file) into the deployment directory.
322 | pipelineScript - Build a single KSQL deployment script with all the individual pipeline processes ordered. Primarily used for building a KSQL Server start script.
323 | pipelineSync - Synchronize the pipeline build directory from the pipeline source directory.
324 | pipelineZip - Build a distribution ZIP file with the pipeline source files, plus a single KSQL 'create' script.
325 | ```
326 |
327 | Now we can execute with a simple `./gradlew deploy` task, which calls as a dependency the `pipelineDeploy` task, which functions identically to the `pipelineExecute` task, except that it operates on the contents of the ZIP artifact instead of what's in source control.
328 |
329 | ```bash
330 | ==> ./gradlew deploy
331 |
332 | > Task :pipelineDeploy
333 | 6 queries terminated.
334 | 13 objects dropped.
335 | 13 objects created.
336 |
337 | BUILD SUCCESSFUL in 4s
338 | 2 actionable tasks: 2 executed
339 | ```
340 |
341 | # KSQL Directives
342 | Because the `gradle-confluent` plugin auto-generates certain statements, we immediately faced an issue defining how options around these statements would be managed.
343 | For the `DROP STREAM/TABLE` statement, for instance, we needed to control whether the `DELETE TOPIC` statement was issued as part of this statement.
344 | A simple command-line option for the Gradle `pipelineExecute` and `pipelineDeploy` tasks was not sufficient, because it didn't provide the stream/table-level granularity that's required.
345 | We introduced *directives* in our KSQL scripts: smart comments that could control certain behaviors.
346 | To date, we've only introduced the `--@DeleteTopic` directive, but others could be introduced as needed.
347 |
348 | Directives are signalled using `--@` followed by a camel-case directive name just above the `CREATE STREAM/TABLE` command.
349 | In this way, directives are similar to *annotations* on classes or methods in Java.
350 |
351 | ## `--@DeleteTopic`
352 | When applied to a table or stream, then the `DELETE TOPIC` option is added to the `DROP STREAM/TABLE` command issued during `pipelineExecute` and `pipelineDeploy` tasks.
353 | An example of this can be seen in [this test script](src/test/resources/src/main/pipeline/01-clickstream/02-integrate.sql/).
354 | This would construct the following `DROP` command:
355 |
356 | ```SQL
357 | DROP table IF EXISTS events_per_min DELETE TOPIC;
358 | ```
359 |
360 | See the output generated below:
361 |
362 | ```bash
363 | ==> ./gradlew pipelineExecute --pipeline-dir 01-clickstream -i
364 |
365 | > Configure project :
366 | Evaluating root project 'ksql-examples' using build file '/Users/stewartbryson/Source/ksql-examples/build.gradle'.
367 | Selected primary task ':jar' from project :
368 | All projects evaluated.
369 | Selected primary task 'pipelineExecute' from project :
370 | Tasks to be executed: [task ':pipelineSync', task ':pipelineExecute']
371 | :pipelineSync (Thread[Execution worker for ':',5,main]) started.
372 |
373 | > Task :pipelineSync
374 | Task ':pipelineSync' is not up-to-date because:
375 | Input property 'rootSpec$1' file /Users/stewartbryson/Source/ksql-examples/src/main/pipeline/01-clickstream/01-create.sql has changed.
376 | Input property 'rootSpec$1' file /Users/stewartbryson/Source/ksql-examples/src/main/pipeline/01-clickstream/02-integrate.sql has changed.
377 | Input property 'rootSpec$1' file /Users/stewartbryson/Source/ksql-examples/src/main/pipeline/01-clickstream/03-deliver.sql has changed.
378 | Custom actions are attached to task ':pipelineSync'.
379 | Synchronizing '/Users/stewartbryson/Source/ksql-examples/build/pipeline' from '/Users/stewartbryson/Source/ksql-examples/src/main/pipeline'.
380 | :pipelineSync (Thread[Execution worker for ':',5,main]) completed. Took 0.013 secs.
381 | :pipelineExecute (Thread[Execution worker for ':',5,main]) started.
382 |
383 | > Task :pipelineExecute
384 | Task ':pipelineExecute' is not up-to-date because:
385 | Task.upToDateWhen is false.
386 | Terminating query CTAS_ENRICHED_ERROR_CODES_COUNT_149...
387 | DROP TABLE IF EXISTS ENRICHED_ERROR_CODES_COUNT;
388 | Terminating query CSAS_ENRICHED_ERROR_CODES_148...
389 | DROP STREAM IF EXISTS ENRICHED_ERROR_CODES;
390 | Terminating query CTAS_PAGES_PER_MIN_147...
391 | DROP TABLE IF EXISTS pages_per_min;
392 | Terminating query CTAS_EVENTS_PER_MIN_146...
393 | DROP table IF EXISTS events_per_min DELETE TOPIC;
394 | DROP TABLE IF EXISTS clickstream_codes;
395 | DROP STREAM IF EXISTS clickstream;
396 | 4 queries terminated.
397 | 6 objects dropped.
398 | CREATE STREAM clickstream (_time bigint,time varchar, ip varchar, request varchar, status int, userid int, bytes bigint, agent varchar) with (kafka_topic = 'clickstream', value_format = 'json');
399 | CREATE TABLE clickstream_codes (code int, definition varchar) with (key='code', kafka_topic = 'clickstream_codes', value_format = 'json');
400 | CREATE table events_per_min AS SELECT userid, count(*) AS events FROM clickstream window TUMBLING (size 60 second) GROUP BY userid;
401 | CREATE TABLE pages_per_min AS SELECT userid, count(*) AS pages FROM clickstream WINDOW HOPPING (size 60 second, advance by 5 second) WHERE request like '%html%' GROUP BY userid;
402 | CREATE STREAM ENRICHED_ERROR_CODES AS SELECT code, definition FROM clickstream LEFT JOIN clickstream_codes ON clickstream.status = clickstream_codes.code;
403 | CREATE TABLE ENRICHED_ERROR_CODES_COUNT AS SELECT code, definition, COUNT(*) AS count FROM ENRICHED_ERROR_CODES WINDOW TUMBLING (size 30 second) GROUP BY code, definition HAVING COUNT(*) > 1;
404 | 6 objects created.
405 | :pipelineExecute (Thread[Execution worker for ':',5,main]) completed. Took 1.446 secs.
406 |
407 | BUILD SUCCESSFUL in 2s
408 | 2 actionable tasks: 2 executed
409 | ==>
410 | ```
411 |
412 | # KSQL User-Defined Functions (UDFs)
413 | Coming soon
414 |
415 | # Kafka Streams
416 | Coming soon
417 |
--------------------------------------------------------------------------------