├── .github ├── FUNDING.yml ├── dependabot.yml └── workflows │ ├── main_build.yml │ └── pr_build.yml ├── .gitignore ├── .tool-versions ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── build.gradle.kts ├── examples ├── .gitignore ├── README.md ├── avro-kts │ └── build.gradle.kts ├── avro │ └── build.gradle ├── build.gradle.kts ├── docker-compose.yml ├── extra │ ├── metadata.json │ └── ruleSet.json ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── json │ └── build.gradle ├── protobuf │ └── build.gradle ├── schemas │ ├── avro │ │ ├── company.avsc │ │ ├── company_v2.avsc │ │ ├── location-address.avsc │ │ ├── location-latlong.avsc │ │ ├── my-record.avsc │ │ ├── results │ │ │ └── .keep │ │ └── user.avsc │ ├── json │ │ ├── company.json │ │ ├── company_v2.json │ │ ├── location-address.json │ │ ├── location-latlong.json │ │ ├── my-record.json │ │ ├── results │ │ │ └── .keep │ │ └── user.json │ └── protobuf │ │ ├── com │ │ └── example │ │ │ ├── company.proto │ │ │ ├── company_v2.proto │ │ │ ├── location-address.proto │ │ │ ├── location-latlong.proto │ │ │ ├── my-record.proto │ │ │ └── user.proto │ │ └── results │ │ └── .keep ├── settings.gradle.kts └── ssl │ ├── build.gradle │ └── docker-compose.yml ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle.kts └── src ├── integration ├── kotlin │ └── com │ │ └── github │ │ └── imflog │ │ └── schema │ │ └── registry │ │ ├── security │ │ ├── BasicAuthenticationIT.kt │ │ └── SslIT.kt │ │ ├── tasks │ │ ├── compatibility │ │ │ └── CompatibilityTaskIT.kt │ │ ├── config │ │ │ └── ConfigTaskIT.kt │ │ ├── download │ │ │ └── DownloadTaskIT.kt │ │ └── register │ │ │ └── RegisterTaskIT.kt │ │ └── utils │ │ └── KafkaTestContainersUtils.kt └── resources │ ├── generate-crt.sh │ └── secrets │ ├── cert_creds │ ├── registry.keystore.jks │ └── registry.truststore.jks ├── main └── kotlin │ └── com │ └── github │ └── imflog │ └── schema │ └── registry │ ├── Exceptions.kt │ ├── LocalReference.kt │ ├── LoggingUtils.kt │ ├── RegistryClientWrapper.kt │ ├── SchemaRegistryExtension.kt │ ├── SchemaRegistryPlugin.kt │ ├── SchemaType.kt │ ├── Subject.kt │ ├── parser │ ├── AvroSchemaParser.kt │ ├── JsonSchemaParser.kt │ ├── ProtobufSchemaParser.kt │ └── SchemaParser.kt │ ├── security │ ├── BasicAuthExtension.kt │ └── SslExtension.kt │ └── tasks │ ├── compatibility │ ├── CompatibilitySubjectExtension.kt │ ├── CompatibilityTask.kt │ └── CompatibilityTaskAction.kt │ ├── config │ ├── ConfigSubjectExtension.kt │ ├── ConfigTask.kt │ └── ConfigTaskAction.kt │ ├── download │ ├── DownloadSubjectExtension.kt │ ├── DownloadTask.kt │ └── DownloadTaskAction.kt │ └── register │ ├── RegisterSchemasTask.kt │ ├── RegisterSubjectExtension.kt │ └── RegisterTaskAction.kt └── test ├── kotlin └── com │ └── github │ └── imflog │ └── schema │ └── registry │ ├── SchemaRegistryPluginTest.kt │ ├── parser │ ├── AvroSchemaParserTest.kt │ ├── JsonSchemaParserTest.kt │ └── ProtobufSchemaParserTest.kt │ └── tasks │ ├── compatibility │ └── CompatibilityTaskActionTest.kt │ ├── config │ └── ConfigTaskActionTest.kt │ ├── download │ └── DownloadTaskActionTest.kt │ └── register │ └── RegisterTaskActionTest.kt └── resources ├── NestedArrayType.avsc ├── NestedNestedType.avsc ├── NestedType.avsc ├── ParentArraySubject.avsc ├── ParentSubject.avsc ├── bug_199 ├── a.avsc ├── b.avsc ├── main.avsc └── shared.avsc ├── bug_204 ├── mainMap.avsc └── map.avsc ├── bug_206 ├── Common.avsc ├── Main.avsc ├── NestedOne.avsc └── NestedTwo.avsc ├── testMetadata.json ├── testRuleSet.json ├── testSimpleSubject.avsc ├── testSubject.avsc ├── testSubjectComplex.avsc ├── testSubjectWithArrayReference.avsc ├── testSubjectWithTag.avsc └── testType.avsc /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [ImFlog] 2 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "gradle" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | open-pull-requests-limit: 10 8 | -------------------------------------------------------------------------------- /.github/workflows/main_build.yml: -------------------------------------------------------------------------------- 1 | name: "Master" 2 | 3 | on: 4 | push: 5 | branches: 6 | - "master" 7 | 8 | jobs: 9 | build: 10 | name: "Build" 11 | runs-on: "ubuntu-latest" 12 | strategy: 13 | matrix: 14 | kafka_version: [ 15 | "6.2.6", 16 | "7.2.2", 17 | "7.3.1", 18 | "7.6.0" 19 | ] 20 | steps: 21 | - uses: "actions/checkout@v4" 22 | - uses: "gradle/actions/wrapper-validation@v4" 23 | - uses: "actions/setup-java@v4" 24 | with: 25 | distribution: "temurin" 26 | java-version: "8" 27 | cache: "gradle" 28 | - name: "Build with Gradle" 29 | env: 30 | KAFKA_VERSION: ${{ matrix.kafka_version }} 31 | run: "./gradlew integrationTest" -------------------------------------------------------------------------------- /.github/workflows/pr_build.yml: -------------------------------------------------------------------------------- 1 | name: "PR" 2 | 3 | on: [ "pull_request" ] 4 | 5 | jobs: 6 | build: 7 | name: "Build" 8 | runs-on: "ubuntu-latest" 9 | steps: 10 | - uses: "actions/checkout@v4" 11 | - uses: "gradle/actions/wrapper-validation@v4" 12 | - uses: "actions/setup-java@v4" 13 | with: 14 | distribution: "temurin" 15 | java-version: "8" 16 | cache: "gradle" 17 | - name: "Build with Gradle" 18 | run: "./gradlew integrationTest --stacktrace" -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # kotlin 2 | *.iml 3 | /local.properties 4 | /.idea 5 | .DS_Store 6 | /build 7 | /captures 8 | 9 | # Gradle 10 | .gradle 11 | /build/ 12 | /out/ 13 | /gradle 14 | 15 | # Ignore Gradle GUI config 16 | gradle-app.setting 17 | 18 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) 19 | !gradle-wrapper.jar 20 | 21 | # Cache of project 22 | .gradletasknamecache 23 | 24 | # # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 25 | # gradle/wrapper/gradle-wrapper.properties 26 | -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | java corretto-8.392.08.1 2 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | garcia.florian.perso@gmail.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /build.gradle.kts: -------------------------------------------------------------------------------- 1 | import org.jetbrains.kotlin.gradle.tasks.KotlinCompile 2 | 3 | group = "com.github.imflog" 4 | version = "2.3.3-SNAPSHOT" 5 | 6 | 7 | plugins { 8 | kotlin("jvm") version "2.1.20" 9 | id("com.gradle.plugin-publish") version "1.3.1" 10 | } 11 | 12 | repositories { 13 | mavenCentral() 14 | maven("https://packages.confluent.io/maven/") 15 | } 16 | 17 | // Dependencies versions 18 | val confluentVersion = "7.6.0" 19 | val avroVersion = "1.11.2" 20 | val wireVersion = "4.9.1" 21 | dependencies { 22 | implementation(gradleApi()) 23 | implementation("io.confluent", "kafka-schema-registry", confluentVersion) { 24 | exclude("org.slf4j", "slf4j-log4j12") 25 | } 26 | // Protobuf schema parser 27 | implementation("com.squareup.wire", "wire-schema", wireVersion) 28 | } 29 | 30 | tasks.withType().configureEach { 31 | kotlinOptions { 32 | jvmTarget = "1.8" 33 | freeCompilerArgs = listOf( 34 | "-Xself-upper-bound-inference" 35 | ) 36 | } 37 | } 38 | 39 | java { 40 | withSourcesJar() 41 | } 42 | 43 | // Unit tests 44 | val junitVersion = "5.7.2" 45 | val mockkVersion = "1.11.0" 46 | val assertJVersion = "3.20.2" 47 | dependencies { 48 | testImplementation(gradleTestKit()) 49 | testImplementation("org.junit.jupiter", "junit-jupiter-api", junitVersion) 50 | testImplementation("org.junit.jupiter", "junit-jupiter-engine", junitVersion) 51 | testImplementation("org.junit.jupiter", "junit-jupiter-params", junitVersion) 52 | testImplementation("org.assertj", "assertj-core", assertJVersion) 53 | testImplementation("io.mockk", "mockk", mockkVersion) 54 | } 55 | 56 | tasks.withType { 57 | useJUnitPlatform() 58 | } 59 | 60 | // Integration tests 61 | val integrationSource = sourceSets.create("integration") { 62 | compileClasspath += sourceSets.main.get().output 63 | runtimeClasspath += sourceSets.main.get().output 64 | } 65 | 66 | val integrationImplementation: Configuration by configurations.getting { 67 | extendsFrom( 68 | configurations.implementation.get(), 69 | configurations.testImplementation.get() 70 | ) 71 | } 72 | 73 | configurations["integrationImplementation"].extendsFrom(configurations.runtimeOnly.get()) 74 | 75 | val wiremockVersion = "2.28.1" 76 | val testContainersVersion = "1.17.6" 77 | dependencies { 78 | integrationImplementation("com.github.tomakehurst", "wiremock-jre8", wiremockVersion) 79 | integrationImplementation("org.testcontainers", "kafka", testContainersVersion) 80 | } 81 | 82 | task("integrationTest") { 83 | description = "Runs integration tests." 84 | group = "verification" 85 | 86 | testClassesDirs = sourceSets["integration"].output.classesDirs 87 | classpath = sourceSets["integration"].runtimeClasspath 88 | 89 | dependsOn("build") 90 | } 91 | 92 | // Publish plugin 93 | val registryPluginName = "com.github.imflog.kafka-schema-registry-gradle-plugin" 94 | @Suppress("UnstableApiUsage") 95 | gradlePlugin { 96 | website.set("https://github.com/ImFlog/schema-registry-plugin") 97 | vcsUrl.set("https://github.com/ImFlog/schema-registry-plugin.git") 98 | testSourceSets( 99 | sourceSets["test"], 100 | integrationSource 101 | ) 102 | plugins { 103 | create("schema-registry") { 104 | id = registryPluginName 105 | description = "A plugin to download, register and test schemas from a Kafka Schema Registry" 106 | displayName = "Kafka schema registry gradle plugin" 107 | version = version 108 | tags.set(listOf("schema", "registry", "schema-registry", "kafka")) 109 | 110 | implementationClass = "com.github.imflog.schema.registry.SchemaRegistryPlugin" 111 | } 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /examples/.gitignore: -------------------------------------------------------------------------------- 1 | **/downloaded/ 2 | **/results/ -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | This directory contains examples about how to use the schema-registry plugin. 3 | 4 | If you want to run the samples locally you need docker and docker-compose. 5 | Gradle will take care of starting the docker compose containers automatically. 6 | 7 | A composite task `run` is available in the build group. It will (in this order): 8 | 1. call the register task 9 | 2. call the configure task 10 | 3. call the download task 11 | 4. call the compatibility task 12 | 13 | You can use `./gradlew run` to run all the examples 14 | or prefix the task with `:project_name:` like this `./gradlew :avro:run` to run what you want. 15 | 16 | Each subproject will define the extension configuration according to the use case. 17 | 18 | ## Avro / JSON / Protobuf 19 | Those are full example of how to use the plugin. They all do the same thing but with the different types: 20 | * register the schema `company` and the schema `user` (that use the type `Company`) 21 | * set the compatibility to `FULL_TRANSITIVE` 22 | * download the previously registered schema (in the downloaded folder) 23 | * test the compatibility with the `company_v2` 24 | You can find the used schemas and the downloaded in [schemas/avro/company.avsc](schemas/avro/company.avsc) for instance. 25 | 26 | ## Override confluent version 27 | The goal is to show how it is possible to override the confluent version. 28 | The run tasks should work but you can run `./gradlew :override-confluent-version:buildEnvironment` to see the resolved confluent version. 29 | 30 | ## HTTPS 31 | This sample shows how to configure SSL with the schema registry. 32 | -------------------------------------------------------------------------------- /examples/avro-kts/build.gradle.kts: -------------------------------------------------------------------------------- 1 | import com.github.imflog.schema.registry.tasks.download.MetadataExtension 2 | 3 | plugins { 4 | id("com.github.imflog.kafka-schema-registry-gradle-plugin") 5 | } 6 | 7 | schemaRegistry { 8 | url.set("http://localhost:8081") 9 | quiet.set(false) 10 | outputDirectory.set("schemas/avro/results/") 11 | pretty.set(true) 12 | failFast.set(true) 13 | 14 | register { 15 | subject("company", "schemas/avro/company.avsc", "AVRO") 16 | .addLocalReference("Address", "schemas/avro/location-address.avsc") 17 | subject("user", "schemas/avro/user.avsc", "AVRO") 18 | .addReference("company", "company", -1) 19 | subject("location-address", "schemas/avro/location-address.avsc", "AVRO") 20 | subject("location-latlong", "schemas/avro/location-latlong.avsc", "AVRO") 21 | subject("my-record","schemas/avro/my-record.avsc", "AVRO") 22 | .setMetadata("extra/metadata.json") 23 | .setRuleSet("extra/ruleSet.json") 24 | .setNormalized(true) 25 | } 26 | 27 | config { 28 | subject("company", "FULL_TRANSITIVE") 29 | subject("user", "FULL_TRANSITIVE") 30 | subject("location-address", "FULL_TRANSITIVE") 31 | subject("location-latlong", "FULL_TRANSITIVE") 32 | } 33 | 34 | download { 35 | metadata.set(MetadataExtension(true)) 36 | // subject("company", "schemas/avro/downloaded") // Retrieved via reference from the user subject 37 | subject("user", "schemas/avro/downloaded", true) 38 | subjectPattern("location.*", "schemas/avro/downloaded/location") 39 | } 40 | 41 | compatibility { 42 | subject("company", "schemas/avro/company_v2.avsc", "AVRO") 43 | .addLocalReference("Address", "schemas/avro/location-address.avsc") 44 | subject("user", "schemas/avro/user.avsc", "AVRO") 45 | .addReference("company", "company", -1) 46 | subject("location-address", "schemas/avro/location-address.avsc", "AVRO") 47 | subject("location-latlong", "schemas/avro/location-latlong.avsc", "AVRO") 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /examples/avro/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id "com.github.imflog.kafka-schema-registry-gradle-plugin" 3 | } 4 | import com.github.imflog.schema.registry.Subject 5 | import com.github.imflog.schema.registry.tasks.download.MetadataExtension 6 | 7 | Subject locationAddressSubject = new Subject('location-address', 'schemas/avro/location-address.avsc', 'AVRO') 8 | Subject locationCoordSubject = new Subject('location-latlong', 'schemas/avro/location-latlong.avsc', 'AVRO') 9 | 10 | schemaRegistry { 11 | url = 'http://localhost:8081' 12 | quiet = false 13 | outputDirectory = 'schemas/avro/results/' 14 | pretty = true 15 | failFast = true 16 | 17 | register { 18 | subject('company', 'schemas/avro/company.avsc', 'AVRO') 19 | .addLocalReference("Address", "schemas/avro/location-address.avsc") 20 | subject('user', 'schemas/avro/user.avsc', 'AVRO') 21 | .addReference('company', 'company', -1) 22 | subject(locationAddressSubject) 23 | subject(locationCoordSubject) 24 | subject("my-record","schemas/avro/my-record.avsc", "AVRO") 25 | .setMetadata("extra/metadata.json") 26 | .setRuleSet("extra/ruleSet.json") 27 | .setNormalized(true) 28 | } 29 | 30 | config { 31 | subject('company', 'FULL_TRANSITIVE') 32 | subject('user', 'FULL_TRANSITIVE') 33 | subject('location-address', 'FULL_TRANSITIVE') 34 | subject('location-latlong', 'FULL_TRANSITIVE') 35 | } 36 | 37 | download { 38 | metadata = new MetadataExtension(true) 39 | // subject('company', 'schemas/avro/downloaded') // is retrieved via reference from the user subject 40 | subject('user', 'schemas/avro/downloaded', true) 41 | subject('my-record', 'schemas/avro/downloaded', true) 42 | subjectPattern('location.*', 'schemas/avro/downloaded/location') 43 | } 44 | 45 | compatibility { 46 | subject('company', 'schemas/avro/company_v2.avsc', 'AVRO') 47 | .addLocalReference("Address", "schemas/avro/location-address.avsc") 48 | subject('user', 'schemas/avro/user.avsc', 'AVRO') 49 | .addReference('company', 'company', -1) 50 | subject(locationAddressSubject) 51 | subject(locationCoordSubject) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /examples/build.gradle.kts: -------------------------------------------------------------------------------- 1 | buildscript { 2 | repositories { 3 | gradlePluginPortal() 4 | mavenCentral() 5 | maven("https://packages.confluent.io/maven/") 6 | mavenLocal() 7 | } 8 | 9 | dependencies { 10 | classpath("com.github.imflog:kafka-schema-registry-gradle-plugin:2.3.3-SNAPSHOT") 11 | } 12 | } 13 | 14 | plugins { 15 | id("com.avast.gradle.docker-compose") version "0.17.6" apply true 16 | } 17 | 18 | subprojects { 19 | apply(plugin = "docker-compose") 20 | 21 | val currentProject = this 22 | // Creates a run task for all the project 23 | tasks.register("run") { 24 | this.group = "build" 25 | val register = currentProject.tasks.getByName("registerSchemasTask") 26 | val configure = currentProject.tasks.getByName("configSubjectsTask") 27 | val download = currentProject.tasks.getByName("downloadSchemasTask") 28 | val test = currentProject.tasks.getByName("testSchemasTask") 29 | dependsOn(register, configure, download, test) 30 | configure.mustRunAfter(register) 31 | download.mustRunAfter(configure) 32 | test.mustRunAfter(download) 33 | 34 | finalizedBy(currentProject.tasks.getByName("composeDown")) 35 | dependsOn(currentProject.tasks.getByName("composeUp")) 36 | } 37 | 38 | dockerCompose { 39 | useComposeFiles.addAll(listOf("${project.rootDir}/docker-compose.yml")) 40 | captureContainersOutput.set(false) 41 | stopContainers.set(true) 42 | removeContainers.set(true) 43 | removeVolumes.set(true) 44 | removeOrphans.set(true) 45 | forceRecreate.set(true) 46 | setProjectName(project.name) 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /examples/docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | services: 3 | zookeeper: 4 | image: confluentinc/cp-zookeeper:7.2.0 5 | hostname: zookeeper 6 | container_name: zookeeper 7 | ports: 8 | - "2181:2181" 9 | environment: 10 | ZOOKEEPER_CLIENT_PORT: 2181 11 | ZOOKEEPER_TICK_TIME: 2000 12 | 13 | broker: 14 | image: confluentinc/cp-kafka:7.2.0 15 | hostname: broker 16 | container_name: broker 17 | depends_on: 18 | - zookeeper 19 | ports: 20 | - "29092:29092" 21 | - "9092:9092" 22 | environment: 23 | KAFKA_BROKER_ID: 1 24 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' 25 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT 26 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092 27 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 28 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 29 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 30 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 31 | KAFKA_LOG4J_LOGGERS: 'kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO' 32 | 33 | schema-registry: 34 | image: confluentinc/cp-schema-registry:7.2.0 35 | hostname: schema-registry 36 | container_name: schema-registry 37 | depends_on: 38 | - zookeeper 39 | - broker 40 | ports: 41 | - "8081:8081" 42 | environment: 43 | SCHEMA_REGISTRY_HOST_NAME: schema-registry 44 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' 45 | SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'PLAINTEXT://broker:29092' 46 | -------------------------------------------------------------------------------- /examples/extra/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "tags": { 3 | "**.ssn": [ "PII" ] 4 | }, 5 | "properties": { 6 | "owner": "Bob Jones", 7 | "email": "bob@acme.com" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /examples/extra/ruleSet.json: -------------------------------------------------------------------------------- 1 | { 2 | "domainRules": [ 3 | { 4 | "name": "encryptPII", 5 | "kind": "TRANSFORM", 6 | "type": "ENCRYPT", 7 | "mode": "WRITEREAD", 8 | "tags": [ 9 | "PII" 10 | ], 11 | "params": { 12 | "encrypt.kek.name": "kafka-csfle", 13 | "encrypt.kms.key.id": "projects/gcp-project/locations/europe-west6/keyRings/gcp-keyring/cryptoKeys/kafka-csfle", 14 | "encrypt.kms.type": "gcp-kms" 15 | }, 16 | "onFailure": "ERROR,NONE" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /examples/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ImFlog/schema-registry-plugin/cfd09be7c4606b2cfdf121431295cafea40618b0/examples/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /examples/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /examples/gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | # 21 | # Gradle start up script for POSIX generated by Gradle. 22 | # 23 | # Important for running: 24 | # 25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 26 | # noncompliant, but you have some other compliant shell such as ksh or 27 | # bash, then to run this script, type that shell name before the whole 28 | # command line, like: 29 | # 30 | # ksh Gradle 31 | # 32 | # Busybox and similar reduced shells will NOT work, because this script 33 | # requires all of these POSIX shell features: 34 | # * functions; 35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 37 | # * compound commands having a testable exit status, especially «case»; 38 | # * various built-in commands including «command», «set», and «ulimit». 39 | # 40 | # Important for patching: 41 | # 42 | # (2) This script targets any POSIX shell, so it avoids extensions provided 43 | # by Bash, Ksh, etc; in particular arrays are avoided. 44 | # 45 | # The "traditional" practice of packing multiple parameters into a 46 | # space-separated string is a well documented source of bugs and security 47 | # problems, so this is (mostly) avoided, by progressively accumulating 48 | # options in "$@", and eventually passing that to Java. 49 | # 50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 52 | # see the in-line comments for details. 53 | # 54 | # There are tweaks for specific operating systems such as AIX, CygWin, 55 | # Darwin, MinGW, and NonStop. 56 | # 57 | # (3) This script is generated from the Groovy template 58 | # https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 59 | # within the Gradle project. 60 | # 61 | # You can find Gradle at https://github.com/gradle/gradle/. 62 | # 63 | ############################################################################## 64 | 65 | # Attempt to set APP_HOME 66 | 67 | # Resolve links: $0 may be a link 68 | app_path=$0 69 | 70 | # Need this for daisy-chained symlinks. 71 | while 72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 73 | [ -h "$app_path" ] 74 | do 75 | ls=$( ls -ld "$app_path" ) 76 | link=${ls#*' -> '} 77 | case $link in #( 78 | /*) app_path=$link ;; #( 79 | *) app_path=$APP_HOME$link ;; 80 | esac 81 | done 82 | 83 | # This is normally unused 84 | # shellcheck disable=SC2034 85 | APP_BASE_NAME=${0##*/} 86 | # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) 87 | APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit 88 | 89 | # Use the maximum available, or set MAX_FD != -1 to use that value. 90 | MAX_FD=maximum 91 | 92 | warn () { 93 | echo "$*" 94 | } >&2 95 | 96 | die () { 97 | echo 98 | echo "$*" 99 | echo 100 | exit 1 101 | } >&2 102 | 103 | # OS specific support (must be 'true' or 'false'). 104 | cygwin=false 105 | msys=false 106 | darwin=false 107 | nonstop=false 108 | case "$( uname )" in #( 109 | CYGWIN* ) cygwin=true ;; #( 110 | Darwin* ) darwin=true ;; #( 111 | MSYS* | MINGW* ) msys=true ;; #( 112 | NONSTOP* ) nonstop=true ;; 113 | esac 114 | 115 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 116 | 117 | 118 | # Determine the Java command to use to start the JVM. 119 | if [ -n "$JAVA_HOME" ] ; then 120 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 121 | # IBM's JDK on AIX uses strange locations for the executables 122 | JAVACMD=$JAVA_HOME/jre/sh/java 123 | else 124 | JAVACMD=$JAVA_HOME/bin/java 125 | fi 126 | if [ ! -x "$JAVACMD" ] ; then 127 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 128 | 129 | Please set the JAVA_HOME variable in your environment to match the 130 | location of your Java installation." 131 | fi 132 | else 133 | JAVACMD=java 134 | if ! command -v java >/dev/null 2>&1 135 | then 136 | die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 137 | 138 | Please set the JAVA_HOME variable in your environment to match the 139 | location of your Java installation." 140 | fi 141 | fi 142 | 143 | # Increase the maximum file descriptors if we can. 144 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 145 | case $MAX_FD in #( 146 | max*) 147 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. 148 | # shellcheck disable=SC2039,SC3045 149 | MAX_FD=$( ulimit -H -n ) || 150 | warn "Could not query maximum file descriptor limit" 151 | esac 152 | case $MAX_FD in #( 153 | '' | soft) :;; #( 154 | *) 155 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. 156 | # shellcheck disable=SC2039,SC3045 157 | ulimit -n "$MAX_FD" || 158 | warn "Could not set maximum file descriptor limit to $MAX_FD" 159 | esac 160 | fi 161 | 162 | # Collect all arguments for the java command, stacking in reverse order: 163 | # * args from the command line 164 | # * the main class name 165 | # * -classpath 166 | # * -D...appname settings 167 | # * --module-path (only if needed) 168 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 169 | 170 | # For Cygwin or MSYS, switch paths to Windows format before running java 171 | if "$cygwin" || "$msys" ; then 172 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 173 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 174 | 175 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 176 | 177 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 178 | for arg do 179 | if 180 | case $arg in #( 181 | -*) false ;; # don't mess with options #( 182 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 183 | [ -e "$t" ] ;; #( 184 | *) false ;; 185 | esac 186 | then 187 | arg=$( cygpath --path --ignore --mixed "$arg" ) 188 | fi 189 | # Roll the args list around exactly as many times as the number of 190 | # args, so each arg winds up back in the position where it started, but 191 | # possibly modified. 192 | # 193 | # NB: a `for` loop captures its iteration list before it begins, so 194 | # changing the positional parameters here affects neither the number of 195 | # iterations, nor the values presented in `arg`. 196 | shift # remove old arg 197 | set -- "$@" "$arg" # push replacement arg 198 | done 199 | fi 200 | 201 | 202 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 203 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 204 | 205 | # Collect all arguments for the java command: 206 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, 207 | # and any embedded shellness will be escaped. 208 | # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be 209 | # treated as '${Hostname}' itself on the command line. 210 | 211 | set -- \ 212 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 213 | -classpath "$CLASSPATH" \ 214 | org.gradle.wrapper.GradleWrapperMain \ 215 | "$@" 216 | 217 | # Stop when "xargs" is not available. 218 | if ! command -v xargs >/dev/null 2>&1 219 | then 220 | die "xargs is not available" 221 | fi 222 | 223 | # Use "xargs" to parse quoted args. 224 | # 225 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 226 | # 227 | # In Bash we could simply go: 228 | # 229 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 230 | # set -- "${ARGS[@]}" "$@" 231 | # 232 | # but POSIX shell has neither arrays nor command substitution, so instead we 233 | # post-process each arg (as a line of input to sed) to backslash-escape any 234 | # character that might be a shell metacharacter, then use eval to reverse 235 | # that process (while maintaining the separation between arguments), and wrap 236 | # the whole thing up as a single "set" statement. 237 | # 238 | # This will of course break if any of these variables contains a newline or 239 | # an unmatched quote. 240 | # 241 | 242 | eval "set -- $( 243 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 244 | xargs -n1 | 245 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 246 | tr '\n' ' ' 247 | )" '"$@"' 248 | 249 | exec "$JAVACMD" "$@" 250 | -------------------------------------------------------------------------------- /examples/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%"=="" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%"=="" set DIRNAME=. 29 | @rem This is normally unused 30 | set APP_BASE_NAME=%~n0 31 | set APP_HOME=%DIRNAME% 32 | 33 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 34 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 35 | 36 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 37 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 38 | 39 | @rem Find java.exe 40 | if defined JAVA_HOME goto findJavaFromJavaHome 41 | 42 | set JAVA_EXE=java.exe 43 | %JAVA_EXE% -version >NUL 2>&1 44 | if %ERRORLEVEL% equ 0 goto execute 45 | 46 | echo. 1>&2 47 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 48 | echo. 1>&2 49 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 50 | echo location of your Java installation. 1>&2 51 | 52 | goto fail 53 | 54 | :findJavaFromJavaHome 55 | set JAVA_HOME=%JAVA_HOME:"=% 56 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 57 | 58 | if exist "%JAVA_EXE%" goto execute 59 | 60 | echo. 1>&2 61 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 62 | echo. 1>&2 63 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 64 | echo location of your Java installation. 1>&2 65 | 66 | goto fail 67 | 68 | :execute 69 | @rem Setup the command line 70 | 71 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 72 | 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if %ERRORLEVEL% equ 0 goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | set EXIT_CODE=%ERRORLEVEL% 85 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 86 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 87 | exit /b %EXIT_CODE% 88 | 89 | :mainEnd 90 | if "%OS%"=="Windows_NT" endlocal 91 | 92 | :omega 93 | -------------------------------------------------------------------------------- /examples/json/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id "com.github.imflog.kafka-schema-registry-gradle-plugin" 3 | } 4 | 5 | import com.github.imflog.schema.registry.Subject 6 | import com.github.imflog.schema.registry.tasks.download.MetadataExtension 7 | 8 | Subject locationAddressSubject = new Subject('location-address', 'schemas/json/location-address.json', 'JSON') 9 | Subject locationCoordSubject = new Subject('location-latlong', 'schemas/json/location-latlong.json', 'JSON') 10 | 11 | schemaRegistry { 12 | url = 'http://localhost:8081' 13 | quiet = false 14 | outputDirectory = 'schemas/json/results/' 15 | pretty = true 16 | failFast = true 17 | 18 | register { 19 | subject('company', 'schemas/json/company.json', 'JSON') 20 | .addLocalReference('Address', 'schemas/json/location-address.json') 21 | subject('user', 'schemas/json/user.json', 'JSON') 22 | .addReference('company', 'company', -1) 23 | subject(locationAddressSubject) 24 | subject(locationCoordSubject) 25 | subject('my-record','schemas/json/my-record.json','JSON') 26 | .setMetadata('extra/metadata.json') 27 | .setRuleSet('extra/ruleSet.json') 28 | .setNormalized(true) 29 | } 30 | 31 | config { 32 | subject('company', 'BACKWARD') 33 | subject('user', 'FULL_TRANSITIVE') 34 | subject('location-address', 'FULL_TRANSITIVE') 35 | subject('location-latlong', 'FULL_TRANSITIVE') 36 | } 37 | 38 | download { 39 | metadata = new MetadataExtension(true) 40 | // subject('company', 'schemas/json/downloaded') // Retrieved via reference from the user subject 41 | subject('user', 'schemas/json/downloaded', true) 42 | subjectPattern('location.*', 'schemas/json/downloaded/location') 43 | } 44 | 45 | compatibility { 46 | subject('company', 'schemas/json/company_v2.json', 'JSON') 47 | .addLocalReference("address", "schemas/json/location-address.json") 48 | subject('user', 'schemas/json/user.json', 'JSON') 49 | .addReference('company', 'company', -1) 50 | subject(locationAddressSubject) 51 | subject(locationCoordSubject) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /examples/protobuf/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id "com.github.imflog.kafka-schema-registry-gradle-plugin" 3 | } 4 | 5 | import com.github.imflog.schema.registry.Subject 6 | import com.github.imflog.schema.registry.tasks.download.MetadataExtension 7 | 8 | Subject locationAddressSubject = new Subject('location-address', 'schemas/protobuf/com/example/location-address.proto', 'PROTOBUF') 9 | Subject locationCoordSubject = new Subject('location-latlong', 'schemas/protobuf/com/example/location-latlong.proto', 'PROTOBUF') 10 | 11 | schemaRegistry { 12 | url = 'http://localhost:8081' 13 | quiet = false 14 | outputDirectory = 'schemas/protobuf/results/' 15 | pretty = true 16 | failFast = true 17 | 18 | register { 19 | subject('company', 'schemas/protobuf/com/example/company.proto', 'PROTOBUF') 20 | subject('user', 'schemas/protobuf/com/example/user.proto', 'PROTOBUF') 21 | .addReference('company', 'company', 1) 22 | subject(locationAddressSubject) 23 | subject(locationCoordSubject) 24 | subject('my-record','schemas/protobuf/com/example/my-record.proto','PROTOBUF') 25 | .setMetadata('extra/metadata.json') 26 | .setRuleSet('extra/ruleSet.json') 27 | .setNormalized(true) 28 | } 29 | 30 | config { 31 | subject('company', 'FULL_TRANSITIVE') 32 | subject('user', 'FULL_TRANSITIVE') 33 | subject('location-address', 'FULL_TRANSITIVE') 34 | subject('location-latlong', 'FULL_TRANSITIVE') 35 | } 36 | 37 | download { 38 | metadata = new MetadataExtension(true) 39 | 40 | // subject('company', 'schemas/protobuf/downloaded') // Retrieved via reference from the user subject 41 | subject('user', 'schemas/protobuf/downloaded', true) 42 | subjectPattern('location.*', 'schemas/protobuf/downloaded/location') 43 | } 44 | 45 | compatibility { 46 | subject('company', 'schemas/protobuf/com/example/company_v2.proto', 'PROTOBUF') 47 | subject('user', 'schemas/protobuf/com/example/user.proto', 'PROTOBUF') 48 | .addReference('company', 'company', 1) 49 | subject(locationAddressSubject) 50 | subject(locationCoordSubject) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /examples/schemas/avro/company.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "namespace": "com.github.imflog", 3 | "name": "Company", 4 | "type": "record", 5 | "fields": [ 6 | {"name": "id", "type": "string"}, 7 | {"name": "name", "type": "string"}, 8 | {"name": "address", "type": "Address"}, 9 | {"name": "thumbnail_url", "type": "string" }, 10 | {"name": "company_type", "type": { 11 | "type": "enum", "name": "companyTypes", "symbols": [ 12 | "COMPANY_TYPE_NOT_DEFINED", 13 | "CLIENT", 14 | "SERVICEPROVIDER" 15 | ]} 16 | }, 17 | {"name": "sectors", "type": {"type": "array", "name": "sectorList", "items": "string", "default": []}} 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /examples/schemas/avro/company_v2.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "namespace": "com.github.imflog", 3 | "name": "Company", 4 | "type": "record", 5 | "fields": [ 6 | {"name": "id", "type": "string"}, 7 | {"name": "name", "type": "string"}, 8 | {"name": "address", "type": "Address"}, 9 | {"name": "thumbnail_url", "type": "string" }, 10 | {"name": "company_type", "type": { 11 | "type": "enum", "name": "companyTypes", "symbols": [ 12 | "COMPANY_TYPE_NOT_DEFINED", 13 | "CLIENT", 14 | "SERVICEPROVIDER" 15 | ]} 16 | }, 17 | {"name": "sectors", "type": {"type": "array", "name": "sectorList", "items": "string", "default": []}}, 18 | { 19 | "name": "sub_sectors", 20 | "type": ["null", {"type": "array", "name": "subSectorList", "items": "string", "default": []}], 21 | "default": null 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /examples/schemas/avro/location-address.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "namespace": "com.github.imflog", 3 | "name": "Address", 4 | "type": "record", 5 | "fields": [ 6 | {"name": "id", "type": "string"}, 7 | {"name": "address_line_1", "type": "string"}, 8 | {"name": "address_line_2", "type": "string" }, 9 | {"name": "post_code", "type": "string" }, 10 | {"name": "country", "type": "string" } 11 | ] 12 | } -------------------------------------------------------------------------------- /examples/schemas/avro/location-latlong.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "namespace": "com.github.imflog", 3 | "name": "LatLong", 4 | "type": "record", 5 | "fields": [ 6 | {"name": "id", "type": "string"}, 7 | {"name": "latitude", "type": "double"}, 8 | {"name": "longitude", "type": "double" } 9 | ] 10 | } -------------------------------------------------------------------------------- /examples/schemas/avro/my-record.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "namespace": "com.github.imflog", 3 | "type": "record", 4 | "name": "MyRecord", 5 | "fields":[{ 6 | "name":"ssn", 7 | "type":"string", 8 | "confluent:tags": [ "PII" ] 9 | }] 10 | } 11 | -------------------------------------------------------------------------------- /examples/schemas/avro/results/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ImFlog/schema-registry-plugin/cfd09be7c4606b2cfdf121431295cafea40618b0/examples/schemas/avro/results/.keep -------------------------------------------------------------------------------- /examples/schemas/avro/user.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "namespace": "com.github.imflog", 3 | "name": "User", 4 | "type": "record", 5 | "fields": [ 6 | {"name": "id", "type": "string"}, 7 | {"name": "fist_name", "type": "string"}, 8 | {"name": "last_name", "type": "string"}, 9 | {"name": "thumbnail_url", "type": "string" }, 10 | {"name": "email", "type": "string"}, 11 | {"name": "company", "type": "Company"} 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /examples/schemas/json/company.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "$id": "company", 4 | "type": "object", 5 | "properties": { 6 | "id": { 7 | "type": "string" 8 | }, 9 | "name": { 10 | "type": "string" 11 | }, 12 | "address": { 13 | "$ref": "#address" 14 | }, 15 | "thumbnail_url": { 16 | "type": "string" 17 | }, 18 | "company_type": { 19 | "type": "string" 20 | }, 21 | "sectors": { 22 | "type": "array", 23 | "items": { 24 | "type": "string" 25 | } 26 | } 27 | }, 28 | "required": [ 29 | "id" 30 | ], 31 | "additionalProperties": false 32 | } 33 | -------------------------------------------------------------------------------- /examples/schemas/json/company_v2.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "$id": "company", 4 | "type": "object", 5 | "properties": { 6 | "id": { 7 | "type": "string" 8 | }, 9 | "name": { 10 | "type": "string" 11 | }, 12 | "address": { 13 | "$ref": "#address" 14 | }, 15 | "thumbnail_url": { 16 | "type": "string" 17 | }, 18 | "company_type": { 19 | "type": "string" 20 | }, 21 | "sectors": { 22 | "type": "array", 23 | "items": { 24 | "type": "string" 25 | } 26 | }, 27 | "sub_sectors": { 28 | "type": [ 29 | "null", 30 | "array" 31 | ], 32 | "items": { 33 | "type": "string" 34 | } 35 | } 36 | }, 37 | "required": [ 38 | "id" 39 | ], 40 | "additionalProperties": false 41 | } 42 | -------------------------------------------------------------------------------- /examples/schemas/json/location-address.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "$id": "address", 4 | "type": "object", 5 | "properties": { 6 | "id": { 7 | "type": "string" 8 | }, 9 | "address_line_1": { 10 | "type": "string" 11 | }, 12 | "address_line_2": { 13 | "type": "string" 14 | }, 15 | "post_code": { 16 | "type": "string" 17 | }, 18 | "country": { 19 | "type": "string" 20 | } 21 | }, 22 | "additionalProperties": false 23 | } 24 | -------------------------------------------------------------------------------- /examples/schemas/json/location-latlong.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "$id": "latlong", 4 | 5 | "type": "object", 6 | "properties": { 7 | "id": {"type": "string"}, 8 | "latitude": {"type": "number"}, 9 | "longitude": {"type": "number"} 10 | }, 11 | "additionalProperties": false 12 | } 13 | -------------------------------------------------------------------------------- /examples/schemas/json/my-record.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "$id": "MyRecord", 4 | "type": "object", 5 | "properties": { 6 | "ssn": { 7 | "type": "string", 8 | "confluent:tags": [ "PII" ] 9 | } 10 | }, 11 | "additionalProperties": false 12 | } 13 | -------------------------------------------------------------------------------- /examples/schemas/json/results/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ImFlog/schema-registry-plugin/cfd09be7c4606b2cfdf121431295cafea40618b0/examples/schemas/json/results/.keep -------------------------------------------------------------------------------- /examples/schemas/json/user.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "$id": "user", 4 | "type": "object", 5 | "properties": { 6 | "id": { 7 | "type": "string" 8 | }, 9 | "first_name": { 10 | "type": "string" 11 | }, 12 | "last_name": { 13 | "type": "string" 14 | }, 15 | "thumbnail_url": { 16 | "type": "string" 17 | }, 18 | "email": { 19 | "type": "string" 20 | }, 21 | "company": { 22 | "$ref": "#company" 23 | } 24 | }, 25 | "additionalProperties": false 26 | } 27 | -------------------------------------------------------------------------------- /examples/schemas/protobuf/com/example/company.proto: -------------------------------------------------------------------------------- 1 | // [START declaration] 2 | syntax = "proto3"; 3 | package proto; 4 | // [END declaration] 5 | 6 | // [START java_declaration] 7 | option java_package = "com.example"; 8 | option java_outer_classname = "CompanyProtos"; 9 | // [END java_declaration] 10 | 11 | message Company { 12 | enum CompanyTypeEnum { 13 | COMPANY_TYPE_NOT_DEFINED = 0; 14 | CLIENT = 1; 15 | SERVICEPROVIDER = 2; 16 | } 17 | 18 | string id = 1; 19 | string name = 2; 20 | string thumbnail_url = 3; 21 | CompanyTypeEnum company_type = 4; 22 | repeated string sectors = 5; 23 | } 24 | -------------------------------------------------------------------------------- /examples/schemas/protobuf/com/example/company_v2.proto: -------------------------------------------------------------------------------- 1 | // [START declaration] 2 | syntax = "proto3"; 3 | package proto; 4 | // [END declaration] 5 | 6 | // [START java_declaration] 7 | option java_package = "com.example"; 8 | option java_outer_classname = "CompanyProtos"; 9 | // [END java_declaration] 10 | 11 | message Company { 12 | enum CompanyTypeEnum { 13 | COMPANY_TYPE_NOT_DEFINED = 0; 14 | CLIENT = 1; 15 | SERVICEPROVIDER = 2; 16 | } 17 | 18 | string id = 1; 19 | string name = 2; 20 | string thumbnail_url = 3; 21 | CompanyTypeEnum company_type = 4; 22 | repeated string sectors = 5; 23 | repeated string sub_sectors = 6; 24 | } 25 | -------------------------------------------------------------------------------- /examples/schemas/protobuf/com/example/location-address.proto: -------------------------------------------------------------------------------- 1 | // [START declaration] 2 | syntax = "proto3"; 3 | package proto; 4 | import "company"; 5 | // [END declaration] 6 | 7 | // [START java_declaration] 8 | option java_package = "com.example"; 9 | option java_outer_classname = "AddressProtos"; 10 | // [END java_declaration] 11 | 12 | message Address { 13 | string id = 1; 14 | string address_line_1 = 3; 15 | string address_line_2 = 4; 16 | string post_code = 5; 17 | string country = 6; 18 | } 19 | -------------------------------------------------------------------------------- /examples/schemas/protobuf/com/example/location-latlong.proto: -------------------------------------------------------------------------------- 1 | // [START declaration] 2 | syntax = "proto3"; 3 | package proto; 4 | import "company"; 5 | // [END declaration] 6 | 7 | // [START java_declaration] 8 | option java_package = "com.example"; 9 | option java_outer_classname = "LatLongProtos"; 10 | // [END java_declaration] 11 | 12 | message LatLong { 13 | string id = 1; 14 | double latitude = 3; 15 | double longitude = 4; 16 | } 17 | -------------------------------------------------------------------------------- /examples/schemas/protobuf/com/example/my-record.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | import "confluent/meta.proto"; 4 | 5 | 6 | message MyRecord { 7 | string ssn = 1 [ 8 | (confluent.field_meta).tags = "PII" 9 | ]; 10 | } 11 | -------------------------------------------------------------------------------- /examples/schemas/protobuf/com/example/user.proto: -------------------------------------------------------------------------------- 1 | // [START declaration] 2 | syntax = "proto3"; 3 | package proto; 4 | import "company"; 5 | // [END declaration] 6 | 7 | // [START java_declaration] 8 | option java_package = "com.example"; 9 | option java_outer_classname = "UserProtos"; 10 | // [END java_declaration] 11 | 12 | message User { 13 | string id = 1; 14 | string first_name = 3; 15 | string last_name = 4; 16 | string thumbnail_url = 5; 17 | string email = 6; 18 | oneof companyid_or_company { 19 | string company_id = 7; 20 | Company company = 8; 21 | }; 22 | } 23 | -------------------------------------------------------------------------------- /examples/schemas/protobuf/results/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ImFlog/schema-registry-plugin/cfd09be7c4606b2cfdf121431295cafea40618b0/examples/schemas/protobuf/results/.keep -------------------------------------------------------------------------------- /examples/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | rootProject.name = "examples" 2 | include( 3 | "avro", 4 | "avro-kts", 5 | "protobuf", 6 | "json", 7 | "ssl" 8 | ) 9 | -------------------------------------------------------------------------------- /examples/ssl/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id "com.github.imflog.kafka-schema-registry-gradle-plugin" 3 | } 4 | 5 | dockerCompose { 6 | useComposeFiles = ["$project.rootDir/ssl/docker-compose.yml"] 7 | captureContainersOutput = false 8 | stopContainers = true 9 | removeContainers = true 10 | removeVolumes = true 11 | removeOrphans = true 12 | forceRecreate = true 13 | projectName = project.name 14 | } 15 | 16 | schemaRegistry { 17 | url = 'https://localhost:8181' 18 | ssl { 19 | configs = [ 20 | "ssl.truststore.location": "$project.rootDir/../src/integration/resources/secrets/registry.truststore.jks", 21 | "ssl.truststore.password": "registry", 22 | "ssl.keystore.location" : "$project.rootDir/../src/integration/resources/secrets/registry.keystore.jks", 23 | "ssl.keystore.password" : "registry" 24 | ] 25 | } 26 | 27 | register { 28 | subject('company', 'schemas/avro/company.avsc', 'AVRO') 29 | .addLocalReference("Address", "schemas/avro/location-address.avsc") 30 | subject('user', 'schemas/avro/user.avsc', 'AVRO') 31 | .addReference('company', 'company', -1) 32 | } 33 | 34 | config { 35 | subject('company', 'FULL_TRANSITIVE') 36 | subject('user', 'FULL_TRANSITIVE') 37 | } 38 | 39 | download { 40 | subject('company', 'schemas/avro/downloaded') 41 | subject('user', 'schemas/avro/downloaded') 42 | } 43 | 44 | compatibility { 45 | subject('company', 'schemas/avro/company_v2.avsc', 'AVRO') 46 | .addLocalReference("Address", "schemas/avro/location-address.avsc") 47 | subject('user', 'schemas/avro/user.avsc', 'AVRO') 48 | .addReference('company', 'company', -1) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /examples/ssl/docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: '3' 3 | services: 4 | zookeeper: 5 | image: confluentinc/cp-zookeeper:7.2.0 6 | hostname: zookeeper 7 | ports: 8 | - "2181:2181" 9 | environment: 10 | ZOOKEEPER_CLIENT_PORT: 2181 11 | 12 | broker: 13 | image: confluentinc/cp-kafka:7.2.0 14 | hostname: broker 15 | depends_on: 16 | - zookeeper 17 | ports: 18 | - "29092:29092" 19 | - "9092:9092" 20 | environment: 21 | KAFKA_BROKER_ID: 1 22 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' 23 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT 24 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092 25 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 26 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 27 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 28 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 29 | KAFKA_LOG4J_LOGGERS: 'kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO' 30 | 31 | schema-registry: 32 | image: confluentinc/cp-schema-registry:7.2.0 33 | hostname: 'schema-registry' 34 | depends_on: 35 | - zookeeper 36 | - broker 37 | ports: 38 | - "8181:8181" 39 | environment: 40 | SCHEMA_REGISTRY_LISTENERS: 'https://0.0.0.0:8181' 41 | SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'PLAINTEXT://broker:29092' 42 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' 43 | SCHEMA_REGISTRY_HOST_NAME: 'schema-registry' 44 | SCHEMA_REGISTRY_SSL_KEYSTORE_LOCATION: '/etc/schema-registry/secrets/registry.keystore.jks' 45 | SCHEMA_REGISTRY_SSL_KEYSTORE_PASSWORD: 'registry' 46 | SCHEMA_REGISTRY_SSL_KEY_PASSWORD: 'registry' 47 | SCHEMA_REGISTRY_SSL_TRUSTSTORE_LOCATION: '/etc/schema-registry/secrets/registry.truststore.jks' 48 | SCHEMA_REGISTRY_SSL_TRUSTSTORE_PASSWORD: 'registry' 49 | SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: 'https' 50 | SCHEMA_REGISTRY_SCHEMA_REGISTRY_GROUP_ID: 'ssl-group' 51 | SCHEMA_REGISTRY_KAFKASTORE_TOPIC: '_ssl_schemas' 52 | SCHEMA_REGISTRY_SSL_CLIENT_AUTHENTICATION: 'REQUIRED' 53 | volumes: 54 | - ../../src/integration/resources/secrets:/etc/schema-registry/secrets 55 | -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | kotlin.code.style=official 2 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ImFlog/schema-registry-plugin/cfd09be7c4606b2cfdf121431295cafea40618b0/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip 4 | networkTimeout=10000 5 | zipStoreBase=GRADLE_USER_HOME 6 | zipStorePath=wrapper/dists 7 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | # 21 | # Gradle start up script for POSIX generated by Gradle. 22 | # 23 | # Important for running: 24 | # 25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 26 | # noncompliant, but you have some other compliant shell such as ksh or 27 | # bash, then to run this script, type that shell name before the whole 28 | # command line, like: 29 | # 30 | # ksh Gradle 31 | # 32 | # Busybox and similar reduced shells will NOT work, because this script 33 | # requires all of these POSIX shell features: 34 | # * functions; 35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 37 | # * compound commands having a testable exit status, especially «case»; 38 | # * various built-in commands including «command», «set», and «ulimit». 39 | # 40 | # Important for patching: 41 | # 42 | # (2) This script targets any POSIX shell, so it avoids extensions provided 43 | # by Bash, Ksh, etc; in particular arrays are avoided. 44 | # 45 | # The "traditional" practice of packing multiple parameters into a 46 | # space-separated string is a well documented source of bugs and security 47 | # problems, so this is (mostly) avoided, by progressively accumulating 48 | # options in "$@", and eventually passing that to Java. 49 | # 50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 52 | # see the in-line comments for details. 53 | # 54 | # There are tweaks for specific operating systems such as AIX, CygWin, 55 | # Darwin, MinGW, and NonStop. 56 | # 57 | # (3) This script is generated from the Groovy template 58 | # https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 59 | # within the Gradle project. 60 | # 61 | # You can find Gradle at https://github.com/gradle/gradle/. 62 | # 63 | ############################################################################## 64 | 65 | # Attempt to set APP_HOME 66 | 67 | # Resolve links: $0 may be a link 68 | app_path=$0 69 | 70 | # Need this for daisy-chained symlinks. 71 | while 72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 73 | [ -h "$app_path" ] 74 | do 75 | ls=$( ls -ld "$app_path" ) 76 | link=${ls#*' -> '} 77 | case $link in #( 78 | /*) app_path=$link ;; #( 79 | *) app_path=$APP_HOME$link ;; 80 | esac 81 | done 82 | 83 | # This is normally unused 84 | # shellcheck disable=SC2034 85 | APP_BASE_NAME=${0##*/} 86 | APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit 87 | 88 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 89 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 90 | 91 | # Use the maximum available, or set MAX_FD != -1 to use that value. 92 | MAX_FD=maximum 93 | 94 | warn () { 95 | echo "$*" 96 | } >&2 97 | 98 | die () { 99 | echo 100 | echo "$*" 101 | echo 102 | exit 1 103 | } >&2 104 | 105 | # OS specific support (must be 'true' or 'false'). 106 | cygwin=false 107 | msys=false 108 | darwin=false 109 | nonstop=false 110 | case "$( uname )" in #( 111 | CYGWIN* ) cygwin=true ;; #( 112 | Darwin* ) darwin=true ;; #( 113 | MSYS* | MINGW* ) msys=true ;; #( 114 | NONSTOP* ) nonstop=true ;; 115 | esac 116 | 117 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 118 | 119 | 120 | # Determine the Java command to use to start the JVM. 121 | if [ -n "$JAVA_HOME" ] ; then 122 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 123 | # IBM's JDK on AIX uses strange locations for the executables 124 | JAVACMD=$JAVA_HOME/jre/sh/java 125 | else 126 | JAVACMD=$JAVA_HOME/bin/java 127 | fi 128 | if [ ! -x "$JAVACMD" ] ; then 129 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 130 | 131 | Please set the JAVA_HOME variable in your environment to match the 132 | location of your Java installation." 133 | fi 134 | else 135 | JAVACMD=java 136 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 137 | 138 | Please set the JAVA_HOME variable in your environment to match the 139 | location of your Java installation." 140 | fi 141 | 142 | # Increase the maximum file descriptors if we can. 143 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 144 | case $MAX_FD in #( 145 | max*) 146 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. 147 | # shellcheck disable=SC3045 148 | MAX_FD=$( ulimit -H -n ) || 149 | warn "Could not query maximum file descriptor limit" 150 | esac 151 | case $MAX_FD in #( 152 | '' | soft) :;; #( 153 | *) 154 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. 155 | # shellcheck disable=SC3045 156 | ulimit -n "$MAX_FD" || 157 | warn "Could not set maximum file descriptor limit to $MAX_FD" 158 | esac 159 | fi 160 | 161 | # Collect all arguments for the java command, stacking in reverse order: 162 | # * args from the command line 163 | # * the main class name 164 | # * -classpath 165 | # * -D...appname settings 166 | # * --module-path (only if needed) 167 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 168 | 169 | # For Cygwin or MSYS, switch paths to Windows format before running java 170 | if "$cygwin" || "$msys" ; then 171 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 172 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 173 | 174 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 175 | 176 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 177 | for arg do 178 | if 179 | case $arg in #( 180 | -*) false ;; # don't mess with options #( 181 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 182 | [ -e "$t" ] ;; #( 183 | *) false ;; 184 | esac 185 | then 186 | arg=$( cygpath --path --ignore --mixed "$arg" ) 187 | fi 188 | # Roll the args list around exactly as many times as the number of 189 | # args, so each arg winds up back in the position where it started, but 190 | # possibly modified. 191 | # 192 | # NB: a `for` loop captures its iteration list before it begins, so 193 | # changing the positional parameters here affects neither the number of 194 | # iterations, nor the values presented in `arg`. 195 | shift # remove old arg 196 | set -- "$@" "$arg" # push replacement arg 197 | done 198 | fi 199 | 200 | # Collect all arguments for the java command; 201 | # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of 202 | # shell script including quotes and variable substitutions, so put them in 203 | # double quotes to make sure that they get re-expanded; and 204 | # * put everything else in single quotes, so that it's not re-expanded. 205 | 206 | set -- \ 207 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 208 | -classpath "$CLASSPATH" \ 209 | org.gradle.wrapper.GradleWrapperMain \ 210 | "$@" 211 | 212 | # Stop when "xargs" is not available. 213 | if ! command -v xargs >/dev/null 2>&1 214 | then 215 | die "xargs is not available" 216 | fi 217 | 218 | # Use "xargs" to parse quoted args. 219 | # 220 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 221 | # 222 | # In Bash we could simply go: 223 | # 224 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 225 | # set -- "${ARGS[@]}" "$@" 226 | # 227 | # but POSIX shell has neither arrays nor command substitution, so instead we 228 | # post-process each arg (as a line of input to sed) to backslash-escape any 229 | # character that might be a shell metacharacter, then use eval to reverse 230 | # that process (while maintaining the separation between arguments), and wrap 231 | # the whole thing up as a single "set" statement. 232 | # 233 | # This will of course break if any of these variables contains a newline or 234 | # an unmatched quote. 235 | # 236 | 237 | eval "set -- $( 238 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 239 | xargs -n1 | 240 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 241 | tr '\n' ' ' 242 | )" '"$@"' 243 | 244 | exec "$JAVACMD" "$@" 245 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%"=="" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%"=="" set DIRNAME=. 29 | @rem This is normally unused 30 | set APP_BASE_NAME=%~n0 31 | set APP_HOME=%DIRNAME% 32 | 33 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 34 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 35 | 36 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 37 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 38 | 39 | @rem Find java.exe 40 | if defined JAVA_HOME goto findJavaFromJavaHome 41 | 42 | set JAVA_EXE=java.exe 43 | %JAVA_EXE% -version >NUL 2>&1 44 | if %ERRORLEVEL% equ 0 goto execute 45 | 46 | echo. 47 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 48 | echo. 49 | echo Please set the JAVA_HOME variable in your environment to match the 50 | echo location of your Java installation. 51 | 52 | goto fail 53 | 54 | :findJavaFromJavaHome 55 | set JAVA_HOME=%JAVA_HOME:"=% 56 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 57 | 58 | if exist "%JAVA_EXE%" goto execute 59 | 60 | echo. 61 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 62 | echo. 63 | echo Please set the JAVA_HOME variable in your environment to match the 64 | echo location of your Java installation. 65 | 66 | goto fail 67 | 68 | :execute 69 | @rem Setup the command line 70 | 71 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 72 | 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if %ERRORLEVEL% equ 0 goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | set EXIT_CODE=%ERRORLEVEL% 85 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 86 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 87 | exit /b %EXIT_CODE% 88 | 89 | :mainEnd 90 | if "%OS%"=="Windows_NT" endlocal 91 | 92 | :omega 93 | -------------------------------------------------------------------------------- /settings.gradle.kts: -------------------------------------------------------------------------------- 1 | rootProject.name = "kafka-schema-registry-gradle-plugin" 2 | 3 | -------------------------------------------------------------------------------- /src/integration/kotlin/com/github/imflog/schema/registry/security/SslIT.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.security 2 | 3 | import com.github.imflog.schema.registry.tasks.config.ConfigTask 4 | import com.github.imflog.schema.registry.utils.KafkaTestContainersUtils 5 | import org.assertj.core.api.Assertions 6 | import org.gradle.internal.impldep.org.junit.rules.TemporaryFolder 7 | import org.gradle.testkit.runner.BuildResult 8 | import org.gradle.testkit.runner.GradleRunner 9 | import org.gradle.testkit.runner.TaskOutcome 10 | import org.junit.jupiter.api.AfterEach 11 | import org.junit.jupiter.api.BeforeEach 12 | import org.junit.jupiter.api.Test 13 | import java.io.File 14 | 15 | class SslIT : KafkaTestContainersUtils() { 16 | 17 | private val folderRule: TemporaryFolder = TemporaryFolder() 18 | private lateinit var buildFile: File 19 | 20 | @BeforeEach 21 | fun init() { 22 | folderRule.create() 23 | 24 | val keystoreFile = folderRule.newFile("registry.keystore.jks") 25 | keystoreFile.writeBytes( 26 | SslIT::class.java 27 | .getResource("/secrets/registry.keystore.jks") 28 | .readBytes() 29 | ) 30 | val truststore = folderRule.newFile("registry.truststore.jks") 31 | truststore.writeBytes( 32 | SslIT::class.java 33 | .getResource("/secrets/registry.truststore.jks") 34 | .readBytes() 35 | ) 36 | } 37 | 38 | @AfterEach 39 | fun tearDown() { 40 | folderRule.delete() 41 | } 42 | 43 | @Test 44 | fun `Should fail with incorrect ssl property`() { 45 | buildFile = folderRule.newFile("build.gradle") 46 | buildFile.writeText( 47 | """ 48 | plugins { 49 | id 'java' 50 | id 'com.github.imflog.kafka-schema-registry-gradle-plugin' 51 | } 52 | schemaRegistry { 53 | url = '$schemaRegistrySslEndpoint' 54 | ssl { 55 | configs = [ 56 | "ssl.truststore.location": "${folderRule.root.absolutePath}/registry.truststore.jks", 57 | "ssl.truststore.password": "registry", 58 | "ssl.keystore.location": "${folderRule.root.absolutePath}/registry.keystore.jks", 59 | "ssl.keystore.password": "registry", 60 | "foo": "bar" 61 | ] 62 | } 63 | config { 64 | subject('testSubject1', 'FULL_TRANSITIVE') 65 | } 66 | } 67 | """ 68 | ) 69 | 70 | val result: BuildResult? = GradleRunner.create() 71 | .withGradleVersion("8.6") 72 | .withProjectDir(folderRule.root) 73 | .withArguments(ConfigTask.TASK_NAME) 74 | .withPluginClasspath() 75 | .withDebug(true) 76 | .buildAndFail() 77 | Assertions 78 | .assertThat(result?.task(":configSubjectsTask")?.outcome) 79 | .isEqualTo(TaskOutcome.FAILED) 80 | } 81 | 82 | @Test 83 | fun `Should use SSL correctly`() { 84 | buildFile = folderRule.newFile("build.gradle") 85 | buildFile.writeText( 86 | """ 87 | plugins { 88 | id 'java' 89 | id 'com.github.imflog.kafka-schema-registry-gradle-plugin' 90 | } 91 | schemaRegistry { 92 | url = '$schemaRegistrySslEndpoint' 93 | ssl { 94 | configs = [ 95 | "ssl.truststore.location": "${folderRule.root.absolutePath}/registry.truststore.jks", 96 | "ssl.truststore.password": "registry", 97 | "ssl.keystore.location": "${folderRule.root.absolutePath}/registry.keystore.jks", 98 | "ssl.keystore.password": "registry" 99 | ] 100 | } 101 | config { 102 | subject('testSubject1', 'FULL_TRANSITIVE') 103 | } 104 | } 105 | """ 106 | ) 107 | 108 | val result: BuildResult? = GradleRunner.create() 109 | .withGradleVersion("8.6") 110 | .withProjectDir(folderRule.root) 111 | .withArguments(ConfigTask.TASK_NAME) 112 | .withPluginClasspath() 113 | .withDebug(true) 114 | .build() 115 | Assertions 116 | .assertThat(result?.task(":configSubjectsTask")?.outcome) 117 | .isEqualTo(TaskOutcome.SUCCESS) 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /src/integration/kotlin/com/github/imflog/schema/registry/tasks/config/ConfigTaskIT.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.config 2 | 3 | import com.github.imflog.schema.registry.utils.KafkaTestContainersUtils 4 | import org.assertj.core.api.Assertions 5 | import org.gradle.internal.impldep.org.junit.rules.TemporaryFolder 6 | import org.gradle.testkit.runner.BuildResult 7 | import org.gradle.testkit.runner.GradleRunner 8 | import org.gradle.testkit.runner.TaskOutcome 9 | import org.junit.jupiter.api.AfterEach 10 | import org.junit.jupiter.api.BeforeEach 11 | import org.junit.jupiter.api.Test 12 | import java.io.File 13 | 14 | class ConfigTaskIT : KafkaTestContainersUtils() { 15 | private lateinit var folderRule: TemporaryFolder 16 | private lateinit var buildFile: File 17 | 18 | @BeforeEach 19 | fun init() { 20 | folderRule = TemporaryFolder() 21 | } 22 | 23 | @AfterEach 24 | fun tearDown() { 25 | folderRule.delete() 26 | } 27 | 28 | @Test 29 | fun `ConfigTask should set subject compatibility`() { 30 | folderRule.create() 31 | buildFile = folderRule.newFile("build.gradle") 32 | buildFile.writeText( 33 | """ 34 | plugins { 35 | id 'java' 36 | id 'com.github.imflog.kafka-schema-registry-gradle-plugin' 37 | } 38 | 39 | schemaRegistry { 40 | url = '$schemaRegistryEndpoint' 41 | config { 42 | subject('testSubject1', 'FULL_TRANSITIVE') 43 | } 44 | } 45 | """.trimIndent() 46 | ) 47 | 48 | val result: BuildResult? = GradleRunner.create() 49 | .withGradleVersion("8.6") 50 | .withProjectDir(folderRule.root) 51 | .withArguments(ConfigTask.TASK_NAME) 52 | .withPluginClasspath() 53 | .withDebug(true) 54 | .build() 55 | Assertions.assertThat(result?.task(":configSubjectsTask")?.outcome).isEqualTo(TaskOutcome.SUCCESS) 56 | } 57 | 58 | @Test 59 | fun `ConfigTask should detect and reject invalid compatibility settings`() { 60 | folderRule.create() 61 | buildFile = folderRule.newFile("build.gradle") 62 | buildFile.writeText( 63 | """ 64 | plugins { 65 | id 'java' 66 | id 'com.github.imflog.kafka-schema-registry-gradle-plugin' 67 | } 68 | 69 | schemaRegistry { 70 | url = '$schemaRegistryEndpoint' 71 | config { 72 | subject('testSubject1', 'FULL_TRANSITIVE') 73 | subject('testSubject2', 'FUL_TRANSITIVE') // intentionally broken 74 | } 75 | } 76 | """.trimIndent() 77 | ) 78 | 79 | val result: BuildResult? = GradleRunner.create() 80 | .withGradleVersion("8.6") 81 | .withProjectDir(folderRule.root) 82 | .withArguments(ConfigTask.TASK_NAME) 83 | .withPluginClasspath() 84 | .withDebug(true) 85 | .buildAndFail() 86 | Assertions.assertThat(result?.task(":configSubjectsTask")?.outcome).isEqualTo(TaskOutcome.FAILED) 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /src/integration/kotlin/com/github/imflog/schema/registry/utils/KafkaTestContainersUtils.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.utils 2 | 3 | import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider 4 | import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient 5 | import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider 6 | import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider 7 | import org.junit.jupiter.api.AfterAll 8 | import org.junit.jupiter.api.BeforeAll 9 | import org.testcontainers.containers.GenericContainer 10 | import org.testcontainers.containers.KafkaContainer 11 | import org.testcontainers.containers.Network 12 | import org.testcontainers.utility.DockerImageName 13 | import java.io.File 14 | 15 | abstract class KafkaTestContainersUtils { 16 | 17 | companion object { 18 | private const val SCHEMA_REGISTRY_INTERNAL_PORT = 8081 19 | 20 | private val CONFLUENT_VERSION = System.getenv().getOrDefault("KAFKA_VERSION", "7.6.0") 21 | private val KAFKA_NETWORK_ALIAS = "kafka-${CONFLUENT_VERSION}" 22 | 23 | private val network: Network = Network.newNetwork() 24 | private val kafkaContainer: KafkaContainer by lazy { 25 | KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:$CONFLUENT_VERSION")) 26 | .withNetwork(network) 27 | .withNetworkAliases(KAFKA_NETWORK_ALIAS) 28 | } 29 | val schemaRegistryContainer: GenericContainer<*> by lazy { 30 | GenericContainer("confluentinc/cp-schema-registry:$CONFLUENT_VERSION") 31 | .withNetwork(network) 32 | .withExposedPorts(SCHEMA_REGISTRY_INTERNAL_PORT) 33 | .withEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", "PLAINTEXT://$KAFKA_NETWORK_ALIAS:9092") 34 | .withEnv("SCHEMA_REGISTRY_HOST_NAME", "schema-registry") 35 | } 36 | 37 | val schemaRegistrySslContainer: GenericContainer<*> by lazy { 38 | GenericContainer("confluentinc/cp-schema-registry:$CONFLUENT_VERSION") 39 | .withNetwork(network) 40 | .withExposedPorts(SCHEMA_REGISTRY_INTERNAL_PORT) 41 | .withEnv("SCHEMA_REGISTRY_LISTENERS", "https://0.0.0.0:$SCHEMA_REGISTRY_INTERNAL_PORT") 42 | .withEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", "PLAINTEXT://$KAFKA_NETWORK_ALIAS:9092") 43 | .withEnv("SCHEMA_REGISTRY_HOST_NAME", "registry-ssl") 44 | .withEnv("SCHEMA_REGISTRY_SSL_KEYSTORE_LOCATION", "/etc/schema-registry/secrets/registry.keystore.jks") 45 | .withEnv("SCHEMA_REGISTRY_SSL_KEYSTORE_PASSWORD", "registry") 46 | .withEnv("SCHEMA_REGISTRY_SSL_KEY_PASSWORD", "registry") 47 | .withEnv( 48 | "SCHEMA_REGISTRY_SSL_TRUSTSTORE_LOCATION", 49 | "/etc/schema-registry/secrets/registry.truststore.jks" 50 | ) 51 | .withEnv("SCHEMA_REGISTRY_SSL_TRUSTSTORE_PASSWORD", "registry") 52 | .withEnv("SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL", "https") 53 | .withEnv("SCHEMA_REGISTRY_SCHEMA_REGISTRY_GROUP_ID", "schema-registry-ssl") 54 | .withEnv("SCHEMA_REGISTRY_KAFKASTORE_TOPIC", "_ssl_schemas") 55 | .withEnv("SCHEMA_REGISTRY_SSL_CLIENT_AUTHENTICATION", "REQUIRED") 56 | .withFileSystemBind( 57 | File(KafkaTestContainersUtils::class.java.getResource("/secrets").toURI()).absolutePath, 58 | "/etc/schema-registry/secrets" 59 | ) 60 | } 61 | 62 | init { 63 | kafkaContainer.start() 64 | schemaRegistryContainer.start() 65 | schemaRegistrySslContainer.start() 66 | } 67 | } 68 | 69 | val schemaRegistryEndpoint: String by lazy { 70 | val port = schemaRegistryContainer.getMappedPort(SCHEMA_REGISTRY_INTERNAL_PORT) 71 | "http://${schemaRegistryContainer.host}:$port" 72 | } 73 | 74 | val schemaRegistrySslEndpoint: String by lazy { 75 | val port = schemaRegistrySslContainer.getMappedPort(SCHEMA_REGISTRY_INTERNAL_PORT) 76 | "https://${schemaRegistrySslContainer.host}:$port" 77 | } 78 | 79 | val client by lazy { 80 | CachedSchemaRegistryClient( 81 | listOf(schemaRegistryEndpoint), 82 | 100, 83 | listOf(AvroSchemaProvider(), ProtobufSchemaProvider(), JsonSchemaProvider()), 84 | mapOf() 85 | ) 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /src/integration/resources/generate-crt.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # extracted and adapted from https://github.com/Pierrotws/kafka-ssl-compose 4 | 5 | set -o nounset -o errexit 6 | 7 | printf "Deleting previous (if any)..." 8 | rm -rf secrets 9 | mkdir secrets 10 | mkdir -p tmp 11 | echo " OK!" 12 | # Generate CA key 13 | printf "Creating CA..." 14 | openssl req -new -x509 -keyout tmp/registry-ca.key -out tmp/registry-ca.crt -days 365 -subj '/CN=ca.registry/OU=registry/O=registry/L=paris/C=fr' -passin pass:registry -passout pass:registry 15 | 16 | echo " OK!" 17 | 18 | printf "Creating cert and keystore of registry..." 19 | # Create keystores 20 | keytool -genkey -noprompt \ 21 | -alias registry \ 22 | -dname "CN=localhost, OU=registry, O=registry, L=paris, C=fr" \ 23 | -keystore secrets/registry.keystore.jks \ 24 | -keyalg RSA \ 25 | -storepass registry \ 26 | -keypass registry 27 | 28 | # Create CSR, sign the key and import back into keystore 29 | keytool -keystore secrets/registry.keystore.jks -alias registry -certreq -file tmp/registry.csr -storepass registry -keypass registry 30 | 31 | openssl x509 -req -CA tmp/registry-ca.crt -CAkey tmp/registry-ca.key -in tmp/registry.csr -out tmp/registry-ca-signed.crt -days 3650 -CAcreateserial -passin pass:registry 32 | 33 | keytool -keystore secrets/registry.keystore.jks -alias CARoot -import -noprompt -file tmp/registry-ca.crt -storepass registry -keypass registry 34 | 35 | keytool -keystore secrets/registry.keystore.jks -alias registry -import -file tmp/registry-ca-signed.crt -storepass registry -keypass registry 36 | 37 | # Create truststore and import the CA cert. 38 | keytool -keystore secrets/registry.truststore.jks -alias CARoot -import -noprompt -file tmp/registry-ca.crt -storepass registry -keypass registry 39 | echo " OK!" 40 | 41 | echo "registry" >secrets/cert_creds 42 | rm -rf tmp 43 | 44 | echo "SUCCEEDED" 45 | -------------------------------------------------------------------------------- /src/integration/resources/secrets/cert_creds: -------------------------------------------------------------------------------- 1 | registry 2 | -------------------------------------------------------------------------------- /src/integration/resources/secrets/registry.keystore.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ImFlog/schema-registry-plugin/cfd09be7c4606b2cfdf121431295cafea40618b0/src/integration/resources/secrets/registry.keystore.jks -------------------------------------------------------------------------------- /src/integration/resources/secrets/registry.truststore.jks: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ImFlog/schema-registry-plugin/cfd09be7c4606b2cfdf121431295cafea40618b0/src/integration/resources/secrets/registry.truststore.jks -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/Exceptions.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry 2 | 3 | class UnknownSchemaTypeException(schemaType: String) : Exception("Unknown schema type provider $schemaType") 4 | class SchemaParsingException( 5 | subject: String, 6 | type: SchemaType, message: String? 7 | ) : 8 | Exception("Could not parse schema $subject of type ${type.registryType}" + (message?.let { ": $it" } ?: "")) { 9 | 10 | constructor( 11 | subject: String, 12 | type: SchemaType 13 | ) : this(subject, type, null) 14 | } 15 | 16 | class MixedReferenceException : Exception("You cannot mix local and remote references") 17 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/LocalReference.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry 2 | 3 | import java.io.File 4 | 5 | data class LocalReference( 6 | val name: String, 7 | val path: String 8 | ) { 9 | fun content(rootDir: File) = rootDir.resolve(path).readText() 10 | } 11 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/LoggingUtils.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry 2 | 3 | import org.slf4j.Logger 4 | 5 | object LoggingUtils { 6 | 7 | var quietLogging: Boolean = false 8 | 9 | /** 10 | * Utility method that checks if the quiet logging is activated before logging. 11 | * This is needed because we cannot set a log level per task. 12 | * See https://github.com/gradle/gradle/issues/1010 13 | */ 14 | fun Logger.infoIfNotQuiet(message: String) { 15 | if (!quietLogging) this.info(message) 16 | } 17 | } -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/RegistryClientWrapper.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry 2 | 3 | import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider 4 | import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient 5 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient 6 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClientConfig 7 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClientConfig.CLIENT_NAMESPACE 8 | import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider 9 | import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider 10 | import org.gradle.api.GradleException 11 | import org.slf4j.LoggerFactory 12 | 13 | 14 | /** 15 | * This is a singleton. 16 | * We can reuse the registryClient without instantiating new ones. 17 | */ 18 | object RegistryClientWrapper { 19 | 20 | private val logger = LoggerFactory.getLogger(RegistryClientWrapper::class.java) 21 | 22 | private const val BASIC_AUTH_SOURCE: String = "USER_INFO" 23 | 24 | fun client(url: String, basicAuth: String, sslConfigs: Map): SchemaRegistryClient = 25 | CachedSchemaRegistryClient( 26 | listOf(url), 27 | 100, 28 | listOf(AvroSchemaProvider(), JsonSchemaProvider(), ProtobufSchemaProvider()), 29 | getConfig(basicAuth) + getValidatedSslConfig(sslConfigs) 30 | ) 31 | 32 | /** 33 | * Retrieves configuration from the plugin extension. 34 | * Note that BASIC_AUTH_CREDENTIALS_SOURCE is not configurable as the plugin only supports 35 | * a single schema registry URL, so there is no additional utility of the URL source. 36 | */ 37 | private fun getConfig(basicAuth: String): Map = if (basicAuth == ":") 38 | mapOf() 39 | else 40 | mapOf( 41 | SchemaRegistryClientConfig.BASIC_AUTH_CREDENTIALS_SOURCE to BASIC_AUTH_SOURCE, 42 | SchemaRegistryClientConfig.USER_INFO_CONFIG to basicAuth 43 | ) 44 | 45 | /** 46 | * Validates that an SSLConfig map only contains only keys that starts with "ssl.xxx". 47 | * @see org.apache.kafka.common.config.SslConfigs 48 | */ 49 | private fun getValidatedSslConfig(sslConfigs: Map): Map { 50 | sslConfigs 51 | .keys 52 | .filterNot { property -> property.startsWith("ssl.") } 53 | .let { wrongProperties -> 54 | if (wrongProperties.any()) { 55 | wrongProperties.forEach { property -> logger.error("$property is not a valid sslConfig") } 56 | throw GradleException( 57 | "SSL configuration only accept keys from org.apache.kafka.common.config.SslConfigs" 58 | ) 59 | } 60 | } 61 | return sslConfigs.mapKeys { CLIENT_NAMESPACE + it.key } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/SchemaRegistryExtension.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry 2 | 3 | import org.gradle.api.model.ObjectFactory 4 | import org.gradle.api.provider.Property 5 | 6 | open class SchemaRegistryExtension(objects: ObjectFactory) { 7 | 8 | companion object { 9 | const val EXTENSION_NAME = "schemaRegistry" 10 | } 11 | 12 | val url: Property = objects.property(String::class.java).apply { 13 | // Default value 14 | convention("http://localhost:8081") 15 | } 16 | 17 | val quiet: Property = objects.property(Boolean::class.java).apply { 18 | convention(false) 19 | } 20 | 21 | val outputDirectory: Property = objects.property(String::class.java) 22 | 23 | val pretty: Property = objects.property(Boolean::class.java).apply { 24 | convention(false) 25 | } 26 | 27 | val failFast: Property = objects.property(Boolean::class.java).apply { 28 | convention(false) 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/SchemaRegistryPlugin.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry 2 | 3 | import com.github.imflog.schema.registry.security.BasicAuthExtension 4 | import com.github.imflog.schema.registry.security.SslExtension 5 | import com.github.imflog.schema.registry.tasks.compatibility.CompatibilitySubjectExtension 6 | import com.github.imflog.schema.registry.tasks.compatibility.CompatibilityTask 7 | import com.github.imflog.schema.registry.tasks.config.ConfigSubjectExtension 8 | import com.github.imflog.schema.registry.tasks.config.ConfigTask 9 | import com.github.imflog.schema.registry.tasks.download.DownloadSubjectExtension 10 | import com.github.imflog.schema.registry.tasks.download.DownloadTask 11 | import com.github.imflog.schema.registry.tasks.register.RegisterSchemasTask 12 | import com.github.imflog.schema.registry.tasks.register.RegisterSubjectExtension 13 | import org.gradle.api.Plugin 14 | import org.gradle.api.Project 15 | import org.gradle.api.plugins.ExtensionAware 16 | 17 | class SchemaRegistryPlugin : Plugin { 18 | 19 | override fun apply(project: Project) { 20 | with(project) { 21 | val globalExtension = extensions.create( 22 | SchemaRegistryExtension.EXTENSION_NAME, 23 | SchemaRegistryExtension::class.java 24 | ) 25 | 26 | val basicAuthExtension = (globalExtension as ExtensionAware).extensions.create( 27 | BasicAuthExtension.EXTENSION_NAME, 28 | BasicAuthExtension::class.java 29 | ) 30 | val sslExtension = (globalExtension as ExtensionAware).extensions.create( 31 | SslExtension.EXTENSION_NAME, 32 | SslExtension::class.java 33 | ) 34 | val downloadExtension = (globalExtension as ExtensionAware).extensions.create( 35 | DownloadSubjectExtension.EXTENSION_NAME, 36 | DownloadSubjectExtension::class.java, 37 | ) 38 | val registerExtension = (globalExtension as ExtensionAware).extensions.create( 39 | RegisterSubjectExtension.EXTENSION_NAME, 40 | RegisterSubjectExtension::class.java 41 | ) 42 | val compatibilityExtension = (globalExtension as ExtensionAware).extensions.create( 43 | CompatibilitySubjectExtension.EXTENSION_NAME, 44 | CompatibilitySubjectExtension::class.java 45 | ) 46 | val configExtension = (globalExtension as ExtensionAware).extensions.create( 47 | ConfigSubjectExtension.EXTENSION_NAME, 48 | ConfigSubjectExtension::class.java 49 | ) 50 | 51 | globalExtension.quiet.map { 52 | LoggingUtils.quietLogging = it 53 | } 54 | 55 | tasks.register(DownloadTask.TASK_NAME, DownloadTask::class.java) 56 | .configure { downloadTask -> 57 | downloadTask.url.set(globalExtension.url) 58 | downloadTask.basicAuth.set(basicAuthExtension.basicAuth) 59 | downloadTask.ssl.set(sslExtension.configs) 60 | downloadTask.subjects.set(downloadExtension.subjects) 61 | downloadTask.metadataConfig.set(downloadExtension.metadata) 62 | downloadTask.pretty.set(globalExtension.pretty) 63 | downloadTask.failFast.set(globalExtension.failFast) 64 | downloadTask.rootDir.set(project.rootDir) 65 | } 66 | 67 | tasks.register(RegisterSchemasTask.TASK_NAME, RegisterSchemasTask::class.java) 68 | .configure { registerSchemasTask -> 69 | registerSchemasTask.url.set(globalExtension.url) 70 | registerSchemasTask.basicAuth.set(basicAuthExtension.basicAuth) 71 | registerSchemasTask.ssl.set(sslExtension.configs) 72 | registerSchemasTask.subjects.set(registerExtension.subjects) 73 | registerSchemasTask.outputDirectory.set(globalExtension.outputDirectory) 74 | registerSchemasTask.failFast.set(globalExtension.failFast) 75 | registerSchemasTask.rootDir.set(project.rootDir) 76 | } 77 | 78 | tasks.register(CompatibilityTask.TASK_NAME, CompatibilityTask::class.java) 79 | .configure { compatibilityTask -> 80 | compatibilityTask.url.set(globalExtension.url) 81 | compatibilityTask.basicAuth.set(basicAuthExtension.basicAuth) 82 | compatibilityTask.ssl.set(sslExtension.configs) 83 | compatibilityTask.subjects.set(compatibilityExtension.subjects) 84 | compatibilityTask.failFast.set(globalExtension.failFast) 85 | compatibilityTask.rootDir.set(project.rootDir) 86 | } 87 | 88 | tasks.register(ConfigTask.TASK_NAME, ConfigTask::class.java) 89 | .configure { configTask -> 90 | configTask.url.set(globalExtension.url) 91 | configTask.basicAuth.set(basicAuthExtension.basicAuth) 92 | configTask.ssl.set(sslExtension.configs) 93 | configTask.subjects.set(configExtension.subjects) 94 | configTask.failFast.set(globalExtension.failFast) 95 | } 96 | } 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/SchemaType.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry 2 | 3 | import io.confluent.kafka.schemaregistry.avro.AvroSchema 4 | import io.confluent.kafka.schemaregistry.json.JsonSchema 5 | import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema 6 | 7 | enum class SchemaType(val registryType: String, val extension: String) { 8 | AVRO(AvroSchema.TYPE, "avsc"), 9 | PROTOBUF(ProtobufSchema.TYPE, "proto"), 10 | JSON(JsonSchema.TYPE, "json") 11 | } 12 | 13 | fun String.toSchemaType(): SchemaType = when (this) { 14 | AvroSchema.TYPE -> SchemaType.AVRO 15 | ProtobufSchema.TYPE -> SchemaType.PROTOBUF 16 | JsonSchema.TYPE -> SchemaType.JSON 17 | else -> throw UnknownSchemaTypeException(this) 18 | } 19 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/Subject.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry 2 | 3 | import com.google.gson.Gson 4 | import io.confluent.kafka.schemaregistry.client.rest.entities.Metadata 5 | import io.confluent.kafka.schemaregistry.client.rest.entities.RuleSet 6 | import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference 7 | import java.io.File 8 | 9 | data class Subject( 10 | val inputSubject: String, 11 | val file: String, 12 | val type: String 13 | ) { 14 | val references: MutableList = mutableListOf() 15 | val localReferences: MutableList = mutableListOf() 16 | var metadata: Metadata? = null 17 | var ruleSet: RuleSet? = null 18 | var normalize: Boolean = false 19 | 20 | 21 | fun addReference(name: String, subject: String, version: Int): Subject { 22 | references.add(SchemaReference(name, subject, version)) 23 | return this 24 | } 25 | 26 | fun addReference(name: String, subject:String): Subject { 27 | references.add(SchemaReference(name, subject, -1)) 28 | return this 29 | } 30 | 31 | fun addLocalReference(name: String, path: String): Subject { 32 | localReferences.add(LocalReference(name, path)) 33 | return this 34 | } 35 | 36 | fun setMetadata(path: String): Subject { 37 | val metadataContent = File(path).readText(Charsets.UTF_8) 38 | metadata = Gson().fromJson(metadataContent, Metadata::class.java) 39 | return this 40 | } 41 | 42 | fun setRuleSet(path: String): Subject { 43 | val ruleSetContent = File(path).readText(Charsets.UTF_8) 44 | ruleSet = Gson().fromJson(ruleSetContent, RuleSet::class.java) 45 | return this 46 | } 47 | 48 | fun setNormalized(normalize: Boolean): Subject { 49 | this.normalize = normalize 50 | return this 51 | } 52 | 53 | 54 | } 55 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/parser/AvroSchemaParser.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.parser 2 | 3 | import com.github.imflog.schema.registry.LocalReference 4 | import com.github.imflog.schema.registry.SchemaType 5 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient 6 | import org.json.JSONArray 7 | import org.json.JSONObject 8 | import java.io.File 9 | 10 | class AvroSchemaParser( 11 | client: SchemaRegistryClient, 12 | rootDir: File 13 | ) : SchemaParser(client, rootDir) { 14 | 15 | override val schemaType: SchemaType = SchemaType.AVRO 16 | 17 | override fun resolveLocalReferences( 18 | subject: String, 19 | schemaPath: String, 20 | localReferences: List 21 | ): String { 22 | // Load and parse the main schema 23 | val mainSchema = JSONObject(loadContent(schemaPath)) 24 | 25 | // Create a map of reference name to schema content 26 | val referenceSchemas = localReferences.associate { reference -> 27 | reference.name to JSONObject(reference.content(rootDir)) 28 | } 29 | 30 | // Set to track which references have already been inserted 31 | val insertedReferences = mutableSetOf() 32 | 33 | // Process the schema recursively 34 | val resolvedSchema = resolveReferences(mainSchema, referenceSchemas, null, insertedReferences) 35 | return resolvedSchema.toString() 36 | } 37 | 38 | private fun resolveReferences( 39 | schema: JSONObject, 40 | references: Map, 41 | parentNamespace: String?, 42 | insertedReferences: MutableSet 43 | ): JSONObject { 44 | // Get the current namespace, falling back to parent namespace if not present 45 | val currentNamespace = schema.optString("namespace", parentNamespace) 46 | 47 | when (schema.opt("type")) { 48 | // If it's a record type, process its fields 49 | "record" -> { 50 | val fields = schema.getJSONArray("fields") 51 | for (i in 0 until fields.length()) { 52 | val field = fields.getJSONObject(i) 53 | fields.put(i, resolveReferences(field, references, currentNamespace, insertedReferences)) 54 | } 55 | } 56 | // If it's an array type, process its items 57 | "array" -> { 58 | if (schema.opt("items") is JSONObject) { 59 | schema.put( 60 | "items", 61 | resolveReferences( 62 | schema.getJSONObject("items"), 63 | references, 64 | currentNamespace, 65 | insertedReferences 66 | ) 67 | ) 68 | } 69 | if (schema.opt("items") is org.json.JSONArray) { 70 | val items = schema.getJSONArray("items") 71 | for (i in 0 until items.length()) { 72 | items.put( 73 | i, 74 | resolveReferences(items.getJSONObject(i), references, currentNamespace, insertedReferences) 75 | ) 76 | } 77 | } 78 | if (schema.opt("items") is String) { 79 | val items = schema.getString("items") 80 | val ref = handleStringRef(items, currentNamespace, references, insertedReferences) 81 | schema.put("items", ref) 82 | } 83 | } 84 | // If it's a map type, process its values 85 | "map" -> { 86 | if (schema.opt("values") is JSONObject) { 87 | schema.put( 88 | "values", 89 | resolveReferences( 90 | schema.getJSONObject("values"), 91 | references, 92 | currentNamespace, 93 | insertedReferences 94 | ) 95 | ) 96 | } 97 | if (schema.opt("values") is JSONArray) { 98 | val values = schema.getJSONArray("values") 99 | for (i in 0 until values.length()) { 100 | values.put( 101 | i, 102 | resolveReferences(values.getJSONObject(i), references, currentNamespace, insertedReferences) 103 | ) 104 | } 105 | } 106 | if (schema.opt("values") is String) { 107 | val values = schema.getString("values") 108 | val ref = handleStringRef(values, currentNamespace, references, insertedReferences) 109 | schema.put("values", ref) 110 | } 111 | } 112 | // If it's a JsonObject type, process its properties 113 | is JSONObject -> { 114 | schema.put( 115 | "type", 116 | resolveReferences(schema.getJSONObject("type"), references, currentNamespace, insertedReferences) 117 | ) 118 | } 119 | 120 | // If it's a union type, process each type in the union 121 | is JSONArray -> { 122 | val types = schema.getJSONArray("type") 123 | for (i in 0 until types.length()) { 124 | when (val type = types.get(i)) { 125 | is String -> { 126 | val ref = handleStringRef(type, currentNamespace, references, insertedReferences) 127 | types.put(i, ref) 128 | } 129 | 130 | is JSONObject -> { 131 | types.put(i, resolveReferences(type, references, currentNamespace, insertedReferences)) 132 | } 133 | } 134 | } 135 | } 136 | 137 | // If it's a string type reference 138 | is String -> { 139 | val ref = handleStringRef(schema.getString("type"), currentNamespace, references, insertedReferences) 140 | schema.put("type", ref) 141 | } 142 | } 143 | return schema 144 | } 145 | 146 | private fun handleStringRef( 147 | items: String, 148 | currentNamespace: String?, 149 | references: Map, 150 | insertedReferences: MutableSet 151 | ): Any { 152 | val referenceKey = findReferenceKey(items, currentNamespace, references) 153 | if (referenceKey != null) { 154 | // deep copy the reference and resolve local references in references (as local references can have references) 155 | val refSchemaCopy = JSONObject(references[referenceKey]!!.toString()) 156 | val referencedSchema = resolveReferences(refSchemaCopy, references, null, insertedReferences) 157 | val refNamespace = referencedSchema.optString("namespace") 158 | 159 | if (refNamespace == currentNamespace) { 160 | referencedSchema.remove("namespace") 161 | } 162 | if (!insertedReferences.contains(referenceKey)) { 163 | insertedReferences.add(referenceKey) 164 | return referencedSchema 165 | } 166 | return items 167 | } 168 | return items 169 | } 170 | 171 | private fun findReferenceKey(typeStr: String, namespace: String?, references: Map): String? { 172 | // First try the type as is 173 | if (references.containsKey(typeStr)) { 174 | return typeStr 175 | } 176 | 177 | // If there's a namespace, try with the namespace 178 | if (namespace != null && !typeStr.contains(".")) { 179 | val fullyQualifiedType = "$namespace.$typeStr" 180 | // Check if the fully qualified name exists in references 181 | if (references.containsKey(fullyQualifiedType)) { 182 | return fullyQualifiedType 183 | } 184 | 185 | // Check if the simple name exists in references and has matching namespace 186 | references[typeStr]?.let { schema -> 187 | if (schema.optString("namespace") == namespace) { 188 | return typeStr 189 | } 190 | } 191 | } 192 | 193 | // If the type already has a namespace (contains a dot), try to extract the simple name 194 | if (typeStr.contains(".")) { 195 | val simpleName = typeStr.substringAfterLast(".") 196 | if (references.containsKey(simpleName)) { 197 | return simpleName 198 | } 199 | } 200 | 201 | return null 202 | } 203 | } 204 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/parser/JsonSchemaParser.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.parser 2 | 3 | import com.github.imflog.schema.registry.LocalReference 4 | import com.github.imflog.schema.registry.SchemaType 5 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient 6 | import org.json.JSONObject 7 | import java.io.File 8 | 9 | class JsonSchemaParser( 10 | client: SchemaRegistryClient, 11 | rootDir: File 12 | ) : SchemaParser(client, rootDir) { 13 | 14 | override val schemaType: SchemaType = SchemaType.JSON 15 | 16 | companion object { 17 | private const val DEFS_NODE = "${"$"}defs" 18 | } 19 | 20 | override fun resolveLocalReferences( 21 | subject: String, 22 | schemaPath: String, 23 | localReferences: List 24 | ): String { 25 | val jsonObj = JSONObject(loadContent(schemaPath)) 26 | val localDefNodes = JSONObject() 27 | localReferences.forEach { reference -> 28 | localDefNodes.put( 29 | reference.name, 30 | JSONObject(reference.content(rootDir)) 31 | ) 32 | } 33 | jsonObj.append(DEFS_NODE, localDefNodes) 34 | return jsonObj.toString() 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/parser/ProtobufSchemaParser.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.parser 2 | 3 | import com.github.imflog.schema.registry.LocalReference 4 | import com.github.imflog.schema.registry.SchemaParsingException 5 | import com.github.imflog.schema.registry.SchemaType 6 | import com.squareup.wire.schema.* 7 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient 8 | import okio.FileSystem 9 | import org.slf4j.LoggerFactory 10 | import java.io.File 11 | 12 | class ProtobufSchemaParser( 13 | client: SchemaRegistryClient, 14 | rootDir: File 15 | ) : SchemaParser(client, rootDir) { 16 | override val schemaType: SchemaType = SchemaType.PROTOBUF 17 | 18 | override fun resolveLocalReferences( 19 | subject: String, 20 | schemaPath: String, 21 | localReferences: List 22 | ): String { 23 | val schema = schemaFor(rootDir) 24 | val source = schema.protoFile(File(schemaPath).relativeTo(rootDir).path) 25 | ?: throw SchemaParsingException( 26 | subject, 27 | schemaType, 28 | "File not found at schema path $schemaPath in ${rootDir.path}" 29 | ) 30 | val refs: Map = parseRefs(localReferences) 31 | 32 | return LocalReferenceTransformer(subject, rootDir, schema, refs).transform(source) 33 | } 34 | 35 | private fun schemaFor(schemaDirectory: File): Schema { 36 | val loader = loaderFor(schemaDirectory) 37 | return loader.loadSchema() 38 | } 39 | 40 | private fun loaderFor(schemaDirectory: File): SchemaLoader { 41 | val loader = SchemaLoader(FileSystem.SYSTEM) 42 | loader.initRoots( 43 | listOf(Location.get(schemaDirectory.absolutePath)), 44 | listOf(Location.get(schemaDirectory.absolutePath)) 45 | ) 46 | return loader 47 | } 48 | 49 | private fun parseRefs(localReferences: List): Map { 50 | return localReferences.associate { 51 | Pair(it.name, File(it.path).normalize()) 52 | } 53 | } 54 | 55 | inner class LocalReferenceTransformer( 56 | private val subject: String, 57 | private val rootDir: File, 58 | private val schema: Schema, 59 | private val refs: Map 60 | ) { 61 | private val log = LoggerFactory.getLogger(LocalReferenceTransformer::class.java) 62 | 63 | fun transform(source: ProtoFile): String { 64 | val hierarchy = DependencyHierarchy(source, schema) 65 | 66 | val (filesToFlatten, filesToRetain) = hierarchy.partition { 67 | val import = it.location.path 68 | 69 | // Unknown local reference 70 | val ref = refs[import] 71 | if (ref == null && it != source) { // Source ends up in the filesToFlatten even if it was in the local references 72 | if (!isBuiltInImport(import)) { 73 | // It's normal to ignore built-ins, so no warning in that case. 74 | log.warn( 75 | "Unknown reference '{}' encountered while processing local references, it will be retained as is. Known references: {}", 76 | import, refs 77 | ) 78 | } 79 | false 80 | } else { 81 | true 82 | } 83 | } 84 | 85 | val typesToRetain = filesToFlatten.flatMap { file -> 86 | val import = file.location.path 87 | val ref = refs[import] 88 | 89 | // This would normally resolve into the same exact file as the reference itself, 90 | // but the way LocalReference is constructed implies it could be elsewhere, so we'll have to 91 | // follow through with the API. 92 | val dependency = if (ref != null) { 93 | schema.protoFile(ref.relativeTo(rootDir).path) 94 | ?: throw SchemaParsingException( 95 | subject, 96 | schemaType, 97 | "Dependency not found for local reference $import at ${ref.absolutePath}" 98 | ) 99 | } else { 100 | file 101 | } 102 | dependency.types.map { 103 | standardizeNames(it, dependency) 104 | } 105 | } 106 | 107 | val result = source.copy( 108 | imports = filesToRetain.map { it.location.path }.toList(), 109 | publicImports = emptyList(), 110 | types = typesToRetain.toList(), 111 | extendList = source.extendList.map { standardizeNames(it, source) }, 112 | services = source.services.map { standardizeNames(it, source) }, 113 | ).toSchema() 114 | 115 | log.info( 116 | "Local reference schema conversion for {}:\n{}\nto:\n{}", 117 | source.location.path, 118 | source.toSchema(), 119 | result 120 | ) 121 | 122 | return result 123 | } 124 | 125 | private fun isBuiltInImport(import: String): Boolean { 126 | return import.startsWith("google/protobuf") 127 | } 128 | 129 | private fun standardizeNames(protoType: ProtoType, file: ProtoFile): ProtoType { 130 | return when { 131 | protoType.isScalar -> protoType 132 | protoType.isMap -> { 133 | // keyType and valueType are always present for `isMap=true`, hence the non-null assertion operator. 134 | val newKey = standardizeNames(protoType.keyType!!, file) 135 | val newValue = standardizeNames(protoType.valueType!!, file) 136 | ProtoType.get( 137 | keyType = newKey, 138 | valueType = newValue, 139 | name = "map<$newKey, $newValue>" 140 | ) 141 | } 142 | 143 | isLocalReference(protoType, file) -> trimToLocalName(protoType, findSource(protoType, file)) 144 | else -> protoType // Not in a local reference, keep as is 145 | } 146 | } 147 | 148 | private fun standardizeNames(type: Type, file: ProtoFile): Type { 149 | return when (type) { 150 | is EnclosingType -> type.copy(nestedTypes = type.nestedTypes.map { standardizeNames(it, file) }) 151 | is MessageType -> type.copy( 152 | declaredFields = type.declaredFields.map { standardizeNames(it, file) }, 153 | extensionFields = type.extensionFields.map { standardizeNames(it, file) }.toMutableList(), 154 | oneOfs = type.oneOfs.map { standardizeNames(it, file) }, 155 | nestedTypes = type.nestedTypes.map { standardizeNames(it, file) }, 156 | nestedExtendList = type.nestedExtendList.map { standardizeNames(it, file) }, 157 | extensionsList = type.extensionsList.map { standardizeNames(it, file) } 158 | ) 159 | 160 | is EnumType -> type // Keep it as is 161 | } 162 | } 163 | 164 | private fun standardizeNames(oneOf: OneOf, file: ProtoFile): OneOf { 165 | return oneOf.copy(fields = oneOf.fields.map { standardizeNames(it, file) }) 166 | } 167 | 168 | private fun standardizeNames(field: Field, file: ProtoFile): Field { 169 | // This is a somewhat unfortunate hack. We had problems with our Schema Registry when mixing outer-scope 170 | // package references (.some.package.Message) and standard package imports (some.package.Message). 171 | // `Field.elementType` contains the actual parsed type, and, unlike `Field.type`, it preserves 172 | // the leading dot. Unfortunately, it's private within `Field`, so the only option for accessing it is 173 | // by converting it first into its corresponding Element. 174 | val fieldElement = Field.toElements(listOf(field)).first() 175 | val fieldType = ProtoType.get(fieldElement.type) 176 | val newType = standardizeNames(fieldType, file) 177 | return field.copy(elementType = newType.toString()) 178 | } 179 | 180 | private fun standardizeNames(extensions: Extensions, file: ProtoFile): Extensions { 181 | log.warn("Extension overwrite is not supported, but one was found at ${file.location.path} for $subject") 182 | return extensions 183 | } 184 | 185 | private fun standardizeNames(extend: Extend, file: ProtoFile): Extend { 186 | log.warn("Extend clause overwrite is not supported, but one was found at ${file.location.path} for $subject") 187 | return extend 188 | } 189 | 190 | private fun standardizeNames(service: Service, file: ProtoFile): Service { 191 | // RPCs are not fully parsed by Wire, so getting them supported would be a lot of work. 192 | log.warn("Service definition overwrite is not supported, but one was found at ${file.location.path} for $subject") 193 | return service 194 | } 195 | 196 | private fun isLocalReference(type: ProtoType, file: ProtoFile): Boolean { 197 | val import = findSource(type, file)?.location?.path 198 | return refs.contains(import) 199 | } 200 | 201 | private fun findSource(type: ProtoType, root: ProtoFile): ProtoFile? { 202 | val typeVariants = toTypeVariants(type, root) 203 | // Wire loses the leading . on linking, making `Field.type="package.Type"` not equal 204 | // `".package.Type"`. 205 | .map { ProtoType.get(it.toString().removePrefix(".")) } 206 | .toSet() 207 | val containingFile = DependencyHierarchy(root, schema).find { file -> 208 | val allTypes = file.typesAndNestedTypes() 209 | .map { it.type } 210 | .toSet() 211 | allTypes.intersect(typeVariants).isNotEmpty() 212 | } 213 | return containingFile 214 | } 215 | 216 | private fun toTypeVariants(type: ProtoType, source: ProtoFile): List { 217 | val packageName = source.packageName 218 | return when { 219 | type.toString().startsWith(".") -> { 220 | // Absolute import 221 | listOf(type) 222 | } 223 | 224 | packageName != null -> { 225 | // for type T and package segments "a", "b" it would produce "T", "b.T", "a.b.T" 226 | 227 | val prefixes = packageName.split('.') 228 | .runningFold("") { acc, segment -> 229 | when (acc) { 230 | "" -> segment 231 | else -> "${acc}.${segment}" 232 | } 233 | } 234 | 235 | prefixes 236 | .map { ProtoType.get("${it}.${type}") } 237 | .toList() 238 | } 239 | 240 | else -> { 241 | // Top level package already 242 | listOf(type) 243 | 244 | } 245 | } 246 | } 247 | 248 | private fun trimToLocalName(type: ProtoType, source: ProtoFile?): ProtoType { 249 | if (source == null) { 250 | return type 251 | } 252 | val withoutDot = type.toString() 253 | // Handle absolute package names (the post-linkage `Field.type` doesn't have those, 254 | // so it'll never match unless we strip the dot). 255 | .removePrefix(".") 256 | val fullType = source.typesAndNestedTypes().find { it.type.toString().endsWith(withoutDot) } 257 | ?: throw SchemaParsingException( 258 | subject, 259 | schemaType, 260 | "Type $type could not be found in ${source.location.path}" 261 | ) 262 | val relativePackageString = fullType.type.toString() 263 | // Strip the package -> make it a "local" reference. 264 | .removePrefix((source.packageName ?: "") + ".") 265 | return ProtoType.get(relativePackageString) 266 | } 267 | } 268 | 269 | class DependencyHierarchy(private val root: ProtoFile, private val schema: Schema) : Iterable { 270 | override fun iterator(): Iterator { 271 | return DependencyHierarchyIterator(root, schema) 272 | } 273 | } 274 | 275 | /** 276 | * It's the [ImportsIterator] that includes the file where the imports are taken from, i.e. the root. 277 | * 278 | * It'll first return the root, and then the imports, looking somewhat like: `[root, *ImportsIterator(root)]` 279 | */ 280 | class DependencyHierarchyIterator(private val root: ProtoFile, schema: Schema) : Iterator { 281 | private var rootReturned = false 282 | private val delegate = ImportsIterator(schema, root) 283 | override fun hasNext(): Boolean { 284 | return !rootReturned || delegate.hasNext() 285 | } 286 | 287 | override fun next(): ProtoFile { 288 | return if (!rootReturned) { 289 | rootReturned = true 290 | root 291 | } else { 292 | delegate.next() 293 | } 294 | } 295 | } 296 | 297 | /** 298 | * Recursively resolves [ProtoFile] instances for the file, recursively. 299 | * 300 | * It will iterate over all the imports and public imports of the supplied file, as well as any transitive 301 | * dependencies. It won't return the same import twice, even if they belong to different files in the hierarchy. 302 | */ 303 | class ImportsIterator(private val schema: Schema, root: ProtoFile) : Iterator { 304 | 305 | private val imports = ArrayDeque(root.imports + root.publicImports) 306 | private val knownImports = mutableSetOf() 307 | 308 | init { 309 | knownImports.addAll(imports) 310 | } 311 | 312 | override fun hasNext(): Boolean { 313 | return imports.isNotEmpty() 314 | } 315 | 316 | override fun next(): ProtoFile { 317 | val current = imports.removeFirstOrNull() ?: throw NoSuchElementException("No more imports") 318 | val result = 319 | schema.protoFile(current) 320 | ?: throw IllegalStateException("Import '$current' is not in the schema path") 321 | addAll(result.imports) 322 | addAll(result.publicImports) 323 | return result 324 | } 325 | 326 | private fun addAll(newImports: Iterable) { 327 | newImports.filter(knownImports::add).forEach(imports::addLast) 328 | } 329 | } 330 | } 331 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/parser/SchemaParser.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.parser 2 | 3 | import com.github.imflog.schema.registry.LocalReference 4 | import com.github.imflog.schema.registry.SchemaParsingException 5 | import com.github.imflog.schema.registry.SchemaType 6 | import com.github.imflog.schema.registry.Subject 7 | import io.confluent.kafka.schemaregistry.ParsedSchema 8 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient 9 | import io.confluent.kafka.schemaregistry.client.rest.entities.Metadata 10 | import io.confluent.kafka.schemaregistry.client.rest.entities.RuleSet 11 | import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference 12 | import java.io.File 13 | 14 | abstract class SchemaParser( 15 | protected val client: SchemaRegistryClient, 16 | protected val rootDir: File 17 | ) { 18 | abstract val schemaType: SchemaType 19 | 20 | companion object { 21 | /** 22 | * This is like a factory for our local parsers. 23 | * We can afford to recreate the parser each time as it's not a long-running processes. 24 | */ 25 | fun provide(schemaType: SchemaType, client: SchemaRegistryClient, rootDir: File): SchemaParser = 26 | when (schemaType) { 27 | SchemaType.AVRO -> AvroSchemaParser(client, rootDir) 28 | SchemaType.JSON -> JsonSchemaParser(client, rootDir) 29 | SchemaType.PROTOBUF -> ProtobufSchemaParser(client, rootDir) 30 | } 31 | } 32 | 33 | @Throws(SchemaParsingException::class, NotImplementedError::class) 34 | fun parseSchemaFromFile( 35 | subject: Subject 36 | ): ParsedSchema { 37 | val parsedLocalSchemaString = if (subject.localReferences.isNotEmpty()) { 38 | resolveLocalReferences(subject.inputSubject, subject.file, subject.localReferences) 39 | } else loadContent(subject.file) 40 | 41 | return client 42 | .parseSchema(schemaType.registryType, parsedLocalSchemaString, subject.references, subject.metadata,subject.ruleSet) 43 | .orElseThrow { SchemaParsingException(subject.inputSubject, schemaType) } 44 | } 45 | 46 | abstract fun resolveLocalReferences( 47 | subject: String, 48 | schemaPath: String, 49 | localReferences: List 50 | ): String 51 | 52 | protected fun loadContent(schemaPath: String): String { 53 | return rootDir.resolve(schemaPath).readText() 54 | } 55 | } 56 | 57 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/security/BasicAuthExtension.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.security 2 | 3 | import org.gradle.api.model.ObjectFactory 4 | import org.gradle.api.provider.Property 5 | import org.gradle.api.provider.Provider 6 | 7 | open class BasicAuthExtension(objects: ObjectFactory) { 8 | 9 | companion object { 10 | const val EXTENSION_NAME = "credentials" 11 | } 12 | 13 | val username: Property = objects.property(String::class.java).apply { 14 | convention("") 15 | } 16 | val password: Property = objects.property(String::class.java).apply { 17 | convention("") 18 | } 19 | 20 | val basicAuth: Provider = 21 | username.flatMap { usernameStr -> password.map { passwordStr -> "$usernameStr:$passwordStr" } } 22 | } 23 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/security/SslExtension.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.security 2 | 3 | import org.gradle.api.model.ObjectFactory 4 | import org.gradle.api.provider.MapProperty 5 | 6 | open class SslExtension(objects: ObjectFactory) { 7 | 8 | companion object { 9 | const val EXTENSION_NAME = "ssl" 10 | } 11 | 12 | val configs: MapProperty = objects.mapProperty(String::class.java, String::class.java).apply { 13 | convention(mapOf()) 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/compatibility/CompatibilitySubjectExtension.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.compatibility 2 | 3 | import com.github.imflog.schema.registry.Subject 4 | import com.github.imflog.schema.registry.toSchemaType 5 | import io.confluent.kafka.schemaregistry.avro.AvroSchema 6 | import org.gradle.api.model.ObjectFactory 7 | import org.gradle.api.provider.ListProperty 8 | 9 | open class CompatibilitySubjectExtension(objects: ObjectFactory) { 10 | companion object { 11 | const val EXTENSION_NAME = "compatibility" 12 | } 13 | 14 | val subjects: ListProperty = objects.listProperty(Subject::class.java) 15 | 16 | fun subject(inputSubject: String, file: String) = subject(inputSubject, file, AvroSchema.TYPE) 17 | 18 | fun subject( 19 | inputSubject: String, 20 | file: String, 21 | type: String 22 | ): Subject { 23 | val compatibilitySubject = Subject(inputSubject, file, type) 24 | subjects.add(compatibilitySubject) 25 | return compatibilitySubject 26 | } 27 | 28 | fun subject(subject: Subject) = subject.apply { subjects.add(subject) } 29 | } 30 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/compatibility/CompatibilityTask.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.compatibility 2 | 3 | import com.github.imflog.schema.registry.RegistryClientWrapper 4 | import com.github.imflog.schema.registry.Subject 5 | import org.gradle.api.DefaultTask 6 | import org.gradle.api.GradleScriptException 7 | import org.gradle.api.model.ObjectFactory 8 | import org.gradle.api.provider.ListProperty 9 | import org.gradle.api.provider.MapProperty 10 | import org.gradle.api.provider.Property 11 | import org.gradle.api.tasks.Input 12 | import org.gradle.api.tasks.TaskAction 13 | import java.io.File 14 | import javax.inject.Inject 15 | 16 | 17 | open class CompatibilityTask @Inject constructor(objects: ObjectFactory) : DefaultTask() { 18 | init { 19 | group = "registry" 20 | description = "Test compatibility against registry" 21 | } 22 | 23 | companion object { 24 | const val TASK_NAME = "testSchemasTask" 25 | } 26 | 27 | @Input 28 | val url: Property = objects.property(String::class.java) 29 | 30 | @Input 31 | val basicAuth: Property = objects.property(String::class.java) 32 | 33 | @Input 34 | val ssl: MapProperty = objects.mapProperty(String::class.java, String::class.java) 35 | 36 | @Input 37 | val subjects: ListProperty = objects.listProperty(Subject::class.java) 38 | 39 | @Input 40 | val failFast: Property = objects.property(Boolean::class.java) 41 | 42 | @Input 43 | val rootDir: Property = objects.property(File::class.java) 44 | 45 | @TaskAction 46 | fun testCompatibility() { 47 | val errorCount = CompatibilityTaskAction( 48 | RegistryClientWrapper.client(url.get(), basicAuth.get(), ssl.get()), 49 | rootDir.get(), 50 | subjects.get(), 51 | failFast.getOrElse(false) 52 | ).run() 53 | if (errorCount > 0) { 54 | throw GradleScriptException("$errorCount schemas not compatible, see logs for details.", Throwable()) 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/compatibility/CompatibilityTaskAction.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.compatibility 2 | 3 | import com.github.imflog.schema.registry.LoggingUtils.infoIfNotQuiet 4 | import com.github.imflog.schema.registry.Subject 5 | import com.github.imflog.schema.registry.parser.SchemaParser 6 | import com.github.imflog.schema.registry.toSchemaType 7 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient 8 | import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException 9 | import io.confluent.kafka.schemaregistry.rest.exceptions.Errors 10 | import org.gradle.api.GradleScriptException 11 | import org.gradle.api.logging.Logging 12 | import java.io.File 13 | import java.io.IOException 14 | 15 | class CompatibilityTaskAction( 16 | private val client: SchemaRegistryClient, 17 | private val rootDir: File, 18 | private val subjects: List, 19 | private val failFast: Boolean = false, 20 | ) { 21 | 22 | private val logger = Logging.getLogger(CompatibilityTaskAction::class.java) 23 | 24 | fun run(): Int { 25 | var errorCount = 0 26 | subjects.forEach { subject -> 27 | logger.debug("Loading schema for subject(${subject.inputSubject}) from ${subject.file}.") 28 | val isCompatible = try { 29 | val parsedSchema = SchemaParser 30 | .provide(subject.type.toSchemaType(), client, rootDir) 31 | .parseSchemaFromFile(subject) 32 | val isCompatible = client.testCompatibility(subject.inputSubject, parsedSchema) 33 | if (!isCompatible) { 34 | try { 35 | client.testCompatibilityVerbose(subject.inputSubject, parsedSchema).forEach { 36 | logger.error("Returned errors : $it") 37 | } 38 | } catch (_: Exception) { 39 | // If we use a confluent version < 6.1.0 this call may fail as the API response would be a boolean instead of the expected String list. 40 | } finally { 41 | if (failFast) { 42 | throw GradleScriptException("Schema ${subject.file} is not compatible with subject: ${subject.inputSubject}", Throwable()) 43 | } 44 | } 45 | } 46 | isCompatible 47 | } catch (ioEx: IOException) { 48 | logger.error("", ioEx) 49 | if (failFast) { 50 | throw ioEx 51 | } 52 | false 53 | } catch (restEx: RestClientException) { 54 | // If the subject does not exist, it is compatible 55 | if (restEx.errorCode == Errors.SUBJECT_NOT_FOUND_ERROR_CODE) { 56 | true 57 | } else { 58 | logger.error("", restEx) 59 | if (failFast) { 60 | throw restEx 61 | } 62 | false 63 | } 64 | } 65 | if (isCompatible) { 66 | logger.infoIfNotQuiet("Schema ${subject.file} is compatible with subject: ${subject.inputSubject}") 67 | } else { 68 | logger.error("Schema ${subject.file} is not compatible with subject: ${subject.inputSubject}") 69 | errorCount++ 70 | } 71 | } 72 | return errorCount 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/config/ConfigSubjectExtension.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.config 2 | 3 | import org.gradle.api.model.ObjectFactory 4 | import org.gradle.api.provider.ListProperty 5 | 6 | open class ConfigSubjectExtension(objects: ObjectFactory) { 7 | 8 | companion object { 9 | const val EXTENSION_NAME = "config" 10 | } 11 | 12 | val subjects: ListProperty = objects.listProperty(ConfigSubject::class.java) 13 | 14 | fun subject(inputSubject: String, compatibility: String) { 15 | subjects.add(ConfigSubject(inputSubject, compatibility)) 16 | } 17 | } 18 | 19 | data class ConfigSubject( 20 | val inputSubject: String, 21 | val compatibility: String 22 | ) 23 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/config/ConfigTask.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.config 2 | 3 | import com.github.imflog.schema.registry.RegistryClientWrapper 4 | import org.gradle.api.DefaultTask 5 | import org.gradle.api.GradleScriptException 6 | import org.gradle.api.model.ObjectFactory 7 | import org.gradle.api.provider.ListProperty 8 | import org.gradle.api.provider.MapProperty 9 | import org.gradle.api.provider.Property 10 | import org.gradle.api.tasks.Input 11 | import org.gradle.api.tasks.TaskAction 12 | import javax.inject.Inject 13 | 14 | 15 | open class ConfigTask @Inject constructor(objects: ObjectFactory) : DefaultTask() { 16 | init { 17 | group = "registry" 18 | description = "Set subject compatibility in registry" 19 | } 20 | 21 | companion object { 22 | const val TASK_NAME = "configSubjectsTask" 23 | } 24 | 25 | @Input 26 | val url: Property = objects.property(String::class.java) 27 | 28 | @Input 29 | val basicAuth: Property = objects.property(String::class.java) 30 | 31 | @Input 32 | val ssl: MapProperty = objects.mapProperty(String::class.java, String::class.java) 33 | 34 | @Input 35 | val subjects: ListProperty = objects.listProperty(ConfigSubject::class.java) 36 | 37 | @Input 38 | val failFast: Property = objects.property(Boolean::class.java) 39 | 40 | @TaskAction 41 | fun configureSubjects() { 42 | val errorCount = ConfigTaskAction( 43 | RegistryClientWrapper.client(url.get(), basicAuth.get(), ssl.get()), 44 | subjects.get(), 45 | failFast.getOrElse(false) 46 | ).run() 47 | if (errorCount > 0) { 48 | throw GradleScriptException("$errorCount subject configuration not set, see logs for details", Throwable()) 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/config/ConfigTaskAction.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.config 2 | 3 | import io.confluent.kafka.schemaregistry.CompatibilityLevel 4 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient 5 | import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException 6 | import org.gradle.api.logging.Logging 7 | 8 | class ConfigTaskAction( 9 | private val client: SchemaRegistryClient, 10 | private val subjects: List, 11 | private val failFast: Boolean = false, 12 | ) { 13 | 14 | private val logger = Logging.getLogger(ConfigTaskAction::class.java) 15 | 16 | fun run(): Int { 17 | var errorCount = 0 18 | for ((subject, config) in subjects) { 19 | logger.debug("$subject: setting config $config") 20 | try { 21 | if (CompatibilityLevel.forName(config) == null) { 22 | logger.error("'$config' is not a valid schema registry compatibility level") 23 | errorCount++ 24 | if (failFast) { 25 | throw IllegalArgumentException("'$config' is not a valid schema registry compatibility level") 26 | } 27 | } else { 28 | client.updateCompatibility(subject, config) 29 | } 30 | } catch (ex: RestClientException) { 31 | logger.error("Error during compatibility update for $subject", ex) 32 | errorCount++ 33 | if (failFast) { 34 | throw ex 35 | } 36 | } 37 | } 38 | return errorCount 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/download/DownloadSubjectExtension.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.download 2 | 3 | import org.gradle.api.model.ObjectFactory 4 | import org.gradle.api.provider.ListProperty 5 | import org.gradle.api.provider.Property 6 | 7 | open class DownloadSubjectExtension(objects: ObjectFactory) { 8 | 9 | companion object { 10 | const val EXTENSION_NAME = "download" 11 | } 12 | 13 | val metadata: Property = objects.property(MetadataExtension::class.java).apply { 14 | convention(MetadataExtension(false, null)) 15 | } 16 | 17 | val subjects: ListProperty = objects.listProperty(DownloadSubject::class.java).apply { 18 | convention(listOf()) 19 | } 20 | 21 | fun subject(inputSubject: String, outputPath: String) { 22 | subjects.add(DownloadSubject(inputSubject, outputPath)) 23 | } 24 | 25 | fun subject(inputSubject: String, outputPath: String, downloadReferences: Boolean) { 26 | subjects.add(DownloadSubject(inputSubject, outputPath, downloadReferences = downloadReferences)) 27 | } 28 | 29 | fun subject(inputSubject: String, outputPath: String, outputFileName: String) { 30 | subjects.add(DownloadSubject(inputSubject, outputPath, outputFileName = outputFileName)) 31 | } 32 | 33 | fun subject(inputSubject: String, outputPath: String, outputFileName: String, downloadReferences: Boolean) { 34 | subjects.add(DownloadSubject(inputSubject, outputPath, outputFileName = outputFileName, downloadReferences = downloadReferences)) 35 | } 36 | 37 | fun subject(inputSubject: String, outputPath: String, version: Int) { 38 | subjects.add(DownloadSubject(inputSubject, outputPath, version)) 39 | } 40 | 41 | fun subject(inputSubject: String, outputPath: String, version: Int, downloadReferences: Boolean) { 42 | subjects.add(DownloadSubject(inputSubject, outputPath, version, downloadReferences = downloadReferences)) 43 | } 44 | 45 | fun subject(inputSubject: String, outputPath: String, version: Int, outputFileName: String) { 46 | subjects.add(DownloadSubject(inputSubject, outputPath, version, outputFileName = outputFileName)) 47 | } 48 | 49 | fun subject(inputSubject: String, outputPath: String, version: Int, outputFileName: String, downloadReferences: Boolean) { 50 | subjects.add(DownloadSubject(inputSubject, outputPath, version, outputFileName = outputFileName, downloadReferences = downloadReferences)) 51 | } 52 | 53 | fun subjectPattern(inputPattern: String, outputPath: String) { 54 | subjects.add(DownloadSubject(inputPattern, outputPath, null, true)) 55 | } 56 | 57 | fun subjectPattern(inputPattern: String, outputPath: String, downloadReferences: Boolean) { 58 | subjects.add(DownloadSubject(inputPattern, outputPath, null, true, downloadReferences = downloadReferences)) 59 | } 60 | } 61 | 62 | data class DownloadSubject( 63 | val subject: String, 64 | val outputPath: String, 65 | val version: Int? = null, 66 | val regex: Boolean = false, 67 | val outputFileName: String? = null, 68 | val downloadReferences: Boolean = false 69 | ) 70 | 71 | data class MetadataExtension( 72 | val enabled: Boolean = false, 73 | val outputPath: String? = null 74 | ) { 75 | constructor(enabled: Boolean) : this(enabled, null) 76 | } 77 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/download/DownloadTask.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.download 2 | 3 | import com.github.imflog.schema.registry.RegistryClientWrapper 4 | import org.gradle.api.DefaultTask 5 | import org.gradle.api.GradleScriptException 6 | import org.gradle.api.model.ObjectFactory 7 | import org.gradle.api.provider.ListProperty 8 | import org.gradle.api.provider.MapProperty 9 | import org.gradle.api.provider.Property 10 | import org.gradle.api.tasks.Input 11 | import org.gradle.api.tasks.TaskAction 12 | import java.io.File 13 | import javax.inject.Inject 14 | 15 | 16 | open class DownloadTask @Inject constructor(objects: ObjectFactory) : DefaultTask() { 17 | 18 | companion object { 19 | const val TASK_NAME = "downloadSchemasTask" 20 | } 21 | 22 | init { 23 | group = "registry" 24 | description = "Download schemas from the registry" 25 | } 26 | 27 | @Input 28 | val metadataConfig: Property = objects.property(MetadataExtension::class.java) 29 | 30 | @Input 31 | val url: Property = objects.property(String::class.java) 32 | 33 | @Input 34 | val subjects: ListProperty = objects.listProperty(DownloadSubject::class.java) 35 | 36 | @Input 37 | val basicAuth: Property = objects.property(String::class.java) 38 | 39 | @Input 40 | val ssl: MapProperty = objects.mapProperty(String::class.java, String::class.java) 41 | 42 | @Input 43 | val pretty: Property = objects.property(Boolean::class.java) 44 | 45 | @Input 46 | val failFast: Property = objects.property(Boolean::class.java) 47 | 48 | @Input 49 | val rootDir: Property = objects.property(File::class.java) 50 | 51 | @TaskAction 52 | fun downloadSchemas() { 53 | val errorCount = DownloadTaskAction( 54 | RegistryClientWrapper.client(url.get(), basicAuth.get(), ssl.get()), 55 | rootDir.get(), 56 | subjects.get(), 57 | metadataConfig.get(), 58 | pretty.get(), 59 | failFast.getOrElse(false), 60 | ).run() 61 | if (errorCount > 0) { 62 | throw GradleScriptException("$errorCount schemas not downloaded, see logs for details", Throwable()) 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/download/DownloadTaskAction.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.download 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper 4 | import com.fasterxml.jackson.databind.PropertyNamingStrategies 5 | import com.fasterxml.jackson.databind.SerializationFeature 6 | import com.github.imflog.schema.registry.LoggingUtils.infoIfNotQuiet 7 | import com.github.imflog.schema.registry.SchemaParsingException 8 | import com.github.imflog.schema.registry.SchemaType 9 | import com.github.imflog.schema.registry.toSchemaType 10 | import com.google.common.base.Suppliers 11 | import io.confluent.kafka.schemaregistry.ParsedSchema 12 | import io.confluent.kafka.schemaregistry.avro.AvroSchema 13 | import io.confluent.kafka.schemaregistry.client.SchemaMetadata 14 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient 15 | import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference 16 | import io.confluent.kafka.schemaregistry.json.JsonSchema 17 | import org.gradle.api.logging.Logging 18 | import java.io.File 19 | import java.util.regex.PatternSyntaxException 20 | 21 | class DownloadTaskAction( 22 | private val client: SchemaRegistryClient, 23 | private val rootDir: File, 24 | private val subjects: List, 25 | private val metadataConfiguration: MetadataExtension, 26 | private val pretty: Boolean = false, 27 | private val failFast: Boolean = false, 28 | ) { 29 | 30 | private val logger = Logging.getLogger(DownloadTaskAction::class.java) 31 | private val objectMapper = ObjectMapper() 32 | .configure(SerializationFeature.INDENT_OUTPUT, true) 33 | .setPropertyNamingStrategy(PropertyNamingStrategies.SNAKE_CASE) 34 | 35 | fun run(): Int { 36 | var errorCount = 0 37 | expandSubjectPatterns().forEach { downloadSubject -> 38 | logger.infoIfNotQuiet("Start loading schemas for ${downloadSubject.subject}") 39 | try { 40 | val metadata = getSchemaMetadata(downloadSubject) 41 | val outputDir = File(rootDir.toURI()).resolve(downloadSubject.outputPath) 42 | outputDir.mkdirs() 43 | val metadataDirectory = if (metadataConfiguration.enabled) { 44 | val metadataDirectory = metadataConfiguration.outputPath?.run { 45 | File(rootDir.toURI()).resolve(this) 46 | } ?: outputDir 47 | metadataDirectory.mkdirs() 48 | metadataDirectory 49 | } else { 50 | outputDir 51 | } 52 | 53 | if (metadataConfiguration.enabled) { 54 | writeSchemaMetadata(downloadSubject, metadata, metadataDirectory) 55 | } 56 | writeSchemaFile(downloadSubject, metadata, pretty, outputDir) 57 | 58 | if (downloadSubject.downloadReferences) { 59 | logger.infoIfNotQuiet("Start downloading referenced schema for ${downloadSubject.subject}") 60 | downloadReference(metadata, downloadSubject.outputPath, metadataDirectory, outputDir) 61 | logger.infoIfNotQuiet("Referenced schema downloaded for ${downloadSubject.subject}") 62 | } 63 | } catch (e: Exception) { 64 | logger.error("Error during schema retrieval for ${downloadSubject.subject}", e) 65 | errorCount++ 66 | if (failFast) { 67 | throw e 68 | } 69 | } 70 | } 71 | return errorCount 72 | } 73 | 74 | private fun expandSubjectPatterns(): List { 75 | val subjectsSupplier = Suppliers.memoize { client.allSubjects } 76 | return subjects.flatMap { downloadSubject -> 77 | if (downloadSubject.regex) { 78 | parseSubjectRegex(downloadSubject.subject)?.let { regex -> 79 | subjectsSupplier.get() 80 | .filter { subject -> regex.matches(subject) } 81 | .map { subject -> 82 | DownloadSubject( 83 | subject, 84 | downloadSubject.outputPath, 85 | downloadSubject.version 86 | ) 87 | } 88 | .toList() 89 | } ?: emptyList() 90 | } else { 91 | listOf(downloadSubject) 92 | } 93 | } 94 | } 95 | 96 | private fun parseSubjectRegex(regex: String): Regex? { 97 | return try { 98 | Regex(regex) 99 | } catch (exception: PatternSyntaxException) { 100 | logger.error("Unable to compile subject pattern of $regex, skipping", exception) 101 | null 102 | } 103 | } 104 | 105 | private fun getSchemaMetadata(subject: DownloadSubject): SchemaMetadata = 106 | if (subject.version == null) client.getLatestSchemaMetadata(subject.subject) 107 | else client.getSchemaMetadata(subject.subject, subject.version) 108 | 109 | private fun writeSchemaFile( 110 | downloadSubject: DownloadSubject, 111 | schemaMetadata: SchemaMetadata, 112 | pretty: Boolean, 113 | outputDir: File 114 | ) { 115 | val parsedSchema = parseSchemaWithRemoteReferences( 116 | downloadSubject.subject, 117 | schemaMetadata.schemaType.toSchemaType(), 118 | schemaMetadata.schema, 119 | schemaMetadata.references 120 | ) 121 | val fileName = downloadSubject.outputFileName ?: downloadSubject.subject 122 | val outputFile = File(outputDir, "${fileName}.${parsedSchema.schemaType().toSchemaType().extension}") 123 | outputFile.createNewFile() 124 | logger.infoIfNotQuiet("Writing file $outputFile") 125 | outputFile.printWriter().use { out -> 126 | out.println(getSchemaString(parsedSchema, pretty)) 127 | } 128 | } 129 | 130 | private fun getSchemaString(parsedSchema: ParsedSchema, pretty: Boolean): String { 131 | return if (pretty && isSupportedPrettyType(parsedSchema)) objectMapper.readTree(parsedSchema.toString()) 132 | .toPrettyString() else parsedSchema.toString() 133 | } 134 | 135 | /** 136 | * Checks whether the current schema type should be pretty-printed. 137 | * Avro and Json are considered eligible for pretty formatting, Protobuf is not. 138 | */ 139 | private fun isSupportedPrettyType(parsedSchema: ParsedSchema): Boolean { 140 | return parsedSchema.schemaType() == AvroSchema.TYPE || parsedSchema.schemaType() == JsonSchema.TYPE 141 | } 142 | 143 | private fun writeSchemaMetadata(downloadSubject: DownloadSubject, schemaMetadata: SchemaMetadata, outputDir: File) { 144 | val fileName = downloadSubject.outputFileName ?: downloadSubject.subject 145 | val outputFile = File(outputDir, "${fileName}-metadata.json") 146 | outputFile.createNewFile() 147 | logger.infoIfNotQuiet("Writing metadata file $outputFile") 148 | outputFile.printWriter().use { out -> 149 | out.println( 150 | objectMapper.writeValueAsString(schemaMetadata) 151 | ) 152 | } 153 | } 154 | 155 | private fun parseSchemaWithRemoteReferences( 156 | subject: String, 157 | schemaType: SchemaType, 158 | schemaContent: String, 159 | references: List, 160 | ): ParsedSchema = client 161 | .parseSchema(schemaType.registryType, schemaContent, references) 162 | .orElseThrow { SchemaParsingException(subject, schemaType) } 163 | 164 | private fun downloadReference( 165 | metadata: SchemaMetadata, 166 | outputPath: String, 167 | metadataDirectory: File, 168 | outputDir: File 169 | ) { 170 | metadata.references.forEach { 171 | logger.infoIfNotQuiet("Start downloading referenced schema ${it.subject}/${it.version}") 172 | val referenceSubject = DownloadSubject( 173 | subject = it.subject, 174 | outputPath = outputPath, 175 | version = it.version 176 | ) 177 | val referenceMetadata = getSchemaMetadata(referenceSubject) 178 | downloadReference(referenceMetadata, outputPath, metadataDirectory, outputDir) 179 | 180 | if (metadataConfiguration.enabled) { 181 | writeSchemaMetadata(referenceSubject, referenceMetadata, metadataDirectory) 182 | } 183 | writeSchemaFile(referenceSubject, referenceMetadata, pretty, outputDir) 184 | } 185 | } 186 | } 187 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/register/RegisterSchemasTask.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.register 2 | 3 | import com.github.imflog.schema.registry.RegistryClientWrapper 4 | import com.github.imflog.schema.registry.Subject 5 | import org.gradle.api.DefaultTask 6 | import org.gradle.api.GradleScriptException 7 | import org.gradle.api.model.ObjectFactory 8 | import org.gradle.api.provider.ListProperty 9 | import org.gradle.api.provider.MapProperty 10 | import org.gradle.api.provider.Property 11 | import org.gradle.api.tasks.Input 12 | import org.gradle.api.tasks.Optional 13 | import org.gradle.api.tasks.TaskAction 14 | import java.io.File 15 | import javax.inject.Inject 16 | 17 | abstract class RegisterSchemasTask @Inject constructor(objects: ObjectFactory) : DefaultTask() { 18 | 19 | companion object { 20 | const val TASK_NAME = "registerSchemasTask" 21 | } 22 | 23 | init { 24 | group = "registry" 25 | description = "Register schemas in the registry" 26 | } 27 | 28 | @Input 29 | val url: Property = objects.property(String::class.java) 30 | 31 | @Input 32 | val basicAuth: Property = objects.property(String::class.java) 33 | 34 | @Input 35 | val ssl: MapProperty = objects.mapProperty(String::class.java, String::class.java) 36 | 37 | @Input 38 | val subjects: ListProperty = objects.listProperty(Subject::class.java) 39 | 40 | @Input 41 | @Optional 42 | val outputDirectory: Property = objects.property(String::class.java) 43 | 44 | @Input 45 | val failFast: Property = objects.property(Boolean::class.java) 46 | 47 | @Input 48 | val rootDir: Property = objects.property(File::class.java) 49 | 50 | @TaskAction 51 | fun registerSchemas() { 52 | val errorCount = RegisterTaskAction( 53 | RegistryClientWrapper.client(url.get(), basicAuth.get(), ssl.get()), 54 | rootDir.get(), 55 | subjects.get(), 56 | outputDirectory.orNull, 57 | failFast.getOrElse(false) 58 | ).run() 59 | if (errorCount > 0) { 60 | throw GradleScriptException("$errorCount schemas not registered, see logs for details", Throwable()) 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/register/RegisterSubjectExtension.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.register 2 | 3 | import com.github.imflog.schema.registry.Subject 4 | import com.github.imflog.schema.registry.toSchemaType 5 | import io.confluent.kafka.schemaregistry.avro.AvroSchema 6 | import org.gradle.api.model.ObjectFactory 7 | import org.gradle.api.provider.ListProperty 8 | 9 | open class RegisterSubjectExtension(objects: ObjectFactory) { 10 | companion object { 11 | const val EXTENSION_NAME = "register" 12 | } 13 | 14 | val subjects: ListProperty = objects.listProperty(Subject::class.java) 15 | 16 | fun subject(inputSubject: String, file: String) = subject(inputSubject, file, AvroSchema.TYPE) 17 | 18 | fun subject( 19 | inputSubject: String, 20 | file: String, 21 | type: String 22 | ): Subject { 23 | val subject = Subject(inputSubject, file, type) 24 | subjects.add(subject) 25 | return subject 26 | } 27 | 28 | fun subject(subject: Subject) = subject.apply { subjects.add(subject) } 29 | } 30 | -------------------------------------------------------------------------------- /src/main/kotlin/com/github/imflog/schema/registry/tasks/register/RegisterTaskAction.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.register 2 | 3 | import com.github.imflog.schema.registry.LoggingUtils.infoIfNotQuiet 4 | import com.github.imflog.schema.registry.Subject 5 | import com.github.imflog.schema.registry.parser.SchemaParser 6 | import com.github.imflog.schema.registry.toSchemaType 7 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient 8 | import org.gradle.api.logging.Logging 9 | import java.io.File 10 | 11 | 12 | class RegisterTaskAction( 13 | private val client: SchemaRegistryClient, 14 | private val rootDir: File, 15 | private val subjects: List, 16 | outputDir: String?, 17 | private val failFast: Boolean = false, 18 | ) { 19 | 20 | private val logger = Logging.getLogger(RegisterTaskAction::class.java) 21 | private val outputFile = outputDir?.let { 22 | rootDir.resolve(it).resolve("registered.csv") 23 | } 24 | 25 | fun run(): Int { 26 | var errorCount = 0 27 | writeOutputFileHeader() 28 | subjects.forEach { subject -> 29 | try { 30 | val schemaId = registerSchema(subject) 31 | writeRegisteredSchemaOutput(subject.inputSubject, subject.file, schemaId) 32 | } catch (e: Exception) { 33 | logger.error("Could not register schema for '$subject'", e) 34 | if (failFast) { 35 | throw e 36 | } 37 | errorCount++ 38 | } 39 | } 40 | return errorCount 41 | } 42 | 43 | private fun registerSchema( 44 | subject: Subject 45 | ): Int { 46 | val parsedSchema = SchemaParser 47 | .provide(subject.type.toSchemaType(), client, rootDir) 48 | .parseSchemaFromFile(subject) 49 | logger.infoIfNotQuiet("Registering $subject (from $subject.file)") 50 | val schemaId = client.register(subject.inputSubject, parsedSchema, subject.normalize) 51 | logger.infoIfNotQuiet("$subject (from $subject.file) has been registered with id $schemaId") 52 | return schemaId 53 | } 54 | 55 | private fun writeOutputFileHeader() { 56 | if (subjects.isNotEmpty() && outputFile != null) { 57 | outputFile.writeText("subject, path, id\n") 58 | } 59 | } 60 | 61 | private fun writeRegisteredSchemaOutput(subject: String, path: String, schemaId: Int) { 62 | outputFile?.appendText("$subject, $path, $schemaId\n") 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/test/kotlin/com/github/imflog/schema/registry/SchemaRegistryPluginTest.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry 2 | 3 | import com.github.imflog.schema.registry.tasks.download.DownloadTask 4 | import org.assertj.core.api.Assertions 5 | import org.gradle.api.Project 6 | import org.gradle.testfixtures.ProjectBuilder 7 | import org.gradle.testkit.runner.GradleRunner 8 | import org.gradle.testkit.runner.UnexpectedBuildFailure 9 | import org.junit.jupiter.api.BeforeEach 10 | import org.junit.jupiter.api.Test 11 | import org.junit.jupiter.api.io.TempDir 12 | import java.io.File 13 | import java.nio.file.Files 14 | import java.nio.file.Path 15 | 16 | class SchemaRegistryPluginTest { 17 | lateinit var project: Project 18 | @TempDir 19 | lateinit var folderRule: Path 20 | lateinit var buildFile: File 21 | 22 | private val subject = "test-subject" 23 | 24 | @BeforeEach 25 | fun init() { 26 | project = ProjectBuilder.builder().build() 27 | project.pluginManager.apply(SchemaRegistryPlugin::class.java) 28 | Files.createFile(folderRule.resolve("build.gradle")) 29 | } 30 | 31 | @Test 32 | fun `plugin should add tasks when applied`() { 33 | project.afterEvaluate { 34 | val downloadSchemaTask = project.tasks.getByName(DownloadTask.TASK_NAME) 35 | Assertions.assertThat(downloadSchemaTask).isNotNull() 36 | } 37 | } 38 | 39 | @Test 40 | fun `plugin should fail with wrong url extension configuration`() { 41 | buildFile = File(folderRule.toFile(), "build.gradle") 42 | buildFile.writeText( 43 | """ 44 | plugins { 45 | id 'java' 46 | id 'com.github.imflog.kafka-schema-registry-gradle-plugin' 47 | } 48 | 49 | schemaRegistry { 50 | urlFoo = 'http://localhost:1234/' 51 | output = 'src/main/avro' 52 | subjects = ['$subject'] 53 | } 54 | """ 55 | ) 56 | 57 | try { 58 | GradleRunner.create() 59 | .withGradleVersion("8.6") 60 | .withProjectDir(folderRule.toFile()) 61 | .withArguments(DownloadTask.TASK_NAME) 62 | .withPluginClasspath() 63 | .withDebug(true) 64 | .build() 65 | Assertions.fail("Should not reach this point") 66 | } catch (ex: UnexpectedBuildFailure) { 67 | Assertions.assertThat(ex.message).containsIgnoringCase("unknown property 'urlFoo'") 68 | } 69 | } 70 | 71 | @Test 72 | fun `plugin should fail with wrong credentials extension configuration`() { 73 | buildFile = File(folderRule.toFile(), "build.gradle") 74 | buildFile.writeText( 75 | """ 76 | plugins { 77 | id 'java' 78 | id 'com.github.imflog.kafka-schema-registry-gradle-plugin' 79 | } 80 | 81 | schemaRegistry { 82 | url = 'http://localhost:1234/' 83 | credentials { 84 | username = 'user' 85 | password = 'pass' 86 | } 87 | credentialsBar.username = 'user' 88 | output = 'src/main/avro' 89 | subjects = ['$subject'] 90 | } 91 | """ 92 | ) 93 | 94 | try { 95 | GradleRunner.create() 96 | .withGradleVersion("8.6") 97 | .withProjectDir(folderRule.toFile()) 98 | .withArguments(DownloadTask.TASK_NAME) 99 | .withPluginClasspath() 100 | .withDebug(true) 101 | .build() 102 | Assertions.fail("Should not reach this point") 103 | } catch (ex: UnexpectedBuildFailure) { 104 | Assertions.assertThat(ex.message).containsIgnoringCase("unknown property 'credentialsBar'") 105 | } 106 | } 107 | 108 | @Test 109 | fun `plugin should only parse nested extensions`() { 110 | buildFile = File(folderRule.toFile(), "build.gradle") 111 | buildFile.writeText( 112 | """ 113 | plugins { 114 | id 'java' 115 | id 'com.github.imflog.kafka-schema-registry-gradle-plugin' 116 | } 117 | 118 | // This should not be taken into account 119 | credentials { 120 | username = 'User' 121 | password = 'Passw0rd' 122 | } 123 | 124 | schemaRegistry { 125 | url = 'http://localhost:1234/' 126 | } 127 | """ 128 | ) 129 | 130 | try { 131 | GradleRunner.create() 132 | .withGradleVersion("8.6") 133 | .withProjectDir(folderRule.toFile()) 134 | .withArguments(DownloadTask.TASK_NAME) 135 | .withPluginClasspath() 136 | .withDebug(true) 137 | .build() 138 | Assertions.fail("Should not reach this point") 139 | } catch (ex: UnexpectedBuildFailure) { 140 | Assertions.assertThat(ex.message).containsIgnoringCase("Could not find method credentials()") 141 | } 142 | } 143 | } 144 | -------------------------------------------------------------------------------- /src/test/kotlin/com/github/imflog/schema/registry/parser/AvroSchemaParserTest.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.parser 2 | 3 | import com.github.imflog.schema.registry.LocalReference 4 | import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient 5 | import org.assertj.core.api.Assertions 6 | import org.intellij.lang.annotations.Language 7 | import org.json.JSONObject 8 | import org.junit.jupiter.api.Test 9 | import org.junit.jupiter.api.TestInstance 10 | import org.junit.jupiter.api.assertDoesNotThrow 11 | import org.junit.jupiter.api.io.TempDir 12 | import java.io.File 13 | import java.nio.file.Path 14 | import java.nio.file.Paths 15 | 16 | @TestInstance(TestInstance.Lifecycle.PER_CLASS) 17 | class AvroSchemaParserTest { 18 | 19 | private val schemaRegistryClient = MockSchemaRegistryClient() 20 | private val testFilesPath = "${Paths.get("").toAbsolutePath()}/src/test/resources/" 21 | 22 | @TempDir 23 | lateinit var folderRule: Path 24 | 25 | companion object { 26 | private const val ADDRESS_REFERENCE_NAME = "Address" 27 | private const val USER_REFERENCE_NAME = "User" 28 | private const val ADDRESS_SCHEMA = """{ 29 | "type": "record", 30 | "name": "$ADDRESS_REFERENCE_NAME", 31 | "fields": [ 32 | {"name": "street", "type": "string" } 33 | ] 34 | }""" 35 | private const val USER_SCHEMA = """{ 36 | "type": "record", 37 | "name": "$USER_REFERENCE_NAME", 38 | "fields": [ 39 | {"name": "name", "type": "string" }, 40 | {"name": "address", "type": "$ADDRESS_REFERENCE_NAME"} 41 | ] 42 | }""" 43 | 44 | } 45 | 46 | @Test 47 | fun `Should format local references correctly`() { 48 | // Given 49 | val parser = AvroSchemaParser(schemaRegistryClient, folderRule.toFile()) 50 | val aLocalReference = givenALocalReference() 51 | val aUserSchemaFile = givenASchemaFile() 52 | 53 | // When 54 | val resolvedSchema = parser.resolveLocalReferences( 55 | USER_REFERENCE_NAME, 56 | aUserSchemaFile.path, 57 | listOf(aLocalReference) 58 | ) 59 | 60 | // Then 61 | localSchemaShouldBeAppended(resolvedSchema) 62 | } 63 | 64 | private fun givenALocalReference(): LocalReference { 65 | val addressLocalFile = folderRule.resolve("Address.avsc").toFile() 66 | addressLocalFile.writeText(ADDRESS_SCHEMA) 67 | return LocalReference(ADDRESS_REFERENCE_NAME, addressLocalFile.path) 68 | } 69 | 70 | private fun givenASchemaFile(): File { 71 | val userLocalFile = folderRule.resolve("User.avsc").toFile() 72 | userLocalFile.writeText(USER_SCHEMA) 73 | return userLocalFile 74 | } 75 | 76 | private fun localSchemaShouldBeAppended(resolvedSchema: String) { 77 | Assertions 78 | .assertThat( 79 | JSONObject(resolvedSchema) 80 | .getJSONArray("fields") 81 | .getJSONObject(1) 82 | .getJSONObject("type") 83 | ) 84 | .usingRecursiveComparison() // To check the content of the JSON Objects instead of the equal method 85 | .isEqualTo(JSONObject(ADDRESS_SCHEMA)) 86 | } 87 | 88 | @Test 89 | fun `Should resolve duplicated references with same namespace correctly`() { 90 | // Given 91 | val parser = AvroSchemaParser(schemaRegistryClient, File(testFilesPath)) 92 | val reference = LocalReference("B", "${testFilesPath}testType.avsc") 93 | val schema = File("${testFilesPath}testSubject.avsc") 94 | // When 95 | val resolvedSchema = parser.resolveLocalReferences( 96 | "test", 97 | schema.path, 98 | listOf(reference) 99 | ) 100 | // Then 101 | val resolved = JSONObject(resolvedSchema).toString() 102 | 103 | @Language("JSON") 104 | val expected = """{ 105 | "name": "A", 106 | "namespace": "com.mycompany", 107 | "type": "record", 108 | "fields": [ 109 | { 110 | "name": "nested", 111 | "type": { 112 | "name":"B", 113 | "type": "enum", 114 | "symbols": ["X1", "X2"] 115 | } 116 | }, 117 | { 118 | "name": "nested1", 119 | "type": "B" 120 | } 121 | ] 122 | }""" 123 | 124 | Assertions.assertThat(resolved).isEqualTo( 125 | JSONObject(expected).toString() 126 | ) 127 | } 128 | 129 | @Test 130 | fun `Should resolve complex nested array example correctly`() { 131 | // Given 132 | val parser = AvroSchemaParser(schemaRegistryClient, File(testFilesPath)) 133 | val schema = File("${testFilesPath}ParentArraySubject.avsc") 134 | 135 | assertDoesNotThrow { 136 | parser.resolveLocalReferences( 137 | "test", 138 | schema.path, 139 | listOf( 140 | LocalReference("NestedArrayType", "${testFilesPath}NestedArrayType.avsc"), 141 | LocalReference("NestedNestedType", "${testFilesPath}NestedNestedType.avsc"), 142 | ) 143 | ) 144 | } 145 | } 146 | 147 | @Test 148 | fun `Should resolve complex nested record example correctly`() { 149 | // Given 150 | val parser = AvroSchemaParser(schemaRegistryClient, File(testFilesPath)) 151 | val schema = File("${testFilesPath}ParentSubject.avsc") 152 | 153 | assertDoesNotThrow { 154 | parser.resolveLocalReferences( 155 | "test", 156 | schema.path, 157 | listOf( 158 | LocalReference("NestedType", "${testFilesPath}NestedType.avsc"), 159 | LocalReference("NestedNestedType", "${testFilesPath}NestedNestedType.avsc"), 160 | ) 161 | ) 162 | } 163 | } 164 | 165 | @Test 166 | fun `Should resolve duplicated array references correctly`() { 167 | // Given 168 | val parser = AvroSchemaParser(schemaRegistryClient, File(testFilesPath)) 169 | val reference = LocalReference("B", "${testFilesPath}testType.avsc") 170 | val schema = File("${testFilesPath}testSubjectWithArrayReference.avsc") 171 | // When 172 | val resolvedSchema = parser.resolveLocalReferences( 173 | "test", 174 | schema.path, 175 | listOf(reference) 176 | ) 177 | // Then 178 | val resolved = JSONObject(resolvedSchema).toString() 179 | 180 | @Language("JSON") 181 | val expected = """{ 182 | "name": "A", 183 | "namespace": "com.mycompany", 184 | "type": "record", 185 | "fields": [ 186 | { 187 | "name": "nested", 188 | "type": { 189 | "type": "array", 190 | "items": { 191 | "name":"B", 192 | "type":"enum", 193 | "symbols": ["X1","X2"] 194 | } 195 | } 196 | }, 197 | { 198 | "name":"nested1", 199 | "type":{ 200 | "type":"array", 201 | "items":"com.mycompany.B" 202 | } 203 | } 204 | ] 205 | }""" 206 | 207 | Assertions.assertThat(resolved).isEqualTo( 208 | JSONObject(expected).toString() 209 | ) 210 | } 211 | 212 | @Test 213 | fun `Should parse with unknown remote references correctly`() { 214 | // Given 215 | val parser = AvroSchemaParser(schemaRegistryClient, File(testFilesPath)) 216 | val schema = File("${testFilesPath}testSubject.avsc") 217 | // When 218 | val resolvedSchema = parser.resolveLocalReferences( 219 | "test", 220 | schema.path, 221 | listOf() 222 | ) 223 | // Then 224 | val resolved = JSONObject(resolvedSchema).toString() 225 | 226 | @Language("JSON") 227 | val expected = """{ 228 | "type": "record", 229 | "name": "A", 230 | "namespace": "com.mycompany", 231 | "fields": [ 232 | { 233 | "name": "nested", 234 | "type": "B" 235 | }, 236 | { 237 | "name": "nested1", 238 | "type": "B" 239 | } 240 | ] 241 | }""" 242 | 243 | Assertions.assertThat(resolved).isEqualTo( 244 | JSONObject(expected).toString() 245 | ) 246 | } 247 | 248 | @Test 249 | fun `Should fix #199`() { 250 | // Given 251 | val parser = AvroSchemaParser(schemaRegistryClient, File(testFilesPath)) 252 | val mainRecord = File("${testFilesPath}/bug_199/main.avsc") 253 | 254 | // When 255 | val resolved = parser.resolveLocalReferences( 256 | "test", 257 | mainRecord.path, 258 | listOf( 259 | LocalReference("TypeA", "${testFilesPath}/bug_199/a.avsc"), 260 | LocalReference("TypeB", "${testFilesPath}/bug_199/b.avsc"), 261 | LocalReference("Shared", "${testFilesPath}/bug_199/shared.avsc") 262 | ) 263 | ) 264 | 265 | // Then 266 | val expected = """ 267 | { 268 | "type": "record", 269 | "name": "MainRecord", 270 | "namespace": "com.example", 271 | "fields": [ 272 | { 273 | "name": "example", 274 | "type": [ 275 | { 276 | "name": "TypeA", 277 | "type": "record", 278 | "fields": [ 279 | { 280 | "name": "fooA", 281 | "type": "string" 282 | }, 283 | { 284 | "name": "shared", 285 | "type": [ 286 | "null", 287 | { 288 | "name": "Shared", 289 | "type": "record", 290 | "namespace": "com.example.common", 291 | "fields": [ 292 | { 293 | "name": "foobar", 294 | "type": "string" 295 | }, 296 | { 297 | "name": "bar", 298 | "type": "string" 299 | } 300 | ] 301 | } 302 | ] 303 | } 304 | 305 | ] 306 | }, 307 | { 308 | "name": "TypeB", 309 | "type": "record", 310 | "fields": [ 311 | { 312 | "name": "fooB", 313 | "type": "string" 314 | }, 315 | { 316 | "name": "shared", 317 | "type": ["null", "com.example.common.Shared"] 318 | } 319 | 320 | ] 321 | } 322 | ] 323 | } 324 | ] 325 | } 326 | """ 327 | Assertions.assertThat(resolved).isEqualTo( 328 | JSONObject(expected).toString() 329 | ) 330 | } 331 | 332 | @Test 333 | fun `Should fix #204`() { 334 | // Given 335 | val parser = AvroSchemaParser(schemaRegistryClient, File(testFilesPath)) 336 | val mainRecord = File("${testFilesPath}/bug_204/mainMap.avsc") 337 | 338 | // When 339 | val resolved = parser.resolveLocalReferences( 340 | "test", 341 | mainRecord.path, 342 | listOf( 343 | LocalReference("Map", "${testFilesPath}/bug_204/map.avsc") 344 | ) 345 | ) 346 | 347 | // Then 348 | val expected = """ 349 | { 350 | "type": "record", 351 | "name": "MainMap", 352 | "namespace": "com.example", 353 | "fields": [ 354 | { 355 | "name": "foo", 356 | "type": "string" 357 | }, 358 | { 359 | "default": {}, 360 | "name": "types", 361 | "type": { 362 | "type": "map", 363 | "values": { 364 | "name": "Map", 365 | "type": "record", 366 | "fields": [ 367 | { 368 | "name": "foo", 369 | "type": ["null", "string"], 370 | "default": null 371 | }, 372 | { 373 | "name": "bar", 374 | "type": ["null", 375 | { 376 | "type": "array", 377 | "items": "string" 378 | } 379 | ], 380 | "default": null 381 | }, 382 | { 383 | "name": "foobar", 384 | "type": [ 385 | "null", 386 | { 387 | "type": "map", 388 | "values": "string" 389 | } 390 | ], 391 | "default": null 392 | } 393 | ] 394 | } 395 | } 396 | } 397 | ] 398 | } 399 | """ 400 | Assertions.assertThat(resolved).isEqualTo( 401 | JSONObject(expected).toString() 402 | ) 403 | } 404 | 405 | @Test 406 | fun `Should fix #206`() { 407 | // Given 408 | val parser = AvroSchemaParser(schemaRegistryClient, File(testFilesPath)) 409 | val mainRecord = File("${testFilesPath}/bug_206/Main.avsc") 410 | 411 | // When 412 | val resolved = parser.resolveLocalReferences( 413 | "test", 414 | mainRecord.path, 415 | listOf( 416 | LocalReference("Common", "${testFilesPath}/bug_206/Common.avsc"), 417 | LocalReference("NestedOne", "${testFilesPath}/bug_206/NestedOne.avsc"), 418 | LocalReference("NestedTwo", "${testFilesPath}/bug_206/NestedTwo.avsc") 419 | 420 | ) 421 | ) 422 | 423 | val expected = """ 424 | { 425 | "name": "MainRecord", 426 | "namespace": "com.example", 427 | "type": "record", 428 | "fields": 429 | [ 430 | { 431 | "name": "foo", 432 | "type": 433 | { 434 | "name": "NestedOne", 435 | "namespace": "com.example.one", 436 | "type": "record", 437 | "fields": 438 | [ 439 | { 440 | "name": "foo", 441 | "type": "string" 442 | }, 443 | { 444 | "name": "bar", 445 | "type": 446 | { 447 | "name": "Common", 448 | "namespace": "com.common.example", 449 | "type": "record", 450 | "fields": 451 | [ 452 | { 453 | "name": "foo", 454 | "type": "string" 455 | } 456 | ] 457 | } 458 | } 459 | ] 460 | } 461 | }, 462 | { 463 | "name": "bar", 464 | "type": 465 | { 466 | "name": "NestedTwo", 467 | "namespace": "com.common.example", 468 | "type": "record", 469 | "fields": 470 | [ 471 | { 472 | "name": "foo", 473 | "type": "string" 474 | }, 475 | { 476 | "name": "bar", 477 | "type": "com.common.example.Common" 478 | } 479 | ] 480 | } 481 | } 482 | ] 483 | } 484 | """ 485 | Assertions.assertThat(resolved).isEqualTo( 486 | JSONObject(expected).toString() 487 | ) 488 | } 489 | } 490 | -------------------------------------------------------------------------------- /src/test/kotlin/com/github/imflog/schema/registry/parser/JsonSchemaParserTest.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.parser 2 | 3 | import com.github.imflog.schema.registry.LocalReference 4 | import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient 5 | import org.assertj.core.api.Assertions 6 | import org.json.JSONObject 7 | import org.junit.jupiter.api.Test 8 | import org.junit.jupiter.api.TestInstance 9 | import org.junit.jupiter.api.io.TempDir 10 | import java.io.File 11 | import java.nio.file.Path 12 | 13 | @TestInstance(TestInstance.Lifecycle.PER_CLASS) 14 | class JsonSchemaParserTest { 15 | 16 | private val schemaRegistryClient = MockSchemaRegistryClient() 17 | @TempDir 18 | lateinit var folderRule: Path 19 | 20 | companion object { 21 | private const val ADDRESS_REFERENCE_NAME = "Address" 22 | private const val USER_REFERENCE_NAME = "User" 23 | private const val ADDRESS_SCHEMA = """{ 24 | "${"$"}schema": "http://json-schema.org/draft-07/schema#", 25 | "${"$"}id": "$ADDRESS_REFERENCE_NAME", 26 | "type": "object", 27 | "properties": { 28 | "street": {"type": "string"} 29 | }, 30 | "additionalProperties": false 31 | }""" 32 | private const val USER_SCHEMA = """{ 33 | "${"$"}schema": "http://json-schema.org/draft-07/schema#", 34 | "${"$"}id": "$USER_REFERENCE_NAME", 35 | "type": "object", 36 | "properties": { 37 | "name": {"type": "string"}, 38 | "address": {"${"$"}ref": "$ADDRESS_REFERENCE_NAME"} 39 | }, 40 | "additionalProperties": false 41 | }""" 42 | } 43 | 44 | @Test 45 | fun `Should format local references correctly`() { 46 | // Given 47 | val parser = JsonSchemaParser(schemaRegistryClient, folderRule.toFile()) 48 | val aLocalReference = givenALocalReference() 49 | val aUserSchemaFile = givenASchemaFile() 50 | 51 | // When 52 | val resolvedSchema = parser.resolveLocalReferences( 53 | USER_REFERENCE_NAME, 54 | aUserSchemaFile.path, 55 | listOf(aLocalReference) 56 | ) 57 | 58 | // Then 59 | localSchemaShouldBeAppended(resolvedSchema) 60 | } 61 | 62 | private fun givenALocalReference(): LocalReference { 63 | val addressLocalFile = folderRule.resolve("Address.json").toFile() 64 | addressLocalFile.writeText(ADDRESS_SCHEMA) 65 | return LocalReference(ADDRESS_REFERENCE_NAME, addressLocalFile.path) 66 | } 67 | 68 | private fun givenASchemaFile(): File { 69 | val userLocalFile = folderRule.resolve("User.json").toFile() 70 | userLocalFile.writeText(USER_SCHEMA) 71 | return userLocalFile 72 | } 73 | 74 | private fun localSchemaShouldBeAppended(resolvedSchema: String) { 75 | Assertions 76 | .assertThat(JSONObject(resolvedSchema).getJSONArray("${"$"}defs").get(0)) 77 | .usingRecursiveComparison() // To check the content of the JSON Objects instead of the equal method 78 | .isEqualTo(JSONObject(mapOf(ADDRESS_REFERENCE_NAME to JSONObject(ADDRESS_SCHEMA)))) 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/test/kotlin/com/github/imflog/schema/registry/tasks/config/ConfigTaskActionTest.kt: -------------------------------------------------------------------------------- 1 | package com.github.imflog.schema.registry.tasks.config 2 | 3 | import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider 4 | import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient 5 | import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider 6 | import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider 7 | import org.assertj.core.api.Assertions 8 | import org.junit.jupiter.api.Test 9 | 10 | class ConfigTaskActionTest { 11 | 12 | @Test 13 | fun `Should set subject config`() { 14 | // given 15 | val registryClient = 16 | MockSchemaRegistryClient(listOf(AvroSchemaProvider(), JsonSchemaProvider(), ProtobufSchemaProvider())) 17 | val subjects = listOf(ConfigSubject("test", "FULL_TRANSITIVE")) 18 | 19 | // when 20 | val errorCount = ConfigTaskAction(registryClient, subjects).run() 21 | 22 | // then 23 | Assertions.assertThat(errorCount).isEqualTo(0) 24 | } 25 | 26 | @Test 27 | fun `Should fail if avro compatibility level does not exist`() { 28 | // given 29 | val registryClient = 30 | MockSchemaRegistryClient(listOf(AvroSchemaProvider(), JsonSchemaProvider(), ProtobufSchemaProvider())) 31 | val subjects = listOf(ConfigSubject("test", "FOO")) 32 | 33 | // when 34 | val errorCount = ConfigTaskAction(registryClient, subjects).run() 35 | 36 | // then 37 | Assertions.assertThat(errorCount).isEqualTo(1) 38 | } 39 | 40 | @Test 41 | fun `Should fail silently by default`() { 42 | // given 43 | val registryClient = 44 | MockSchemaRegistryClient(listOf(AvroSchemaProvider(), JsonSchemaProvider(), ProtobufSchemaProvider())) 45 | val subjects = listOf( 46 | ConfigSubject("test", "FULL_TRANSITIVE"), 47 | ConfigSubject("test", "FOO"), 48 | ConfigSubject("test", "FULL_TRANSITIVE"), 49 | ) 50 | 51 | // when 52 | val errorCount = ConfigTaskAction(registryClient, subjects).run() 53 | 54 | // then 55 | Assertions.assertThat(errorCount).isEqualTo(1) 56 | } 57 | 58 | @Test 59 | fun `Should fail fast correctly`() { 60 | // given 61 | val registryClient = 62 | MockSchemaRegistryClient(listOf(AvroSchemaProvider(), JsonSchemaProvider(), ProtobufSchemaProvider())) 63 | val subjects = listOf( 64 | ConfigSubject("test", "FULL_TRANSITIVE"), 65 | ConfigSubject("test", "FOO"), 66 | ConfigSubject("test", "FULL_TRANSITIVE"), 67 | ) 68 | 69 | // when 70 | try { 71 | ConfigTaskAction(registryClient, subjects, failFast = true).run() 72 | Assertions.fail("Should have thrown an exception") 73 | } catch (e: IllegalArgumentException) { 74 | // then 75 | // Nothing specific to check here 76 | } 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/test/resources/NestedArrayType.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "NestedArrayType", 4 | "namespace": "com.test.company", 5 | "fields": [ 6 | { 7 | "name": "someNestedArray", 8 | "type": { 9 | "type": "array", 10 | "items": "NestedNestedType" 11 | } 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /src/test/resources/NestedNestedType.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "NestedNestedType", 4 | "namespace": "com.test.company", 5 | "fields": [ 6 | { 7 | "name": "name", 8 | "type": { 9 | "type": "string", 10 | "avro.java.string": "String" 11 | } 12 | }, 13 | { 14 | "name": "number", 15 | "type": "int" 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /src/test/resources/NestedType.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "NestedType", 4 | "namespace": "com.test.company", 5 | "fields": [ 6 | { 7 | "name": "someNestedRecord", 8 | "type": "NestedNestedType" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /src/test/resources/ParentArraySubject.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "ParentSubjectArray", 4 | "namespace": "com.test.company", 5 | "fields": [ 6 | { 7 | "name": "element", 8 | "type": "NestedArrayType" 9 | } 10 | ] 11 | } -------------------------------------------------------------------------------- /src/test/resources/ParentSubject.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "ParentSubject", 4 | "namespace": "com.test.company", 5 | "fields": [ 6 | { 7 | "name": "element", 8 | "type": "NestedType" 9 | } 10 | ] 11 | } -------------------------------------------------------------------------------- /src/test/resources/bug_199/a.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "name": "TypeA", 3 | "type": "record", 4 | "namespace": "com.example", 5 | "fields": [ 6 | { 7 | "name": "fooA", 8 | "type": "string" 9 | }, 10 | { 11 | "name": "shared", 12 | "type": [ 13 | "null", 14 | "com.example.common.Shared" 15 | ] 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /src/test/resources/bug_199/b.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "name": "TypeB", 3 | "type": "record", 4 | "namespace": "com.example", 5 | "fields": [ 6 | { 7 | "name": "fooB", 8 | "type": "string" 9 | }, 10 | { 11 | "name": "shared", 12 | "type": [ 13 | "null", 14 | "com.example.common.Shared" 15 | ] 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /src/test/resources/bug_199/main.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "MainRecord", 4 | "namespace": "com.example", 5 | "fields": [ 6 | { 7 | "name": "example", 8 | "type": [ 9 | "TypeA", 10 | "TypeB" 11 | ] 12 | } 13 | ] 14 | } -------------------------------------------------------------------------------- /src/test/resources/bug_199/shared.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Shared", 3 | "type": "record", 4 | "namespace": "com.example.common", 5 | "fields": [ 6 | { 7 | "name": "foobar", 8 | "type": "string" 9 | }, 10 | { 11 | "name": "bar", 12 | "type": "string" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /src/test/resources/bug_204/mainMap.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "MainMap", 4 | "namespace": "com.example", 5 | "fields": [ 6 | { 7 | "name": "foo", 8 | "type": "string", 9 | }, 10 | { 11 | "name": "types", 12 | "type": { 13 | "type": "map", 14 | "values": "Map" 15 | }, 16 | "default": {}, 17 | } 18 | ] 19 | } -------------------------------------------------------------------------------- /src/test/resources/bug_204/map.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "Map", 4 | "namespace": "com.example", 5 | "fields": [ 6 | { 7 | "name": "foo", 8 | "type": [ 9 | "null", 10 | "string" 11 | ], 12 | "default": null, 13 | }, 14 | { 15 | "name": "bar", 16 | "type": [ 17 | "null", 18 | { 19 | "type": "array", 20 | "items": "string" 21 | } 22 | ], 23 | "default": null, 24 | }, 25 | { 26 | "name": "foobar", 27 | "type": [ 28 | "null", 29 | { 30 | "type": "map", 31 | "values": "string" 32 | } 33 | ], 34 | "default": null, 35 | } 36 | ] 37 | } -------------------------------------------------------------------------------- /src/test/resources/bug_206/Common.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "Common", 4 | "namespace": "com.common.example", 5 | "fields": [ 6 | { 7 | "name": "foo", 8 | "type": "string" 9 | } 10 | ] 11 | } -------------------------------------------------------------------------------- /src/test/resources/bug_206/Main.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "MainRecord", 4 | "namespace": "com.example", 5 | "fields": [ 6 | { 7 | "name": "foo", 8 | "type": "com.example.one.NestedOne" 9 | }, 10 | { 11 | "name": "bar", 12 | "type": "com.common.example.NestedTwo" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /src/test/resources/bug_206/NestedOne.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "NestedOne", 4 | "namespace": "com.example.one", 5 | "fields": [ 6 | { 7 | "name": "foo", 8 | "type": "string" 9 | }, 10 | { 11 | "name": "bar", 12 | "type": "com.common.example.Common" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /src/test/resources/bug_206/NestedTwo.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "NestedTwo", 4 | "namespace": "com.common.example", 5 | "fields": [ 6 | { 7 | "name": "foo", 8 | "type": "string" 9 | }, 10 | { 11 | "name": "bar", 12 | "type": "com.common.example.Common" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /src/test/resources/testMetadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "tags": { 3 | "**.ssn": [ "PII" ] 4 | }, 5 | "properties": { 6 | "owner": "Bob Jones", 7 | "email": "bob@acme.com" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /src/test/resources/testRuleSet.json: -------------------------------------------------------------------------------- 1 | { 2 | "domainRules": [ 3 | { 4 | "name": "encryptPII", 5 | "kind": "TRANSFORM", 6 | "type": "ENCRYPT", 7 | "mode": "WRITEREAD", 8 | "tags": [ 9 | "PII" 10 | ], 11 | "params": { 12 | "encrypt.kek.name": "kafka-csfle", 13 | "encrypt.kms.key.id": "projects/gcp-project/locations/europe-west6/keyRings/gcp-keyring/cryptoKeys/kafka-csfle", 14 | "encrypt.kms.type": "gcp-kms" 15 | }, 16 | "onFailure": "ERROR,NONE" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /src/test/resources/testSimpleSubject.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "A", 4 | "namespace": "com.mycompany", 5 | "fields": [ 6 | { 7 | "name": "foo", 8 | "type": "string" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /src/test/resources/testSubject.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "A", 4 | "namespace": "com.mycompany", 5 | "fields": [ 6 | { 7 | "name": "nested", 8 | "type": "B" 9 | }, 10 | { 11 | "name": "nested1", 12 | "type": "B" 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /src/test/resources/testSubjectComplex.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "A", 4 | "namespace": "com.mycompany", 5 | "fields": [ 6 | { 7 | "name": "element", 8 | "type": [ 9 | "B", 10 | "C" 11 | ] 12 | } 13 | ] 14 | } -------------------------------------------------------------------------------- /src/test/resources/testSubjectWithArrayReference.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "A", 4 | "namespace": "com.mycompany", 5 | "fields": [ 6 | { 7 | "name": "nested", 8 | "type": { 9 | "type": "array", 10 | "items": "com.mycompany.B" 11 | } 12 | }, 13 | { 14 | "name": "nested1", 15 | "type": { 16 | "type": "array", 17 | "items": "com.mycompany.B" 18 | } 19 | } 20 | ] 21 | } -------------------------------------------------------------------------------- /src/test/resources/testSubjectWithTag.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "A", 4 | "namespace": "com.mycompany", 5 | "fields": [ 6 | { 7 | "name": "ssn", 8 | "type": "string", 9 | "confluent:tags": [ "PII" ] 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /src/test/resources/testType.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "enum", 3 | "name": "B", 4 | "namespace": "com.mycompany", 5 | "symbols" : ["X1", "X2"] 6 | } --------------------------------------------------------------------------------