├── .circleci └── config.yml ├── .gitignore ├── .mvn └── wrapper │ ├── MavenWrapperDownloader.java │ └── maven-wrapper.properties ├── LICENSE ├── README.md ├── mvnw ├── mvnw.cmd ├── pom.xml ├── settings.xml └── src ├── main ├── java │ └── cricket │ │ └── jmoore │ │ ├── kafka │ │ └── connect │ │ │ └── transforms │ │ │ ├── ConnectSchemaUtil.java │ │ │ └── SchemaRegistryTransfer.java │ │ └── security │ │ └── basicauth │ │ ├── DestSaslBasicAuthCredentialProvider.java │ │ ├── DestUrlBasicAuthCredentialProvider.java │ │ ├── DestUserInfoCredentialProvider.java │ │ ├── SrcSaslBasicAuthCredentialProvider.java │ │ ├── SrcUrlBasicAuthCredentialProvider.java │ │ └── SrcUserInfoCredentialProvider.java └── resources │ └── META-INF │ └── services │ └── io.confluent.kafka.schemaregistry.client.security.basicauth.BasicAuthCredentialProvider └── test ├── java └── cricket │ └── jmoore │ └── kafka │ └── connect │ └── transforms │ ├── Constants.java │ ├── SchemaRegistryMock.java │ ├── SchemaTest.java │ └── TransformTest.java └── resources └── logback-test.xml /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | # Java Maven CircleCI 2.0 configuration file 2 | # 3 | # Check https://circleci.com/docs/2.0/language-java/ for more details 4 | # 5 | version: 2 6 | jobs: 7 | build: 8 | docker: 9 | # specify the version you desire here 10 | - image: circleci/openjdk:8-jdk 11 | 12 | working_directory: ~/repo 13 | 14 | environment: 15 | # Customize the JVM maximum heap limit 16 | MAVEN_OPTS: -Xmx2048m 17 | 18 | steps: 19 | - checkout 20 | 21 | # Download and cache dependencies 22 | - restore_cache: 23 | keys: 24 | - v1-dependencies-{{ checksum "pom.xml" }} 25 | # fallback to using the latest cache if no exact match is found 26 | - v1-dependencies- 27 | 28 | - run: mvn dependency:go-offline 29 | 30 | - save_cache: 31 | paths: 32 | - ~/.m2 33 | key: v1-dependencies-{{ checksum "pom.xml" }} 34 | 35 | # run tests! 36 | - run: mvn verify 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | *.iml 3 | 4 | *.class 5 | 6 | *.log 7 | 8 | *.jar 9 | *.zip 10 | *.tar.gz 11 | *.rar 12 | 13 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 14 | hs_err_pid* 15 | 16 | ### macOS ### 17 | .DS_Store 18 | 19 | # Icon must end with two \r 20 | Icon 21 | 22 | # Thumbnails 23 | ._* 24 | 25 | ### Maven ### 26 | target/ 27 | dependency-reduced-pom.xml 28 | .mvn/timing.properties 29 | .mvn/wrapper/maven-wrapper.jar 30 | -------------------------------------------------------------------------------- /.mvn/wrapper/MavenWrapperDownloader.java: -------------------------------------------------------------------------------- 1 | /* 2 | Licensed to the Apache Software Foundation (ASF) under one 3 | or more contributor license agreements. See the NOTICE file 4 | distributed with this work for additional information 5 | regarding copyright ownership. The ASF licenses this file 6 | to you under the Apache License, Version 2.0 (the 7 | "License"); you may not use this file except in compliance 8 | with the License. You may obtain a copy of the License at 9 | 10 | http://www.apache.org/licenses/LICENSE-2.0 11 | 12 | Unless required by applicable law or agreed to in writing, 13 | software distributed under the License is distributed on an 14 | "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | KIND, either express or implied. See the License for the 16 | specific language governing permissions and limitations 17 | under the License. 18 | */ 19 | 20 | import java.net.*; 21 | import java.io.*; 22 | import java.nio.channels.*; 23 | import java.util.Properties; 24 | 25 | public class MavenWrapperDownloader { 26 | 27 | /** 28 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. 29 | */ 30 | private static final String DEFAULT_DOWNLOAD_URL = 31 | "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"; 32 | 33 | /** 34 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to 35 | * use instead of the default one. 36 | */ 37 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH = 38 | ".mvn/wrapper/maven-wrapper.properties"; 39 | 40 | /** 41 | * Path where the maven-wrapper.jar will be saved to. 42 | */ 43 | private static final String MAVEN_WRAPPER_JAR_PATH = 44 | ".mvn/wrapper/maven-wrapper.jar"; 45 | 46 | /** 47 | * Name of the property which should be used to override the default download url for the wrapper. 48 | */ 49 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; 50 | 51 | public static void main(String args[]) { 52 | System.out.println("- Downloader started"); 53 | File baseDirectory = new File(args[0]); 54 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); 55 | 56 | // If the maven-wrapper.properties exists, read it and check if it contains a custom 57 | // wrapperUrl parameter. 58 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); 59 | String url = DEFAULT_DOWNLOAD_URL; 60 | if(mavenWrapperPropertyFile.exists()) { 61 | FileInputStream mavenWrapperPropertyFileInputStream = null; 62 | try { 63 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); 64 | Properties mavenWrapperProperties = new Properties(); 65 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); 66 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); 67 | } catch (IOException e) { 68 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); 69 | } finally { 70 | try { 71 | if(mavenWrapperPropertyFileInputStream != null) { 72 | mavenWrapperPropertyFileInputStream.close(); 73 | } 74 | } catch (IOException e) { 75 | // Ignore ... 76 | } 77 | } 78 | } 79 | System.out.println("- Downloading from: : " + url); 80 | 81 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); 82 | if(!outputFile.getParentFile().exists()) { 83 | if(!outputFile.getParentFile().mkdirs()) { 84 | System.out.println( 85 | "- ERROR creating output direcrory '" + outputFile.getParentFile().getAbsolutePath() + "'"); 86 | } 87 | } 88 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); 89 | try { 90 | downloadFileFromURL(url, outputFile); 91 | System.out.println("Done"); 92 | System.exit(0); 93 | } catch (Throwable e) { 94 | System.out.println("- Error downloading"); 95 | e.printStackTrace(); 96 | System.exit(1); 97 | } 98 | } 99 | 100 | private static void downloadFileFromURL(String urlString, File destination) throws Exception { 101 | URL website = new URL(urlString); 102 | ReadableByteChannel rbc; 103 | rbc = Channels.newChannel(website.openStream()); 104 | FileOutputStream fos = new FileOutputStream(destination); 105 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); 106 | fos.close(); 107 | rbc.close(); 108 | } 109 | 110 | } 111 | -------------------------------------------------------------------------------- /.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Schema Registry Transfer SMT 2 | ============================ 3 | 4 | A [Kafka Connect Single Message Transformation (SMT)][smt] that reads the serialized [wire format header][wire-format] of Confluent's `KafkaAvroSerializer`, performs a lookup against a source [Confluent Schema Registry][schema-registry] for the ID in the message, and registers that schema into a destination Registry for that topic/subject under a new ID. 5 | 6 | To be used where it is not feasible to make the destination Schema Registry as a follower to the source Registry, or when migrating topics to a new cluster. 7 | 8 | > _Requires that the Kafka Connect tasks can reach both Schema Registries._ 9 | 10 | This transform doesn't mirror the contents of the `_schemas` topic, so therefore each registry can be completely isolated from one another. As a side-effect of this, the subject configurations that might be applied to the `/config` endpoint in the source registry are not copied to the destination. In other words, you might get schema registration errors if using differing compatibility levels on the registries. Just a heads-up. 11 | 12 | Example Kafka Connectors where this could be applied. 13 | 14 | - [Comcast/MirrorTool-for-Kafka-Connect](https://github.com/Comcast/MirrorTool-for-Kafka-Connect) - Code was tested with this first, and verified that the topic-renaming logic of this connector worked fine with this SMT. 15 | - [Salesforce/mirus](https://github.com/salesforce/mirus) 16 | - [Confluent Replicator](https://docs.confluent.io/current/connect/kafka-connect-replicator/index.html) - While this already can copy the schema, we observed it is only possible via the `AvroConverter`, which must first parse the entire message into a Kafka Connect `Struct` object. Thus, the class here is considered a "shallow" copier — it only inspects [the first 5 bytes][wire-format] of the keys and values for the schema ids. 17 | - [KIP-382 (MirrorMaker 2.0)](https://cwiki.apache.org/confluence/display/KAFKA/KIP-382%3A+MirrorMaker+2.0) - Still open at the time of writing. 18 | 19 | 20 | ## Installation 21 | 22 | 1. Edit the Kafka Connect worker properties file on each worker to include a new directory. For example, `/opt/kafka-connect/plugins` 23 | 24 | ```sh 25 | plugin.path=/usr/share/java,/opt/kafka-connect/plugins 26 | ``` 27 | 28 | 2. Build this project 29 | 30 | ```sh 31 | ./mvnw clean package 32 | ``` 33 | 34 | 3. Copy the JAR from `target` to all Kafka Connect workers under a directory set by `plugin.path` 35 | 36 | 4. (Re)start Kafka Connect processes 37 | 38 | ## Usage 39 | 40 | Standalone Kafka Connect configuration section 41 | 42 | ```properties 43 | # Requires that records are entirely byte-arrays. These can go in the worker or connector configuration. 44 | key.converter=org.apache.kafka.connect.converters.ByteArrayConverter 45 | value.converter=org.apache.kafka.connect.converters.ByteArrayConverter 46 | 47 | # Setup the SMT 48 | transforms=AvroSchemaTransfer 49 | 50 | transforms.AvroSchemaTransfer.type=cricket.jmoore.kafka.connect.transforms.SchemaRegistryTransfer 51 | transforms.AvroSchemaTransfer.src.schema.registry.url=http://schema-registry-1:8081 52 | transforms.AvroSchemaTransfer.dest.schema.registry.url=http://schema-registry-2:8081 53 | ``` 54 | 55 | Distributed Kafka Connect configuration section 56 | 57 | ```json 58 | "config" : { 59 | ... 60 | 61 | "__comment": "Requires that records are entirely byte-arrays. These can go in the worker or connector configuration.", 62 | "key.converter": "org.apache.kafka.connect.converters.ByteArrayConverter", 63 | "value.converter": "org.apache.kafka.connect.converters.ByteArrayConverter", 64 | 65 | "__comment": "Setup the SMT", 66 | "transforms": "AvroSchemaTransfer", 67 | 68 | "transforms.AvroSchemaTransfer.type": "cricket.jmoore.kafka.connect.transforms.SchemaRegistryTransfer", 69 | "transforms.AvroSchemaTransfer.src.schema.registry.url": "http://schema-registry-1:8081", 70 | "transforms.AvroSchemaTransfer.dest.schema.registry.url": "http://schema-registry-2:8081" 71 | } 72 | ``` 73 | 74 | ## Advanced Configuration 75 | 76 | Configuration Parameter | Default | Description 77 | ----------------------- | ------- | ----------- 78 | **transfer.message.keys** | true | Indicates whether Avro schemas from message keys in source records should be copied to the destination Registry. 79 | **include.message.headers** | true | Indicates whether message headers from source records should be preserved after the transform. 80 | **schema.capacity** | 100 | Capacity of schemas that can be cached in each `CachedSchemaRegistryClient` 81 | 82 | ## Embedded Schema Registry Client Configuration 83 | 84 | Schema Registry Transfer SMT passes some properties prefixed by either `src.` or `dest.` 85 | through to its embedded schema registry clients, after stripping away `src.` or `dest.` 86 | prefix used to disambiguate which client is to receive which configuration value. 87 | 88 | Properties prefixed by `src.` are passed through to the source consumer's schema registry 89 | client. Properties prefixed by `dest.` are passed through to the target producer's schema 90 | registry client. 91 | 92 | Configuration Parameter | Default | Description 93 | ----------------------- | ------- | ----------- 94 | (src\|dest).basic.auth.credentials.source | URL | Specify how to pick credentials for Basic Auth header. Supported values are `URL`, `USER_INFO` and `SASL_INHERIT` 95 | (src\|dest).basic.auth.user.info | | Specify credentials for Basic Auth in form of `{username}:{password}` when source is `USER_INFO` 96 | 97 | ## Subject Renaming 98 | 99 | Renaming of a subject can be done with the `RegexRouter` Transform **before** this one. 100 | 101 | Example Configuration 102 | 103 | ```properties 104 | transforms=TopicRename,AvroSchemaTransfer 105 | 106 | transforms.TopicRename.type=org.apache.kafka.connect.transforms.RegexRouter 107 | transforms.TopicRename.regex=(.*) 108 | transforms.TopicRename.replacement=replica.$1 109 | 110 | transforms.AvroSchemaTransfer.type=... 111 | ``` 112 | 113 | 114 | [smt]: https://docs.confluent.io/current/connect/concepts.html#connect-transforms 115 | [schema-registry]: https://docs.confluent.io/current/schema-registry/docs/index.html 116 | [wire-format]: https://docs.confluent.io/current/schema-registry/docs/serializer-formatter.html#wire-format 117 | -------------------------------------------------------------------------------- /mvnw: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ---------------------------------------------------------------------------- 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | # ---------------------------------------------------------------------------- 20 | 21 | # ---------------------------------------------------------------------------- 22 | # Maven2 Start Up Batch script 23 | # 24 | # Required ENV vars: 25 | # ------------------ 26 | # JAVA_HOME - location of a JDK home dir 27 | # 28 | # Optional ENV vars 29 | # ----------------- 30 | # M2_HOME - location of maven2's installed home dir 31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven 32 | # e.g. to debug Maven itself, use 33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files 35 | # ---------------------------------------------------------------------------- 36 | 37 | if [ -z "$MAVEN_SKIP_RC" ] ; then 38 | 39 | if [ -f /etc/mavenrc ] ; then 40 | . /etc/mavenrc 41 | fi 42 | 43 | if [ -f "$HOME/.mavenrc" ] ; then 44 | . "$HOME/.mavenrc" 45 | fi 46 | 47 | fi 48 | 49 | # OS specific support. $var _must_ be set to either true or false. 50 | cygwin=false; 51 | darwin=false; 52 | mingw=false 53 | case "`uname`" in 54 | CYGWIN*) cygwin=true ;; 55 | MINGW*) mingw=true;; 56 | Darwin*) darwin=true 57 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home 58 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html 59 | if [ -z "$JAVA_HOME" ]; then 60 | if [ -x "/usr/libexec/java_home" ]; then 61 | export JAVA_HOME="`/usr/libexec/java_home`" 62 | else 63 | export JAVA_HOME="/Library/Java/Home" 64 | fi 65 | fi 66 | ;; 67 | esac 68 | 69 | if [ -z "$JAVA_HOME" ] ; then 70 | if [ -r /etc/gentoo-release ] ; then 71 | JAVA_HOME=`java-config --jre-home` 72 | fi 73 | fi 74 | 75 | if [ -z "$M2_HOME" ] ; then 76 | ## resolve links - $0 may be a link to maven's home 77 | PRG="$0" 78 | 79 | # need this for relative symlinks 80 | while [ -h "$PRG" ] ; do 81 | ls=`ls -ld "$PRG"` 82 | link=`expr "$ls" : '.*-> \(.*\)$'` 83 | if expr "$link" : '/.*' > /dev/null; then 84 | PRG="$link" 85 | else 86 | PRG="`dirname "$PRG"`/$link" 87 | fi 88 | done 89 | 90 | saveddir=`pwd` 91 | 92 | M2_HOME=`dirname "$PRG"`/.. 93 | 94 | # make it fully qualified 95 | M2_HOME=`cd "$M2_HOME" && pwd` 96 | 97 | cd "$saveddir" 98 | # echo Using m2 at $M2_HOME 99 | fi 100 | 101 | # For Cygwin, ensure paths are in UNIX format before anything is touched 102 | if $cygwin ; then 103 | [ -n "$M2_HOME" ] && 104 | M2_HOME=`cygpath --unix "$M2_HOME"` 105 | [ -n "$JAVA_HOME" ] && 106 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"` 107 | [ -n "$CLASSPATH" ] && 108 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"` 109 | fi 110 | 111 | # For Mingw, ensure paths are in UNIX format before anything is touched 112 | if $mingw ; then 113 | [ -n "$M2_HOME" ] && 114 | M2_HOME="`(cd "$M2_HOME"; pwd)`" 115 | [ -n "$JAVA_HOME" ] && 116 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" 117 | # TODO classpath? 118 | fi 119 | 120 | if [ -z "$JAVA_HOME" ]; then 121 | javaExecutable="`which javac`" 122 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then 123 | # readlink(1) is not available as standard on Solaris 10. 124 | readLink=`which readlink` 125 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then 126 | if $darwin ; then 127 | javaHome="`dirname \"$javaExecutable\"`" 128 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" 129 | else 130 | javaExecutable="`readlink -f \"$javaExecutable\"`" 131 | fi 132 | javaHome="`dirname \"$javaExecutable\"`" 133 | javaHome=`expr "$javaHome" : '\(.*\)/bin'` 134 | JAVA_HOME="$javaHome" 135 | export JAVA_HOME 136 | fi 137 | fi 138 | fi 139 | 140 | if [ -z "$JAVACMD" ] ; then 141 | if [ -n "$JAVA_HOME" ] ; then 142 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 143 | # IBM's JDK on AIX uses strange locations for the executables 144 | JAVACMD="$JAVA_HOME/jre/sh/java" 145 | else 146 | JAVACMD="$JAVA_HOME/bin/java" 147 | fi 148 | else 149 | JAVACMD="`which java`" 150 | fi 151 | fi 152 | 153 | if [ ! -x "$JAVACMD" ] ; then 154 | echo "Error: JAVA_HOME is not defined correctly." >&2 155 | echo " We cannot execute $JAVACMD" >&2 156 | exit 1 157 | fi 158 | 159 | if [ -z "$JAVA_HOME" ] ; then 160 | echo "Warning: JAVA_HOME environment variable is not set." 161 | fi 162 | 163 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher 164 | 165 | # traverses directory structure from process work directory to filesystem root 166 | # first directory with .mvn subdirectory is considered project base directory 167 | find_maven_basedir() { 168 | 169 | if [ -z "$1" ] 170 | then 171 | echo "Path not specified to find_maven_basedir" 172 | return 1 173 | fi 174 | 175 | basedir="$1" 176 | wdir="$1" 177 | while [ "$wdir" != '/' ] ; do 178 | if [ -d "$wdir"/.mvn ] ; then 179 | basedir=$wdir 180 | break 181 | fi 182 | # workaround for JBEAP-8937 (on Solaris 10/Sparc) 183 | if [ -d "${wdir}" ]; then 184 | wdir=`cd "$wdir/.."; pwd` 185 | fi 186 | # end of workaround 187 | done 188 | echo "${basedir}" 189 | } 190 | 191 | # concatenates all lines of a file 192 | concat_lines() { 193 | if [ -f "$1" ]; then 194 | echo "$(tr -s '\n' ' ' < "$1")" 195 | fi 196 | } 197 | 198 | BASE_DIR=`find_maven_basedir "$(pwd)"` 199 | if [ -z "$BASE_DIR" ]; then 200 | exit 1; 201 | fi 202 | 203 | ########################################################################################## 204 | # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 205 | # This allows using the maven wrapper in projects that prohibit checking in binary data. 206 | ########################################################################################## 207 | if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then 208 | if [ "$MVNW_VERBOSE" = true ]; then 209 | echo "Found .mvn/wrapper/maven-wrapper.jar" 210 | fi 211 | else 212 | if [ "$MVNW_VERBOSE" = true ]; then 213 | echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." 214 | fi 215 | jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" 216 | while IFS="=" read key value; do 217 | case "$key" in (wrapperUrl) jarUrl="$value"; break ;; 218 | esac 219 | done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" 220 | if [ "$MVNW_VERBOSE" = true ]; then 221 | echo "Downloading from: $jarUrl" 222 | fi 223 | wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" 224 | 225 | if command -v wget > /dev/null; then 226 | if [ "$MVNW_VERBOSE" = true ]; then 227 | echo "Found wget ... using wget" 228 | fi 229 | wget "$jarUrl" -O "$wrapperJarPath" 230 | elif command -v curl > /dev/null; then 231 | if [ "$MVNW_VERBOSE" = true ]; then 232 | echo "Found curl ... using curl" 233 | fi 234 | curl -o "$wrapperJarPath" "$jarUrl" 235 | else 236 | if [ "$MVNW_VERBOSE" = true ]; then 237 | echo "Falling back to using Java to download" 238 | fi 239 | javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" 240 | if [ -e "$javaClass" ]; then 241 | if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 242 | if [ "$MVNW_VERBOSE" = true ]; then 243 | echo " - Compiling MavenWrapperDownloader.java ..." 244 | fi 245 | # Compiling the Java class 246 | ("$JAVA_HOME/bin/javac" "$javaClass") 247 | fi 248 | if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then 249 | # Running the downloader 250 | if [ "$MVNW_VERBOSE" = true ]; then 251 | echo " - Running MavenWrapperDownloader.java ..." 252 | fi 253 | ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") 254 | fi 255 | fi 256 | fi 257 | fi 258 | ########################################################################################## 259 | # End of extension 260 | ########################################################################################## 261 | 262 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} 263 | if [ "$MVNW_VERBOSE" = true ]; then 264 | echo $MAVEN_PROJECTBASEDIR 265 | fi 266 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" 267 | 268 | # For Cygwin, switch paths to Windows format before running java 269 | if $cygwin; then 270 | [ -n "$M2_HOME" ] && 271 | M2_HOME=`cygpath --path --windows "$M2_HOME"` 272 | [ -n "$JAVA_HOME" ] && 273 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` 274 | [ -n "$CLASSPATH" ] && 275 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"` 276 | [ -n "$MAVEN_PROJECTBASEDIR" ] && 277 | MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` 278 | fi 279 | 280 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 281 | 282 | exec "$JAVACMD" \ 283 | $MAVEN_OPTS \ 284 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ 285 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ 286 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" 287 | -------------------------------------------------------------------------------- /mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM http://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven2 Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM set title of command window 39 | title %0 40 | @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' 41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 42 | 43 | @REM set %HOME% to equivalent of $HOME 44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 45 | 46 | @REM Execute a user defined script before this one 47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 49 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" 50 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" 51 | :skipRcPre 52 | 53 | @setlocal 54 | 55 | set ERROR_CODE=0 56 | 57 | @REM To isolate internal variables from possible post scripts, we use another setlocal 58 | @setlocal 59 | 60 | @REM ==== START VALIDATION ==== 61 | if not "%JAVA_HOME%" == "" goto OkJHome 62 | 63 | echo. 64 | echo Error: JAVA_HOME not found in your environment. >&2 65 | echo Please set the JAVA_HOME variable in your environment to match the >&2 66 | echo location of your Java installation. >&2 67 | echo. 68 | goto error 69 | 70 | :OkJHome 71 | if exist "%JAVA_HOME%\bin\java.exe" goto init 72 | 73 | echo. 74 | echo Error: JAVA_HOME is set to an invalid directory. >&2 75 | echo JAVA_HOME = "%JAVA_HOME%" >&2 76 | echo Please set the JAVA_HOME variable in your environment to match the >&2 77 | echo location of your Java installation. >&2 78 | echo. 79 | goto error 80 | 81 | @REM ==== END VALIDATION ==== 82 | 83 | :init 84 | 85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 86 | @REM Fallback to current working directory if not found. 87 | 88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 90 | 91 | set EXEC_DIR=%CD% 92 | set WDIR=%EXEC_DIR% 93 | :findBaseDir 94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 95 | cd .. 96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 97 | set WDIR=%CD% 98 | goto findBaseDir 99 | 100 | :baseDirFound 101 | set MAVEN_PROJECTBASEDIR=%WDIR% 102 | cd "%EXEC_DIR%" 103 | goto endDetectBaseDir 104 | 105 | :baseDirNotFound 106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 107 | cd "%EXEC_DIR%" 108 | 109 | :endDetectBaseDir 110 | 111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 112 | 113 | @setlocal EnableExtensions EnableDelayedExpansion 114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 116 | 117 | :endReadAdditionalConfig 118 | 119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" 121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 122 | 123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" 124 | FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO ( 125 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B 126 | ) 127 | 128 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central 129 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data. 130 | if exist %WRAPPER_JAR% ( 131 | echo Found %WRAPPER_JAR% 132 | ) else ( 133 | echo Couldn't find %WRAPPER_JAR%, downloading it ... 134 | echo Downloading from: %DOWNLOAD_URL% 135 | powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')" 136 | echo Finished downloading %WRAPPER_JAR% 137 | ) 138 | @REM End of extension 139 | 140 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* 141 | if ERRORLEVEL 1 goto error 142 | goto end 143 | 144 | :error 145 | set ERROR_CODE=1 146 | 147 | :end 148 | @endlocal & set ERROR_CODE=%ERROR_CODE% 149 | 150 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost 151 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 152 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" 153 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" 154 | :skipRcPost 155 | 156 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 157 | if "%MAVEN_BATCH_PAUSE%" == "on" pause 158 | 159 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% 160 | 161 | exit /B %ERROR_CODE% 162 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | 5 | cricket.jmoore 6 | schema-registry-transfer-smt 7 | 0.2.1-SNAPSHOT 8 | schema-registry-transfer-smt 9 | 10 | A Kafka Connect Transform for copying Confluent Schema Registry schemas between different registries. 11 | 12 | https://github.com/cricket007/schema-registry-transfer-smt 13 | 2019 14 | 15 | 16 | Apache License 2.0 17 | https://github.com/cricket007/schema-registry-transfer-smt/LICENSE 18 | repo 19 | 20 | 21 | 22 | 23 | cricket007 24 | Jordan Moore 25 | https://github.com/cricket007 26 | 27 | Owner 28 | 29 | 30 | 31 | 32 | scm:git:https://github.com/cricket007/schema-registry-transfer-smt.git 33 | scm:git:git@github.com:cricket007/schema-registry-transfer-smt.git 34 | https://github.com/cricket007/schema-registry-transfer-smt 35 | 36 | 37 | github 38 | https://github.com/cricket007/schema-registry-transfer-smt/issues 39 | 40 | 41 | 42 | bintray 43 | bintray-releases 44 | https://api.bintray.com/maven/${env.BINTRAY_REPO_OWNER}/${env.BINTRAY_REPO}/${project.artifactId}/;publish=1 45 | 46 | 47 | 48 | 49 | 50 | confluent 51 | https://packages.confluent.io/maven 52 | 53 | 54 | repo.eclipse.org 55 | sensiNact Repository - Snapshots 56 | https://repo.eclipse.org/content/repositories/sensinact-snapshots/ 57 | 58 | true 59 | 60 | 61 | 62 | 63 | 64 | UTF-8 65 | 1.8 66 | 1.8 67 | 68 | 1.7.25 69 | 2.1.0 70 | 5.1.0 71 | -cp1 72 | 2.9.7 73 | 1.9.13 74 | 75 | 1.20.0 76 | 77 | cricket.jmoore 78 | 79 | 80 | 81 | 82 | 83 | 84 | org.slf4j 85 | slf4j-api 86 | ${slf4j.version} 87 | provided 88 | 89 | 90 | 91 | org.apache.kafka 92 | kafka-clients 93 | ${kafka.version}${confluent.patch.version} 94 | provided 95 | 96 | 97 | 98 | org.apache.kafka 99 | connect-transforms 100 | ${kafka.version}${confluent.patch.version} 101 | provided 102 | 103 | 104 | 105 | org.apache.zookeeper 106 | zookeeper 107 | 3.4.13 108 | provided 109 | 110 | 111 | log4j 112 | log4j 113 | 114 | 115 | io.netty 116 | netty 117 | 118 | 119 | org.slf4j 120 | slf4j-log4j12 121 | 122 | 123 | 124 | 125 | 126 | com.101tec 127 | zkclient 128 | 0.10 129 | provided 130 | 131 | 132 | 133 | com.fasterxml.jackson.core 134 | jackson-databind 135 | ${jackson.version} 136 | provided 137 | 138 | 139 | 140 | org.codehaus.jackson 141 | jackson-core-asl 142 | ${jackson.asl.version} 143 | provided 144 | 145 | 146 | 147 | org.codehaus.jackson 148 | jackson-mapper-asl 149 | ${jackson.asl.version} 150 | provided 151 | 152 | 153 | 154 | com.thoughtworks.paranamer 155 | paranamer 156 | 2.7 157 | provided 158 | 159 | 160 | 161 | org.tukaani 162 | xz 163 | 1.5 164 | provided 165 | 166 | 167 | 168 | org.xerial.snappy 169 | snappy-java 170 | 1.1.7.1 171 | provided 172 | 173 | 174 | 175 | org.apache.commons 176 | commons-compress 177 | 1.8.1 178 | provided 179 | 180 | 181 | 182 | 183 | org.apache.avro 184 | avro 185 | 1.8.1 186 | 187 | 188 | 189 | io.confluent 190 | kafka-avro-serializer 191 | ${confluent.version} 192 | 193 | 194 | 195 | io.confluent 196 | kafka-schema-registry-client 197 | ${confluent.version} 198 | 199 | 200 | org.apache.kafka 201 | kafka-clients 202 | 203 | 204 | com.fasterxml.jackson.core 205 | jackson-databind 206 | 207 | 208 | 209 | 210 | 211 | 212 | org.junit.jupiter 213 | junit-jupiter-engine 214 | 5.3.1 215 | test 216 | 217 | 218 | 219 | org.slf4j 220 | log4j-over-slf4j 221 | ${slf4j.version} 222 | test 223 | 224 | 225 | 226 | ch.qos.logback 227 | logback-classic 228 | 1.2.3 229 | test 230 | 231 | 232 | 233 | com.github.tomakehurst 234 | wiremock 235 | 2.20.0 236 | test 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | org.apache.maven.plugins 245 | maven-surefire-plugin 246 | 3.0.0-M3 247 | 248 | 249 | 250 | 251 | 252 | maven-release-plugin 253 | 2.5.3 254 | 255 | 256 | org.apache.maven.plugins 257 | maven-shade-plugin 258 | 3.2.1 259 | 260 | false 261 | 262 | 263 | *:* 264 | 265 | META-INF/maven/** 266 | META-INF/LICENSE* 267 | META-INF/NOTICE* 268 | META-INF/DEPENDENCIES 269 | 270 | 271 | 272 | 273 | 274 | io.confluent 275 | ${shade.prefix}.confluent 276 | 277 | 278 | org.apache.avro 279 | ${shade.prefix}.avro 280 | 281 | 282 | avro.shaded 283 | ${shade.prefix}.avroshaded 284 | 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | package 293 | 294 | shade 295 | 296 | 297 | 298 | 299 | 300 | io.confluent 301 | kafka-connect-maven-plugin 302 | 0.11.1 303 | 304 | Avro Schema Transformer 305 | 306 | false 307 | true 308 | 309 | transform 310 | 311 | cricket007 312 | 313 | avro 314 | 315 | ${project.issueManagement.url} 316 | user 317 | Support provided through community involvement. 318 | true 319 | 320 | 321 | 322 | com.diffplug.spotless 323 | spotless-maven-plugin 324 | ${spotless.version} 325 | 326 | 327 | 328 | /* Licensed under Apache-2.0 */ 329 | 330 | 331 | 332 | 333 | 334 | 335 | 336 | java,org,io.confluent,com,cricket.jmoore, 337 | 338 | 339 | 340 | 341 | 342 | process-sources 343 | 344 | apply 345 | 346 | 347 | 348 | 349 | 350 | 351 | 352 | 353 | -------------------------------------------------------------------------------- /settings.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | bintray 7 | ${env.BINTRAY_USER} 8 | ${env.BINTRAY_API_KEY} 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /src/main/java/cricket/jmoore/kafka/connect/transforms/ConnectSchemaUtil.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.kafka.connect.transforms; 3 | 4 | import org.apache.kafka.connect.data.Schema; 5 | 6 | public class ConnectSchemaUtil { 7 | public static boolean isBytesSchema(Schema connectSchema) { 8 | if (connectSchema == null) { 9 | return false; 10 | } 11 | return connectSchema.type() == Schema.BYTES_SCHEMA.type() || 12 | connectSchema.type() == Schema.OPTIONAL_BYTES_SCHEMA.type(); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/main/java/cricket/jmoore/kafka/connect/transforms/SchemaRegistryTransfer.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.kafka.connect.transforms; 3 | 4 | import java.io.IOException; 5 | import java.nio.ByteBuffer; 6 | import java.util.HashMap; 7 | import java.util.List; 8 | import java.util.Map; 9 | import java.util.Objects; 10 | import java.util.Optional; 11 | 12 | import org.apache.kafka.common.cache.Cache; 13 | import org.apache.kafka.common.cache.LRUCache; 14 | import org.apache.kafka.common.cache.SynchronizedCache; 15 | import org.apache.kafka.common.config.ConfigDef; 16 | import org.apache.kafka.common.errors.SerializationException; 17 | import org.apache.kafka.connect.connector.ConnectRecord; 18 | import org.apache.kafka.connect.data.Schema; 19 | import org.apache.kafka.connect.errors.ConnectException; 20 | import org.apache.kafka.connect.transforms.Transformation; 21 | import org.apache.kafka.connect.transforms.util.NonEmptyListValidator; 22 | import org.apache.kafka.connect.transforms.util.SimpleConfig; 23 | import org.slf4j.Logger; 24 | import org.slf4j.LoggerFactory; 25 | 26 | import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; 27 | import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; 28 | import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig; 29 | import io.confluent.kafka.serializers.subject.TopicNameStrategy; 30 | import io.confluent.kafka.serializers.subject.strategy.SubjectNameStrategy; 31 | 32 | @SuppressWarnings("unused") 33 | public class SchemaRegistryTransfer> implements Transformation { 34 | public static final String OVERVIEW_DOC = "Inspect the Confluent KafkaAvroSerializer's wire-format header to copy schemas from one Schema Registry to another."; 35 | private static final Logger log = LoggerFactory.getLogger(SchemaRegistryTransfer.class); 36 | 37 | private static final byte MAGIC_BYTE = (byte) 0x0; 38 | // wire-format is magic byte + an integer, then data 39 | private static final short WIRE_FORMAT_PREFIX_LENGTH = 1 + (Integer.SIZE / Byte.SIZE); 40 | 41 | public static final ConfigDef CONFIG_DEF; 42 | public static final String SCHEMA_CAPACITY_CONFIG_DOC = "The maximum amount of schemas to be stored for each Schema Registry client."; 43 | public static final Integer SCHEMA_CAPACITY_CONFIG_DEFAULT = 100; 44 | 45 | public static final String SRC_PREAMBLE = "For source consumer's schema registry, "; 46 | public static final String SRC_SCHEMA_REGISTRY_CONFIG_DOC = "A list of addresses for the Schema Registry to copy from. The consumer's Schema Registry."; 47 | public static final String SRC_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DOC = SRC_PREAMBLE + AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE_DOC; 48 | public static final String SRC_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DEFAULT = AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE_DEFAULT; 49 | public static final String SRC_USER_INFO_CONFIG_DOC = SRC_PREAMBLE + AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_USER_INFO_DOC; 50 | public static final String SRC_USER_INFO_CONFIG_DEFAULT = AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_USER_INFO_DEFAULT; 51 | 52 | public static final String DEST_PREAMBLE = "For target producer's schema registry, "; 53 | public static final String DEST_SCHEMA_REGISTRY_CONFIG_DOC = "A list of addresses for the Schema Registry to copy to. The producer's Schema Registry."; 54 | public static final String DEST_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DOC = DEST_PREAMBLE + AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE_DOC; 55 | public static final String DEST_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DEFAULT = AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE_DEFAULT; 56 | public static final String DEST_USER_INFO_CONFIG_DOC = DEST_PREAMBLE + AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_USER_INFO_DOC; 57 | public static final String DEST_USER_INFO_CONFIG_DEFAULT = AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_USER_INFO_DEFAULT; 58 | 59 | public static final String TRANSFER_KEYS_CONFIG_DOC = "Whether or not to copy message key schemas between registries."; 60 | public static final Boolean TRANSFER_KEYS_CONFIG_DEFAULT = true; 61 | public static final String INCLUDE_HEADERS_CONFIG_DOC = "Whether or not to preserve the Kafka Connect Record headers."; 62 | public static final Boolean INCLUDE_HEADERS_CONFIG_DEFAULT = true; 63 | 64 | private CachedSchemaRegistryClient sourceSchemaRegistryClient; 65 | private CachedSchemaRegistryClient destSchemaRegistryClient; 66 | private SubjectNameStrategy subjectNameStrategy; 67 | private boolean transferKeys, includeHeaders; 68 | 69 | // caches from the source registry to the destination registry 70 | private Cache schemaCache; 71 | 72 | public SchemaRegistryTransfer() { 73 | } 74 | 75 | static { 76 | CONFIG_DEF = (new ConfigDef()) 77 | .define(ConfigName.SRC_SCHEMA_REGISTRY_URL, ConfigDef.Type.LIST, ConfigDef.NO_DEFAULT_VALUE, new NonEmptyListValidator(), ConfigDef.Importance.HIGH, SRC_SCHEMA_REGISTRY_CONFIG_DOC) 78 | .define(ConfigName.DEST_SCHEMA_REGISTRY_URL, ConfigDef.Type.LIST, ConfigDef.NO_DEFAULT_VALUE, new NonEmptyListValidator(), ConfigDef.Importance.HIGH, DEST_SCHEMA_REGISTRY_CONFIG_DOC) 79 | .define(ConfigName.SRC_BASIC_AUTH_CREDENTIALS_SOURCE, ConfigDef.Type.STRING, SRC_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, SRC_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DOC) 80 | .define(ConfigName.SRC_USER_INFO, ConfigDef.Type.PASSWORD, SRC_USER_INFO_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, SRC_USER_INFO_CONFIG_DOC) 81 | .define(ConfigName.DEST_BASIC_AUTH_CREDENTIALS_SOURCE, ConfigDef.Type.STRING, DEST_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, DEST_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DOC) 82 | .define(ConfigName.DEST_USER_INFO, ConfigDef.Type.PASSWORD, DEST_USER_INFO_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, DEST_USER_INFO_CONFIG_DOC) 83 | .define(ConfigName.SCHEMA_CAPACITY, ConfigDef.Type.INT, SCHEMA_CAPACITY_CONFIG_DEFAULT, ConfigDef.Importance.LOW, SCHEMA_CAPACITY_CONFIG_DOC) 84 | .define(ConfigName.TRANSFER_KEYS, ConfigDef.Type.BOOLEAN, TRANSFER_KEYS_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, TRANSFER_KEYS_CONFIG_DOC) 85 | .define(ConfigName.INCLUDE_HEADERS, ConfigDef.Type.BOOLEAN, INCLUDE_HEADERS_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, INCLUDE_HEADERS_CONFIG_DOC) 86 | ; 87 | // TODO: Other properties might be useful, e.g. the Subject Strategies 88 | } 89 | 90 | @Override 91 | public ConfigDef config() { 92 | return CONFIG_DEF; 93 | } 94 | 95 | @Override 96 | public void configure(Map props) { 97 | SimpleConfig config = new SimpleConfig(CONFIG_DEF, props); 98 | 99 | List sourceUrls = config.getList(ConfigName.SRC_SCHEMA_REGISTRY_URL); 100 | final Map sourceProps = new HashMap<>(); 101 | sourceProps.put(AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE, 102 | "SRC_" + config.getString(ConfigName.SRC_BASIC_AUTH_CREDENTIALS_SOURCE)); 103 | sourceProps.put(AbstractKafkaAvroSerDeConfig.USER_INFO_CONFIG, 104 | config.getPassword(ConfigName.SRC_USER_INFO) 105 | .value()); 106 | 107 | List destUrls = config.getList(ConfigName.DEST_SCHEMA_REGISTRY_URL); 108 | final Map destProps = new HashMap<>(); 109 | destProps.put(AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE, 110 | "DEST_" + config.getString(ConfigName.DEST_BASIC_AUTH_CREDENTIALS_SOURCE)); 111 | destProps.put(AbstractKafkaAvroSerDeConfig.USER_INFO_CONFIG, 112 | config.getPassword(ConfigName.DEST_USER_INFO) 113 | .value()); 114 | 115 | Integer schemaCapacity = config.getInt(ConfigName.SCHEMA_CAPACITY); 116 | 117 | this.schemaCache = new SynchronizedCache<>(new LRUCache<>(schemaCapacity)); 118 | this.sourceSchemaRegistryClient = new CachedSchemaRegistryClient(sourceUrls, schemaCapacity, sourceProps); 119 | this.destSchemaRegistryClient = new CachedSchemaRegistryClient(destUrls, schemaCapacity, destProps); 120 | 121 | this.transferKeys = config.getBoolean(ConfigName.TRANSFER_KEYS); 122 | this.includeHeaders = config.getBoolean(ConfigName.INCLUDE_HEADERS); 123 | 124 | // TODO: Make the Strategy configurable, may be different for src and dest 125 | // Strategy for the -key and -value subjects 126 | this.subjectNameStrategy = new TopicNameStrategy(); 127 | } 128 | 129 | @Override 130 | public R apply(R r) { 131 | final String topic = r.topic(); 132 | 133 | // Transcribe the key's schema id 134 | final Object key = r.key(); 135 | final Schema keySchema = r.keySchema(); 136 | 137 | Object updatedKey = key; 138 | Optional destKeySchemaId; 139 | if (transferKeys) { 140 | if (ConnectSchemaUtil.isBytesSchema(keySchema) || key instanceof byte[]) { 141 | if (key == null) { 142 | log.trace("Passing through null record key."); 143 | } else { 144 | byte[] keyAsBytes = (byte[]) key; 145 | int keyByteLength = keyAsBytes.length; 146 | if (keyByteLength <= 5) { 147 | throw new SerializationException("Unexpected byte[] length " + keyByteLength + " for Avro record key."); 148 | } 149 | ByteBuffer b = ByteBuffer.wrap(keyAsBytes); 150 | destKeySchemaId = copySchema(b, topic, true); 151 | b.putInt(1, destKeySchemaId.orElseThrow(() 152 | -> new ConnectException("Transform failed. Unable to update record schema id. (isKey=true)"))); 153 | updatedKey = b.array(); 154 | } 155 | } else { 156 | throw new ConnectException("Transform failed. Record key does not have a byte[] schema."); 157 | } 158 | } else { 159 | log.trace("Skipping record key translation. {} has been to false. Keys will be passed as-is." 160 | , ConfigName.TRANSFER_KEYS); 161 | } 162 | 163 | // Transcribe the value's schema id 164 | final Object value = r.value(); 165 | final Schema valueSchema = r.valueSchema(); 166 | 167 | Object updatedValue = value; 168 | Optional destValueSchemaId; 169 | if (ConnectSchemaUtil.isBytesSchema(valueSchema) || value instanceof byte[]) { 170 | if (value == null) { 171 | log.trace("Passing through null record value"); 172 | } else { 173 | byte[] valueAsBytes = (byte[]) value; 174 | int valueByteLength = valueAsBytes.length; 175 | if (valueByteLength <= 5) { 176 | throw new SerializationException("Unexpected byte[] length " + valueByteLength + " for Avro record value."); 177 | } 178 | ByteBuffer b = ByteBuffer.wrap(valueAsBytes); 179 | destValueSchemaId = copySchema(b, topic, false); 180 | b.putInt(1, destValueSchemaId.orElseThrow(() 181 | -> new ConnectException("Transform failed. Unable to update record schema id. (isKey=false)"))); 182 | updatedValue = b.array(); 183 | } 184 | } else { 185 | throw new ConnectException("Transform failed. Record value does not have a byte[] schema."); 186 | } 187 | 188 | 189 | return includeHeaders ? 190 | r.newRecord(topic, r.kafkaPartition(), 191 | keySchema, updatedKey, 192 | valueSchema, updatedValue, 193 | r.timestamp(), 194 | r.headers()) 195 | : 196 | r.newRecord(topic, r.kafkaPartition(), 197 | keySchema, updatedKey, 198 | valueSchema, updatedValue, 199 | r.timestamp()); 200 | } 201 | 202 | protected Optional copySchema(ByteBuffer buffer, String topic, boolean isKey) { 203 | SchemaAndId schemaAndDestId; 204 | if (buffer.get() == MAGIC_BYTE) { 205 | int sourceSchemaId = buffer.getInt(); 206 | 207 | schemaAndDestId = schemaCache.get(sourceSchemaId); 208 | if (schemaAndDestId != null) { 209 | log.trace("Schema id {} has been seen before. Not registering with destination registry again."); 210 | } else { // cache miss 211 | log.trace("Schema id {} has not been seen before", sourceSchemaId); 212 | schemaAndDestId = new SchemaAndId(); 213 | try { 214 | log.trace("Looking up schema id {} in source registry", sourceSchemaId); 215 | // Can't do getBySubjectAndId because that requires a Schema object for the strategy 216 | schemaAndDestId.schema = sourceSchemaRegistryClient.getById(sourceSchemaId); 217 | } catch (IOException | RestClientException e) { 218 | log.error(String.format("Unable to fetch source schema for id %d.", sourceSchemaId), e); 219 | throw new ConnectException(e); 220 | } 221 | 222 | try { 223 | log.trace("Registering schema {} to destination registry", schemaAndDestId.schema); 224 | // It could be possible that the destination naming strategy is different from the source 225 | String subjectName = subjectNameStrategy.subjectName(topic, isKey, schemaAndDestId.schema); 226 | schemaAndDestId.id = destSchemaRegistryClient.register(subjectName, schemaAndDestId.schema); 227 | schemaCache.put(sourceSchemaId, schemaAndDestId); 228 | } catch (IOException | RestClientException e) { 229 | log.error(String.format("Unable to register source schema id %d to destination registry.", 230 | sourceSchemaId), e); 231 | return Optional.empty(); 232 | } 233 | } 234 | } else { 235 | throw new SerializationException("Unknown magic byte!"); 236 | } 237 | return Optional.ofNullable(schemaAndDestId.id); 238 | } 239 | 240 | @Override 241 | public void close() { 242 | this.sourceSchemaRegistryClient = null; 243 | this.destSchemaRegistryClient = null; 244 | } 245 | 246 | interface ConfigName { 247 | String SRC_SCHEMA_REGISTRY_URL = "src." + AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; 248 | String SRC_BASIC_AUTH_CREDENTIALS_SOURCE = "src." + AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE; 249 | String SRC_USER_INFO = "src." + AbstractKafkaAvroSerDeConfig.USER_INFO_CONFIG; 250 | String DEST_SCHEMA_REGISTRY_URL = "dest." + AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; 251 | String DEST_BASIC_AUTH_CREDENTIALS_SOURCE = "dest." + AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE; 252 | String DEST_USER_INFO = "dest." + AbstractKafkaAvroSerDeConfig.USER_INFO_CONFIG; 253 | String SCHEMA_CAPACITY = "schema.capacity"; 254 | String TRANSFER_KEYS = "transfer.message.keys"; 255 | String INCLUDE_HEADERS = "include.message.headers"; 256 | } 257 | 258 | private static class SchemaAndId { 259 | private Integer id; 260 | private org.apache.avro.Schema schema; 261 | 262 | SchemaAndId() { 263 | } 264 | 265 | SchemaAndId(int id, org.apache.avro.Schema schema) { 266 | this.id = id; 267 | this.schema = schema; 268 | } 269 | 270 | @Override 271 | public boolean equals(Object o) { 272 | if (this == o) return true; 273 | if (o == null || getClass() != o.getClass()) return false; 274 | SchemaAndId schemaAndId = (SchemaAndId) o; 275 | return Objects.equals(id, schemaAndId.id) && 276 | Objects.equals(schema, schemaAndId.schema); 277 | } 278 | 279 | @Override 280 | public int hashCode() { 281 | return Objects.hash(id, schema); 282 | } 283 | 284 | @Override 285 | public String toString() { 286 | return "SchemaAndId{" + 287 | "id=" + id + 288 | ", schema=" + schema + 289 | '}'; 290 | } 291 | } 292 | 293 | } 294 | -------------------------------------------------------------------------------- /src/main/java/cricket/jmoore/security/basicauth/DestSaslBasicAuthCredentialProvider.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.security.basicauth; 3 | 4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.SaslBasicAuthCredentialProvider; 5 | 6 | public class DestSaslBasicAuthCredentialProvider extends SaslBasicAuthCredentialProvider { 7 | @Override 8 | public String alias() { 9 | return "DEST_SASL_INHERIT"; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/main/java/cricket/jmoore/security/basicauth/DestUrlBasicAuthCredentialProvider.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.security.basicauth; 3 | 4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.UrlBasicAuthCredentialProvider; 5 | 6 | public class DestUrlBasicAuthCredentialProvider extends UrlBasicAuthCredentialProvider { 7 | @Override 8 | public String alias() { 9 | return "DEST_URL"; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/main/java/cricket/jmoore/security/basicauth/DestUserInfoCredentialProvider.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.security.basicauth; 3 | 4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.UserInfoCredentialProvider; 5 | 6 | public class DestUserInfoCredentialProvider extends UserInfoCredentialProvider 7 | { 8 | @Override 9 | public String alias() { 10 | return "DEST_USER_INFO"; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/main/java/cricket/jmoore/security/basicauth/SrcSaslBasicAuthCredentialProvider.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.security.basicauth; 3 | 4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.SaslBasicAuthCredentialProvider; 5 | 6 | public class SrcSaslBasicAuthCredentialProvider extends SaslBasicAuthCredentialProvider { 7 | @Override 8 | public String alias() { 9 | return "SRC_SASL_INHERIT"; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/main/java/cricket/jmoore/security/basicauth/SrcUrlBasicAuthCredentialProvider.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.security.basicauth; 3 | 4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.UrlBasicAuthCredentialProvider; 5 | 6 | public class SrcUrlBasicAuthCredentialProvider extends UrlBasicAuthCredentialProvider { 7 | @Override 8 | public String alias() { 9 | return "SRC_URL"; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/main/java/cricket/jmoore/security/basicauth/SrcUserInfoCredentialProvider.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.security.basicauth; 3 | 4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.UserInfoCredentialProvider; 5 | 6 | public class SrcUserInfoCredentialProvider extends UserInfoCredentialProvider 7 | { 8 | @Override 9 | public String alias() { 10 | return "SRC_USER_INFO"; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/main/resources/META-INF/services/io.confluent.kafka.schemaregistry.client.security.basicauth.BasicAuthCredentialProvider: -------------------------------------------------------------------------------- 1 | cricket.jmoore.security.basicauth.DestSaslBasicAuthCredentialProvider 2 | cricket.jmoore.security.basicauth.DestUrlBasicAuthCredentialProvider 3 | cricket.jmoore.security.basicauth.DestUserInfoCredentialProvider 4 | cricket.jmoore.security.basicauth.SrcSaslBasicAuthCredentialProvider 5 | cricket.jmoore.security.basicauth.SrcUrlBasicAuthCredentialProvider 6 | cricket.jmoore.security.basicauth.SrcUserInfoCredentialProvider -------------------------------------------------------------------------------- /src/test/java/cricket/jmoore/kafka/connect/transforms/Constants.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.kafka.connect.transforms; 3 | 4 | public interface Constants { 5 | public static final String USE_BASIC_AUTH_SOURCE_TAG = "useBasicAuthSource"; 6 | 7 | public static final String USE_BASIC_AUTH_DEST_TAG = "useBasicAuthDest"; 8 | 9 | public static final String USER_INFO_SOURCE = "USER_INFO"; 10 | 11 | public static final String URL_SOURCE = "URL"; 12 | 13 | public static final String HTTP_AUTH_SOURCE_CREDENTIALS_FIXTURE = "sourceuser:sourcepass"; 14 | 15 | public static final String HTTP_AUTH_DEST_CREDENTIALS_FIXTURE = "destuser:destpass"; 16 | } 17 | -------------------------------------------------------------------------------- /src/test/java/cricket/jmoore/kafka/connect/transforms/SchemaRegistryMock.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.kafka.connect.transforms; 3 | 4 | import static java.net.HttpURLConnection.HTTP_NOT_FOUND; 5 | 6 | import java.io.IOException; 7 | import java.util.List; 8 | import java.util.function.Function; 9 | import java.util.stream.Collectors; 10 | import java.util.stream.StreamSupport; 11 | 12 | import org.apache.avro.Schema; 13 | import org.junit.jupiter.api.extension.AfterEachCallback; 14 | import org.junit.jupiter.api.extension.BeforeEachCallback; 15 | import org.junit.jupiter.api.extension.ExtensionContext; 16 | import org.slf4j.Logger; 17 | import org.slf4j.LoggerFactory; 18 | 19 | import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; 20 | import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; 21 | import io.confluent.kafka.schemaregistry.client.SchemaMetadata; 22 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; 23 | import io.confluent.kafka.schemaregistry.client.rest.entities.Config; 24 | import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString; 25 | import io.confluent.kafka.schemaregistry.client.rest.entities.requests.RegisterSchemaRequest; 26 | import io.confluent.kafka.schemaregistry.client.rest.entities.requests.RegisterSchemaResponse; 27 | import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; 28 | import io.confluent.kafka.serializers.subject.TopicNameStrategy; 29 | import io.confluent.kafka.serializers.subject.strategy.SubjectNameStrategy; 30 | 31 | import com.github.tomakehurst.wiremock.WireMockServer; 32 | import com.github.tomakehurst.wiremock.client.MappingBuilder; 33 | import com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder; 34 | import com.github.tomakehurst.wiremock.client.WireMock; 35 | import com.github.tomakehurst.wiremock.common.FileSource; 36 | import com.github.tomakehurst.wiremock.core.WireMockConfiguration; 37 | import com.github.tomakehurst.wiremock.extension.Parameters; 38 | import com.github.tomakehurst.wiremock.extension.ResponseDefinitionTransformer; 39 | import com.github.tomakehurst.wiremock.http.Request; 40 | import com.github.tomakehurst.wiremock.http.ResponseDefinition; 41 | import com.github.tomakehurst.wiremock.stubbing.StubMapping; 42 | import com.google.common.base.Splitter; 43 | import com.google.common.collect.Iterables; 44 | 45 | /** 46 | *

The schema registry mock implements a few basic HTTP endpoints that are used by the Avro serdes.

47 | * In particular, 48 | *
    49 | *
  • you can register a schema and
  • 50 | *
  • retrieve a schema by id.
  • 51 | *
52 | * 53 | *

Additionally, server-side mock can be toggled from its default authentication behavior (no authentication) 54 | * to a variant that requires basic HTTP Authentication using fixed credentials `username:password` by placing a 55 | * `@Tag(Constants.USE_BASIC_AUTH_SOURCE_TAG)` and/or `@Tag(Constants.USE_BASIC_AUTH_DESTR_TAG)` annotation after 56 | * @Test annotation of any basic HTTP authentication dependent test code.

57 | * 58 | *

If you use the TestToplogy of the fluent Kafka Streams test, you don't have to interact with this class at 59 | * all.

60 | * 61 | *

Without the test framework, you can use the mock as follows:

62 | *

 63 |  * class SchemaRegistryMockTest {
 64 |  *     {@literal @RegisterExtension}
 65 |  *     final SchemaRegistryMock schemaRegistry = new SchemaRegistryMock();
 66 |  *
 67 |  *     {@literal @Test}
 68 |  *     void shouldRegisterKeySchema() throws IOException, RestClientException {
 69 |  *         final Schema keySchema = this.createSchema("key_schema");
 70 |  *         final int id = this.schemaRegistry.registerKeySchema("test-topic", keySchema);
 71 |  *
 72 |  *         final Schema retrievedSchema = this.schemaRegistry.getSchemaRegistryClient().getById(id);
 73 |  *         assertThat(retrievedSchema).isEqualTo(keySchema);
 74 |  *     }
 75 |  *
 76 |  *     {@literal @Test}
 77 |  *     {@literal @Tag(Constants.USE_BASIC_AUTH_SOURCE_TAG)}
 78 |  *     {@literal @Tag(Constants.USE_BASIC_AUTH_DEST_TAG)}
 79 |  *     void shouldUseBasicAuth() {
 80 |  *         final Map smtConfiguration = new HashMap<>();
 81 |  *         // ...
 82 |  *         smtConfiguration.put(ConfigName.SRC_BASIC_AUTH_CREDENTIALS_SOURCE, "USER_INFO");
 83 |  *         smtConfiguration.put(ConfigName.SRC_USER_INFO, "username:password");
 84 |  *         smtConfiguration.put(ConfigName.SRC_BASIC_AUTH_CREDENTIALS_SOURCE, "USER_INFO");
 85 |  *         smtConfiguration.put(ConfigName.SRC_USER_INFO, "username:password");
 86 |  *         // ...
 87 |  *         final SchemaRegistryTransfer smt = new SchemaRegistryTransfer();
 88 |  *         smt.configure(smtConfiguration);
 89 |  *         // ...
 90 |  *         smt.apply(...);
 91 |  *     }
 92 |  * }
93 | *

94 | * To retrieve the url of the schema registry for a Kafka Streams config, please use {@link #getUrl()} 95 | */ 96 | public class SchemaRegistryMock implements BeforeEachCallback, AfterEachCallback { 97 | public enum Role { 98 | SOURCE, 99 | DESTINATION; 100 | } 101 | 102 | private static final String SCHEMA_REGISTRATION_PATTERN = "/subjects/[^/]+/versions"; 103 | private static final String SCHEMA_BY_ID_PATTERN = "/schemas/ids/"; 104 | private static final String CONFIG_PATTERN = "/config"; 105 | private static final int IDENTITY_MAP_CAPACITY = 1000; 106 | private final ListVersionsHandler listVersionsHandler = new ListVersionsHandler(); 107 | private final GetVersionHandler getVersionHandler = new GetVersionHandler(); 108 | private final AutoRegistrationHandler autoRegistrationHandler = new AutoRegistrationHandler(); 109 | private final GetConfigHandler getConfigHandler = new GetConfigHandler(); 110 | private final WireMockServer mockSchemaRegistry = new WireMockServer( 111 | WireMockConfiguration.wireMockConfig().dynamicPort().extensions( 112 | this.autoRegistrationHandler, this.listVersionsHandler, this.getVersionHandler, 113 | this.getConfigHandler)); 114 | private final SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); 115 | private final String basicAuthTag; 116 | private final String basicAuthCredentials; 117 | private Function stubFor; 118 | 119 | private static final Logger log = LoggerFactory.getLogger(SchemaRegistryMock.class); 120 | 121 | public SchemaRegistryMock(Role role) { 122 | if (role == null) { 123 | throw new IllegalArgumentException("Role must be either SOURCE or DESTINATION"); 124 | } 125 | 126 | this.basicAuthTag = (role == Role.SOURCE) ? Constants.USE_BASIC_AUTH_SOURCE_TAG : Constants.USE_BASIC_AUTH_DEST_TAG; 127 | this.basicAuthCredentials = 128 | (role == Role.SOURCE)? Constants.HTTP_AUTH_SOURCE_CREDENTIALS_FIXTURE : Constants.HTTP_AUTH_DEST_CREDENTIALS_FIXTURE; 129 | } 130 | 131 | @Override 132 | public void afterEach(final ExtensionContext context) { 133 | this.mockSchemaRegistry.stop(); 134 | } 135 | 136 | @Override 137 | public void beforeEach(final ExtensionContext context) { 138 | if (context.getTags().contains(this.basicAuthTag)) { 139 | final String[] userPass = this.basicAuthCredentials.split(":"); 140 | this.stubFor = (MappingBuilder mappingBuilder) -> this.mockSchemaRegistry.stubFor( 141 | mappingBuilder.withBasicAuth(userPass[0], userPass[1])); 142 | } else { 143 | this.stubFor = (MappingBuilder mappingBuilder) -> this.mockSchemaRegistry.stubFor(mappingBuilder); 144 | } 145 | 146 | this.mockSchemaRegistry.start(); 147 | this.stubFor.apply(WireMock.get(WireMock.urlPathMatching(SCHEMA_REGISTRATION_PATTERN)) 148 | .willReturn(WireMock.aResponse().withTransformers(this.listVersionsHandler.getName()))); 149 | this.stubFor.apply(WireMock.post(WireMock.urlPathMatching(SCHEMA_REGISTRATION_PATTERN)) 150 | .willReturn(WireMock.aResponse().withTransformers(this.autoRegistrationHandler.getName()))); 151 | this.stubFor.apply(WireMock.get(WireMock.urlPathMatching(SCHEMA_REGISTRATION_PATTERN + "/(?:latest|\\d+)")) 152 | .willReturn(WireMock.aResponse().withTransformers(this.getVersionHandler.getName()))); 153 | this.stubFor.apply(WireMock.get(WireMock.urlPathMatching(CONFIG_PATTERN)) 154 | .willReturn(WireMock.aResponse().withTransformers(this.getConfigHandler.getName()))); 155 | this.stubFor.apply(WireMock.get(WireMock.urlPathMatching(SCHEMA_BY_ID_PATTERN + "\\d+")) 156 | .willReturn(WireMock.aResponse().withStatus(HTTP_NOT_FOUND))); 157 | } 158 | 159 | public int registerSchema(final String topic, boolean isKey, final Schema schema) { 160 | return this.registerSchema(topic, isKey, schema, new TopicNameStrategy()); 161 | } 162 | 163 | public int registerSchema(final String topic, boolean isKey, final Schema schema, SubjectNameStrategy strategy) { 164 | return this.register(strategy.subjectName(topic, isKey, schema), schema); 165 | } 166 | 167 | private int register(final String subject, final Schema schema) { 168 | try { 169 | final int id = this.schemaRegistryClient.register(subject, schema); 170 | this.stubFor.apply(WireMock.get(WireMock.urlEqualTo(SCHEMA_BY_ID_PATTERN + id)) 171 | .willReturn(ResponseDefinitionBuilder.okForJson(new SchemaString(schema.toString())))); 172 | log.debug("Registered schema {}", id); 173 | return id; 174 | } catch (final IOException | RestClientException e) { 175 | throw new IllegalStateException("Internal error in mock schema registry client", e); 176 | } 177 | } 178 | 179 | private List listVersions(String subject) { 180 | log.debug("Listing all versions for subject {}", subject); 181 | try { 182 | return this.schemaRegistryClient.getAllVersions(subject); 183 | } catch (IOException | RestClientException e) { 184 | throw new IllegalStateException("Internal error in mock schema registry client", e); 185 | } 186 | } 187 | 188 | private SchemaMetadata getSubjectVersion(String subject, Object version) { 189 | log.debug("Requesting version {} for subject {}", version, subject); 190 | try { 191 | if (version instanceof String && version.equals("latest")) { 192 | return this.schemaRegistryClient.getLatestSchemaMetadata(subject); 193 | } else if (version instanceof Number) { 194 | return this.schemaRegistryClient.getSchemaMetadata(subject, ((Number) version).intValue()); 195 | } else { 196 | throw new IllegalArgumentException("Only 'latest' or integer versions are allowed"); 197 | } 198 | } catch (IOException | RestClientException e) { 199 | throw new IllegalStateException("Internal error in mock schema registry client", e); 200 | } 201 | } 202 | 203 | private String getCompatibility(String subject) { 204 | if (subject == null) { 205 | log.debug("Requesting registry base compatibility"); 206 | } else { 207 | log.debug("Requesting compatibility for subject {}", subject); 208 | } 209 | try { 210 | return this.schemaRegistryClient.getCompatibility(subject); 211 | } catch (IOException | RestClientException e) { 212 | throw new IllegalStateException("Internal error in mock schema registry client", e); 213 | } 214 | } 215 | 216 | public SchemaRegistryClient getSchemaRegistryClient() { 217 | return new CachedSchemaRegistryClient(this.getUrl(), IDENTITY_MAP_CAPACITY); 218 | } 219 | 220 | public String getUrl() { 221 | return "http://localhost:" + this.mockSchemaRegistry.port(); 222 | } 223 | 224 | private abstract class SubjectsVersioHandler extends ResponseDefinitionTransformer { 225 | // Expected url pattern /subjects/.*-value/versions 226 | protected final Splitter urlSplitter = Splitter.on('/').omitEmptyStrings(); 227 | 228 | protected String getSubject(Request request) { 229 | return Iterables.get(this.urlSplitter.split(request.getUrl()), 1); 230 | } 231 | 232 | @Override 233 | public boolean applyGlobally() { 234 | return false; 235 | } 236 | } 237 | 238 | private class AutoRegistrationHandler extends SubjectsVersioHandler { 239 | 240 | @Override 241 | public ResponseDefinition transform(final Request request, final ResponseDefinition responseDefinition, 242 | final FileSource files, final Parameters parameters) { 243 | try { 244 | final int id = SchemaRegistryMock.this.register(getSubject(request), 245 | new Schema.Parser() 246 | .parse(RegisterSchemaRequest.fromJson(request.getBodyAsString()).getSchema())); 247 | final RegisterSchemaResponse registerSchemaResponse = new RegisterSchemaResponse(); 248 | registerSchemaResponse.setId(id); 249 | return ResponseDefinitionBuilder.jsonResponse(registerSchemaResponse); 250 | } catch (final IOException e) { 251 | throw new IllegalArgumentException("Cannot parse schema registration request", e); 252 | } 253 | } 254 | 255 | @Override 256 | public String getName() { 257 | return AutoRegistrationHandler.class.getSimpleName(); 258 | } 259 | } 260 | 261 | private class ListVersionsHandler extends SubjectsVersioHandler { 262 | 263 | @Override 264 | public ResponseDefinition transform(final Request request, final ResponseDefinition responseDefinition, 265 | final FileSource files, final Parameters parameters) { 266 | final List versions = SchemaRegistryMock.this.listVersions(getSubject(request)); 267 | log.debug("Got versions {}", versions); 268 | return ResponseDefinitionBuilder.jsonResponse(versions); 269 | } 270 | 271 | @Override 272 | public String getName() { 273 | return ListVersionsHandler.class.getSimpleName(); 274 | } 275 | } 276 | 277 | private class GetVersionHandler extends SubjectsVersioHandler { 278 | 279 | @Override 280 | public ResponseDefinition transform(final Request request, final ResponseDefinition responseDefinition, 281 | final FileSource files, final Parameters parameters) { 282 | String versionStr = Iterables.get(this.urlSplitter.split(request.getUrl()), 3); 283 | SchemaMetadata metadata; 284 | if (versionStr.equals("latest")) { 285 | metadata = SchemaRegistryMock.this.getSubjectVersion(getSubject(request), versionStr); 286 | } else { 287 | int version = Integer.parseInt(versionStr); 288 | metadata = SchemaRegistryMock.this.getSubjectVersion(getSubject(request), version); 289 | } 290 | return ResponseDefinitionBuilder.jsonResponse(metadata); 291 | } 292 | 293 | @Override 294 | public String getName() { 295 | return GetVersionHandler.class.getSimpleName(); 296 | } 297 | } 298 | 299 | private class GetConfigHandler extends SubjectsVersioHandler { 300 | 301 | @Override 302 | protected String getSubject(Request request) { 303 | List parts = 304 | StreamSupport.stream(this.urlSplitter.split(request.getUrl()).spliterator(), false) 305 | .collect(Collectors.toList()); 306 | 307 | // return null when this is just /config 308 | return parts.size() < 2 ? null : parts.get(1); 309 | } 310 | 311 | @Override 312 | public ResponseDefinition transform(final Request request, final ResponseDefinition responseDefinition, 313 | final FileSource files, final Parameters parameters) { 314 | Config config = new Config(SchemaRegistryMock.this.getCompatibility(getSubject(request))); 315 | return ResponseDefinitionBuilder.jsonResponse(config); 316 | } 317 | 318 | @Override 319 | public String getName() { 320 | return GetConfigHandler.class.getSimpleName(); 321 | } 322 | } 323 | 324 | } 325 | -------------------------------------------------------------------------------- /src/test/java/cricket/jmoore/kafka/connect/transforms/SchemaTest.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.kafka.connect.transforms; 3 | 4 | import static org.junit.jupiter.api.Assertions.*; 5 | 6 | import org.apache.kafka.connect.data.Schema; 7 | import org.apache.kafka.connect.data.SchemaBuilder; 8 | import org.junit.jupiter.api.Test; 9 | 10 | public class SchemaTest { 11 | 12 | @Test 13 | public void testBytesSchema() { 14 | assertTrue(ConnectSchemaUtil.isBytesSchema(Schema.BYTES_SCHEMA)); 15 | assertTrue(ConnectSchemaUtil.isBytesSchema(Schema.OPTIONAL_BYTES_SCHEMA)); 16 | } 17 | 18 | @Test 19 | public void testNullIsNotBytes() { 20 | assertFalse(ConnectSchemaUtil.isBytesSchema(null)); 21 | } 22 | 23 | @Test 24 | public void testNonByteTypeSchemas() { 25 | Schema[] schemas = new Schema[]{ 26 | // Boolean 27 | Schema.BOOLEAN_SCHEMA, 28 | Schema.OPTIONAL_BOOLEAN_SCHEMA, 29 | // Integers 30 | Schema.INT8_SCHEMA, 31 | Schema.INT16_SCHEMA, 32 | Schema.INT32_SCHEMA, 33 | Schema.INT64_SCHEMA, 34 | Schema.OPTIONAL_INT8_SCHEMA, 35 | Schema.OPTIONAL_INT16_SCHEMA, 36 | Schema.OPTIONAL_INT32_SCHEMA, 37 | Schema.OPTIONAL_INT64_SCHEMA, 38 | // Floats 39 | Schema.FLOAT32_SCHEMA, 40 | Schema.FLOAT64_SCHEMA, 41 | Schema.OPTIONAL_FLOAT32_SCHEMA, 42 | Schema.OPTIONAL_FLOAT64_SCHEMA, 43 | // String 44 | Schema.STRING_SCHEMA, 45 | Schema.OPTIONAL_STRING_SCHEMA, 46 | // Struct with a field of bytes 47 | SchemaBuilder.struct().name("record"). 48 | field("foo", Schema.BYTES_SCHEMA) 49 | .build(), 50 | SchemaBuilder.struct().name("record"). 51 | field("foo", Schema.OPTIONAL_BYTES_SCHEMA) 52 | .build(), 53 | // map 54 | SchemaBuilder.map(Schema.BYTES_SCHEMA, Schema.OPTIONAL_BYTES_SCHEMA).build(), 55 | // array 56 | SchemaBuilder.array(Schema.OPTIONAL_BYTES_SCHEMA).build() 57 | }; 58 | 59 | for (Schema s : schemas) { 60 | assertFalse(ConnectSchemaUtil.isBytesSchema(s)); 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/test/java/cricket/jmoore/kafka/connect/transforms/TransformTest.java: -------------------------------------------------------------------------------- 1 | /* Licensed under Apache-2.0 */ 2 | package cricket.jmoore.kafka.connect.transforms; 3 | 4 | import static cricket.jmoore.kafka.connect.transforms.SchemaRegistryTransfer.ConfigName; 5 | import static org.apache.avro.Schema.Type.BOOLEAN; 6 | import static org.apache.avro.Schema.Type.INT; 7 | import static org.apache.avro.Schema.Type.STRING; 8 | import static org.junit.jupiter.api.Assertions.*; 9 | 10 | import java.io.ByteArrayOutputStream; 11 | import java.io.IOException; 12 | import java.nio.ByteBuffer; 13 | import java.util.Arrays; 14 | import java.util.HashMap; 15 | import java.util.List; 16 | import java.util.Map; 17 | import java.util.UUID; 18 | 19 | import org.apache.avro.SchemaBuilder; 20 | import org.apache.avro.generic.GenericData; 21 | import org.apache.avro.generic.GenericDatumWriter; 22 | import org.apache.avro.generic.GenericRecordBuilder; 23 | import org.apache.avro.io.BinaryEncoder; 24 | import org.apache.avro.io.DatumWriter; 25 | import org.apache.avro.io.EncoderFactory; 26 | import org.apache.kafka.common.errors.SerializationException; 27 | import org.apache.kafka.connect.connector.ConnectRecord; 28 | import org.apache.kafka.connect.data.Schema; 29 | import org.apache.kafka.connect.errors.ConnectException; 30 | import org.apache.kafka.connect.source.SourceRecord; 31 | import org.junit.jupiter.api.BeforeEach; 32 | import org.junit.jupiter.api.Disabled; 33 | import org.junit.jupiter.api.Tag; 34 | import org.junit.jupiter.api.Test; 35 | import org.junit.jupiter.api.extension.RegisterExtension; 36 | import org.slf4j.Logger; 37 | import org.slf4j.LoggerFactory; 38 | 39 | import io.confluent.kafka.schemaregistry.client.SchemaMetadata; 40 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; 41 | import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; 42 | import io.confluent.kafka.serializers.NonRecordContainer; 43 | 44 | @SuppressWarnings("unchecked") 45 | public class TransformTest { 46 | 47 | private enum ExplicitAuthType { 48 | USER_INFO, 49 | URL, 50 | NULL; 51 | } 52 | 53 | private static final Logger log = LoggerFactory.getLogger(TransformTest.class); 54 | 55 | public static final String HELLO_WORLD_VALUE = "Hello, world!"; 56 | 57 | public static final String TOPIC = TransformTest.class.getSimpleName(); 58 | 59 | private static final byte MAGIC_BYTE = (byte) 0x0; 60 | public static final int ID_SIZE = Integer.SIZE / Byte.SIZE; 61 | private static final int AVRO_CONTENT_OFFSET = 1 + ID_SIZE; 62 | public static final org.apache.avro.Schema INT_SCHEMA = org.apache.avro.Schema.create(INT); 63 | public static final org.apache.avro.Schema STRING_SCHEMA = org.apache.avro.Schema.create(STRING); 64 | public static final org.apache.avro.Schema BOOLEAN_SCHEMA = org.apache.avro.Schema.create(BOOLEAN); 65 | public static final org.apache.avro.Schema NAME_SCHEMA = SchemaBuilder.record("FullName") 66 | .namespace("cricket.jmoore.kafka.connect.transforms").fields() 67 | .requiredString("first") 68 | .requiredString("last") 69 | .endRecord(); 70 | public static final org.apache.avro.Schema NAME_SCHEMA_ALIASED = SchemaBuilder.record("FullName") 71 | .namespace("cricket.jmoore.kafka.connect.transforms").fields() 72 | .requiredString("first") 73 | .name("surname").aliases("last").type().stringType().noDefault() 74 | .endRecord(); 75 | 76 | @RegisterExtension 77 | final SchemaRegistryMock sourceSchemaRegistry = 78 | new SchemaRegistryMock(SchemaRegistryMock.Role.SOURCE); 79 | 80 | @RegisterExtension 81 | final SchemaRegistryMock destSchemaRegistry = 82 | new SchemaRegistryMock(SchemaRegistryMock.Role.DESTINATION); 83 | 84 | private SchemaRegistryTransfer smt; 85 | private Map smtConfiguration; 86 | 87 | private ConnectRecord createRecord(Schema keySchema, Object key, Schema valueSchema, Object value) { 88 | // partition and offset aren't needed 89 | return new SourceRecord(null, null, TOPIC, keySchema, key, valueSchema, value); 90 | } 91 | 92 | private ConnectRecord createRecord(byte[] key, byte[] value) { 93 | return createRecord(Schema.OPTIONAL_BYTES_SCHEMA, key, Schema.OPTIONAL_BYTES_SCHEMA, value); 94 | } 95 | 96 | private Map getRequiredTransformConfigs() { 97 | Map configs = new HashMap<>(); 98 | configs.put(ConfigName.SRC_SCHEMA_REGISTRY_URL, sourceSchemaRegistry.getUrl()); 99 | configs.put(ConfigName.DEST_SCHEMA_REGISTRY_URL, destSchemaRegistry.getUrl()); 100 | return configs; 101 | } 102 | 103 | private void configure(boolean copyKeys) { 104 | smtConfiguration.put(ConfigName.TRANSFER_KEYS, copyKeys); 105 | smt.configure(smtConfiguration); 106 | } 107 | 108 | private void configure(boolean copyKeys, boolean copyHeaders) { 109 | smtConfiguration.put(ConfigName.TRANSFER_KEYS, copyKeys); 110 | smtConfiguration.put(ConfigName.INCLUDE_HEADERS, copyHeaders); 111 | smt.configure(smtConfiguration); 112 | } 113 | 114 | private void configure(final String sourceUserInfo, final String destUserInfo, ExplicitAuthType credentialSource) { 115 | if (credentialSource == ExplicitAuthType.USER_INFO) { 116 | if (sourceUserInfo != null) { 117 | smtConfiguration.put(ConfigName.SRC_BASIC_AUTH_CREDENTIALS_SOURCE, Constants.USER_INFO_SOURCE); 118 | smtConfiguration.put(ConfigName.SRC_USER_INFO, sourceUserInfo); 119 | } 120 | 121 | if (destUserInfo != null) { 122 | smtConfiguration.put(ConfigName.DEST_BASIC_AUTH_CREDENTIALS_SOURCE, Constants.USER_INFO_SOURCE); 123 | smtConfiguration.put(ConfigName.DEST_USER_INFO, destUserInfo); 124 | } 125 | } else { 126 | if (sourceUserInfo != null) { 127 | String url = sourceSchemaRegistry.getUrl(); 128 | url = url.replace("://", "://" + sourceUserInfo + "@" ); 129 | smtConfiguration.put(ConfigName.SRC_SCHEMA_REGISTRY_URL, url); 130 | 131 | if (credentialSource == ExplicitAuthType.URL) { 132 | smtConfiguration.put(ConfigName.SRC_BASIC_AUTH_CREDENTIALS_SOURCE, Constants.URL_SOURCE); 133 | } else if (credentialSource == ExplicitAuthType.NULL) { 134 | // For an explicit null case, set both the URL and UserInfo to confirm that neither is found 135 | smtConfiguration.put(ConfigName.SRC_BASIC_AUTH_CREDENTIALS_SOURCE, null); 136 | smtConfiguration.put(ConfigName.SRC_USER_INFO, sourceUserInfo); 137 | } else { 138 | // For null ExplicitAuthType, insert no key and rely on implicit default. 139 | } 140 | } 141 | 142 | if (destUserInfo != null) { 143 | String url = destSchemaRegistry.getUrl(); 144 | url = url.replace("://", "://" + destUserInfo + "@" ); 145 | smtConfiguration.put(ConfigName.DEST_SCHEMA_REGISTRY_URL, url); 146 | 147 | if (credentialSource == ExplicitAuthType.URL) { 148 | smtConfiguration.put(ConfigName.DEST_BASIC_AUTH_CREDENTIALS_SOURCE, Constants.URL_SOURCE); 149 | } else if (credentialSource == ExplicitAuthType.NULL) { 150 | // For an explicit null case, set both the URL and UserInfo to confirm that neither is found 151 | smtConfiguration.put(ConfigName.DEST_BASIC_AUTH_CREDENTIALS_SOURCE, null); 152 | smtConfiguration.put(ConfigName.DEST_USER_INFO, destUserInfo); 153 | } else { 154 | // For null ExplicitAuthType, insert no key and rely on implicit default. 155 | } 156 | } 157 | } 158 | 159 | smt.configure(smtConfiguration); 160 | } 161 | 162 | private ByteArrayOutputStream encodeAvroObject(org.apache.avro.Schema schema, int sourceId, Object datum) throws IOException { 163 | ByteArrayOutputStream out = new ByteArrayOutputStream(); 164 | 165 | out.write(MAGIC_BYTE); 166 | out.write(ByteBuffer.allocate(ID_SIZE).putInt(sourceId).array()); 167 | 168 | EncoderFactory encoderFactory = EncoderFactory.get(); 169 | BinaryEncoder encoder = encoderFactory.directBinaryEncoder(out, null); 170 | Object 171 | value = 172 | datum instanceof NonRecordContainer ? ((NonRecordContainer) datum).getValue() 173 | : datum; 174 | DatumWriter writer = new GenericDatumWriter<>(schema); 175 | writer.write(value, encoder); 176 | encoder.flush(); 177 | 178 | return out; 179 | } 180 | 181 | // Used to run a message through the SMT when testing authentication modes, which only need to 182 | // know if there was a communication error, but rely on other tests to verify schema transfers 183 | // are making the correct API calls. 184 | private void passSimpleMessage() throws IOException { 185 | // Create key/value schemas for source registry 186 | log.info("Registering key/value string schemas in source registry"); 187 | final int sourceKeyId = sourceSchemaRegistry.registerSchema(TOPIC, true, STRING_SCHEMA); 188 | final int sourceValId = sourceSchemaRegistry.registerSchema(TOPIC, false, STRING_SCHEMA); 189 | 190 | final ByteArrayOutputStream keyOut = 191 | encodeAvroObject(STRING_SCHEMA, sourceKeyId, HELLO_WORLD_VALUE); 192 | final ByteArrayOutputStream valOut = 193 | encodeAvroObject(STRING_SCHEMA, sourceValId, HELLO_WORLD_VALUE); 194 | final ConnectRecord record = 195 | createRecord(keyOut.toByteArray(), valOut.toByteArray()); 196 | 197 | smt.apply(record); 198 | } 199 | 200 | @BeforeEach 201 | public void setup() { 202 | smt = new SchemaRegistryTransfer(); 203 | smtConfiguration = getRequiredTransformConfigs(); 204 | } 205 | 206 | @Test 207 | public void applyKeySchemaNotBytes() { 208 | configure(true); 209 | 210 | ConnectRecord record = createRecord(null, null, null, null); 211 | 212 | // The key schema is not a byte[] 213 | assertThrows(ConnectException.class, () -> smt.apply(record)); 214 | } 215 | 216 | @Test 217 | public void applyValueSchemaNotBytes() { 218 | configure(false); 219 | 220 | ConnectRecord record = createRecord(null, null, null, null); 221 | 222 | // The value schema is not a byte[] 223 | assertThrows(ConnectException.class, () -> smt.apply(record)); 224 | } 225 | 226 | @Test 227 | public void applySchemalessKeyBytesTooShort() { 228 | configure(true); 229 | 230 | // allocate enough space for the magic-byte 231 | byte[] b = ByteBuffer.allocate(1).array(); 232 | ConnectRecord record = createRecord(null, b, null, null); 233 | 234 | // The key payload is not long enough for schema registry wire-format 235 | assertThrows(SerializationException.class, () -> smt.apply(record)); 236 | } 237 | 238 | @Test 239 | public void applySchemalessValueBytesTooShort() { 240 | configure(false); 241 | 242 | // allocate enough space for the magic-byte 243 | byte[] b = ByteBuffer.allocate(1).array(); 244 | ConnectRecord record = createRecord(null, null, null, b); 245 | 246 | // The value payload is not long enough for schema registry wire-format 247 | assertThrows(SerializationException.class, () -> smt.apply(record)); 248 | } 249 | 250 | @Test 251 | public void testKeySchemaLookupFailure() { 252 | configure(true); 253 | 254 | byte[] b = ByteBuffer.allocate(6).array(); 255 | ConnectRecord record = createRecord(null, b, null, null); 256 | 257 | // tries to lookup schema id 0, but that isn't a valid id 258 | assertThrows(ConnectException.class, () -> smt.apply(record)); 259 | } 260 | 261 | @Test 262 | public void testValueSchemaLookupFailure() { 263 | configure(false); 264 | 265 | byte[] b = ByteBuffer.allocate(6).array(); 266 | ConnectRecord record = createRecord(null, null, null, b); 267 | 268 | // tries to lookup schema id 0, but that isn't a valid id 269 | assertThrows(ConnectException.class, () -> smt.apply(record)); 270 | } 271 | 272 | @Test 273 | @Tag(Constants.USE_BASIC_AUTH_SOURCE_TAG) 274 | @Tag(Constants.USE_BASIC_AUTH_DEST_TAG) 275 | public void testBothBasicHttpAuthUserInfo() throws IOException { 276 | configure( 277 | Constants.HTTP_AUTH_SOURCE_CREDENTIALS_FIXTURE, 278 | Constants.HTTP_AUTH_DEST_CREDENTIALS_FIXTURE, 279 | ExplicitAuthType.USER_INFO); 280 | 281 | this.passSimpleMessage(); 282 | } 283 | 284 | 285 | @Test 286 | @Tag(Constants.USE_BASIC_AUTH_SOURCE_TAG) 287 | public void testSourceBasicHttpAuthUserInfo() throws IOException { 288 | configure(Constants.HTTP_AUTH_SOURCE_CREDENTIALS_FIXTURE, null, ExplicitAuthType.USER_INFO); 289 | 290 | this.passSimpleMessage(); 291 | } 292 | 293 | @Test 294 | @Tag(Constants.USE_BASIC_AUTH_DEST_TAG) 295 | public void testDestinationBasicHttpAuthUserInfo() throws IOException { 296 | configure(null, Constants.HTTP_AUTH_DEST_CREDENTIALS_FIXTURE, ExplicitAuthType.USER_INFO); 297 | 298 | this.passSimpleMessage(); 299 | } 300 | 301 | @Test 302 | @Tag(Constants.USE_BASIC_AUTH_SOURCE_TAG) 303 | public void testSourceBasicHttpAuthUrl() throws IOException { 304 | configure(Constants.HTTP_AUTH_SOURCE_CREDENTIALS_FIXTURE, null, ExplicitAuthType.URL); 305 | 306 | this.passSimpleMessage(); 307 | } 308 | 309 | @Test 310 | @Tag(Constants.USE_BASIC_AUTH_DEST_TAG) 311 | public void testDestinationBasicHttpAuthUrl() throws IOException { 312 | configure(null, Constants.HTTP_AUTH_DEST_CREDENTIALS_FIXTURE, ExplicitAuthType.URL); 313 | 314 | this.passSimpleMessage(); 315 | } 316 | 317 | @Test 318 | @Tag(Constants.USE_BASIC_AUTH_SOURCE_TAG) 319 | public void testSourceBasicHttpAuthNull() throws IOException { 320 | configure(Constants.HTTP_AUTH_SOURCE_CREDENTIALS_FIXTURE, null, ExplicitAuthType.NULL); 321 | 322 | assertThrows(ConnectException.class, () -> this.passSimpleMessage()); 323 | } 324 | 325 | @Test 326 | @Tag(Constants.USE_BASIC_AUTH_DEST_TAG) 327 | public void testDestinationBasicHttpAuthNull() throws IOException { 328 | configure(null, Constants.HTTP_AUTH_DEST_CREDENTIALS_FIXTURE, ExplicitAuthType.NULL); 329 | 330 | assertThrows(ConnectException.class, () -> this.passSimpleMessage()); 331 | } 332 | 333 | @Test 334 | @Tag(Constants.USE_BASIC_AUTH_SOURCE_TAG) 335 | public void testSourceBasicHttpAuthImplicitDefault() throws IOException { 336 | configure(Constants.HTTP_AUTH_SOURCE_CREDENTIALS_FIXTURE, null, null); 337 | 338 | this.passSimpleMessage(); 339 | } 340 | 341 | @Test 342 | @Tag(Constants.USE_BASIC_AUTH_DEST_TAG) 343 | public void testDestinationBasicHttpAuthImplicitDefault() throws IOException { 344 | configure(null, Constants.HTTP_AUTH_DEST_CREDENTIALS_FIXTURE, null); 345 | 346 | this.passSimpleMessage(); 347 | } 348 | 349 | @Test 350 | @Tag(Constants.USE_BASIC_AUTH_SOURCE_TAG) 351 | public void testSourceBasicHttpAuthWrong() throws IOException { 352 | configure(Constants.HTTP_AUTH_DEST_CREDENTIALS_FIXTURE, null, null); 353 | 354 | assertThrows(ConnectException.class, () -> this.passSimpleMessage()); 355 | } 356 | 357 | @Test 358 | @Tag(Constants.USE_BASIC_AUTH_DEST_TAG) 359 | public void testDestinationBasicHttpAuthWrong() throws IOException { 360 | configure(null, Constants.HTTP_AUTH_SOURCE_CREDENTIALS_FIXTURE, null); 361 | 362 | assertThrows(ConnectException.class, () -> this.passSimpleMessage()); 363 | } 364 | 365 | @Test 366 | @Tag(Constants.USE_BASIC_AUTH_SOURCE_TAG) 367 | public void testSourceBasicHttpAuthOmit() throws IOException { 368 | configure(null, null, null); 369 | 370 | assertThrows(ConnectException.class, () -> this.passSimpleMessage()); 371 | } 372 | 373 | @Test 374 | @Tag(Constants.USE_BASIC_AUTH_DEST_TAG) 375 | public void testDestinationBasicHttpAuthOmit() throws IOException { 376 | configure(null, null, null); 377 | 378 | assertThrows(ConnectException.class, () -> this.passSimpleMessage()); 379 | } 380 | 381 | @Test 382 | public void testKeySchemaTransfer() { 383 | configure(true); 384 | 385 | // Create bogus schema in destination so that source and destination ids differ 386 | log.info("Registering schema in destination registry"); 387 | destSchemaRegistry.registerSchema(UUID.randomUUID().toString(), true, INT_SCHEMA); 388 | 389 | // Create new schema for source registry 390 | org.apache.avro.Schema schema = STRING_SCHEMA; 391 | log.info("Registering schema in source registry"); 392 | int sourceId = sourceSchemaRegistry.registerSchema(TOPIC, true, schema); 393 | final String subject = TOPIC + "-key"; 394 | assertEquals(1, sourceId, "An empty registry starts at id=1"); 395 | 396 | SchemaRegistryClient sourceClient = sourceSchemaRegistry.getSchemaRegistryClient(); 397 | int numSourceVersions = 0; 398 | try { 399 | numSourceVersions = sourceClient.getAllVersions(subject).size(); 400 | assertEquals(1, numSourceVersions, "the source registry subject contains the pre-registered schema"); 401 | } catch (IOException | RestClientException e) { 402 | fail(e); 403 | } 404 | 405 | try { 406 | ByteArrayOutputStream out = encodeAvroObject(schema, sourceId, "hello, world"); 407 | 408 | ConnectRecord record = createRecord(Schema.OPTIONAL_BYTES_SCHEMA, out.toByteArray(), null, null); 409 | 410 | // check the destination has no versions for this subject 411 | SchemaRegistryClient destClient = destSchemaRegistry.getSchemaRegistryClient(); 412 | List destVersions = destClient.getAllVersions(subject); 413 | assertTrue(destVersions.isEmpty(), "the destination registry starts empty"); 414 | 415 | // The transform will fail on the byte[]-less record value. 416 | // TODO: Allow only key schemas to be copied? 417 | log.info("applying transformation"); 418 | ConnectException connectException = assertThrows(ConnectException.class, () -> smt.apply(record)); 419 | assertEquals("Transform failed. Record value does not have a byte[] schema.", connectException.getMessage()); 420 | 421 | // In any case, we can still check the key schema was copied, and the destination now has some version 422 | destVersions = destClient.getAllVersions(subject); 423 | assertEquals(numSourceVersions, destVersions.size(), 424 | "source and destination registries have the same amount of schemas for the same subject"); 425 | 426 | // Verify that the ids for the source and destination are different 427 | SchemaMetadata metadata = destClient.getSchemaMetadata(subject, destVersions.get(0)); 428 | int destinationId = metadata.getId(); 429 | log.debug("source_id={} ; dest_id={}", sourceId, destinationId); 430 | assertTrue(sourceId < destinationId, 431 | "destination id should be different and higher since that registry already had schemas"); 432 | 433 | // Verify the schema is the same 434 | org.apache.avro.Schema sourceSchema = sourceClient.getById(sourceId); 435 | org.apache.avro.Schema destSchema = new org.apache.avro.Schema.Parser().parse(metadata.getSchema()); 436 | assertEquals(schema, sourceSchema, "source server returned same schema"); 437 | assertEquals(schema, destSchema, "destination server returned same schema"); 438 | assertEquals(sourceSchema, destSchema, "both servers' schemas match"); 439 | } catch (IOException | RestClientException e) { 440 | fail(e); 441 | } 442 | } 443 | 444 | @Test 445 | public void testValueSchemaTransfer() { 446 | configure(true); 447 | 448 | // Create bogus schema in destination so that source and destination ids differ 449 | log.info("Registering schema in destination registry"); 450 | destSchemaRegistry.registerSchema(UUID.randomUUID().toString(), false, INT_SCHEMA); 451 | 452 | // Create new schema for source registry 453 | org.apache.avro.Schema schema = STRING_SCHEMA; 454 | log.info("Registering schema in source registry"); 455 | int sourceId = sourceSchemaRegistry.registerSchema(TOPIC, false, schema); 456 | final String subject = TOPIC + "-value"; 457 | assertEquals(1, sourceId, "An empty registry starts at id=1"); 458 | 459 | SchemaRegistryClient sourceClient = sourceSchemaRegistry.getSchemaRegistryClient(); 460 | int numSourceVersions = 0; 461 | try { 462 | numSourceVersions = sourceClient.getAllVersions(subject).size(); 463 | assertEquals(1, numSourceVersions, "the source registry subject contains the pre-registered schema"); 464 | } catch (IOException | RestClientException e) { 465 | fail(e); 466 | } 467 | 468 | byte[] value = null; 469 | ConnectRecord appliedRecord = null; 470 | int destinationId = -1; 471 | try { 472 | ByteArrayOutputStream out = encodeAvroObject(schema, sourceId, "hello, world"); 473 | 474 | value = out.toByteArray(); 475 | ConnectRecord record = createRecord(null, value); 476 | 477 | // check the destination has no versions for this subject 478 | SchemaRegistryClient destClient = destSchemaRegistry.getSchemaRegistryClient(); 479 | List destVersions = destClient.getAllVersions(subject); 480 | assertTrue(destVersions.isEmpty(), "the destination registry starts empty"); 481 | 482 | // The transform will pass for key and value with byte schemas 483 | log.info("applying transformation"); 484 | appliedRecord = assertDoesNotThrow(() -> smt.apply(record)); 485 | 486 | assertEquals(record.keySchema(), appliedRecord.keySchema(), "key schema unchanged"); 487 | assertEquals(record.key(), appliedRecord.key(), "null key not modified"); 488 | assertEquals(record.valueSchema(), appliedRecord.valueSchema(), "value schema unchanged"); 489 | 490 | // check the value schema was copied, and the destination now has some version 491 | destVersions = destClient.getAllVersions(subject); 492 | assertEquals(numSourceVersions, destVersions.size(), 493 | "source and destination registries have the same amount of schemas for the same subject"); 494 | 495 | // Verify that the ids for the source and destination are different 496 | SchemaMetadata metadata = destClient.getSchemaMetadata(subject, destVersions.get(0)); 497 | destinationId = metadata.getId(); 498 | log.debug("source_id={} ; dest_id={}", sourceId, destinationId); 499 | assertTrue(sourceId < destinationId, 500 | "destination id should be different and higher since that registry already had schemas"); 501 | 502 | // Verify the schema is the same 503 | org.apache.avro.Schema sourceSchema = sourceClient.getById(sourceId); 504 | org.apache.avro.Schema destSchema = new org.apache.avro.Schema.Parser().parse(metadata.getSchema()); 505 | assertEquals(schema, sourceSchema, "source server returned same schema"); 506 | assertEquals(schema, destSchema, "destination server returned same schema"); 507 | assertEquals(sourceSchema, destSchema, "both servers' schemas match"); 508 | } catch (IOException | RestClientException e) { 509 | fail(e); 510 | } 511 | 512 | // Verify the record's byte value was transformed, and avro content is same 513 | byte[] appliedValue = (byte[]) appliedRecord.value(); 514 | ByteBuffer appliedValueBuffer = ByteBuffer.wrap(appliedValue); 515 | assertEquals(value.length, appliedValue.length, "byte[] values sizes unchanged"); 516 | assertEquals(MAGIC_BYTE, appliedValueBuffer.get(), "record value starts with magic byte"); 517 | int transformedRecordSchemaId = appliedValueBuffer.getInt(); 518 | assertNotEquals(sourceId, transformedRecordSchemaId, "transformed record's schema id changed"); 519 | assertEquals(destinationId, transformedRecordSchemaId, "record value's schema id matches destination id"); 520 | assertArrayEquals(Arrays.copyOfRange(value, AVRO_CONTENT_OFFSET, value.length), 521 | Arrays.copyOfRange(appliedValueBuffer.array(), AVRO_CONTENT_OFFSET, appliedValue.length), 522 | "the avro data is not modified"); 523 | } 524 | 525 | @Test 526 | public void testKeyValueSchemaTransfer() { 527 | configure(true); 528 | 529 | // Create bogus schema in destination so that source and destination ids differ 530 | log.info("Registering schema in destination registry"); 531 | destSchemaRegistry.registerSchema(UUID.randomUUID().toString(), false, BOOLEAN_SCHEMA); 532 | 533 | // Create new schemas for source registry 534 | org.apache.avro.Schema keySchema = INT_SCHEMA; 535 | org.apache.avro.Schema valueSchema = STRING_SCHEMA; 536 | log.info("Registering schemas in source registry"); 537 | int sourceKeyId = sourceSchemaRegistry.registerSchema(TOPIC, true, keySchema); 538 | final String keySubject = TOPIC + "-key"; 539 | assertEquals(1, sourceKeyId, "An empty registry starts at id=1"); 540 | int sourceValueId = sourceSchemaRegistry.registerSchema(TOPIC, false, valueSchema); 541 | final String valueSubject = TOPIC + "-value"; 542 | assertEquals(2, sourceValueId, "unique schema ids monotonically increase"); 543 | 544 | SchemaRegistryClient sourceClient = sourceSchemaRegistry.getSchemaRegistryClient(); 545 | int numSourceKeyVersions = 0; 546 | int numSourceValueVersions = 0; 547 | try { 548 | numSourceKeyVersions = sourceClient.getAllVersions(keySubject).size(); 549 | assertEquals(1, numSourceKeyVersions, "the source registry subject contains the pre-registered key schema"); 550 | numSourceValueVersions = sourceClient.getAllVersions(valueSubject).size(); 551 | assertEquals(1, numSourceValueVersions, "the source registry subject contains the pre-registered value schema"); 552 | } catch (IOException | RestClientException e) { 553 | fail(e); 554 | } 555 | 556 | byte[] key = null; 557 | byte[] value = null; 558 | ConnectRecord appliedRecord = null; 559 | int destinationKeyId = -1; 560 | int destinationValueId = -1; 561 | try { 562 | ByteArrayOutputStream keyStream = encodeAvroObject(keySchema, sourceKeyId, AVRO_CONTENT_OFFSET); 563 | ByteArrayOutputStream valueStream = encodeAvroObject(valueSchema, sourceValueId, "hello, world"); 564 | 565 | key = keyStream.toByteArray(); 566 | value = valueStream.toByteArray(); 567 | ConnectRecord record = createRecord(key, value); 568 | 569 | // check the destination has no versions for this subject 570 | SchemaRegistryClient destClient = destSchemaRegistry.getSchemaRegistryClient(); 571 | List destKeyVersions = destClient.getAllVersions(keySubject); 572 | assertTrue(destKeyVersions.isEmpty(), "the destination registry starts empty"); 573 | List destValueVersions = destClient.getAllVersions(valueSubject); 574 | assertTrue(destValueVersions.isEmpty(), "the destination registry starts empty"); 575 | 576 | // The transform will pass for key and value with byte schemas 577 | log.info("applying transformation"); 578 | appliedRecord = assertDoesNotThrow(() -> smt.apply(record)); 579 | 580 | assertEquals(record.keySchema(), appliedRecord.keySchema(), "key schema unchanged"); 581 | assertEquals(record.valueSchema(), appliedRecord.valueSchema(), "value schema unchanged"); 582 | 583 | // check the value schema was copied, and the destination now has some version 584 | destKeyVersions = destClient.getAllVersions(keySubject); 585 | assertEquals(numSourceKeyVersions, destKeyVersions.size(), 586 | "source and destination registries have the same amount of schemas for the key subject"); 587 | destValueVersions = destClient.getAllVersions(valueSubject); 588 | assertEquals(numSourceValueVersions, destValueVersions.size(), 589 | "source and destination registries have the same amount of schemas for the value subject"); 590 | 591 | // Verify that the ids for the source and destination are different 592 | SchemaMetadata keyMetadata = destClient.getSchemaMetadata(keySubject, destKeyVersions.get(0)); 593 | destinationKeyId = keyMetadata.getId(); 594 | log.debug("source_keyId={} ; dest_keyId={}", sourceKeyId, destinationKeyId); 595 | assertTrue(sourceKeyId < destinationKeyId, 596 | "destination id should be different and higher since that registry already had schemas"); 597 | SchemaMetadata valueMetadata = destClient.getSchemaMetadata(valueSubject, destValueVersions.get(0)); 598 | destinationValueId = valueMetadata.getId(); 599 | log.debug("source_valueId={} ; dest_valueId={}", sourceValueId, destinationValueId); 600 | assertTrue(sourceValueId < destinationValueId, 601 | "destination id should be different and higher since that registry already had schemas"); 602 | 603 | // Verify the schemas are the same 604 | org.apache.avro.Schema sourceKeySchema = sourceClient.getById(sourceKeyId); 605 | org.apache.avro.Schema destKeySchema = new org.apache.avro.Schema.Parser().parse(keyMetadata.getSchema()); 606 | assertEquals(destKeySchema, sourceKeySchema, "source server returned same key schema"); 607 | assertEquals(keySchema, destKeySchema, "destination server returned same key schema"); 608 | assertEquals(sourceKeySchema, destKeySchema, "both servers' key schemas match"); 609 | org.apache.avro.Schema sourceValueSchema = sourceClient.getById(sourceValueId); 610 | org.apache.avro.Schema destValueSchema = new org.apache.avro.Schema.Parser().parse(valueMetadata.getSchema()); 611 | assertEquals(destValueSchema, sourceValueSchema, "source server returned same value schema"); 612 | assertEquals(valueSchema, destValueSchema, "destination server returned same value schema"); 613 | assertEquals(sourceValueSchema, destValueSchema, "both servers' value schemas match"); 614 | 615 | } catch (IOException | RestClientException e) { 616 | fail(e); 617 | } 618 | 619 | // Verify the record's byte key was transformed, and avro content is same 620 | byte[] appliedKey = (byte[]) appliedRecord.key(); 621 | ByteBuffer appliedKeyBuffer = ByteBuffer.wrap(appliedKey); 622 | assertEquals(key.length, appliedKey.length, "key byte[] sizes unchanged"); 623 | assertEquals(MAGIC_BYTE, appliedKeyBuffer.get(), "record key starts with magic byte"); 624 | int transformedRecordKeySchemaId = appliedKeyBuffer.getInt(); 625 | assertNotEquals(sourceKeyId, transformedRecordKeySchemaId, "transformed record's key schema id changed"); 626 | assertEquals(destinationKeyId, transformedRecordKeySchemaId, "record key's schema id matches destination id"); 627 | assertArrayEquals(Arrays.copyOfRange(key, AVRO_CONTENT_OFFSET, key.length), 628 | Arrays.copyOfRange(appliedKeyBuffer.array(), AVRO_CONTENT_OFFSET, appliedKey.length), 629 | "the key's avro data is not modified"); 630 | 631 | // Verify the record's byte value was transformed, and avro content is same 632 | byte[] appliedValue = (byte[]) appliedRecord.value(); 633 | ByteBuffer appliedValueBuffer = ByteBuffer.wrap(appliedValue); 634 | assertEquals(value.length, appliedValue.length, "value byte[] sizes unchanged"); 635 | assertEquals(MAGIC_BYTE, appliedValueBuffer.get(), "record value starts with magic byte"); 636 | int transformedRecordValueSchemaId = appliedValueBuffer.getInt(); 637 | assertNotEquals(sourceValueId, transformedRecordValueSchemaId, "transformed record's schema id changed"); 638 | assertEquals(destinationValueId, transformedRecordValueSchemaId, "record value's schema id matches destination id"); 639 | assertArrayEquals(Arrays.copyOfRange(value, AVRO_CONTENT_OFFSET, value.length), 640 | Arrays.copyOfRange(appliedValueBuffer.array(), AVRO_CONTENT_OFFSET, appliedValue.length), 641 | "the value's avro data is not modified"); 642 | } 643 | 644 | @Test 645 | public void testTombstoneRecord() { 646 | configure(false); 647 | 648 | ConnectRecord record = createRecord(null, null, Schema.OPTIONAL_BYTES_SCHEMA, null); 649 | 650 | log.info("applying transformation"); 651 | ConnectRecord appliedRecord = assertDoesNotThrow(() -> smt.apply(record)); 652 | 653 | assertEquals(record.valueSchema(), appliedRecord.valueSchema(), "value schema unchanged"); 654 | assertNull(appliedRecord.value()); 655 | } 656 | 657 | @Test 658 | public void testEvolvingValueSchemaTransfer() { 659 | configure(true); 660 | 661 | // Create bogus schema in destination so that source and destination ids differ 662 | log.info("Registering schema in destination registry"); 663 | destSchemaRegistry.registerSchema(UUID.randomUUID().toString(), false, INT_SCHEMA); 664 | 665 | log.info("Registering schema in source registry"); 666 | int sourceId = sourceSchemaRegistry.registerSchema(TOPIC, false, NAME_SCHEMA); 667 | int nextSourceId = sourceSchemaRegistry.registerSchema(TOPIC, false, NAME_SCHEMA_ALIASED); 668 | final String subject = TOPIC + "-value"; 669 | assertEquals(1, sourceId, "An empty registry starts at id=1"); 670 | assertEquals(2, nextSourceId, "The next schema is id=2"); 671 | 672 | SchemaRegistryClient sourceClient = sourceSchemaRegistry.getSchemaRegistryClient(); 673 | int numSourceVersions = 0; 674 | try { 675 | numSourceVersions = sourceClient.getAllVersions(subject).size(); 676 | assertEquals(2, numSourceVersions, "the source registry subject contains the pre-registered schema"); 677 | } catch (IOException | RestClientException e) { 678 | fail(e); 679 | } 680 | 681 | try { 682 | GenericData.Record record1 = new GenericRecordBuilder(NAME_SCHEMA) 683 | .set("first", "fname") 684 | .set("last", "lname") 685 | .build(); 686 | ByteArrayOutputStream out = encodeAvroObject(NAME_SCHEMA, sourceId, record1); 687 | 688 | byte[] value = out.toByteArray(); 689 | ConnectRecord record = createRecord(null, value); 690 | 691 | GenericData.Record record2 = new GenericRecordBuilder(NAME_SCHEMA_ALIASED) 692 | .set("first", "fname") 693 | .set("surname", "lname") 694 | .build(); 695 | out = encodeAvroObject(NAME_SCHEMA_ALIASED, nextSourceId, record2); 696 | 697 | byte[] nextValue = out.toByteArray(); 698 | ConnectRecord nextRecord = createRecord(null, nextValue); 699 | 700 | // check the destination has no versions for this subject 701 | SchemaRegistryClient destClient = destSchemaRegistry.getSchemaRegistryClient(); 702 | List destVersions = destClient.getAllVersions(subject); 703 | assertTrue(destVersions.isEmpty(), "the destination registry starts empty"); 704 | 705 | // The transform will pass for key and value with byte schemas 706 | log.info("applying transformation"); 707 | assertDoesNotThrow(() -> smt.apply(record)); 708 | 709 | // check the value schema was copied, and the destination now has some version 710 | destVersions = destClient.getAllVersions(subject); 711 | assertEquals(1, destVersions.size(), 712 | "the destination registry has been updated with first schema"); 713 | 714 | log.info("applying transformation"); 715 | assertDoesNotThrow(() -> smt.apply(nextRecord)); 716 | 717 | destVersions = destClient.getAllVersions(subject); 718 | assertEquals(numSourceVersions, destVersions.size(), 719 | "the destination registry has been updated with the second schema"); 720 | 721 | } catch (IOException | RestClientException e) { 722 | fail(e); 723 | } 724 | } 725 | 726 | @Test 727 | @Disabled("TODO: Find scenario where a backwards compatible change cannot be undone") 728 | public void testIncompatibleEvolvingValueSchemaTransfer() { 729 | configure(true); 730 | 731 | // Create bogus schema in destination so that source and destination ids differ 732 | log.info("Registering schema in destination registry"); 733 | destSchemaRegistry.registerSchema(UUID.randomUUID().toString(), false, INT_SCHEMA); 734 | 735 | // Create new schema for source registry 736 | log.info("Registering schema in source registry"); 737 | 738 | // TODO: Figure out what these should be, where if order is flipped, destination will not accept 739 | org.apache.avro.Schema schema = null; 740 | org.apache.avro.Schema nextSchema = null; 741 | 742 | int sourceId = sourceSchemaRegistry.registerSchema(TOPIC, false, schema); 743 | int nextSourceId = sourceSchemaRegistry.registerSchema(TOPIC, false, nextSchema); 744 | final String subject = TOPIC + "-value"; 745 | assertEquals(1, sourceId, "An empty registry starts at id=1"); 746 | assertEquals(2, nextSourceId, "The next schema is id=2"); 747 | 748 | SchemaRegistryClient sourceClient = sourceSchemaRegistry.getSchemaRegistryClient(); 749 | int numSourceVersions = 0; 750 | try { 751 | numSourceVersions = sourceClient.getAllVersions(subject).size(); 752 | assertEquals(2, numSourceVersions, "the source registry subject contains the pre-registered schema"); 753 | } catch (IOException | RestClientException e) { 754 | fail(e); 755 | } 756 | 757 | try { 758 | // TODO: Depending on schemas above, then build Avro records for them 759 | // ensure second id is encoded first 760 | ByteArrayOutputStream out = encodeAvroObject(nextSchema, nextSourceId, null); 761 | 762 | byte[] value = out.toByteArray(); 763 | ConnectRecord record = createRecord(null, value); 764 | 765 | out = encodeAvroObject(schema, sourceId, null); 766 | 767 | byte[] nextValue = out.toByteArray(); 768 | ConnectRecord nextRecord = createRecord(null, nextValue); 769 | 770 | // check the destination has no versions for this subject 771 | SchemaRegistryClient destClient = destSchemaRegistry.getSchemaRegistryClient(); 772 | List destVersions = destClient.getAllVersions(subject); 773 | assertTrue(destVersions.isEmpty(), "the destination registry starts empty"); 774 | 775 | // The transform will pass for key and value with byte schemas 776 | log.info("applying transformation"); 777 | assertDoesNotThrow(() -> smt.apply(record)); 778 | 779 | // check the value schema was copied, and the destination now has some version 780 | destVersions = destClient.getAllVersions(subject); 781 | assertEquals(1, destVersions.size(), 782 | "the destination registry has been updated with first schema"); 783 | 784 | log.info("applying transformation"); 785 | assertThrows(ConnectException.class, () -> smt.apply(nextRecord)); 786 | 787 | } catch (IOException | RestClientException e) { 788 | fail(e); 789 | } 790 | } 791 | } 792 | -------------------------------------------------------------------------------- /src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | [%d{yyyy-MM-dd HH:mm:ss:SSS}]\(%thread\) %-5level - %-26logger{26} - %msg%n 5 | UTF-8 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | --------------------------------------------------------------------------------