├── .circleci
└── config.yml
├── .gitignore
├── .mvn
└── wrapper
│ ├── MavenWrapperDownloader.java
│ └── maven-wrapper.properties
├── LICENSE
├── README.md
├── mvnw
├── mvnw.cmd
├── pom.xml
├── settings.xml
└── src
├── main
├── java
│ └── cricket
│ │ └── jmoore
│ │ ├── kafka
│ │ └── connect
│ │ │ └── transforms
│ │ │ ├── ConnectSchemaUtil.java
│ │ │ └── SchemaRegistryTransfer.java
│ │ └── security
│ │ └── basicauth
│ │ ├── DestSaslBasicAuthCredentialProvider.java
│ │ ├── DestUrlBasicAuthCredentialProvider.java
│ │ ├── DestUserInfoCredentialProvider.java
│ │ ├── SrcSaslBasicAuthCredentialProvider.java
│ │ ├── SrcUrlBasicAuthCredentialProvider.java
│ │ └── SrcUserInfoCredentialProvider.java
└── resources
│ └── META-INF
│ └── services
│ └── io.confluent.kafka.schemaregistry.client.security.basicauth.BasicAuthCredentialProvider
└── test
├── java
└── cricket
│ └── jmoore
│ └── kafka
│ └── connect
│ └── transforms
│ ├── Constants.java
│ ├── SchemaRegistryMock.java
│ ├── SchemaTest.java
│ └── TransformTest.java
└── resources
└── logback-test.xml
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | # Java Maven CircleCI 2.0 configuration file
2 | #
3 | # Check https://circleci.com/docs/2.0/language-java/ for more details
4 | #
5 | version: 2
6 | jobs:
7 | build:
8 | docker:
9 | # specify the version you desire here
10 | - image: circleci/openjdk:8-jdk
11 |
12 | working_directory: ~/repo
13 |
14 | environment:
15 | # Customize the JVM maximum heap limit
16 | MAVEN_OPTS: -Xmx2048m
17 |
18 | steps:
19 | - checkout
20 |
21 | # Download and cache dependencies
22 | - restore_cache:
23 | keys:
24 | - v1-dependencies-{{ checksum "pom.xml" }}
25 | # fallback to using the latest cache if no exact match is found
26 | - v1-dependencies-
27 |
28 | - run: mvn dependency:go-offline
29 |
30 | - save_cache:
31 | paths:
32 | - ~/.m2
33 | key: v1-dependencies-{{ checksum "pom.xml" }}
34 |
35 | # run tests!
36 | - run: mvn verify
37 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | *.iml
3 |
4 | *.class
5 |
6 | *.log
7 |
8 | *.jar
9 | *.zip
10 | *.tar.gz
11 | *.rar
12 |
13 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
14 | hs_err_pid*
15 |
16 | ### macOS ###
17 | .DS_Store
18 |
19 | # Icon must end with two \r
20 | Icon
21 |
22 | # Thumbnails
23 | ._*
24 |
25 | ### Maven ###
26 | target/
27 | dependency-reduced-pom.xml
28 | .mvn/timing.properties
29 | .mvn/wrapper/maven-wrapper.jar
30 |
--------------------------------------------------------------------------------
/.mvn/wrapper/MavenWrapperDownloader.java:
--------------------------------------------------------------------------------
1 | /*
2 | Licensed to the Apache Software Foundation (ASF) under one
3 | or more contributor license agreements. See the NOTICE file
4 | distributed with this work for additional information
5 | regarding copyright ownership. The ASF licenses this file
6 | to you under the Apache License, Version 2.0 (the
7 | "License"); you may not use this file except in compliance
8 | with the License. You may obtain a copy of the License at
9 |
10 | http://www.apache.org/licenses/LICENSE-2.0
11 |
12 | Unless required by applicable law or agreed to in writing,
13 | software distributed under the License is distributed on an
14 | "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | KIND, either express or implied. See the License for the
16 | specific language governing permissions and limitations
17 | under the License.
18 | */
19 |
20 | import java.net.*;
21 | import java.io.*;
22 | import java.nio.channels.*;
23 | import java.util.Properties;
24 |
25 | public class MavenWrapperDownloader {
26 |
27 | /**
28 | * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
29 | */
30 | private static final String DEFAULT_DOWNLOAD_URL =
31 | "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar";
32 |
33 | /**
34 | * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
35 | * use instead of the default one.
36 | */
37 | private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
38 | ".mvn/wrapper/maven-wrapper.properties";
39 |
40 | /**
41 | * Path where the maven-wrapper.jar will be saved to.
42 | */
43 | private static final String MAVEN_WRAPPER_JAR_PATH =
44 | ".mvn/wrapper/maven-wrapper.jar";
45 |
46 | /**
47 | * Name of the property which should be used to override the default download url for the wrapper.
48 | */
49 | private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
50 |
51 | public static void main(String args[]) {
52 | System.out.println("- Downloader started");
53 | File baseDirectory = new File(args[0]);
54 | System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
55 |
56 | // If the maven-wrapper.properties exists, read it and check if it contains a custom
57 | // wrapperUrl parameter.
58 | File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
59 | String url = DEFAULT_DOWNLOAD_URL;
60 | if(mavenWrapperPropertyFile.exists()) {
61 | FileInputStream mavenWrapperPropertyFileInputStream = null;
62 | try {
63 | mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
64 | Properties mavenWrapperProperties = new Properties();
65 | mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
66 | url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
67 | } catch (IOException e) {
68 | System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
69 | } finally {
70 | try {
71 | if(mavenWrapperPropertyFileInputStream != null) {
72 | mavenWrapperPropertyFileInputStream.close();
73 | }
74 | } catch (IOException e) {
75 | // Ignore ...
76 | }
77 | }
78 | }
79 | System.out.println("- Downloading from: : " + url);
80 |
81 | File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
82 | if(!outputFile.getParentFile().exists()) {
83 | if(!outputFile.getParentFile().mkdirs()) {
84 | System.out.println(
85 | "- ERROR creating output direcrory '" + outputFile.getParentFile().getAbsolutePath() + "'");
86 | }
87 | }
88 | System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
89 | try {
90 | downloadFileFromURL(url, outputFile);
91 | System.out.println("Done");
92 | System.exit(0);
93 | } catch (Throwable e) {
94 | System.out.println("- Error downloading");
95 | e.printStackTrace();
96 | System.exit(1);
97 | }
98 | }
99 |
100 | private static void downloadFileFromURL(String urlString, File destination) throws Exception {
101 | URL website = new URL(urlString);
102 | ReadableByteChannel rbc;
103 | rbc = Channels.newChannel(website.openStream());
104 | FileOutputStream fos = new FileOutputStream(destination);
105 | fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
106 | fos.close();
107 | rbc.close();
108 | }
109 |
110 | }
111 |
--------------------------------------------------------------------------------
/.mvn/wrapper/maven-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Schema Registry Transfer SMT
2 | ============================
3 |
4 | A [Kafka Connect Single Message Transformation (SMT)][smt] that reads the serialized [wire format header][wire-format] of Confluent's `KafkaAvroSerializer`, performs a lookup against a source [Confluent Schema Registry][schema-registry] for the ID in the message, and registers that schema into a destination Registry for that topic/subject under a new ID.
5 |
6 | To be used where it is not feasible to make the destination Schema Registry as a follower to the source Registry, or when migrating topics to a new cluster.
7 |
8 | > _Requires that the Kafka Connect tasks can reach both Schema Registries._
9 |
10 | This transform doesn't mirror the contents of the `_schemas` topic, so therefore each registry can be completely isolated from one another. As a side-effect of this, the subject configurations that might be applied to the `/config` endpoint in the source registry are not copied to the destination. In other words, you might get schema registration errors if using differing compatibility levels on the registries. Just a heads-up.
11 |
12 | Example Kafka Connectors where this could be applied.
13 |
14 | - [Comcast/MirrorTool-for-Kafka-Connect](https://github.com/Comcast/MirrorTool-for-Kafka-Connect) - Code was tested with this first, and verified that the topic-renaming logic of this connector worked fine with this SMT.
15 | - [Salesforce/mirus](https://github.com/salesforce/mirus)
16 | - [Confluent Replicator](https://docs.confluent.io/current/connect/kafka-connect-replicator/index.html) - While this already can copy the schema, we observed it is only possible via the `AvroConverter`, which must first parse the entire message into a Kafka Connect `Struct` object. Thus, the class here is considered a "shallow" copier — it only inspects [the first 5 bytes][wire-format] of the keys and values for the schema ids.
17 | - [KIP-382 (MirrorMaker 2.0)](https://cwiki.apache.org/confluence/display/KAFKA/KIP-382%3A+MirrorMaker+2.0) - Still open at the time of writing.
18 |
19 |
20 | ## Installation
21 |
22 | 1. Edit the Kafka Connect worker properties file on each worker to include a new directory. For example, `/opt/kafka-connect/plugins`
23 |
24 | ```sh
25 | plugin.path=/usr/share/java,/opt/kafka-connect/plugins
26 | ```
27 |
28 | 2. Build this project
29 |
30 | ```sh
31 | ./mvnw clean package
32 | ```
33 |
34 | 3. Copy the JAR from `target` to all Kafka Connect workers under a directory set by `plugin.path`
35 |
36 | 4. (Re)start Kafka Connect processes
37 |
38 | ## Usage
39 |
40 | Standalone Kafka Connect configuration section
41 |
42 | ```properties
43 | # Requires that records are entirely byte-arrays. These can go in the worker or connector configuration.
44 | key.converter=org.apache.kafka.connect.converters.ByteArrayConverter
45 | value.converter=org.apache.kafka.connect.converters.ByteArrayConverter
46 |
47 | # Setup the SMT
48 | transforms=AvroSchemaTransfer
49 |
50 | transforms.AvroSchemaTransfer.type=cricket.jmoore.kafka.connect.transforms.SchemaRegistryTransfer
51 | transforms.AvroSchemaTransfer.src.schema.registry.url=http://schema-registry-1:8081
52 | transforms.AvroSchemaTransfer.dest.schema.registry.url=http://schema-registry-2:8081
53 | ```
54 |
55 | Distributed Kafka Connect configuration section
56 |
57 | ```json
58 | "config" : {
59 | ...
60 |
61 | "__comment": "Requires that records are entirely byte-arrays. These can go in the worker or connector configuration.",
62 | "key.converter": "org.apache.kafka.connect.converters.ByteArrayConverter",
63 | "value.converter": "org.apache.kafka.connect.converters.ByteArrayConverter",
64 |
65 | "__comment": "Setup the SMT",
66 | "transforms": "AvroSchemaTransfer",
67 |
68 | "transforms.AvroSchemaTransfer.type": "cricket.jmoore.kafka.connect.transforms.SchemaRegistryTransfer",
69 | "transforms.AvroSchemaTransfer.src.schema.registry.url": "http://schema-registry-1:8081",
70 | "transforms.AvroSchemaTransfer.dest.schema.registry.url": "http://schema-registry-2:8081"
71 | }
72 | ```
73 |
74 | ## Advanced Configuration
75 |
76 | Configuration Parameter | Default | Description
77 | ----------------------- | ------- | -----------
78 | **transfer.message.keys** | true | Indicates whether Avro schemas from message keys in source records should be copied to the destination Registry.
79 | **include.message.headers** | true | Indicates whether message headers from source records should be preserved after the transform.
80 | **schema.capacity** | 100 | Capacity of schemas that can be cached in each `CachedSchemaRegistryClient`
81 |
82 | ## Embedded Schema Registry Client Configuration
83 |
84 | Schema Registry Transfer SMT passes some properties prefixed by either `src.` or `dest.`
85 | through to its embedded schema registry clients, after stripping away `src.` or `dest.`
86 | prefix used to disambiguate which client is to receive which configuration value.
87 |
88 | Properties prefixed by `src.` are passed through to the source consumer's schema registry
89 | client. Properties prefixed by `dest.` are passed through to the target producer's schema
90 | registry client.
91 |
92 | Configuration Parameter | Default | Description
93 | ----------------------- | ------- | -----------
94 | (src\|dest).basic.auth.credentials.source | URL | Specify how to pick credentials for Basic Auth header. Supported values are `URL`, `USER_INFO` and `SASL_INHERIT`
95 | (src\|dest).basic.auth.user.info | | Specify credentials for Basic Auth in form of `{username}:{password}` when source is `USER_INFO`
96 |
97 | ## Subject Renaming
98 |
99 | Renaming of a subject can be done with the `RegexRouter` Transform **before** this one.
100 |
101 | Example Configuration
102 |
103 | ```properties
104 | transforms=TopicRename,AvroSchemaTransfer
105 |
106 | transforms.TopicRename.type=org.apache.kafka.connect.transforms.RegexRouter
107 | transforms.TopicRename.regex=(.*)
108 | transforms.TopicRename.replacement=replica.$1
109 |
110 | transforms.AvroSchemaTransfer.type=...
111 | ```
112 |
113 |
114 | [smt]: https://docs.confluent.io/current/connect/concepts.html#connect-transforms
115 | [schema-registry]: https://docs.confluent.io/current/schema-registry/docs/index.html
116 | [wire-format]: https://docs.confluent.io/current/schema-registry/docs/serializer-formatter.html#wire-format
117 |
--------------------------------------------------------------------------------
/mvnw:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # ----------------------------------------------------------------------------
3 | # Licensed to the Apache Software Foundation (ASF) under one
4 | # or more contributor license agreements. See the NOTICE file
5 | # distributed with this work for additional information
6 | # regarding copyright ownership. The ASF licenses this file
7 | # to you under the Apache License, Version 2.0 (the
8 | # "License"); you may not use this file except in compliance
9 | # with the License. You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing,
14 | # software distributed under the License is distributed on an
15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
16 | # KIND, either express or implied. See the License for the
17 | # specific language governing permissions and limitations
18 | # under the License.
19 | # ----------------------------------------------------------------------------
20 |
21 | # ----------------------------------------------------------------------------
22 | # Maven2 Start Up Batch script
23 | #
24 | # Required ENV vars:
25 | # ------------------
26 | # JAVA_HOME - location of a JDK home dir
27 | #
28 | # Optional ENV vars
29 | # -----------------
30 | # M2_HOME - location of maven2's installed home dir
31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven
32 | # e.g. to debug Maven itself, use
33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files
35 | # ----------------------------------------------------------------------------
36 |
37 | if [ -z "$MAVEN_SKIP_RC" ] ; then
38 |
39 | if [ -f /etc/mavenrc ] ; then
40 | . /etc/mavenrc
41 | fi
42 |
43 | if [ -f "$HOME/.mavenrc" ] ; then
44 | . "$HOME/.mavenrc"
45 | fi
46 |
47 | fi
48 |
49 | # OS specific support. $var _must_ be set to either true or false.
50 | cygwin=false;
51 | darwin=false;
52 | mingw=false
53 | case "`uname`" in
54 | CYGWIN*) cygwin=true ;;
55 | MINGW*) mingw=true;;
56 | Darwin*) darwin=true
57 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
58 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
59 | if [ -z "$JAVA_HOME" ]; then
60 | if [ -x "/usr/libexec/java_home" ]; then
61 | export JAVA_HOME="`/usr/libexec/java_home`"
62 | else
63 | export JAVA_HOME="/Library/Java/Home"
64 | fi
65 | fi
66 | ;;
67 | esac
68 |
69 | if [ -z "$JAVA_HOME" ] ; then
70 | if [ -r /etc/gentoo-release ] ; then
71 | JAVA_HOME=`java-config --jre-home`
72 | fi
73 | fi
74 |
75 | if [ -z "$M2_HOME" ] ; then
76 | ## resolve links - $0 may be a link to maven's home
77 | PRG="$0"
78 |
79 | # need this for relative symlinks
80 | while [ -h "$PRG" ] ; do
81 | ls=`ls -ld "$PRG"`
82 | link=`expr "$ls" : '.*-> \(.*\)$'`
83 | if expr "$link" : '/.*' > /dev/null; then
84 | PRG="$link"
85 | else
86 | PRG="`dirname "$PRG"`/$link"
87 | fi
88 | done
89 |
90 | saveddir=`pwd`
91 |
92 | M2_HOME=`dirname "$PRG"`/..
93 |
94 | # make it fully qualified
95 | M2_HOME=`cd "$M2_HOME" && pwd`
96 |
97 | cd "$saveddir"
98 | # echo Using m2 at $M2_HOME
99 | fi
100 |
101 | # For Cygwin, ensure paths are in UNIX format before anything is touched
102 | if $cygwin ; then
103 | [ -n "$M2_HOME" ] &&
104 | M2_HOME=`cygpath --unix "$M2_HOME"`
105 | [ -n "$JAVA_HOME" ] &&
106 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
107 | [ -n "$CLASSPATH" ] &&
108 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
109 | fi
110 |
111 | # For Mingw, ensure paths are in UNIX format before anything is touched
112 | if $mingw ; then
113 | [ -n "$M2_HOME" ] &&
114 | M2_HOME="`(cd "$M2_HOME"; pwd)`"
115 | [ -n "$JAVA_HOME" ] &&
116 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
117 | # TODO classpath?
118 | fi
119 |
120 | if [ -z "$JAVA_HOME" ]; then
121 | javaExecutable="`which javac`"
122 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
123 | # readlink(1) is not available as standard on Solaris 10.
124 | readLink=`which readlink`
125 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
126 | if $darwin ; then
127 | javaHome="`dirname \"$javaExecutable\"`"
128 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
129 | else
130 | javaExecutable="`readlink -f \"$javaExecutable\"`"
131 | fi
132 | javaHome="`dirname \"$javaExecutable\"`"
133 | javaHome=`expr "$javaHome" : '\(.*\)/bin'`
134 | JAVA_HOME="$javaHome"
135 | export JAVA_HOME
136 | fi
137 | fi
138 | fi
139 |
140 | if [ -z "$JAVACMD" ] ; then
141 | if [ -n "$JAVA_HOME" ] ; then
142 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
143 | # IBM's JDK on AIX uses strange locations for the executables
144 | JAVACMD="$JAVA_HOME/jre/sh/java"
145 | else
146 | JAVACMD="$JAVA_HOME/bin/java"
147 | fi
148 | else
149 | JAVACMD="`which java`"
150 | fi
151 | fi
152 |
153 | if [ ! -x "$JAVACMD" ] ; then
154 | echo "Error: JAVA_HOME is not defined correctly." >&2
155 | echo " We cannot execute $JAVACMD" >&2
156 | exit 1
157 | fi
158 |
159 | if [ -z "$JAVA_HOME" ] ; then
160 | echo "Warning: JAVA_HOME environment variable is not set."
161 | fi
162 |
163 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
164 |
165 | # traverses directory structure from process work directory to filesystem root
166 | # first directory with .mvn subdirectory is considered project base directory
167 | find_maven_basedir() {
168 |
169 | if [ -z "$1" ]
170 | then
171 | echo "Path not specified to find_maven_basedir"
172 | return 1
173 | fi
174 |
175 | basedir="$1"
176 | wdir="$1"
177 | while [ "$wdir" != '/' ] ; do
178 | if [ -d "$wdir"/.mvn ] ; then
179 | basedir=$wdir
180 | break
181 | fi
182 | # workaround for JBEAP-8937 (on Solaris 10/Sparc)
183 | if [ -d "${wdir}" ]; then
184 | wdir=`cd "$wdir/.."; pwd`
185 | fi
186 | # end of workaround
187 | done
188 | echo "${basedir}"
189 | }
190 |
191 | # concatenates all lines of a file
192 | concat_lines() {
193 | if [ -f "$1" ]; then
194 | echo "$(tr -s '\n' ' ' < "$1")"
195 | fi
196 | }
197 |
198 | BASE_DIR=`find_maven_basedir "$(pwd)"`
199 | if [ -z "$BASE_DIR" ]; then
200 | exit 1;
201 | fi
202 |
203 | ##########################################################################################
204 | # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
205 | # This allows using the maven wrapper in projects that prohibit checking in binary data.
206 | ##########################################################################################
207 | if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
208 | if [ "$MVNW_VERBOSE" = true ]; then
209 | echo "Found .mvn/wrapper/maven-wrapper.jar"
210 | fi
211 | else
212 | if [ "$MVNW_VERBOSE" = true ]; then
213 | echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
214 | fi
215 | jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
216 | while IFS="=" read key value; do
217 | case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
218 | esac
219 | done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
220 | if [ "$MVNW_VERBOSE" = true ]; then
221 | echo "Downloading from: $jarUrl"
222 | fi
223 | wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
224 |
225 | if command -v wget > /dev/null; then
226 | if [ "$MVNW_VERBOSE" = true ]; then
227 | echo "Found wget ... using wget"
228 | fi
229 | wget "$jarUrl" -O "$wrapperJarPath"
230 | elif command -v curl > /dev/null; then
231 | if [ "$MVNW_VERBOSE" = true ]; then
232 | echo "Found curl ... using curl"
233 | fi
234 | curl -o "$wrapperJarPath" "$jarUrl"
235 | else
236 | if [ "$MVNW_VERBOSE" = true ]; then
237 | echo "Falling back to using Java to download"
238 | fi
239 | javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
240 | if [ -e "$javaClass" ]; then
241 | if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
242 | if [ "$MVNW_VERBOSE" = true ]; then
243 | echo " - Compiling MavenWrapperDownloader.java ..."
244 | fi
245 | # Compiling the Java class
246 | ("$JAVA_HOME/bin/javac" "$javaClass")
247 | fi
248 | if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
249 | # Running the downloader
250 | if [ "$MVNW_VERBOSE" = true ]; then
251 | echo " - Running MavenWrapperDownloader.java ..."
252 | fi
253 | ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
254 | fi
255 | fi
256 | fi
257 | fi
258 | ##########################################################################################
259 | # End of extension
260 | ##########################################################################################
261 |
262 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
263 | if [ "$MVNW_VERBOSE" = true ]; then
264 | echo $MAVEN_PROJECTBASEDIR
265 | fi
266 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
267 |
268 | # For Cygwin, switch paths to Windows format before running java
269 | if $cygwin; then
270 | [ -n "$M2_HOME" ] &&
271 | M2_HOME=`cygpath --path --windows "$M2_HOME"`
272 | [ -n "$JAVA_HOME" ] &&
273 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
274 | [ -n "$CLASSPATH" ] &&
275 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
276 | [ -n "$MAVEN_PROJECTBASEDIR" ] &&
277 | MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
278 | fi
279 |
280 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
281 |
282 | exec "$JAVACMD" \
283 | $MAVEN_OPTS \
284 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
285 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
286 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
287 |
--------------------------------------------------------------------------------
/mvnw.cmd:
--------------------------------------------------------------------------------
1 | @REM ----------------------------------------------------------------------------
2 | @REM Licensed to the Apache Software Foundation (ASF) under one
3 | @REM or more contributor license agreements. See the NOTICE file
4 | @REM distributed with this work for additional information
5 | @REM regarding copyright ownership. The ASF licenses this file
6 | @REM to you under the Apache License, Version 2.0 (the
7 | @REM "License"); you may not use this file except in compliance
8 | @REM with the License. You may obtain a copy of the License at
9 | @REM
10 | @REM http://www.apache.org/licenses/LICENSE-2.0
11 | @REM
12 | @REM Unless required by applicable law or agreed to in writing,
13 | @REM software distributed under the License is distributed on an
14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | @REM KIND, either express or implied. See the License for the
16 | @REM specific language governing permissions and limitations
17 | @REM under the License.
18 | @REM ----------------------------------------------------------------------------
19 |
20 | @REM ----------------------------------------------------------------------------
21 | @REM Maven2 Start Up Batch script
22 | @REM
23 | @REM Required ENV vars:
24 | @REM JAVA_HOME - location of a JDK home dir
25 | @REM
26 | @REM Optional ENV vars
27 | @REM M2_HOME - location of maven2's installed home dir
28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
31 | @REM e.g. to debug Maven itself, use
32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
34 | @REM ----------------------------------------------------------------------------
35 |
36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
37 | @echo off
38 | @REM set title of command window
39 | title %0
40 | @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
41 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
42 |
43 | @REM set %HOME% to equivalent of $HOME
44 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
45 |
46 | @REM Execute a user defined script before this one
47 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
48 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending
49 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
50 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
51 | :skipRcPre
52 |
53 | @setlocal
54 |
55 | set ERROR_CODE=0
56 |
57 | @REM To isolate internal variables from possible post scripts, we use another setlocal
58 | @setlocal
59 |
60 | @REM ==== START VALIDATION ====
61 | if not "%JAVA_HOME%" == "" goto OkJHome
62 |
63 | echo.
64 | echo Error: JAVA_HOME not found in your environment. >&2
65 | echo Please set the JAVA_HOME variable in your environment to match the >&2
66 | echo location of your Java installation. >&2
67 | echo.
68 | goto error
69 |
70 | :OkJHome
71 | if exist "%JAVA_HOME%\bin\java.exe" goto init
72 |
73 | echo.
74 | echo Error: JAVA_HOME is set to an invalid directory. >&2
75 | echo JAVA_HOME = "%JAVA_HOME%" >&2
76 | echo Please set the JAVA_HOME variable in your environment to match the >&2
77 | echo location of your Java installation. >&2
78 | echo.
79 | goto error
80 |
81 | @REM ==== END VALIDATION ====
82 |
83 | :init
84 |
85 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
86 | @REM Fallback to current working directory if not found.
87 |
88 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
89 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
90 |
91 | set EXEC_DIR=%CD%
92 | set WDIR=%EXEC_DIR%
93 | :findBaseDir
94 | IF EXIST "%WDIR%"\.mvn goto baseDirFound
95 | cd ..
96 | IF "%WDIR%"=="%CD%" goto baseDirNotFound
97 | set WDIR=%CD%
98 | goto findBaseDir
99 |
100 | :baseDirFound
101 | set MAVEN_PROJECTBASEDIR=%WDIR%
102 | cd "%EXEC_DIR%"
103 | goto endDetectBaseDir
104 |
105 | :baseDirNotFound
106 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
107 | cd "%EXEC_DIR%"
108 |
109 | :endDetectBaseDir
110 |
111 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
112 |
113 | @setlocal EnableExtensions EnableDelayedExpansion
114 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
115 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
116 |
117 | :endReadAdditionalConfig
118 |
119 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
120 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
121 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
122 |
123 | set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
124 | FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO (
125 | IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
126 | )
127 |
128 | @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
129 | @REM This allows using the maven wrapper in projects that prohibit checking in binary data.
130 | if exist %WRAPPER_JAR% (
131 | echo Found %WRAPPER_JAR%
132 | ) else (
133 | echo Couldn't find %WRAPPER_JAR%, downloading it ...
134 | echo Downloading from: %DOWNLOAD_URL%
135 | powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"
136 | echo Finished downloading %WRAPPER_JAR%
137 | )
138 | @REM End of extension
139 |
140 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
141 | if ERRORLEVEL 1 goto error
142 | goto end
143 |
144 | :error
145 | set ERROR_CODE=1
146 |
147 | :end
148 | @endlocal & set ERROR_CODE=%ERROR_CODE%
149 |
150 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
151 | @REM check for post script, once with legacy .bat ending and once with .cmd ending
152 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
153 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
154 | :skipRcPost
155 |
156 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
157 | if "%MAVEN_BATCH_PAUSE%" == "on" pause
158 |
159 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
160 |
161 | exit /B %ERROR_CODE%
162 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 |
5 | cricket.jmoore
6 | schema-registry-transfer-smt
7 | 0.2.1-SNAPSHOT
8 | schema-registry-transfer-smt
9 |
10 | A Kafka Connect Transform for copying Confluent Schema Registry schemas between different registries.
11 |
12 | https://github.com/cricket007/schema-registry-transfer-smt
13 | 2019
14 |
15 |
16 | Apache License 2.0
17 | https://github.com/cricket007/schema-registry-transfer-smt/LICENSE
18 | repo
19 |
20 |
21 |
22 |
23 | cricket007
24 | Jordan Moore
25 | https://github.com/cricket007
26 |
27 | Owner
28 |
29 |
30 |
31 |
32 | scm:git:https://github.com/cricket007/schema-registry-transfer-smt.git
33 | scm:git:git@github.com:cricket007/schema-registry-transfer-smt.git
34 | https://github.com/cricket007/schema-registry-transfer-smt
35 |
36 |
37 | github
38 | https://github.com/cricket007/schema-registry-transfer-smt/issues
39 |
40 |
41 |
42 | bintray
43 | bintray-releases
44 | https://api.bintray.com/maven/${env.BINTRAY_REPO_OWNER}/${env.BINTRAY_REPO}/${project.artifactId}/;publish=1
45 |
46 |
47 |
48 |
49 |
50 | confluent
51 | https://packages.confluent.io/maven
52 |
53 |
54 | repo.eclipse.org
55 | sensiNact Repository - Snapshots
56 | https://repo.eclipse.org/content/repositories/sensinact-snapshots/
57 |
58 | true
59 |
60 |
61 |
62 |
63 |
64 | UTF-8
65 | 1.8
66 | 1.8
67 |
68 | 1.7.25
69 | 2.1.0
70 | 5.1.0
71 | -cp1
72 | 2.9.7
73 | 1.9.13
74 |
75 | 1.20.0
76 |
77 | cricket.jmoore
78 |
79 |
80 |
81 |
82 |
83 |
84 | org.slf4j
85 | slf4j-api
86 | ${slf4j.version}
87 | provided
88 |
89 |
90 |
91 | org.apache.kafka
92 | kafka-clients
93 | ${kafka.version}${confluent.patch.version}
94 | provided
95 |
96 |
97 |
98 | org.apache.kafka
99 | connect-transforms
100 | ${kafka.version}${confluent.patch.version}
101 | provided
102 |
103 |
104 |
105 | org.apache.zookeeper
106 | zookeeper
107 | 3.4.13
108 | provided
109 |
110 |
111 | log4j
112 | log4j
113 |
114 |
115 | io.netty
116 | netty
117 |
118 |
119 | org.slf4j
120 | slf4j-log4j12
121 |
122 |
123 |
124 |
125 |
126 | com.101tec
127 | zkclient
128 | 0.10
129 | provided
130 |
131 |
132 |
133 | com.fasterxml.jackson.core
134 | jackson-databind
135 | ${jackson.version}
136 | provided
137 |
138 |
139 |
140 | org.codehaus.jackson
141 | jackson-core-asl
142 | ${jackson.asl.version}
143 | provided
144 |
145 |
146 |
147 | org.codehaus.jackson
148 | jackson-mapper-asl
149 | ${jackson.asl.version}
150 | provided
151 |
152 |
153 |
154 | com.thoughtworks.paranamer
155 | paranamer
156 | 2.7
157 | provided
158 |
159 |
160 |
161 | org.tukaani
162 | xz
163 | 1.5
164 | provided
165 |
166 |
167 |
168 | org.xerial.snappy
169 | snappy-java
170 | 1.1.7.1
171 | provided
172 |
173 |
174 |
175 | org.apache.commons
176 | commons-compress
177 | 1.8.1
178 | provided
179 |
180 |
181 |
182 |
183 | org.apache.avro
184 | avro
185 | 1.8.1
186 |
187 |
188 |
189 | io.confluent
190 | kafka-avro-serializer
191 | ${confluent.version}
192 |
193 |
194 |
195 | io.confluent
196 | kafka-schema-registry-client
197 | ${confluent.version}
198 |
199 |
200 | org.apache.kafka
201 | kafka-clients
202 |
203 |
204 | com.fasterxml.jackson.core
205 | jackson-databind
206 |
207 |
208 |
209 |
210 |
211 |
212 | org.junit.jupiter
213 | junit-jupiter-engine
214 | 5.3.1
215 | test
216 |
217 |
218 |
219 | org.slf4j
220 | log4j-over-slf4j
221 | ${slf4j.version}
222 | test
223 |
224 |
225 |
226 | ch.qos.logback
227 | logback-classic
228 | 1.2.3
229 | test
230 |
231 |
232 |
233 | com.github.tomakehurst
234 | wiremock
235 | 2.20.0
236 | test
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 | org.apache.maven.plugins
245 | maven-surefire-plugin
246 | 3.0.0-M3
247 |
248 |
249 |
250 |
251 |
252 | maven-release-plugin
253 | 2.5.3
254 |
255 |
256 | org.apache.maven.plugins
257 | maven-shade-plugin
258 | 3.2.1
259 |
260 | false
261 |
262 |
263 | *:*
264 |
265 | META-INF/maven/**
266 | META-INF/LICENSE*
267 | META-INF/NOTICE*
268 | META-INF/DEPENDENCIES
269 |
270 |
271 |
272 |
273 |
274 | io.confluent
275 | ${shade.prefix}.confluent
276 |
277 |
278 | org.apache.avro
279 | ${shade.prefix}.avro
280 |
281 |
282 | avro.shaded
283 | ${shade.prefix}.avroshaded
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 | package
293 |
294 | shade
295 |
296 |
297 |
298 |
299 |
300 | io.confluent
301 | kafka-connect-maven-plugin
302 | 0.11.1
303 |
304 | Avro Schema Transformer
305 |
306 | false
307 | true
308 |
309 | transform
310 |
311 | cricket007
312 |
313 | avro
314 |
315 | ${project.issueManagement.url}
316 | user
317 | Support provided through community involvement.
318 | true
319 |
320 |
321 |
322 | com.diffplug.spotless
323 | spotless-maven-plugin
324 | ${spotless.version}
325 |
326 |
327 |
328 | /* Licensed under Apache-2.0 */
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 | java,org,io.confluent,com,cricket.jmoore,
337 |
338 |
339 |
340 |
341 |
342 | process-sources
343 |
344 | apply
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 |
--------------------------------------------------------------------------------
/settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 | bintray
7 | ${env.BINTRAY_USER}
8 | ${env.BINTRAY_API_KEY}
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/src/main/java/cricket/jmoore/kafka/connect/transforms/ConnectSchemaUtil.java:
--------------------------------------------------------------------------------
1 | /* Licensed under Apache-2.0 */
2 | package cricket.jmoore.kafka.connect.transforms;
3 |
4 | import org.apache.kafka.connect.data.Schema;
5 |
6 | public class ConnectSchemaUtil {
7 | public static boolean isBytesSchema(Schema connectSchema) {
8 | if (connectSchema == null) {
9 | return false;
10 | }
11 | return connectSchema.type() == Schema.BYTES_SCHEMA.type() ||
12 | connectSchema.type() == Schema.OPTIONAL_BYTES_SCHEMA.type();
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/java/cricket/jmoore/kafka/connect/transforms/SchemaRegistryTransfer.java:
--------------------------------------------------------------------------------
1 | /* Licensed under Apache-2.0 */
2 | package cricket.jmoore.kafka.connect.transforms;
3 |
4 | import java.io.IOException;
5 | import java.nio.ByteBuffer;
6 | import java.util.HashMap;
7 | import java.util.List;
8 | import java.util.Map;
9 | import java.util.Objects;
10 | import java.util.Optional;
11 |
12 | import org.apache.kafka.common.cache.Cache;
13 | import org.apache.kafka.common.cache.LRUCache;
14 | import org.apache.kafka.common.cache.SynchronizedCache;
15 | import org.apache.kafka.common.config.ConfigDef;
16 | import org.apache.kafka.common.errors.SerializationException;
17 | import org.apache.kafka.connect.connector.ConnectRecord;
18 | import org.apache.kafka.connect.data.Schema;
19 | import org.apache.kafka.connect.errors.ConnectException;
20 | import org.apache.kafka.connect.transforms.Transformation;
21 | import org.apache.kafka.connect.transforms.util.NonEmptyListValidator;
22 | import org.apache.kafka.connect.transforms.util.SimpleConfig;
23 | import org.slf4j.Logger;
24 | import org.slf4j.LoggerFactory;
25 |
26 | import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
27 | import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
28 | import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig;
29 | import io.confluent.kafka.serializers.subject.TopicNameStrategy;
30 | import io.confluent.kafka.serializers.subject.strategy.SubjectNameStrategy;
31 |
32 | @SuppressWarnings("unused")
33 | public class SchemaRegistryTransfer> implements Transformation {
34 | public static final String OVERVIEW_DOC = "Inspect the Confluent KafkaAvroSerializer's wire-format header to copy schemas from one Schema Registry to another.";
35 | private static final Logger log = LoggerFactory.getLogger(SchemaRegistryTransfer.class);
36 |
37 | private static final byte MAGIC_BYTE = (byte) 0x0;
38 | // wire-format is magic byte + an integer, then data
39 | private static final short WIRE_FORMAT_PREFIX_LENGTH = 1 + (Integer.SIZE / Byte.SIZE);
40 |
41 | public static final ConfigDef CONFIG_DEF;
42 | public static final String SCHEMA_CAPACITY_CONFIG_DOC = "The maximum amount of schemas to be stored for each Schema Registry client.";
43 | public static final Integer SCHEMA_CAPACITY_CONFIG_DEFAULT = 100;
44 |
45 | public static final String SRC_PREAMBLE = "For source consumer's schema registry, ";
46 | public static final String SRC_SCHEMA_REGISTRY_CONFIG_DOC = "A list of addresses for the Schema Registry to copy from. The consumer's Schema Registry.";
47 | public static final String SRC_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DOC = SRC_PREAMBLE + AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE_DOC;
48 | public static final String SRC_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DEFAULT = AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE_DEFAULT;
49 | public static final String SRC_USER_INFO_CONFIG_DOC = SRC_PREAMBLE + AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_USER_INFO_DOC;
50 | public static final String SRC_USER_INFO_CONFIG_DEFAULT = AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_USER_INFO_DEFAULT;
51 |
52 | public static final String DEST_PREAMBLE = "For target producer's schema registry, ";
53 | public static final String DEST_SCHEMA_REGISTRY_CONFIG_DOC = "A list of addresses for the Schema Registry to copy to. The producer's Schema Registry.";
54 | public static final String DEST_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DOC = DEST_PREAMBLE + AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE_DOC;
55 | public static final String DEST_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DEFAULT = AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE_DEFAULT;
56 | public static final String DEST_USER_INFO_CONFIG_DOC = DEST_PREAMBLE + AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_USER_INFO_DOC;
57 | public static final String DEST_USER_INFO_CONFIG_DEFAULT = AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_USER_INFO_DEFAULT;
58 |
59 | public static final String TRANSFER_KEYS_CONFIG_DOC = "Whether or not to copy message key schemas between registries.";
60 | public static final Boolean TRANSFER_KEYS_CONFIG_DEFAULT = true;
61 | public static final String INCLUDE_HEADERS_CONFIG_DOC = "Whether or not to preserve the Kafka Connect Record headers.";
62 | public static final Boolean INCLUDE_HEADERS_CONFIG_DEFAULT = true;
63 |
64 | private CachedSchemaRegistryClient sourceSchemaRegistryClient;
65 | private CachedSchemaRegistryClient destSchemaRegistryClient;
66 | private SubjectNameStrategy subjectNameStrategy;
67 | private boolean transferKeys, includeHeaders;
68 |
69 | // caches from the source registry to the destination registry
70 | private Cache schemaCache;
71 |
72 | public SchemaRegistryTransfer() {
73 | }
74 |
75 | static {
76 | CONFIG_DEF = (new ConfigDef())
77 | .define(ConfigName.SRC_SCHEMA_REGISTRY_URL, ConfigDef.Type.LIST, ConfigDef.NO_DEFAULT_VALUE, new NonEmptyListValidator(), ConfigDef.Importance.HIGH, SRC_SCHEMA_REGISTRY_CONFIG_DOC)
78 | .define(ConfigName.DEST_SCHEMA_REGISTRY_URL, ConfigDef.Type.LIST, ConfigDef.NO_DEFAULT_VALUE, new NonEmptyListValidator(), ConfigDef.Importance.HIGH, DEST_SCHEMA_REGISTRY_CONFIG_DOC)
79 | .define(ConfigName.SRC_BASIC_AUTH_CREDENTIALS_SOURCE, ConfigDef.Type.STRING, SRC_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, SRC_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DOC)
80 | .define(ConfigName.SRC_USER_INFO, ConfigDef.Type.PASSWORD, SRC_USER_INFO_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, SRC_USER_INFO_CONFIG_DOC)
81 | .define(ConfigName.DEST_BASIC_AUTH_CREDENTIALS_SOURCE, ConfigDef.Type.STRING, DEST_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, DEST_BASIC_AUTH_CREDENTIALS_SOURCE_CONFIG_DOC)
82 | .define(ConfigName.DEST_USER_INFO, ConfigDef.Type.PASSWORD, DEST_USER_INFO_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, DEST_USER_INFO_CONFIG_DOC)
83 | .define(ConfigName.SCHEMA_CAPACITY, ConfigDef.Type.INT, SCHEMA_CAPACITY_CONFIG_DEFAULT, ConfigDef.Importance.LOW, SCHEMA_CAPACITY_CONFIG_DOC)
84 | .define(ConfigName.TRANSFER_KEYS, ConfigDef.Type.BOOLEAN, TRANSFER_KEYS_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, TRANSFER_KEYS_CONFIG_DOC)
85 | .define(ConfigName.INCLUDE_HEADERS, ConfigDef.Type.BOOLEAN, INCLUDE_HEADERS_CONFIG_DEFAULT, ConfigDef.Importance.MEDIUM, INCLUDE_HEADERS_CONFIG_DOC)
86 | ;
87 | // TODO: Other properties might be useful, e.g. the Subject Strategies
88 | }
89 |
90 | @Override
91 | public ConfigDef config() {
92 | return CONFIG_DEF;
93 | }
94 |
95 | @Override
96 | public void configure(Map props) {
97 | SimpleConfig config = new SimpleConfig(CONFIG_DEF, props);
98 |
99 | List sourceUrls = config.getList(ConfigName.SRC_SCHEMA_REGISTRY_URL);
100 | final Map sourceProps = new HashMap<>();
101 | sourceProps.put(AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE,
102 | "SRC_" + config.getString(ConfigName.SRC_BASIC_AUTH_CREDENTIALS_SOURCE));
103 | sourceProps.put(AbstractKafkaAvroSerDeConfig.USER_INFO_CONFIG,
104 | config.getPassword(ConfigName.SRC_USER_INFO)
105 | .value());
106 |
107 | List destUrls = config.getList(ConfigName.DEST_SCHEMA_REGISTRY_URL);
108 | final Map destProps = new HashMap<>();
109 | destProps.put(AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE,
110 | "DEST_" + config.getString(ConfigName.DEST_BASIC_AUTH_CREDENTIALS_SOURCE));
111 | destProps.put(AbstractKafkaAvroSerDeConfig.USER_INFO_CONFIG,
112 | config.getPassword(ConfigName.DEST_USER_INFO)
113 | .value());
114 |
115 | Integer schemaCapacity = config.getInt(ConfigName.SCHEMA_CAPACITY);
116 |
117 | this.schemaCache = new SynchronizedCache<>(new LRUCache<>(schemaCapacity));
118 | this.sourceSchemaRegistryClient = new CachedSchemaRegistryClient(sourceUrls, schemaCapacity, sourceProps);
119 | this.destSchemaRegistryClient = new CachedSchemaRegistryClient(destUrls, schemaCapacity, destProps);
120 |
121 | this.transferKeys = config.getBoolean(ConfigName.TRANSFER_KEYS);
122 | this.includeHeaders = config.getBoolean(ConfigName.INCLUDE_HEADERS);
123 |
124 | // TODO: Make the Strategy configurable, may be different for src and dest
125 | // Strategy for the -key and -value subjects
126 | this.subjectNameStrategy = new TopicNameStrategy();
127 | }
128 |
129 | @Override
130 | public R apply(R r) {
131 | final String topic = r.topic();
132 |
133 | // Transcribe the key's schema id
134 | final Object key = r.key();
135 | final Schema keySchema = r.keySchema();
136 |
137 | Object updatedKey = key;
138 | Optional destKeySchemaId;
139 | if (transferKeys) {
140 | if (ConnectSchemaUtil.isBytesSchema(keySchema) || key instanceof byte[]) {
141 | if (key == null) {
142 | log.trace("Passing through null record key.");
143 | } else {
144 | byte[] keyAsBytes = (byte[]) key;
145 | int keyByteLength = keyAsBytes.length;
146 | if (keyByteLength <= 5) {
147 | throw new SerializationException("Unexpected byte[] length " + keyByteLength + " for Avro record key.");
148 | }
149 | ByteBuffer b = ByteBuffer.wrap(keyAsBytes);
150 | destKeySchemaId = copySchema(b, topic, true);
151 | b.putInt(1, destKeySchemaId.orElseThrow(()
152 | -> new ConnectException("Transform failed. Unable to update record schema id. (isKey=true)")));
153 | updatedKey = b.array();
154 | }
155 | } else {
156 | throw new ConnectException("Transform failed. Record key does not have a byte[] schema.");
157 | }
158 | } else {
159 | log.trace("Skipping record key translation. {} has been to false. Keys will be passed as-is."
160 | , ConfigName.TRANSFER_KEYS);
161 | }
162 |
163 | // Transcribe the value's schema id
164 | final Object value = r.value();
165 | final Schema valueSchema = r.valueSchema();
166 |
167 | Object updatedValue = value;
168 | Optional destValueSchemaId;
169 | if (ConnectSchemaUtil.isBytesSchema(valueSchema) || value instanceof byte[]) {
170 | if (value == null) {
171 | log.trace("Passing through null record value");
172 | } else {
173 | byte[] valueAsBytes = (byte[]) value;
174 | int valueByteLength = valueAsBytes.length;
175 | if (valueByteLength <= 5) {
176 | throw new SerializationException("Unexpected byte[] length " + valueByteLength + " for Avro record value.");
177 | }
178 | ByteBuffer b = ByteBuffer.wrap(valueAsBytes);
179 | destValueSchemaId = copySchema(b, topic, false);
180 | b.putInt(1, destValueSchemaId.orElseThrow(()
181 | -> new ConnectException("Transform failed. Unable to update record schema id. (isKey=false)")));
182 | updatedValue = b.array();
183 | }
184 | } else {
185 | throw new ConnectException("Transform failed. Record value does not have a byte[] schema.");
186 | }
187 |
188 |
189 | return includeHeaders ?
190 | r.newRecord(topic, r.kafkaPartition(),
191 | keySchema, updatedKey,
192 | valueSchema, updatedValue,
193 | r.timestamp(),
194 | r.headers())
195 | :
196 | r.newRecord(topic, r.kafkaPartition(),
197 | keySchema, updatedKey,
198 | valueSchema, updatedValue,
199 | r.timestamp());
200 | }
201 |
202 | protected Optional copySchema(ByteBuffer buffer, String topic, boolean isKey) {
203 | SchemaAndId schemaAndDestId;
204 | if (buffer.get() == MAGIC_BYTE) {
205 | int sourceSchemaId = buffer.getInt();
206 |
207 | schemaAndDestId = schemaCache.get(sourceSchemaId);
208 | if (schemaAndDestId != null) {
209 | log.trace("Schema id {} has been seen before. Not registering with destination registry again.");
210 | } else { // cache miss
211 | log.trace("Schema id {} has not been seen before", sourceSchemaId);
212 | schemaAndDestId = new SchemaAndId();
213 | try {
214 | log.trace("Looking up schema id {} in source registry", sourceSchemaId);
215 | // Can't do getBySubjectAndId because that requires a Schema object for the strategy
216 | schemaAndDestId.schema = sourceSchemaRegistryClient.getById(sourceSchemaId);
217 | } catch (IOException | RestClientException e) {
218 | log.error(String.format("Unable to fetch source schema for id %d.", sourceSchemaId), e);
219 | throw new ConnectException(e);
220 | }
221 |
222 | try {
223 | log.trace("Registering schema {} to destination registry", schemaAndDestId.schema);
224 | // It could be possible that the destination naming strategy is different from the source
225 | String subjectName = subjectNameStrategy.subjectName(topic, isKey, schemaAndDestId.schema);
226 | schemaAndDestId.id = destSchemaRegistryClient.register(subjectName, schemaAndDestId.schema);
227 | schemaCache.put(sourceSchemaId, schemaAndDestId);
228 | } catch (IOException | RestClientException e) {
229 | log.error(String.format("Unable to register source schema id %d to destination registry.",
230 | sourceSchemaId), e);
231 | return Optional.empty();
232 | }
233 | }
234 | } else {
235 | throw new SerializationException("Unknown magic byte!");
236 | }
237 | return Optional.ofNullable(schemaAndDestId.id);
238 | }
239 |
240 | @Override
241 | public void close() {
242 | this.sourceSchemaRegistryClient = null;
243 | this.destSchemaRegistryClient = null;
244 | }
245 |
246 | interface ConfigName {
247 | String SRC_SCHEMA_REGISTRY_URL = "src." + AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG;
248 | String SRC_BASIC_AUTH_CREDENTIALS_SOURCE = "src." + AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE;
249 | String SRC_USER_INFO = "src." + AbstractKafkaAvroSerDeConfig.USER_INFO_CONFIG;
250 | String DEST_SCHEMA_REGISTRY_URL = "dest." + AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG;
251 | String DEST_BASIC_AUTH_CREDENTIALS_SOURCE = "dest." + AbstractKafkaAvroSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE;
252 | String DEST_USER_INFO = "dest." + AbstractKafkaAvroSerDeConfig.USER_INFO_CONFIG;
253 | String SCHEMA_CAPACITY = "schema.capacity";
254 | String TRANSFER_KEYS = "transfer.message.keys";
255 | String INCLUDE_HEADERS = "include.message.headers";
256 | }
257 |
258 | private static class SchemaAndId {
259 | private Integer id;
260 | private org.apache.avro.Schema schema;
261 |
262 | SchemaAndId() {
263 | }
264 |
265 | SchemaAndId(int id, org.apache.avro.Schema schema) {
266 | this.id = id;
267 | this.schema = schema;
268 | }
269 |
270 | @Override
271 | public boolean equals(Object o) {
272 | if (this == o) return true;
273 | if (o == null || getClass() != o.getClass()) return false;
274 | SchemaAndId schemaAndId = (SchemaAndId) o;
275 | return Objects.equals(id, schemaAndId.id) &&
276 | Objects.equals(schema, schemaAndId.schema);
277 | }
278 |
279 | @Override
280 | public int hashCode() {
281 | return Objects.hash(id, schema);
282 | }
283 |
284 | @Override
285 | public String toString() {
286 | return "SchemaAndId{" +
287 | "id=" + id +
288 | ", schema=" + schema +
289 | '}';
290 | }
291 | }
292 |
293 | }
294 |
--------------------------------------------------------------------------------
/src/main/java/cricket/jmoore/security/basicauth/DestSaslBasicAuthCredentialProvider.java:
--------------------------------------------------------------------------------
1 | /* Licensed under Apache-2.0 */
2 | package cricket.jmoore.security.basicauth;
3 |
4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.SaslBasicAuthCredentialProvider;
5 |
6 | public class DestSaslBasicAuthCredentialProvider extends SaslBasicAuthCredentialProvider {
7 | @Override
8 | public String alias() {
9 | return "DEST_SASL_INHERIT";
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/src/main/java/cricket/jmoore/security/basicauth/DestUrlBasicAuthCredentialProvider.java:
--------------------------------------------------------------------------------
1 | /* Licensed under Apache-2.0 */
2 | package cricket.jmoore.security.basicauth;
3 |
4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.UrlBasicAuthCredentialProvider;
5 |
6 | public class DestUrlBasicAuthCredentialProvider extends UrlBasicAuthCredentialProvider {
7 | @Override
8 | public String alias() {
9 | return "DEST_URL";
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/src/main/java/cricket/jmoore/security/basicauth/DestUserInfoCredentialProvider.java:
--------------------------------------------------------------------------------
1 | /* Licensed under Apache-2.0 */
2 | package cricket.jmoore.security.basicauth;
3 |
4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.UserInfoCredentialProvider;
5 |
6 | public class DestUserInfoCredentialProvider extends UserInfoCredentialProvider
7 | {
8 | @Override
9 | public String alias() {
10 | return "DEST_USER_INFO";
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/src/main/java/cricket/jmoore/security/basicauth/SrcSaslBasicAuthCredentialProvider.java:
--------------------------------------------------------------------------------
1 | /* Licensed under Apache-2.0 */
2 | package cricket.jmoore.security.basicauth;
3 |
4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.SaslBasicAuthCredentialProvider;
5 |
6 | public class SrcSaslBasicAuthCredentialProvider extends SaslBasicAuthCredentialProvider {
7 | @Override
8 | public String alias() {
9 | return "SRC_SASL_INHERIT";
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/src/main/java/cricket/jmoore/security/basicauth/SrcUrlBasicAuthCredentialProvider.java:
--------------------------------------------------------------------------------
1 | /* Licensed under Apache-2.0 */
2 | package cricket.jmoore.security.basicauth;
3 |
4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.UrlBasicAuthCredentialProvider;
5 |
6 | public class SrcUrlBasicAuthCredentialProvider extends UrlBasicAuthCredentialProvider {
7 | @Override
8 | public String alias() {
9 | return "SRC_URL";
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/src/main/java/cricket/jmoore/security/basicauth/SrcUserInfoCredentialProvider.java:
--------------------------------------------------------------------------------
1 | /* Licensed under Apache-2.0 */
2 | package cricket.jmoore.security.basicauth;
3 |
4 | import io.confluent.kafka.schemaregistry.client.security.basicauth.UserInfoCredentialProvider;
5 |
6 | public class SrcUserInfoCredentialProvider extends UserInfoCredentialProvider
7 | {
8 | @Override
9 | public String alias() {
10 | return "SRC_USER_INFO";
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/src/main/resources/META-INF/services/io.confluent.kafka.schemaregistry.client.security.basicauth.BasicAuthCredentialProvider:
--------------------------------------------------------------------------------
1 | cricket.jmoore.security.basicauth.DestSaslBasicAuthCredentialProvider
2 | cricket.jmoore.security.basicauth.DestUrlBasicAuthCredentialProvider
3 | cricket.jmoore.security.basicauth.DestUserInfoCredentialProvider
4 | cricket.jmoore.security.basicauth.SrcSaslBasicAuthCredentialProvider
5 | cricket.jmoore.security.basicauth.SrcUrlBasicAuthCredentialProvider
6 | cricket.jmoore.security.basicauth.SrcUserInfoCredentialProvider
--------------------------------------------------------------------------------
/src/test/java/cricket/jmoore/kafka/connect/transforms/Constants.java:
--------------------------------------------------------------------------------
1 | /* Licensed under Apache-2.0 */
2 | package cricket.jmoore.kafka.connect.transforms;
3 |
4 | public interface Constants {
5 | public static final String USE_BASIC_AUTH_SOURCE_TAG = "useBasicAuthSource";
6 |
7 | public static final String USE_BASIC_AUTH_DEST_TAG = "useBasicAuthDest";
8 |
9 | public static final String USER_INFO_SOURCE = "USER_INFO";
10 |
11 | public static final String URL_SOURCE = "URL";
12 |
13 | public static final String HTTP_AUTH_SOURCE_CREDENTIALS_FIXTURE = "sourceuser:sourcepass";
14 |
15 | public static final String HTTP_AUTH_DEST_CREDENTIALS_FIXTURE = "destuser:destpass";
16 | }
17 |
--------------------------------------------------------------------------------
/src/test/java/cricket/jmoore/kafka/connect/transforms/SchemaRegistryMock.java:
--------------------------------------------------------------------------------
1 | /* Licensed under Apache-2.0 */
2 | package cricket.jmoore.kafka.connect.transforms;
3 |
4 | import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
5 |
6 | import java.io.IOException;
7 | import java.util.List;
8 | import java.util.function.Function;
9 | import java.util.stream.Collectors;
10 | import java.util.stream.StreamSupport;
11 |
12 | import org.apache.avro.Schema;
13 | import org.junit.jupiter.api.extension.AfterEachCallback;
14 | import org.junit.jupiter.api.extension.BeforeEachCallback;
15 | import org.junit.jupiter.api.extension.ExtensionContext;
16 | import org.slf4j.Logger;
17 | import org.slf4j.LoggerFactory;
18 |
19 | import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
20 | import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient;
21 | import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
22 | import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
23 | import io.confluent.kafka.schemaregistry.client.rest.entities.Config;
24 | import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString;
25 | import io.confluent.kafka.schemaregistry.client.rest.entities.requests.RegisterSchemaRequest;
26 | import io.confluent.kafka.schemaregistry.client.rest.entities.requests.RegisterSchemaResponse;
27 | import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
28 | import io.confluent.kafka.serializers.subject.TopicNameStrategy;
29 | import io.confluent.kafka.serializers.subject.strategy.SubjectNameStrategy;
30 |
31 | import com.github.tomakehurst.wiremock.WireMockServer;
32 | import com.github.tomakehurst.wiremock.client.MappingBuilder;
33 | import com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder;
34 | import com.github.tomakehurst.wiremock.client.WireMock;
35 | import com.github.tomakehurst.wiremock.common.FileSource;
36 | import com.github.tomakehurst.wiremock.core.WireMockConfiguration;
37 | import com.github.tomakehurst.wiremock.extension.Parameters;
38 | import com.github.tomakehurst.wiremock.extension.ResponseDefinitionTransformer;
39 | import com.github.tomakehurst.wiremock.http.Request;
40 | import com.github.tomakehurst.wiremock.http.ResponseDefinition;
41 | import com.github.tomakehurst.wiremock.stubbing.StubMapping;
42 | import com.google.common.base.Splitter;
43 | import com.google.common.collect.Iterables;
44 |
45 | /**
46 | *
The schema registry mock implements a few basic HTTP endpoints that are used by the Avro serdes.
47 | * In particular,
48 | *
49 | *
you can register a schema and
50 | *
retrieve a schema by id.
51 | *
52 | *
53 | *
Additionally, server-side mock can be toggled from its default authentication behavior (no authentication)
54 | * to a variant that requires basic HTTP Authentication using fixed credentials `username:password` by placing a
55 | * `@Tag(Constants.USE_BASIC_AUTH_SOURCE_TAG)` and/or `@Tag(Constants.USE_BASIC_AUTH_DESTR_TAG)` annotation after
56 | * @Test annotation of any basic HTTP authentication dependent test code.
57 | *
58 | *
If you use the TestToplogy of the fluent Kafka Streams test, you don't have to interact with this class at
59 | * all.
60 | *
61 | *
Without the test framework, you can use the mock as follows: