├── .gitignore ├── Jenkinsfile ├── LICENSE ├── README.md ├── config └── PatternRename.conf ├── docs └── transformations.rst ├── pom.xml └── src ├── main └── java │ └── com │ └── github │ └── jcustenborder │ └── kafka │ └── connect │ └── transform │ └── common │ ├── AdjustPrecisionAndScale.java │ ├── AdjustPrecisionAndScaleConfig.java │ ├── BaseTransformation.java │ ├── BytesToString.java │ ├── BytesToStringConfig.java │ ├── ChangeCase.java │ ├── ChangeCaseConfig.java │ ├── ChangeTopicCase.java │ ├── ChangeTopicCaseConfig.java │ ├── ConversionHandler.java │ ├── Debug.java │ ├── ExtractNestedField.java │ ├── ExtractNestedFieldConfig.java │ ├── ExtractTimestamp.java │ ├── ExtractTimestampConfig.java │ ├── ExtractXPath.java │ ├── ExtractXPathConfig.java │ ├── HeaderToField.java │ ├── HeaderToFieldConfig.java │ ├── LowerCaseTopic.java │ ├── NormalizeSchema.java │ ├── PatternFilter.java │ ├── PatternFilterConfig.java │ ├── PatternMapString.java │ ├── PatternMapStringConfig.java │ ├── PatternRename.java │ ├── PatternRenameConfig.java │ ├── SchemaNameToTopic.java │ ├── SetMaximumPrecision.java │ ├── SetMaximumPrecisionConfig.java │ ├── SetNull.java │ ├── TimestampNow.java │ ├── TimestampNowField.java │ ├── TimestampNowFieldConfig.java │ ├── ToJSON.java │ ├── ToJSONConfig.java │ ├── ToLong.java │ ├── ToLongConfig.java │ ├── TopicNameToField.java │ ├── TopicNameToFieldConfig.java │ └── package-info.java └── test ├── java └── com │ └── github │ └── jcustenborder │ └── kafka │ └── connect │ └── transform │ └── common │ ├── AdjustPrecisionAndScaleTest.java │ ├── BytesToStringTest.java │ ├── ChangeCaseTest.java │ ├── ChangeTopicCaseTest.java │ ├── ConversionHandlerTest.java │ ├── DebugTest.java │ ├── DocumentationTest.java │ ├── ExtractNestedFieldTest.java │ ├── ExtractTimestampTest.java │ ├── ExtractXPathTest.java │ ├── GenericAssertions.java │ ├── HeaderToFieldConfigTest.java │ ├── HeaderToFieldTest.java │ ├── LowerCaseTopicTest.java │ ├── NormalizeSchemaTest.java │ ├── PatternFilterTest.java │ ├── PatternMapStringTest.java │ ├── PatternRenameTest.java │ ├── SchemaNameToTopicTest.java │ ├── SetMaximumPrecisionTest.java │ ├── SetNullTest.java │ ├── TimestampNowFieldTest.java │ ├── TimestampNowTest.java │ ├── ToJsonTest.java │ ├── ToLongTest.java │ ├── TopicNameToFieldTest.java │ └── TransformationTest.java └── resources ├── com └── github │ └── jcustenborder │ └── kafka │ └── connect │ └── transform │ └── common │ ├── BytesToString │ ├── bytes.json │ └── struct.json │ ├── ChangeCase │ └── simple.json │ ├── ChangeTopicCase │ ├── LOWER_CAMELToUPPER_UNDERSCORE.json │ ├── LOWER_HYPHENToLOWER_UNDERSCORE.json │ └── UPPER_UNDERSCOREToLOWER_CAMEL.json │ ├── ExtractNestedField │ └── simple.json │ ├── ExtractTimestamp │ └── simple.json │ ├── ExtractXPath │ ├── SOAPEnvelope1.xml │ └── Transaction.xml │ ├── HeaderToField │ ├── headertofield.json │ └── spooldir.json │ ├── LowerCaseTopic │ └── example.json │ ├── PatternRename │ └── simple.json │ ├── SchemaNameToTopic │ └── example.json │ ├── TimestampNow │ └── example.json │ ├── TimestampNowField │ └── insertTimestamp.json │ ├── ToJSON │ └── simple.json │ ├── ToLong │ └── simple.json │ ├── TopicNameToField │ └── simple.json │ └── foo └── logback.xml /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log 6 | 7 | # BlueJ files 8 | *.ctxt 9 | 10 | # Mobile Tools for Java (J2ME) 11 | .mtj.tmp/ 12 | 13 | # Package Files # 14 | *.jar 15 | *.war 16 | *.ear 17 | *.zip 18 | *.tar.gz 19 | *.rar 20 | 21 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 22 | hs_err_pid* 23 | .okhttpcache 24 | .idea 25 | target 26 | -------------------------------------------------------------------------------- /Jenkinsfile: -------------------------------------------------------------------------------- 1 | #!groovy 2 | @Library('jenkins-pipeline') import com.github.jcustenborder.jenkins.pipeline.KafkaConnectPipeline 3 | 4 | def pipe = new KafkaConnectPipeline() 5 | pipe.execute() -------------------------------------------------------------------------------- /config/PatternRename.conf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcustenborder/kafka-connect-transform-common/1bde8223eca60dde8b23b896e8c1ed6d06409297/config/PatternRename.conf -------------------------------------------------------------------------------- /docs/transformations.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | Common Transforms 3 | ================= 4 | 5 | The Common Transforms project is a collection of common transformations that can be use universally. 6 | 7 | .. toctree:: 8 | :maxdepth: 1 9 | :caption: Transformations: 10 | :hidden: 11 | :glob: 12 | 13 | transformations/* 14 | 15 | 16 | .. toctree:: 17 | :maxdepth: 0 18 | :caption: Schemas: 19 | :hidden: 20 | 21 | schemas 22 | info 23 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 19 | 23 | 4.0.0 24 | 25 | com.github.jcustenborder.kafka.connect 26 | kafka-connect-parent 27 | 3.3.1-1 28 | 29 | kafka-connect-transform-common 30 | 0.1.0-SNAPSHOT 31 | kafka-connect-transform-common 32 | https://github.com/jcustenborder/kafka-connect-transform-common 33 | 2017 34 | Common transformations for Kafka Connect. 35 | 36 | 37 | Apache License 2.0 38 | https:/github.com/jcustenborder/kafka-connect-transform-common/LICENSE 39 | repo 40 | 41 | 42 | 43 | 44 | jcustenborder 45 | Jeremy Custenborder 46 | https://github.com/jcustenborder 47 | 48 | Committer 49 | 50 | 51 | 52 | 53 | scm:git:https://github.com/jcustenborder/kafka-connect-transform-common.git 54 | 55 | 56 | scm:git:git@github.com:jcustenborder/kafka-connect-transform-common.git 57 | 58 | https://github.com/jcustenborder/kafka-connect-transform-common 59 | 60 | 61 | github 62 | https://github.com/jcustenborder/kafka-connect-transform-common/issues 63 | 64 | 65 | 66 | org.apache.kafka 67 | connect-json 68 | 69 | 70 | com.github.jcustenborder.kafka.connect 71 | connect-utils-jackson 72 | 73 | 74 | org.reflections 75 | reflections 76 | 0.9.10 77 | test 78 | 79 | 80 | com.github.jcustenborder.kafka.connect 81 | connect-utils-testing-data 82 | ${connect-utils.version} 83 | test 84 | 85 | 86 | org.apache.ws.commons 87 | ws-commons-util 88 | 1.0.1 89 | 90 | 91 | xerces 92 | xercesImpl 93 | 2.12.1 94 | 95 | 96 | xml-apis 97 | xml-apis 98 | 1.4.01 99 | 100 | 101 | 102 | 103 | 104 | io.confluent 105 | kafka-connect-maven-plugin 106 | 107 | true 108 | https://jcustenborder.github.io/kafka-connect-documentation/ 109 | 110 | 111 | transform 112 | 113 | 114 | Common 115 | Transformation 116 | 117 | Kafka Connect Common Transformations 118 | ${pom.issueManagement.url} 119 | Support provided through community involvement. 120 | 121 | 122 | 123 | 124 | 125 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/BytesToString.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.DocumentationTip; 20 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 21 | import com.google.common.base.Strings; 22 | import org.apache.kafka.common.config.ConfigDef; 23 | import org.apache.kafka.connect.connector.ConnectRecord; 24 | import org.apache.kafka.connect.data.Field; 25 | import org.apache.kafka.connect.data.Schema; 26 | import org.apache.kafka.connect.data.SchemaAndValue; 27 | import org.apache.kafka.connect.data.SchemaBuilder; 28 | import org.apache.kafka.connect.data.Struct; 29 | import org.slf4j.Logger; 30 | import org.slf4j.LoggerFactory; 31 | 32 | import java.util.HashMap; 33 | import java.util.Map; 34 | 35 | public abstract class BytesToString> extends BaseTransformation { 36 | private static final Logger log = LoggerFactory.getLogger(BytesToString.class); 37 | 38 | @Override 39 | public ConfigDef config() { 40 | return BytesToStringConfig.config(); 41 | } 42 | 43 | BytesToStringConfig config; 44 | 45 | @Override 46 | public void configure(Map settings) { 47 | this.config = new BytesToStringConfig(settings); 48 | } 49 | 50 | @Override 51 | public void close() { 52 | 53 | } 54 | 55 | @Override 56 | protected SchemaAndValue processBytes(R record, Schema inputSchema, byte[] input) { 57 | final Schema outputSchema = inputSchema.isOptional() ? Schema.OPTIONAL_STRING_SCHEMA : Schema.STRING_SCHEMA; 58 | final String output = input != null 59 | ? new String(input, this.config.charset) 60 | : (String) inputSchema.defaultValue(); 61 | return new SchemaAndValue(outputSchema, output); 62 | } 63 | 64 | Map schemaCache = new HashMap<>(); 65 | 66 | @Override 67 | protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct input) { 68 | final Schema schema = this.schemaCache.computeIfAbsent(inputSchema, s -> { 69 | final SchemaBuilder builder = SchemaBuilder.struct(); 70 | if (!Strings.isNullOrEmpty(inputSchema.name())) { 71 | builder.name(inputSchema.name()); 72 | } 73 | if (inputSchema.isOptional()) { 74 | builder.optional(); 75 | } 76 | 77 | for (Field field : inputSchema.fields()) { 78 | log.trace("processStruct() - processing '{}'", field.name()); 79 | final Schema fieldSchema; 80 | if (this.config.fields.contains(field.name())) { 81 | fieldSchema = field.schema().isOptional() ? 82 | Schema.OPTIONAL_STRING_SCHEMA : 83 | Schema.STRING_SCHEMA; 84 | } else { 85 | fieldSchema = field.schema(); 86 | } 87 | builder.field(field.name(), fieldSchema); 88 | } 89 | return builder.build(); 90 | }); 91 | 92 | Struct struct = new Struct(schema); 93 | for (Field field : schema.fields()) { 94 | if (this.config.fields.contains(field.name())) { 95 | byte[] buffer = input.getBytes(field.name()); 96 | struct.put( 97 | field.name(), 98 | buffer != null 99 | ? new String(buffer, this.config.charset) 100 | : field.schema().defaultValue() 101 | ); 102 | } else { 103 | struct.put(field.name(), input.get(field.name())); 104 | } 105 | } 106 | return new SchemaAndValue(schema, struct); 107 | } 108 | 109 | @Title("BytesToString(Key)") 110 | @Description("This transformation is used to convert a byte array to a string.") 111 | @DocumentationTip("This transformation is used to manipulate fields in the Key of the record.") 112 | public static class Key> extends BytesToString { 113 | 114 | @Override 115 | public R apply(R r) { 116 | final SchemaAndValue transformed = process(r, r.keySchema(), r.key()); 117 | 118 | return r.newRecord( 119 | r.topic(), 120 | r.kafkaPartition(), 121 | transformed.schema(), 122 | transformed.value(), 123 | r.valueSchema(), 124 | r.value(), 125 | r.timestamp() 126 | ); 127 | } 128 | } 129 | 130 | @Title("BytesToString(Value)") 131 | @Description("This transformation is used to convert a byte array to a string.") 132 | public static class Value> extends BytesToString { 133 | @Override 134 | public R apply(R r) { 135 | final SchemaAndValue transformed = process(r, r.valueSchema(), r.value()); 136 | 137 | return r.newRecord( 138 | r.topic(), 139 | r.kafkaPartition(), 140 | r.keySchema(), 141 | r.key(), 142 | transformed.schema(), 143 | transformed.value(), 144 | r.timestamp() 145 | ); 146 | } 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/BytesToStringConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigKeyBuilder; 19 | import org.apache.kafka.common.config.AbstractConfig; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | 22 | import java.nio.charset.Charset; 23 | import java.util.Collections; 24 | import java.util.HashSet; 25 | import java.util.List; 26 | import java.util.Map; 27 | import java.util.Set; 28 | 29 | public class BytesToStringConfig extends AbstractConfig { 30 | public final Charset charset; 31 | public final Set fields; 32 | 33 | public static final String CHARSET_CONFIG = "charset"; 34 | public static final String CHARSET_DOC = "The charset to use when creating the output string."; 35 | 36 | public static final String FIELD_CONFIG = "fields"; 37 | public static final String FIELD_DOC = "The fields to transform."; 38 | 39 | 40 | public BytesToStringConfig(Map settings) { 41 | super(config(), settings); 42 | String charset = getString(CHARSET_CONFIG); 43 | this.charset = Charset.forName(charset); 44 | List fields = getList(FIELD_CONFIG); 45 | this.fields = new HashSet<>(fields); 46 | } 47 | 48 | public static ConfigDef config() { 49 | return new ConfigDef() 50 | .define( 51 | ConfigKeyBuilder.of(CHARSET_CONFIG, ConfigDef.Type.STRING) 52 | .documentation(CHARSET_DOC) 53 | .defaultValue("UTF-8") 54 | .importance(ConfigDef.Importance.HIGH) 55 | .build() 56 | ).define( 57 | ConfigKeyBuilder.of(FIELD_CONFIG, ConfigDef.Type.LIST) 58 | .documentation(FIELD_DOC) 59 | .defaultValue(Collections.emptyList()) 60 | .importance(ConfigDef.Importance.HIGH) 61 | .build() 62 | ); 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ChangeCaseConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigUtils; 19 | import com.github.jcustenborder.kafka.connect.utils.config.ValidEnum; 20 | import com.google.common.base.CaseFormat; 21 | import org.apache.kafka.common.config.AbstractConfig; 22 | import org.apache.kafka.common.config.ConfigDef; 23 | 24 | import java.util.Map; 25 | 26 | class ChangeCaseConfig extends AbstractConfig { 27 | public final CaseFormat from; 28 | public final CaseFormat to; 29 | 30 | public static final String FROM_CONFIG = "from"; 31 | static final String FROM_DOC = "The format to move from "; 32 | public static final String TO_CONFIG = "to"; 33 | static final String TO_DOC = ""; 34 | 35 | public ChangeCaseConfig(Map originals) { 36 | super(config(), originals); 37 | this.from = ConfigUtils.getEnum(CaseFormat.class, this, FROM_CONFIG); 38 | this.to = ConfigUtils.getEnum(CaseFormat.class, this, TO_CONFIG); 39 | } 40 | 41 | public static ConfigDef config() { 42 | return new ConfigDef() 43 | .define(FROM_CONFIG, ConfigDef.Type.STRING, ConfigDef.NO_DEFAULT_VALUE, ValidEnum.of(CaseFormat.class), ConfigDef.Importance.HIGH, FROM_DOC) 44 | .define(TO_CONFIG, ConfigDef.Type.STRING, ConfigDef.NO_DEFAULT_VALUE, ValidEnum.of(CaseFormat.class), ConfigDef.Importance.HIGH, TO_DOC); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ChangeTopicCase.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.DocumentationTip; 20 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 21 | import org.apache.kafka.common.config.ConfigDef; 22 | import org.apache.kafka.connect.connector.ConnectRecord; 23 | import org.apache.kafka.connect.transforms.Transformation; 24 | 25 | import java.util.Map; 26 | 27 | @Title("ChangeTopicCase") 28 | @Description("This transformation is used to change the case of a topic.") 29 | @DocumentationTip("This transformation will convert a topic name like 'TOPIC_NAME' to `topicName`, " + 30 | "or `topic_name`.") 31 | public class ChangeTopicCase> implements Transformation { 32 | 33 | @Override 34 | public R apply(R record) { 35 | final String newTopic = this.config.from.to(this.config.to, record.topic()); 36 | 37 | return record.newRecord( 38 | newTopic, 39 | record.kafkaPartition(), 40 | record.keySchema(), 41 | record.key(), 42 | record.valueSchema(), 43 | record.value(), 44 | record.timestamp() 45 | ); 46 | } 47 | 48 | ChangeTopicCaseConfig config; 49 | 50 | @Override 51 | public ConfigDef config() { 52 | return ChangeTopicCaseConfig.config(); 53 | } 54 | 55 | @Override 56 | public void close() { 57 | 58 | } 59 | 60 | @Override 61 | public void configure(Map settings) { 62 | this.config = new ChangeTopicCaseConfig(settings); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ChangeTopicCaseConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigUtils; 19 | import com.github.jcustenborder.kafka.connect.utils.config.ValidEnum; 20 | import com.google.common.base.CaseFormat; 21 | import org.apache.kafka.common.config.AbstractConfig; 22 | import org.apache.kafka.common.config.ConfigDef; 23 | 24 | import java.util.Map; 25 | 26 | class ChangeTopicCaseConfig extends AbstractConfig { 27 | public final CaseFormat from; 28 | public final CaseFormat to; 29 | 30 | static final String CONSTANTS = "`LOWER_CAMEL` = Java variable naming convention, e.g., \"lowerCamel\". " + 31 | "`LOWER_HYPHEN` = Hyphenated variable naming convention, e.g., \"lower-hyphen\". " + 32 | "`LOWER_UNDERSCORE` = C++ variable naming convention, e.g., \"lower_underscore\". " + 33 | "`UPPER_CAMEL` = Java and C++ class naming convention, e.g., \"UpperCamel\". " + 34 | "`UPPER_UNDERSCORE` = Java and C++ constant naming convention, e.g., \"UPPER_UNDERSCORE\"."; 35 | 36 | public static final String FROM_CONFIG = "from"; 37 | static final String FROM_DOC = "The format of the incoming topic name. " + CONSTANTS; 38 | public static final String TO_CONFIG = "to"; 39 | static final String TO_DOC = "The format of the outgoing topic name. " + CONSTANTS; 40 | 41 | public ChangeTopicCaseConfig(Map originals) { 42 | super(config(), originals); 43 | this.from = ConfigUtils.getEnum(CaseFormat.class, this, FROM_CONFIG); 44 | this.to = ConfigUtils.getEnum(CaseFormat.class, this, TO_CONFIG); 45 | } 46 | 47 | public static ConfigDef config() { 48 | return new ConfigDef() 49 | .define(FROM_CONFIG, ConfigDef.Type.STRING, ConfigDef.NO_DEFAULT_VALUE, ValidEnum.of(CaseFormat.class), ConfigDef.Importance.HIGH, FROM_DOC) 50 | .define(TO_CONFIG, ConfigDef.Type.STRING, ConfigDef.NO_DEFAULT_VALUE, ValidEnum.of(CaseFormat.class), ConfigDef.Importance.HIGH, TO_DOC); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/Debug.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.fasterxml.jackson.databind.SerializationFeature; 19 | import com.github.jcustenborder.kafka.connect.utils.jackson.ObjectMapperFactory; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | import org.apache.kafka.connect.connector.ConnectRecord; 22 | import org.apache.kafka.connect.sink.SinkRecord; 23 | import org.apache.kafka.connect.source.SourceRecord; 24 | import org.apache.kafka.connect.transforms.Transformation; 25 | import org.slf4j.Logger; 26 | import org.slf4j.LoggerFactory; 27 | 28 | import java.util.LinkedHashMap; 29 | import java.util.Map; 30 | 31 | public class Debug> implements Transformation { 32 | private static final Logger log = LoggerFactory.getLogger(Debug.class); 33 | 34 | void addConnectRecord(Map debugContent, ConnectRecord r) { 35 | debugContent.put("topic", r.topic()); 36 | debugContent.put("kafkaPartition", r.kafkaPartition()); 37 | debugContent.put("timestamp", r.timestamp()); 38 | } 39 | 40 | void addSinkRecord(Map debugContent, SinkRecord r) { 41 | debugContent.put("timestampType", r.timestampType()); 42 | debugContent.put("kafkaOffset", r.kafkaOffset()); 43 | } 44 | 45 | void addSourceRecord(Map debugContent, SourceRecord r) { 46 | debugContent.put("sourcePartition", r.sourcePartition()); 47 | debugContent.put("sourceOffset", r.sourceOffset()); 48 | } 49 | 50 | void addKey(Map debugContent, R record) { 51 | Object result = record.key(); 52 | debugContent.put("key", result); 53 | } 54 | 55 | void addValue(Map debugContent, R record) { 56 | Object result = record.value(); 57 | debugContent.put("value", result); 58 | } 59 | 60 | @Override 61 | public R apply(R r) { 62 | try { 63 | Map debugContent = new LinkedHashMap<>(); 64 | addConnectRecord(debugContent, r); 65 | if (r instanceof SinkRecord) { 66 | SinkRecord sinkRecord = (SinkRecord) r; 67 | addSinkRecord(debugContent, sinkRecord); 68 | } else if (r instanceof SourceRecord) { 69 | SourceRecord sourceRecord = (SourceRecord) r; 70 | addSourceRecord(debugContent, sourceRecord); 71 | } 72 | addKey(debugContent, r); 73 | addValue(debugContent, r); 74 | log.info("\n{}", ObjectMapperFactory.INSTANCE.writeValueAsString(debugContent)); 75 | } catch (Exception ex) { 76 | log.error("Exception while generating debug content.", ex); 77 | } 78 | 79 | return r; 80 | } 81 | 82 | @Override 83 | public ConfigDef config() { 84 | return new ConfigDef(); 85 | } 86 | 87 | @Override 88 | public void close() { 89 | 90 | } 91 | 92 | @Override 93 | public void configure(Map settings) { 94 | ObjectMapperFactory.INSTANCE.configure(SerializationFeature.INDENT_OUTPUT, true); 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ExtractNestedField.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.DocumentationTip; 20 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 21 | import com.google.common.base.Strings; 22 | import org.apache.kafka.common.config.ConfigDef; 23 | import org.apache.kafka.connect.connector.ConnectRecord; 24 | import org.apache.kafka.connect.data.Field; 25 | import org.apache.kafka.connect.data.Schema; 26 | import org.apache.kafka.connect.data.SchemaAndValue; 27 | import org.apache.kafka.connect.data.SchemaBuilder; 28 | import org.apache.kafka.connect.data.Struct; 29 | import org.slf4j.Logger; 30 | import org.slf4j.LoggerFactory; 31 | 32 | import java.util.HashMap; 33 | import java.util.Map; 34 | 35 | public abstract class ExtractNestedField> extends BaseTransformation { 36 | private static final Logger log = LoggerFactory.getLogger(ExtractNestedField.class); 37 | 38 | 39 | @Override 40 | public ConfigDef config() { 41 | return ExtractNestedFieldConfig.config(); 42 | } 43 | 44 | @Override 45 | public void close() { 46 | 47 | } 48 | 49 | ExtractNestedFieldConfig config; 50 | Map schemaCache; 51 | 52 | @Override 53 | public void configure(Map map) { 54 | this.config = new ExtractNestedFieldConfig(map); 55 | this.schemaCache = new HashMap<>(); 56 | } 57 | 58 | @Override 59 | protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct input) { 60 | final Struct innerStruct = input.getStruct(this.config.outerFieldName); 61 | final Schema outputSchema = this.schemaCache.computeIfAbsent(inputSchema, s -> { 62 | 63 | final Field innerField = innerStruct.schema().field(this.config.innerFieldName); 64 | final SchemaBuilder builder = SchemaBuilder.struct(); 65 | if (!Strings.isNullOrEmpty(inputSchema.name())) { 66 | builder.name(inputSchema.name()); 67 | } 68 | if (inputSchema.isOptional()) { 69 | builder.optional(); 70 | } 71 | for (Field inputField : inputSchema.fields()) { 72 | builder.field(inputField.name(), inputField.schema()); 73 | } 74 | builder.field(this.config.outputFieldName, innerField.schema()); 75 | return builder.build(); 76 | }); 77 | final Struct outputStruct = new Struct(outputSchema); 78 | for (Field inputField : inputSchema.fields()) { 79 | final Object value = input.get(inputField); 80 | outputStruct.put(inputField.name(), value); 81 | } 82 | final Object innerFieldValue = innerStruct.get(this.config.innerFieldName); 83 | outputStruct.put(this.config.outputFieldName, innerFieldValue); 84 | 85 | return new SchemaAndValue(outputSchema, outputStruct); 86 | 87 | } 88 | 89 | 90 | @Title("ExtractNestedField(Key)") 91 | @Description("This transformation is used to extract a field from a nested struct and append it " + 92 | "to the parent struct.") 93 | @DocumentationTip("This transformation is used to manipulate fields in the Key of the record.") 94 | public static class Key> extends ExtractNestedField { 95 | 96 | @Override 97 | public R apply(R r) { 98 | final SchemaAndValue transformed = process(r, r.keySchema(), r.key()); 99 | 100 | return r.newRecord( 101 | r.topic(), 102 | r.kafkaPartition(), 103 | transformed.schema(), 104 | transformed.value(), 105 | r.valueSchema(), 106 | r.value(), 107 | r.timestamp() 108 | ); 109 | } 110 | } 111 | 112 | @Title("ExtractNestedField(Value)") 113 | @Description("This transformation is used to extract a field from a nested struct and append it " + 114 | "to the parent struct.") 115 | public static class Value> extends ExtractNestedField { 116 | 117 | @Override 118 | public R apply(R r) { 119 | final SchemaAndValue transformed = process(r, r.valueSchema(), r.value()); 120 | 121 | return r.newRecord( 122 | r.topic(), 123 | r.kafkaPartition(), 124 | r.keySchema(), 125 | r.key(), 126 | transformed.schema(), 127 | transformed.value(), 128 | r.timestamp() 129 | ); 130 | } 131 | } 132 | 133 | } 134 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ExtractNestedFieldConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import org.apache.kafka.common.config.AbstractConfig; 19 | import org.apache.kafka.common.config.ConfigDef; 20 | 21 | import java.util.Map; 22 | 23 | public class ExtractNestedFieldConfig extends AbstractConfig { 24 | public final String outerFieldName; 25 | public final String innerFieldName; 26 | public final String outputFieldName; 27 | 28 | public ExtractNestedFieldConfig(Map settings) { 29 | super(config(), settings); 30 | this.outerFieldName = getString(OUTER_FIELD_NAME_CONF); 31 | this.innerFieldName = getString(INNER_FIELD_NAME_CONF); 32 | this.outputFieldName = getString(OUTPUT_FIELD_NAME_CONF); 33 | } 34 | 35 | public static final String OUTER_FIELD_NAME_CONF = "input.outer.field.name"; 36 | static final String OUTER_FIELD_NAME_DOC = "The field on the parent struct containing the child struct. " + 37 | "For example if you wanted the extract `address.state` you would use `address`."; 38 | public static final String INNER_FIELD_NAME_CONF = "input.inner.field.name"; 39 | static final String INNER_FIELD_NAME_DOC = "The field on the child struct containing the field to be extracted. " + 40 | "For example if you wanted the extract `address.state` you would use `state`."; 41 | public static final String OUTPUT_FIELD_NAME_CONF = "output.field.name"; 42 | static final String OUTPUT_FIELD_NAME_DOC = "The field to place the extracted value into."; 43 | 44 | public static ConfigDef config() { 45 | return new ConfigDef() 46 | .define(OUTER_FIELD_NAME_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, OUTER_FIELD_NAME_DOC) 47 | .define(INNER_FIELD_NAME_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, INNER_FIELD_NAME_DOC) 48 | .define(OUTPUT_FIELD_NAME_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, OUTPUT_FIELD_NAME_DOC); 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ExtractTimestamp.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 20 | import com.google.common.base.Preconditions; 21 | import org.apache.kafka.common.config.ConfigDef; 22 | import org.apache.kafka.connect.connector.ConnectRecord; 23 | import org.apache.kafka.connect.data.Field; 24 | import org.apache.kafka.connect.data.Schema; 25 | import org.apache.kafka.connect.data.SchemaAndValue; 26 | import org.apache.kafka.connect.data.Struct; 27 | import org.apache.kafka.connect.data.Timestamp; 28 | import org.apache.kafka.connect.errors.DataException; 29 | import org.apache.kafka.connect.transforms.Transformation; 30 | import org.slf4j.Logger; 31 | import org.slf4j.LoggerFactory; 32 | 33 | import java.util.Date; 34 | import java.util.Map; 35 | 36 | @Title("ExtractTimestamp") 37 | @Description("This transformation is used to use a field from the input data to override the timestamp for the record.") 38 | public abstract class ExtractTimestamp> implements Transformation { 39 | private static final Logger log = LoggerFactory.getLogger(ExtractTimestamp.class); 40 | public ExtractTimestampConfig config; 41 | 42 | protected long process(SchemaAndValue schemaAndValue) { 43 | final long result; 44 | if (schemaAndValue.value() instanceof Struct) { 45 | result = processStruct(schemaAndValue); 46 | } else if (schemaAndValue.value() instanceof Map) { 47 | result = processMap(schemaAndValue); 48 | } else { 49 | throw new UnsupportedOperationException(); 50 | } 51 | return result; 52 | } 53 | 54 | private long processMap(SchemaAndValue schemaAndValue) { 55 | Preconditions.checkState(schemaAndValue.value() instanceof Map, "value must be a map."); 56 | final Map input = (Map) schemaAndValue.value(); 57 | final Object inputValue = input.get(this.config.fieldName); 58 | final long result; 59 | 60 | if (inputValue instanceof Date) { 61 | final Date inputDate = (Date) inputValue; 62 | result = inputDate.getTime(); 63 | } else if (inputValue instanceof Long) { 64 | result = (long) inputValue; 65 | } else if (null == inputValue) { 66 | throw new DataException( 67 | String.format("Field '%s' cannot be null.", this.config.fieldName) 68 | ); 69 | } else { 70 | throw new DataException( 71 | String.format("Cannot convert %s to timestamp.", inputValue.getClass().getName()) 72 | ); 73 | } 74 | 75 | return result; 76 | } 77 | 78 | private long processStruct(SchemaAndValue schemaAndValue) { 79 | final Struct inputStruct = (Struct) schemaAndValue.value(); 80 | final Field inputField = schemaAndValue.schema().field(this.config.fieldName); 81 | 82 | if (null == inputField) { 83 | throw new DataException( 84 | String.format("Schema does not have field '{}'", this.config.fieldName) 85 | ); 86 | } 87 | 88 | final Schema fieldSchema = inputField.schema(); 89 | final long result; 90 | if (Schema.Type.INT64 == fieldSchema.type()) { 91 | final Object fieldValue = inputStruct.get(inputField); 92 | 93 | if (null == fieldValue) { 94 | throw new DataException( 95 | String.format("Field '%s' cannot be null.", this.config.fieldName) 96 | ); 97 | } 98 | 99 | if (Timestamp.LOGICAL_NAME.equals(fieldSchema.name())) { 100 | final Date date = (Date) fieldValue; 101 | result = date.getTime(); 102 | } else { 103 | final long timestamp = (long) fieldValue; 104 | result = timestamp; 105 | } 106 | } else { 107 | throw new DataException( 108 | String.format("Schema '{}' is not supported.", inputField.schema()) 109 | ); 110 | } 111 | 112 | return result; 113 | } 114 | 115 | 116 | @Override 117 | public ConfigDef config() { 118 | return ExtractTimestampConfig.config(); 119 | } 120 | 121 | @Override 122 | public void close() { 123 | 124 | } 125 | 126 | @Override 127 | public void configure(Map settings) { 128 | this.config = new ExtractTimestampConfig(settings); 129 | } 130 | 131 | 132 | public static class Key> extends ExtractTimestamp { 133 | 134 | @Override 135 | public R apply(R r) { 136 | final long timestamp = process(new SchemaAndValue(r.valueSchema(), r.value())); 137 | return r.newRecord( 138 | r.topic(), 139 | r.kafkaPartition(), 140 | r.keySchema(), 141 | r.key(), 142 | r.valueSchema(), 143 | r.value(), 144 | timestamp 145 | ); 146 | } 147 | } 148 | 149 | public static class Value> extends ExtractTimestamp { 150 | 151 | @Override 152 | public R apply(R r) { 153 | final long timestamp = process(new SchemaAndValue(r.valueSchema(), r.value())); 154 | return r.newRecord( 155 | r.topic(), 156 | r.kafkaPartition(), 157 | r.keySchema(), 158 | r.key(), 159 | r.valueSchema(), 160 | r.value(), 161 | timestamp 162 | ); 163 | } 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ExtractTimestampConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import org.apache.kafka.common.config.AbstractConfig; 19 | import org.apache.kafka.common.config.ConfigDef; 20 | 21 | import java.util.Map; 22 | 23 | class ExtractTimestampConfig extends AbstractConfig { 24 | public final String fieldName; 25 | 26 | public static final String FIELD_NAME_CONFIG = "field.name"; 27 | public static final String FIELD_NAME_DOC = "The field to pull the timestamp from. This must be an int64 or a timestamp."; 28 | 29 | public ExtractTimestampConfig(Map originals) { 30 | super(config(), originals); 31 | this.fieldName = getString(FIELD_NAME_CONFIG); 32 | } 33 | 34 | public static ConfigDef config() { 35 | return new ConfigDef() 36 | .define(FIELD_NAME_CONFIG, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, FIELD_NAME_DOC); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ExtractXPathConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import org.apache.kafka.common.config.AbstractConfig; 19 | import org.apache.kafka.common.config.ConfigDef; 20 | 21 | import java.util.List; 22 | import java.util.ArrayList; 23 | import java.util.Map; 24 | import java.util.Arrays; 25 | 26 | import org.slf4j.Logger; 27 | import org.slf4j.LoggerFactory; 28 | 29 | public class ExtractXPathConfig extends AbstractConfig { 30 | private static final Logger log = LoggerFactory.getLogger(ExtractXPathConfig.class); 31 | 32 | public final String inputField; 33 | public final String outputField; 34 | public final String xpath; 35 | public final boolean namespaceAware; 36 | public final List prefixes; 37 | public final List namespaces; 38 | 39 | public static final String IN_FIELD_CONFIG = "input.field"; 40 | public static final String IN_FIELD_DOC = "The input field containing the XML Document."; 41 | public static final String OUT_FIELD_CONFIG = "output.field"; 42 | public static final String OUT_FIELD_DOC = "The output field where the XML element matching the XPath will be placed."; 43 | public static final String NS_PREFIX_CONFIG = "ns.prefix"; 44 | public static final String NS_PREFIX_DOC = "A comma separated list of Namespace prefixes"; 45 | public static final String NS_LIST_CONFIG = "ns.namespace"; 46 | public static final String NS_LIST_DOC = "A comma separated list of Namespaces corresponding to the prefixes"; 47 | public static final String XPATH_CONFIG = "xpath"; 48 | public static final String XPATH_DOC = "The XPath to apply to extract an element from the Document"; 49 | 50 | 51 | 52 | public ExtractXPathConfig(Map settings) { 53 | super(config(), settings); 54 | this.inputField = getString(IN_FIELD_CONFIG); 55 | this.outputField = getString(OUT_FIELD_CONFIG); 56 | this.xpath = getString(XPATH_CONFIG); 57 | String prefixString = getString(NS_PREFIX_CONFIG); 58 | String namespaceString = getString(NS_LIST_CONFIG); 59 | if (prefixString == null || prefixString.trim().length() == 0) { 60 | this.namespaceAware = false; 61 | prefixes = new ArrayList(); 62 | namespaces = new ArrayList(); 63 | } else { 64 | this.namespaceAware = true; 65 | prefixes = Arrays.asList(prefixString.split(",")); 66 | namespaces = Arrays.asList(namespaceString.split(",")); 67 | if (namespaces.size() != prefixes.size()) { 68 | log.warn("The list of namespaces and corresponding prefixes are not the same length."); 69 | } 70 | } 71 | } 72 | 73 | public static ConfigDef config() { 74 | return new ConfigDef() 75 | .define(IN_FIELD_CONFIG, ConfigDef.Type.STRING, null, ConfigDef.Importance.HIGH, IN_FIELD_DOC) 76 | .define(OUT_FIELD_CONFIG, ConfigDef.Type.STRING, null, ConfigDef.Importance.HIGH, OUT_FIELD_DOC) 77 | .define(XPATH_CONFIG, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, XPATH_DOC) 78 | .define(NS_LIST_CONFIG, ConfigDef.Type.STRING, null, ConfigDef.Importance.LOW, NS_LIST_DOC) 79 | .define(NS_PREFIX_CONFIG, ConfigDef.Type.STRING, null, ConfigDef.Importance.LOW, NS_PREFIX_DOC); 80 | } 81 | 82 | } 83 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/HeaderToField.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 20 | import com.github.jcustenborder.kafka.connect.utils.data.SchemaBuilders; 21 | import com.github.jcustenborder.kafka.connect.utils.transformation.BaseKeyValueTransformation; 22 | import org.apache.kafka.common.config.ConfigDef; 23 | import org.apache.kafka.connect.connector.ConnectRecord; 24 | import org.apache.kafka.connect.data.Field; 25 | import org.apache.kafka.connect.data.Schema; 26 | import org.apache.kafka.connect.data.SchemaAndValue; 27 | import org.apache.kafka.connect.data.SchemaBuilder; 28 | import org.apache.kafka.connect.data.Struct; 29 | import org.slf4j.Logger; 30 | import org.slf4j.LoggerFactory; 31 | 32 | import java.util.ArrayList; 33 | import java.util.HashMap; 34 | import java.util.List; 35 | import java.util.Map; 36 | 37 | @Title("HeaderToField") 38 | @Description("This transformation is used to copy the value of a header to a field in the key or " + 39 | "value of the record.") 40 | public class HeaderToField> extends BaseKeyValueTransformation { 41 | private static final Logger log = LoggerFactory.getLogger(HeaderToField.class); 42 | 43 | HeaderToFieldConfig config; 44 | 45 | protected HeaderToField(boolean isKey) { 46 | super(isKey); 47 | } 48 | 49 | @Override 50 | public ConfigDef config() { 51 | return HeaderToFieldConfig.config(); 52 | } 53 | 54 | 55 | static class Conversion { 56 | public final Schema newSchema; 57 | public final List conversionHandlers; 58 | 59 | private Conversion(Schema newSchema, List conversionHandlers) { 60 | this.newSchema = newSchema; 61 | this.conversionHandlers = conversionHandlers; 62 | } 63 | 64 | public SchemaAndValue apply(ConnectRecord record, Struct input) { 65 | Struct result = new Struct(this.newSchema); 66 | for (Field field : input.schema().fields()) { 67 | String fieldName = field.name(); 68 | Object fieldValue = input.get(field); 69 | result.put(fieldName, fieldValue); 70 | } 71 | for (ConversionHandler handler : this.conversionHandlers) { 72 | handler.convert(record, result); 73 | } 74 | return new SchemaAndValue(this.newSchema, result); 75 | } 76 | 77 | public static Conversion of(Schema newSchema, List conversionHandlers) { 78 | return new Conversion(newSchema, conversionHandlers); 79 | } 80 | } 81 | 82 | 83 | Map schemaCache = new HashMap<>(); 84 | 85 | 86 | Conversion conversion(Schema schema) { 87 | return this.schemaCache.computeIfAbsent(schema, s -> { 88 | log.info("conversion() - Building new schema for {}", schema); 89 | 90 | SchemaBuilder builder = SchemaBuilders.of(schema); 91 | List handlers = new ArrayList<>(this.config.mappings.size()); 92 | for (HeaderToFieldConfig.HeaderToFieldMapping mapping : this.config.mappings) { 93 | log.trace("conversion() - adding field '{}' with schema {}", mapping.field, mapping.schema); 94 | builder.field(mapping.field, mapping.schema); 95 | ConversionHandler handler = ConversionHandler.of(mapping.schema, mapping.header, mapping.field); 96 | handlers.add(handler); 97 | } 98 | Schema newSchema = builder.build(); 99 | return Conversion.of(newSchema, handlers); 100 | }); 101 | } 102 | 103 | 104 | @Override 105 | protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct input) { 106 | Conversion conversion = conversion(inputSchema); 107 | return conversion.apply(record, input); 108 | } 109 | 110 | @Override 111 | public void close() { 112 | 113 | } 114 | 115 | @Override 116 | public void configure(Map map) { 117 | this.config = new HeaderToFieldConfig(map); 118 | } 119 | 120 | public static class Key> extends HeaderToField { 121 | public Key() { 122 | super(true); 123 | } 124 | } 125 | 126 | public static class Value> extends HeaderToField { 127 | public Value() { 128 | super(false); 129 | } 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/LowerCaseTopic.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | import org.apache.kafka.common.utils.SystemTime; 22 | import org.apache.kafka.common.utils.Time; 23 | import org.apache.kafka.connect.connector.ConnectRecord; 24 | import org.apache.kafka.connect.transforms.Transformation; 25 | 26 | import java.util.Map; 27 | 28 | @Title("LowerCaseTopic") 29 | @Description("This transformation is used to change a topic name to be all lower case.") 30 | public class LowerCaseTopic> implements Transformation { 31 | Time time = SystemTime.SYSTEM; 32 | 33 | @Override 34 | public R apply(R record) { 35 | return record.newRecord( 36 | record.topic().toLowerCase(), 37 | record.kafkaPartition(), 38 | record.keySchema(), 39 | record.key(), 40 | record.valueSchema(), 41 | record.value(), 42 | record.timestamp(), 43 | record.headers() 44 | ); 45 | } 46 | 47 | @Override 48 | public ConfigDef config() { 49 | return new ConfigDef(); 50 | } 51 | 52 | @Override 53 | public void close() { 54 | 55 | } 56 | 57 | @Override 58 | public void configure(Map map) { 59 | 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/PatternFilter.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.DocumentationTip; 20 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 21 | import org.apache.kafka.common.config.ConfigDef; 22 | import org.apache.kafka.connect.connector.ConnectRecord; 23 | import org.apache.kafka.connect.data.Field; 24 | import org.apache.kafka.connect.data.Schema; 25 | import org.apache.kafka.connect.data.SchemaAndValue; 26 | import org.apache.kafka.connect.data.Struct; 27 | import org.apache.kafka.connect.transforms.Transformation; 28 | import org.slf4j.Logger; 29 | import org.slf4j.LoggerFactory; 30 | 31 | import java.util.Map; 32 | import java.util.regex.Matcher; 33 | 34 | public abstract class PatternFilter> implements Transformation { 35 | private static final Logger log = LoggerFactory.getLogger(PatternFilter.class); 36 | 37 | @Override 38 | public ConfigDef config() { 39 | return PatternFilterConfig.config(); 40 | } 41 | 42 | PatternFilterConfig config; 43 | 44 | @Override 45 | public void configure(Map settings) { 46 | this.config = new PatternFilterConfig(settings); 47 | } 48 | 49 | @Override 50 | public void close() { 51 | 52 | } 53 | 54 | R filter(R record, Struct struct) { 55 | for (Field field : struct.schema().fields()) { 56 | if (this.config.fields.contains(field.name())) { 57 | if (field.schema().type() == Schema.Type.STRING) { 58 | String input = struct.getString(field.name()); 59 | if (null != input) { 60 | Matcher matcher = this.config.pattern.matcher(input); 61 | if (matcher.matches()) { 62 | return null; 63 | } 64 | } 65 | } 66 | } 67 | } 68 | return record; 69 | } 70 | 71 | R filter(R record, Map map) { 72 | for (Object field : map.keySet()) { 73 | if (this.config.fields.contains(field)) { 74 | Object value = map.get(field); 75 | 76 | if (value instanceof String) { 77 | String input = (String) value; 78 | Matcher matcher = this.config.pattern.matcher(input); 79 | if (matcher.matches()) { 80 | return null; 81 | } 82 | } 83 | } 84 | } 85 | 86 | return record; 87 | } 88 | 89 | 90 | R filter(R record, final boolean key) { 91 | final SchemaAndValue input = key ? 92 | new SchemaAndValue(record.keySchema(), record.key()) : 93 | new SchemaAndValue(record.valueSchema(), record.value()); 94 | final R result; 95 | if (input.schema() != null) { 96 | if (Schema.Type.STRUCT == input.schema().type()) { 97 | result = filter(record, (Struct) input.value()); 98 | } else if (Schema.Type.MAP == input.schema().type()) { 99 | result = filter(record, (Map) input.value()); 100 | } else { 101 | result = record; 102 | } 103 | } else if (input.value() instanceof Map) { 104 | result = filter(record, (Map) input.value()); 105 | } else { 106 | result = record; 107 | } 108 | 109 | return result; 110 | } 111 | 112 | @Title("PatternFilter(Key)") 113 | @Description("This transformation is used to filter records based on a regular expression.") 114 | @DocumentationTip("This transformation is used to filter records based on fields in the Key of the record.") 115 | public static class Key> extends PatternFilter { 116 | @Override 117 | public R apply(R r) { 118 | return filter(r, true); 119 | } 120 | } 121 | 122 | @Title("PatternFilter(Value)") 123 | @Description("This transformation is used to filter records based on a regular expression.") 124 | @DocumentationTip("This transformation is used to filter records based on fields in the Value of the record.") 125 | public static class Value> extends PatternFilter { 126 | @Override 127 | public R apply(R r) { 128 | return filter(r, false); 129 | } 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/PatternFilterConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigKeyBuilder; 19 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigUtils; 20 | import com.github.jcustenborder.kafka.connect.utils.config.validators.Validators; 21 | import org.apache.kafka.common.config.AbstractConfig; 22 | import org.apache.kafka.common.config.ConfigDef; 23 | 24 | import java.util.Collections; 25 | import java.util.HashSet; 26 | import java.util.List; 27 | import java.util.Map; 28 | import java.util.Set; 29 | import java.util.regex.Pattern; 30 | 31 | public class PatternFilterConfig extends AbstractConfig { 32 | public final Pattern pattern; 33 | public final Set fields; 34 | 35 | public static final String PATTERN_CONFIG = "pattern"; 36 | public static final String PATTERN_DOC = "The regex to test the message with. "; 37 | 38 | public static final String FIELD_CONFIG = "fields"; 39 | public static final String FIELD_DOC = "The fields to transform."; 40 | 41 | 42 | public PatternFilterConfig(Map settings) { 43 | super(config(), settings); 44 | this.pattern = ConfigUtils.pattern(this, PATTERN_CONFIG); 45 | List fields = getList(FIELD_CONFIG); 46 | this.fields = new HashSet<>(fields); 47 | } 48 | 49 | public static ConfigDef config() { 50 | return new ConfigDef() 51 | .define( 52 | ConfigKeyBuilder.of(PATTERN_CONFIG, ConfigDef.Type.STRING) 53 | .documentation(PATTERN_DOC) 54 | .importance(ConfigDef.Importance.HIGH) 55 | .validator(Validators.pattern()) 56 | .build() 57 | ).define( 58 | ConfigKeyBuilder.of(FIELD_CONFIG, ConfigDef.Type.LIST) 59 | .documentation(FIELD_DOC) 60 | .defaultValue(Collections.emptyList()) 61 | .importance(ConfigDef.Importance.HIGH) 62 | .build() 63 | ); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/PatternMapStringConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.google.common.collect.ImmutableMap; 19 | import org.apache.kafka.common.config.AbstractConfig; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | 22 | import java.util.Arrays; 23 | import java.util.HashMap; 24 | import java.util.List; 25 | import java.util.Map; 26 | import java.util.regex.Pattern; 27 | 28 | class PatternMapStringConfig extends AbstractConfig { 29 | 30 | public static final String SRC_FIELD_NAME_CONF = "src.field.name"; 31 | static final String SRC_FIELD_NAME_DOC = "The name of the input field"; 32 | 33 | public static final String DEST_FIELD_NAME_CONF = "dest.field.name"; 34 | static final String DEST_FIELD_NAME_DOC = "The name of the output field, set to the same as 'src.field.name' to replace a field value"; 35 | 36 | public static final String VALUE_PATTERN_CONF = "value.pattern"; 37 | static final String VALUE_PATTERN_DOC = "RegEx pattern which will be replaced"; 38 | 39 | public static final String VALUE_PATTERN_FLAGS_CONF = "value.pattern.flags"; 40 | static final String VALUE_PATTERN_FLAGS_DOC = ""; 41 | 42 | public static final String VALUE_REPLACEMENT_CONF = "value.replacement"; 43 | static final String VALUE_REPLACEMENT_DOC = "RegEx to generate output for each match"; 44 | 45 | 46 | public final Pattern pattern; 47 | public final String replacement; 48 | public final String srcfieldname; 49 | public final String destfieldname; 50 | 51 | public PatternMapStringConfig(Map parsedConfig) { 52 | super(config(), parsedConfig); 53 | final String pattern = getString(VALUE_PATTERN_CONF); 54 | final List flagList = getList(VALUE_PATTERN_FLAGS_CONF); 55 | int patternFlags = 0; 56 | for (final String f : flagList) { 57 | final int flag = FLAG_VALUES.get(f); 58 | patternFlags = patternFlags | flag; 59 | } 60 | this.pattern = Pattern.compile(pattern, patternFlags); 61 | this.replacement = getString(VALUE_REPLACEMENT_CONF); 62 | this.srcfieldname = getString(SRC_FIELD_NAME_CONF); 63 | this.destfieldname = getString(DEST_FIELD_NAME_CONF); 64 | } 65 | 66 | static final Map FLAG_VALUES; 67 | 68 | static { 69 | Map map = new HashMap<>(); 70 | map.put("UNICODE_CHARACTER_CLASS", 0x100); 71 | map.put("CANON_EQ", 0x80); 72 | map.put("UNICODE_CASE", 0x40); 73 | map.put("DOTALL", 0x20); 74 | map.put("LITERAL", 0x10); 75 | map.put("MULTILINE", 0x08); 76 | map.put("COMMENTS", 0x04); 77 | map.put("CASE_INSENSITIVE", 0x02); 78 | map.put("UNIX_LINES", 0x01); 79 | FLAG_VALUES = ImmutableMap.copyOf(map); 80 | } 81 | 82 | public static ConfigDef config() { 83 | 84 | 85 | return new ConfigDef() 86 | .define(VALUE_PATTERN_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, VALUE_PATTERN_DOC) 87 | .define(VALUE_PATTERN_FLAGS_CONF, ConfigDef.Type.LIST, Arrays.asList("CASE_INSENSITIVE"), ConfigDef.ValidList.in("UNICODE_CHARACTER_CLASS", "CANON_EQ", "UNICODE_CASE", "DOTALL", "LITERAL", "MULTILINE", "COMMENTS", "CASE_INSENSITIVE", "UNIX_LINES"), ConfigDef.Importance.LOW, VALUE_PATTERN_FLAGS_DOC) 88 | .define(VALUE_REPLACEMENT_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, VALUE_REPLACEMENT_DOC) 89 | .define(SRC_FIELD_NAME_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, SRC_FIELD_NAME_DOC) 90 | .define(DEST_FIELD_NAME_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, DEST_FIELD_NAME_DOC); 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/PatternRename.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.DocumentationTip; 20 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 21 | import org.apache.kafka.common.config.ConfigDef; 22 | import org.apache.kafka.connect.connector.ConnectRecord; 23 | import org.apache.kafka.connect.data.Field; 24 | import org.apache.kafka.connect.data.Schema; 25 | import org.apache.kafka.connect.data.SchemaAndValue; 26 | import org.apache.kafka.connect.data.SchemaBuilder; 27 | import org.apache.kafka.connect.data.Struct; 28 | import org.slf4j.Logger; 29 | import org.slf4j.LoggerFactory; 30 | 31 | import java.util.HashMap; 32 | import java.util.LinkedHashMap; 33 | import java.util.Map; 34 | import java.util.regex.Matcher; 35 | 36 | public abstract class PatternRename> extends BaseTransformation { 37 | private static final Logger log = LoggerFactory.getLogger(PatternRename.class); 38 | 39 | @Override 40 | public ConfigDef config() { 41 | return PatternRenameConfig.config(); 42 | } 43 | 44 | PatternRenameConfig config; 45 | 46 | @Override 47 | public void configure(Map settings) { 48 | this.config = new PatternRenameConfig(settings); 49 | } 50 | 51 | @Override 52 | public void close() { 53 | 54 | } 55 | 56 | @Override 57 | protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct inputStruct) { 58 | final SchemaBuilder outputSchemaBuilder = SchemaBuilder.struct(); 59 | outputSchemaBuilder.name(inputSchema.name()); 60 | outputSchemaBuilder.doc(inputSchema.doc()); 61 | if (null != inputSchema.defaultValue()) { 62 | outputSchemaBuilder.defaultValue(inputSchema.defaultValue()); 63 | } 64 | if (null != inputSchema.parameters() && !inputSchema.parameters().isEmpty()) { 65 | outputSchemaBuilder.parameters(inputSchema.parameters()); 66 | } 67 | if (inputSchema.isOptional()) { 68 | outputSchemaBuilder.optional(); 69 | } 70 | Map fieldMappings = new HashMap<>(inputSchema.fields().size()); 71 | for (final Field inputField : inputSchema.fields()) { 72 | log.trace("process() - Processing field '{}'", inputField.name()); 73 | final Matcher fieldMatcher = this.config.pattern.matcher(inputField.name()); 74 | final String outputFieldName; 75 | if (fieldMatcher.find()) { 76 | outputFieldName = fieldMatcher.replaceAll(this.config.replacement); 77 | } else { 78 | outputFieldName = inputField.name(); 79 | } 80 | log.trace("process() - Mapping field '{}' to '{}'", inputField.name(), outputFieldName); 81 | fieldMappings.put(inputField.name(), outputFieldName); 82 | outputSchemaBuilder.field(outputFieldName, inputField.schema()); 83 | } 84 | final Schema outputSchema = outputSchemaBuilder.build(); 85 | final Struct outputStruct = new Struct(outputSchema); 86 | for (Map.Entry entry : fieldMappings.entrySet()) { 87 | final String inputField = entry.getKey(), outputField = entry.getValue(); 88 | log.trace("process() - Copying '{}' to '{}'", inputField, outputField); 89 | final Object value = inputStruct.get(inputField); 90 | outputStruct.put(outputField, value); 91 | } 92 | return new SchemaAndValue(outputSchema, outputStruct); 93 | } 94 | 95 | @Override 96 | protected SchemaAndValue processMap(R record, Map input) { 97 | final Map outputMap = new LinkedHashMap<>(input.size()); 98 | 99 | for (final String inputFieldName : input.keySet()) { 100 | log.trace("process() - Processing field '{}'", inputFieldName); 101 | final Matcher fieldMatcher = this.config.pattern.matcher(inputFieldName); 102 | final String outputFieldName; 103 | if (fieldMatcher.find()) { 104 | outputFieldName = fieldMatcher.replaceAll(this.config.replacement); 105 | } else { 106 | outputFieldName = inputFieldName; 107 | } 108 | final Object value = input.get(inputFieldName); 109 | outputMap.put(outputFieldName, value); 110 | } 111 | return new SchemaAndValue(null, outputMap); 112 | } 113 | 114 | @Title("PatternRename(Key)") 115 | @Description("This transformation is used to rename fields in the key of an input struct based on a regular expression and a replacement string.") 116 | @DocumentationTip("This transformation is used to manipulate fields in the Key of the record.") 117 | public static class Key> extends PatternRename { 118 | 119 | @Override 120 | public R apply(R r) { 121 | final SchemaAndValue transformed = process(r, r.keySchema(), r.key()); 122 | 123 | return r.newRecord( 124 | r.topic(), 125 | r.kafkaPartition(), 126 | transformed.schema(), 127 | transformed.value(), 128 | r.valueSchema(), 129 | r.value(), 130 | r.timestamp() 131 | ); 132 | } 133 | } 134 | 135 | @Title("PatternRename(Value)") 136 | @Description("This transformation is used to rename fields in the value of an input struct based on a regular expression and a replacement string.") 137 | public static class Value> extends PatternRename { 138 | @Override 139 | public R apply(R r) { 140 | final SchemaAndValue transformed = process(r, r.valueSchema(), r.value()); 141 | 142 | return r.newRecord( 143 | r.topic(), 144 | r.kafkaPartition(), 145 | r.keySchema(), 146 | r.key(), 147 | transformed.schema(), 148 | transformed.value(), 149 | r.timestamp() 150 | ); 151 | } 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/PatternRenameConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.google.common.collect.ImmutableMap; 19 | import org.apache.kafka.common.config.AbstractConfig; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | 22 | import java.util.Arrays; 23 | import java.util.HashMap; 24 | import java.util.List; 25 | import java.util.Map; 26 | import java.util.regex.Pattern; 27 | 28 | class PatternRenameConfig extends AbstractConfig { 29 | 30 | public static final String FIELD_PATTERN_CONF = "field.pattern"; 31 | static final String FIELD_PATTERN_DOC = ""; 32 | 33 | public static final String FIELD_PATTERN_FLAGS_CONF = "field.pattern.flags"; 34 | static final String FIELD_PATTERN_FLAGS_DOC = ""; 35 | 36 | public static final String FIELD_REPLACEMENT_CONF = "field.replacement"; 37 | static final String FIELD_REPLACEMENT_DOC = ""; 38 | 39 | 40 | public final Pattern pattern; 41 | public final String replacement; 42 | 43 | public PatternRenameConfig(Map parsedConfig) { 44 | super(config(), parsedConfig); 45 | final String pattern = getString(FIELD_PATTERN_CONF); 46 | final List flagList = getList(FIELD_PATTERN_FLAGS_CONF); 47 | int patternFlags = 0; 48 | for (final String f : flagList) { 49 | final int flag = FLAG_VALUES.get(f); 50 | patternFlags = patternFlags | flag; 51 | } 52 | this.pattern = Pattern.compile(pattern, patternFlags); 53 | this.replacement = getString(FIELD_REPLACEMENT_CONF); 54 | } 55 | 56 | static final Map FLAG_VALUES; 57 | 58 | static { 59 | Map map = new HashMap<>(); 60 | map.put("UNICODE_CHARACTER_CLASS", 0x100); 61 | map.put("CANON_EQ", 0x80); 62 | map.put("UNICODE_CASE", 0x40); 63 | map.put("DOTALL", 0x20); 64 | map.put("LITERAL", 0x10); 65 | map.put("MULTILINE", 0x08); 66 | map.put("COMMENTS", 0x04); 67 | map.put("CASE_INSENSITIVE", 0x02); 68 | map.put("UNIX_LINES", 0x01); 69 | FLAG_VALUES = ImmutableMap.copyOf(map); 70 | } 71 | 72 | public static ConfigDef config() { 73 | 74 | 75 | return new ConfigDef() 76 | .define(FIELD_PATTERN_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, FIELD_PATTERN_DOC) 77 | .define(FIELD_PATTERN_FLAGS_CONF, ConfigDef.Type.LIST, Arrays.asList("CASE_INSENSITIVE"), ConfigDef.ValidList.in("UNICODE_CHARACTER_CLASS", "CANON_EQ", "UNICODE_CASE", "DOTALL", "LITERAL", "MULTILINE", "COMMENTS", "CASE_INSENSITIVE", "UNIX_LINES"), ConfigDef.Importance.LOW, FIELD_PATTERN_FLAGS_DOC) 78 | .define(FIELD_REPLACEMENT_CONF, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, FIELD_REPLACEMENT_DOC); 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/SchemaNameToTopic.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2019 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | import org.apache.kafka.connect.connector.ConnectRecord; 22 | import org.apache.kafka.connect.transforms.Transformation; 23 | import org.slf4j.Logger; 24 | import org.slf4j.LoggerFactory; 25 | 26 | import java.util.Map; 27 | 28 | @Title("SchemaNameToTopic") 29 | @Description("This transformation is used to take the name from the schema for the key or value and" + 30 | " replace the topic with this value.") 31 | public abstract class SchemaNameToTopic> implements Transformation { 32 | private static final Logger log = LoggerFactory.getLogger(SchemaNameToTopic.class); 33 | 34 | @Override 35 | public ConfigDef config() { 36 | return new ConfigDef(); 37 | } 38 | 39 | @Override 40 | public void close() { 41 | 42 | } 43 | 44 | @Override 45 | public void configure(Map map) { 46 | 47 | } 48 | 49 | public static class Key> extends SchemaNameToTopic { 50 | @Override 51 | public R apply(R r) { 52 | 53 | return r.newRecord( 54 | r.keySchema().name(), 55 | r.kafkaPartition(), 56 | r.keySchema(), 57 | r.key(), 58 | r.valueSchema(), 59 | r.value(), 60 | r.timestamp(), 61 | r.headers() 62 | ); 63 | } 64 | } 65 | 66 | 67 | public static class Value> extends SchemaNameToTopic { 68 | @Override 69 | public R apply(R r) { 70 | 71 | return r.newRecord( 72 | r.valueSchema().name(), 73 | r.kafkaPartition(), 74 | r.keySchema(), 75 | r.key(), 76 | r.valueSchema(), 77 | r.value(), 78 | r.timestamp(), 79 | r.headers() 80 | ); 81 | } 82 | } 83 | 84 | 85 | } 86 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/SetMaximumPrecisionConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigKeyBuilder; 19 | import org.apache.kafka.common.config.AbstractConfig; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | 22 | import java.util.Map; 23 | 24 | public class SetMaximumPrecisionConfig extends AbstractConfig { 25 | public final int maxPrecision; 26 | 27 | public SetMaximumPrecisionConfig(Map originals) { 28 | super(config(), originals); 29 | this.maxPrecision = getInt(MAX_PRECISION_CONFIG); 30 | } 31 | 32 | public static final String MAX_PRECISION_CONFIG = "precision.max"; 33 | static final String MAX_PRECISION_DOC = "The maximum precision allowed."; 34 | 35 | public static ConfigDef config() { 36 | return new ConfigDef() 37 | .define( 38 | ConfigKeyBuilder.of(MAX_PRECISION_CONFIG, ConfigDef.Type.INT) 39 | .documentation(MAX_PRECISION_DOC) 40 | .importance(ConfigDef.Importance.HIGH) 41 | .validator(ConfigDef.Range.between(1, 64)) 42 | .build() 43 | ); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/SetNull.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.DocumentationTip; 20 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 21 | import org.apache.kafka.common.config.ConfigDef; 22 | import org.apache.kafka.connect.connector.ConnectRecord; 23 | import org.slf4j.Logger; 24 | import org.slf4j.LoggerFactory; 25 | 26 | import java.util.Map; 27 | 28 | public abstract class SetNull> extends BaseTransformation { 29 | private static final Logger log = LoggerFactory.getLogger(SetNull.class); 30 | 31 | @Override 32 | public ConfigDef config() { 33 | return new ConfigDef(); 34 | } 35 | 36 | PatternRenameConfig config; 37 | 38 | @Override 39 | public void configure(Map settings) { 40 | 41 | 42 | } 43 | 44 | @Override 45 | public void close() { 46 | 47 | } 48 | 49 | @Title("SetNull(Key)") 50 | @Description("This transformation will will set the key and keySchema of a message to null.") 51 | @DocumentationTip("This transformation is used to manipulate the Key of the record.") 52 | public static class Key> extends SetNull { 53 | 54 | @Override 55 | public R apply(R r) { 56 | return r.newRecord( 57 | r.topic(), 58 | r.kafkaPartition(), 59 | null, 60 | null, 61 | r.valueSchema(), 62 | r.value(), 63 | r.timestamp() 64 | ); 65 | } 66 | } 67 | 68 | @Title("SetNull(Value)") 69 | @Description("This transformation will will set the value and valueSchema of a message to null.") 70 | @DocumentationTip("This transformation is used to manipulate the Value of the record.") 71 | public static class Value> extends SetNull { 72 | @Override 73 | public R apply(R r) { 74 | return r.newRecord( 75 | r.topic(), 76 | r.kafkaPartition(), 77 | r.keySchema(), 78 | r.key(), 79 | null, 80 | null, 81 | r.timestamp() 82 | ); 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/TimestampNow.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | import org.apache.kafka.common.utils.SystemTime; 22 | import org.apache.kafka.common.utils.Time; 23 | import org.apache.kafka.connect.connector.ConnectRecord; 24 | import org.apache.kafka.connect.transforms.Transformation; 25 | 26 | import java.util.Map; 27 | 28 | @Title("TimestampNow") 29 | @Description("This transformation is used to override the timestamp of the incoming record to the " + 30 | "time the record is being processed.") 31 | public class TimestampNow> implements Transformation { 32 | Time time = SystemTime.SYSTEM; 33 | 34 | @Override 35 | public R apply(R record) { 36 | return record.newRecord( 37 | record.topic(), 38 | record.kafkaPartition(), 39 | record.keySchema(), 40 | record.key(), 41 | record.valueSchema(), 42 | record.value(), 43 | time.milliseconds() 44 | ); 45 | } 46 | 47 | @Override 48 | public ConfigDef config() { 49 | return new ConfigDef(); 50 | } 51 | 52 | @Override 53 | public void close() { 54 | 55 | } 56 | 57 | @Override 58 | public void configure(Map map) { 59 | 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/TimestampNowField.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 20 | import com.github.jcustenborder.kafka.connect.utils.data.SchemaBuilders; 21 | import com.github.jcustenborder.kafka.connect.utils.transformation.BaseKeyValueTransformation; 22 | import org.apache.kafka.common.config.ConfigDef; 23 | import org.apache.kafka.common.utils.Time; 24 | import org.apache.kafka.connect.connector.ConnectRecord; 25 | import org.apache.kafka.connect.data.Field; 26 | import org.apache.kafka.connect.data.Schema; 27 | import org.apache.kafka.connect.data.SchemaAndValue; 28 | import org.apache.kafka.connect.data.SchemaBuilder; 29 | import org.apache.kafka.connect.data.Struct; 30 | import org.apache.kafka.connect.data.Timestamp; 31 | 32 | import java.util.Collection; 33 | import java.util.Date; 34 | import java.util.HashMap; 35 | import java.util.LinkedHashMap; 36 | import java.util.Map; 37 | import java.util.stream.Collectors; 38 | 39 | @Title("TimestampNowField") 40 | @Description("This transformation is used to set a field with the current timestamp of the system running the " + 41 | "transformation.") 42 | public abstract class TimestampNowField> extends BaseKeyValueTransformation { 43 | private TimestampNowFieldConfig config; 44 | Time time = Time.SYSTEM; 45 | 46 | protected TimestampNowField(boolean isKey) { 47 | super(isKey); 48 | } 49 | 50 | public static class Key> extends TimestampNowField { 51 | public Key() { 52 | super(true); 53 | } 54 | } 55 | 56 | public static class Value> extends TimestampNowField { 57 | public Value() { 58 | super(false); 59 | } 60 | } 61 | 62 | @Override 63 | public void close() { 64 | 65 | } 66 | 67 | Map schemaCache = new HashMap<>(); 68 | 69 | static boolean isTimestampSchema(Schema schema) { 70 | return (Timestamp.SCHEMA.type() == schema.type() && Timestamp.SCHEMA.name().equals(schema.name())); 71 | } 72 | 73 | @Override 74 | protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct input) { 75 | Date timestamp = new Date(this.time.milliseconds()); 76 | 77 | Schema outputSchema = schemaCache.computeIfAbsent(inputSchema, schema -> { 78 | Collection replaceFields = schema.fields().stream() 79 | .filter(f -> this.config.fields.contains(f.name())) 80 | .filter(f -> !isTimestampSchema(f.schema())) 81 | .map(Field::name) 82 | .collect(Collectors.toList()); 83 | SchemaBuilder builder = SchemaBuilders.of(schema, replaceFields); 84 | this.config.fields.forEach(timestampField -> { 85 | Field existingField = builder.field(timestampField); 86 | if (null == existingField) { 87 | builder.field(timestampField, Timestamp.SCHEMA); 88 | } 89 | }); 90 | return builder.build(); 91 | }); 92 | 93 | Struct output = new Struct(outputSchema); 94 | inputSchema.fields().stream() 95 | .filter(f -> !this.config.fields.contains(f.name())) 96 | .forEach(f -> output.put(f.name(), input.get(f.name()))); 97 | this.config.fields.forEach(field -> output.put(field, timestamp)); 98 | return new SchemaAndValue(outputSchema, output); 99 | } 100 | 101 | @Override 102 | protected SchemaAndValue processMap(R record, Map input) { 103 | Map result = new LinkedHashMap<>(input); 104 | Date timestamp = new Date(this.time.milliseconds()); 105 | this.config.fields.forEach(field -> result.put(field, timestamp)); 106 | return new SchemaAndValue(null, result); 107 | } 108 | 109 | @Override 110 | public void configure(Map settings) { 111 | this.config = new TimestampNowFieldConfig(settings); 112 | } 113 | 114 | @Override 115 | public ConfigDef config() { 116 | return TimestampNowFieldConfig.config(); 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/TimestampNowFieldConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigKeyBuilder; 19 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigUtils; 20 | import org.apache.kafka.common.config.AbstractConfig; 21 | import org.apache.kafka.common.config.ConfigDef; 22 | 23 | import java.util.Map; 24 | import java.util.Set; 25 | 26 | class TimestampNowFieldConfig extends AbstractConfig { 27 | public static final String FIELDS_CONF = "fields"; 28 | public static final String FIELDS_DOC = "The field(s) that will be inserted with the timestamp of the system."; 29 | 30 | public final Set fields; 31 | 32 | public TimestampNowFieldConfig(Map originals) { 33 | super(config(), originals); 34 | this.fields = ConfigUtils.getSet(this, FIELDS_CONF); 35 | } 36 | 37 | public static ConfigDef config() { 38 | return new ConfigDef() 39 | .define( 40 | ConfigKeyBuilder.of(FIELDS_CONF, ConfigDef.Type.LIST) 41 | .documentation(FIELDS_DOC) 42 | .importance(ConfigDef.Importance.HIGH) 43 | .build() 44 | ); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ToJSON.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.DocumentationTip; 20 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 21 | import com.google.common.base.Charsets; 22 | import org.apache.kafka.common.config.ConfigDef; 23 | import org.apache.kafka.connect.connector.ConnectRecord; 24 | import org.apache.kafka.connect.data.Schema; 25 | import org.apache.kafka.connect.data.SchemaAndValue; 26 | import org.apache.kafka.connect.data.Struct; 27 | import org.apache.kafka.connect.json.JsonConverter; 28 | import org.slf4j.Logger; 29 | import org.slf4j.LoggerFactory; 30 | 31 | import java.util.LinkedHashMap; 32 | import java.util.Map; 33 | 34 | public abstract class ToJSON> extends BaseTransformation { 35 | private static final Logger log = LoggerFactory.getLogger(ToJSON.class); 36 | 37 | 38 | ToJSONConfig config; 39 | 40 | @Override 41 | public ConfigDef config() { 42 | return ToJSONConfig.config(); 43 | } 44 | 45 | @Override 46 | public void close() { 47 | 48 | } 49 | 50 | JsonConverter converter = new JsonConverter(); 51 | 52 | @Override 53 | public void configure(Map settings) { 54 | this.config = new ToJSONConfig(settings); 55 | Map settingsClone = new LinkedHashMap<>(settings); 56 | settingsClone.put(ToJSONConfig.SCHEMAS_ENABLE_CONFIG, this.config.schemasEnable); 57 | this.converter.configure(settingsClone, false); 58 | } 59 | 60 | @Override 61 | protected SchemaAndValue processString(R record, Schema inputSchema, String input) { 62 | return new SchemaAndValue(inputSchema, input); 63 | } 64 | 65 | @Override 66 | protected SchemaAndValue processBytes(R record, Schema inputSchema, byte[] input) { 67 | return new SchemaAndValue(inputSchema, input); 68 | } 69 | 70 | SchemaAndValue schemaAndValue(Schema inputSchema, Object input) { 71 | final byte[] buffer = this.converter.fromConnectData("dummy", inputSchema, input); 72 | final Schema schema; 73 | final Object value; 74 | 75 | switch (this.config.outputSchema) { 76 | case STRING: 77 | value = new String(buffer, Charsets.UTF_8); 78 | schema = Schema.OPTIONAL_STRING_SCHEMA; 79 | break; 80 | case BYTES: 81 | value = buffer; 82 | schema = Schema.OPTIONAL_BYTES_SCHEMA; 83 | break; 84 | default: 85 | throw new UnsupportedOperationException( 86 | String.format( 87 | "Schema type (%s)'%s' is not supported.", 88 | ToJSONConfig.OUTPUT_SCHEMA_CONFIG, 89 | this.config.outputSchema 90 | ) 91 | ); 92 | } 93 | 94 | return new SchemaAndValue(schema, value); 95 | } 96 | 97 | @Override 98 | protected SchemaAndValue processMap(R record, Map input) { 99 | return schemaAndValue(null, input); 100 | } 101 | 102 | @Override 103 | protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct input) { 104 | return schemaAndValue(inputSchema, input); 105 | } 106 | 107 | @Title("ToJson(Key)") 108 | @Description("This transformation is used to take structured data such as AVRO and output it as " + 109 | "JSON by way of the JsonConverter built into Kafka Connect.") 110 | @DocumentationTip("This transformation is used to manipulate fields in the Key of the record.") 111 | public static class Key> extends ToJSON { 112 | 113 | @Override 114 | public R apply(R r) { 115 | final SchemaAndValue transformed = process(r, r.keySchema(), r.key()); 116 | 117 | return r.newRecord( 118 | r.topic(), 119 | r.kafkaPartition(), 120 | transformed.schema(), 121 | transformed.value(), 122 | r.valueSchema(), 123 | r.value(), 124 | r.timestamp() 125 | ); 126 | } 127 | } 128 | 129 | @Title("ToJson(Value)") 130 | @Description("This transformation is used to take structured data such as AVRO and output it as " + 131 | "JSON by way of the JsonConverter built into Kafka Connect.") 132 | public static class Value> extends ToJSON { 133 | 134 | @Override 135 | public R apply(R r) { 136 | final SchemaAndValue transformed = process(r, r.valueSchema(), r.value()); 137 | 138 | return r.newRecord( 139 | r.topic(), 140 | r.kafkaPartition(), 141 | r.keySchema(), 142 | r.key(), 143 | transformed.schema(), 144 | transformed.value(), 145 | r.timestamp() 146 | ); 147 | } 148 | } 149 | 150 | } 151 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ToJSONConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigKeyBuilder; 19 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigUtils; 20 | import org.apache.kafka.common.config.AbstractConfig; 21 | import org.apache.kafka.common.config.ConfigDef; 22 | import org.apache.kafka.connect.data.Schema; 23 | 24 | import java.util.Map; 25 | 26 | public class ToJSONConfig extends AbstractConfig { 27 | public final Schema.Type outputSchema; 28 | public final boolean schemasEnable; 29 | 30 | public static final String OUTPUT_SCHEMA_CONFIG = "output.schema.type"; 31 | public static final String OUTPUT_SCHEMA_DOC = "The connect schema type to output the converted JSON as."; 32 | public static final String SCHEMAS_ENABLE_CONFIG = "schemas.enable"; 33 | public static final String SCHEMAS_ENABLE_DOC = "Flag to determine if the JSON data should include the schema."; 34 | 35 | 36 | public ToJSONConfig(Map settings) { 37 | super(config(), settings); 38 | this.outputSchema = ConfigUtils.getEnum(Schema.Type.class, this, OUTPUT_SCHEMA_CONFIG); 39 | this.schemasEnable = getBoolean(SCHEMAS_ENABLE_CONFIG); 40 | } 41 | 42 | public static ConfigDef config() { 43 | return new ConfigDef() 44 | .define( 45 | ConfigKeyBuilder.of(OUTPUT_SCHEMA_CONFIG, ConfigDef.Type.STRING) 46 | .documentation(OUTPUT_SCHEMA_DOC) 47 | .defaultValue(Schema.Type.STRING.toString()) 48 | .validator( 49 | ConfigDef.ValidString.in( 50 | Schema.Type.STRING.toString(), 51 | Schema.Type.BYTES.toString() 52 | ) 53 | ) 54 | .importance(ConfigDef.Importance.MEDIUM) 55 | .build() 56 | ).define( 57 | ConfigKeyBuilder.of(SCHEMAS_ENABLE_CONFIG, ConfigDef.Type.BOOLEAN) 58 | .documentation(SCHEMAS_ENABLE_DOC) 59 | .defaultValue(false) 60 | .importance(ConfigDef.Importance.MEDIUM) 61 | .build() 62 | ); 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ToLong.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.DocumentationTip; 20 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 21 | import com.google.common.base.Strings; 22 | import org.apache.kafka.common.config.ConfigDef; 23 | import org.apache.kafka.connect.connector.ConnectRecord; 24 | import org.apache.kafka.connect.data.Field; 25 | import org.apache.kafka.connect.data.Schema; 26 | import org.apache.kafka.connect.data.SchemaAndValue; 27 | import org.apache.kafka.connect.data.SchemaBuilder; 28 | import org.apache.kafka.connect.data.Struct; 29 | import org.apache.kafka.connect.errors.DataException; 30 | import org.slf4j.Logger; 31 | import org.slf4j.LoggerFactory; 32 | 33 | import java.util.HashMap; 34 | import java.util.Map; 35 | 36 | public abstract class ToLong> extends BaseTransformation { 37 | private static final Logger log = LoggerFactory.getLogger(ToLong.class); 38 | 39 | @Override 40 | public ConfigDef config() { 41 | return ToLongConfig.config(); 42 | } 43 | 44 | ToLongConfig config; 45 | 46 | @Override 47 | public void configure(Map settings) { 48 | this.config = new ToLongConfig(settings); 49 | } 50 | 51 | @Override 52 | public void close() { 53 | 54 | } 55 | 56 | Map schemaCache = new HashMap<>(); 57 | 58 | @Override 59 | protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct input) { 60 | final Schema schema = this.schemaCache.computeIfAbsent(inputSchema, s -> { 61 | final SchemaBuilder builder = SchemaBuilder.struct(); 62 | if (!Strings.isNullOrEmpty(inputSchema.name())) { 63 | builder.name(inputSchema.name()); 64 | } 65 | if (inputSchema.isOptional()) { 66 | builder.optional(); 67 | } 68 | 69 | for (Field field : inputSchema.fields()) { 70 | log.trace("processStruct() - processing '{}'", field.name()); 71 | final Schema fieldSchema; 72 | if (this.config.fields.contains(field.name())) { 73 | fieldSchema = field.schema().isOptional() ? 74 | Schema.OPTIONAL_INT64_SCHEMA : 75 | Schema.INT64_SCHEMA; 76 | } else { 77 | fieldSchema = field.schema(); 78 | } 79 | builder.field(field.name(), fieldSchema); 80 | } 81 | return builder.build(); 82 | }); 83 | 84 | Struct struct = new Struct(schema); 85 | for (Field field : schema.fields()) { 86 | final Object o = input.get(field.name()); 87 | if (this.config.fields.contains(field.name())) { 88 | if (null == o) { 89 | struct.put(field, null); 90 | } else if (o instanceof Number) { 91 | struct.put(field, ((Number) o).longValue()); 92 | } else { 93 | throw new DataException( 94 | String.format( 95 | "Field '%s' is not a number. %s", 96 | field.name(), 97 | o.getClass().getSimpleName() 98 | ) 99 | ); 100 | } 101 | } else { 102 | struct.put(field, o); 103 | } 104 | } 105 | return new SchemaAndValue(schema, struct); 106 | } 107 | 108 | @Title("ToLong(Key)") 109 | @Description("This transformation is used to convert a number to a long") 110 | @DocumentationTip("This transformation is used to manipulate fields in the Key of the record.") 111 | public static class Key> extends ToLong { 112 | 113 | @Override 114 | public R apply(R r) { 115 | final SchemaAndValue transformed = process(r, r.keySchema(), r.key()); 116 | 117 | return r.newRecord( 118 | r.topic(), 119 | r.kafkaPartition(), 120 | transformed.schema(), 121 | transformed.value(), 122 | r.valueSchema(), 123 | r.value(), 124 | r.timestamp() 125 | ); 126 | } 127 | } 128 | 129 | @Title("ToLong(Value)") 130 | @Description("This transformation is used to convert a number to a long") 131 | public static class Value> extends ToLong { 132 | @Override 133 | public R apply(R r) { 134 | final SchemaAndValue transformed = process(r, r.valueSchema(), r.value()); 135 | 136 | return r.newRecord( 137 | r.topic(), 138 | r.kafkaPartition(), 139 | r.keySchema(), 140 | r.key(), 141 | transformed.schema(), 142 | transformed.value(), 143 | r.timestamp() 144 | ); 145 | } 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/ToLongConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigKeyBuilder; 19 | import org.apache.kafka.common.config.AbstractConfig; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | 22 | import java.util.Collections; 23 | import java.util.HashSet; 24 | import java.util.List; 25 | import java.util.Map; 26 | import java.util.Set; 27 | 28 | public class ToLongConfig extends AbstractConfig { 29 | public final Set fields; 30 | 31 | public static final String FIELD_CONFIG = "fields"; 32 | public static final String FIELD_DOC = "The fields to transform."; 33 | 34 | 35 | public ToLongConfig(Map settings) { 36 | super(config(), settings); 37 | List fields = getList(FIELD_CONFIG); 38 | this.fields = new HashSet<>(fields); 39 | } 40 | 41 | public static ConfigDef config() { 42 | return new ConfigDef() 43 | .define( 44 | ConfigKeyBuilder.of(FIELD_CONFIG, ConfigDef.Type.LIST) 45 | .documentation(FIELD_DOC) 46 | .defaultValue(Collections.emptyList()) 47 | .importance(ConfigDef.Importance.HIGH) 48 | .build() 49 | ); 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/TopicNameToField.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.Description; 19 | import com.github.jcustenborder.kafka.connect.utils.config.DocumentationTip; 20 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 21 | import org.apache.kafka.common.config.ConfigDef; 22 | import org.apache.kafka.connect.connector.ConnectRecord; 23 | import org.apache.kafka.connect.data.Field; 24 | import org.apache.kafka.connect.data.Schema; 25 | import org.apache.kafka.connect.data.SchemaAndValue; 26 | import org.apache.kafka.connect.data.SchemaBuilder; 27 | import org.apache.kafka.connect.data.Struct; 28 | 29 | import java.util.HashMap; 30 | import java.util.Map; 31 | 32 | public abstract class TopicNameToField> extends BaseTransformation { 33 | 34 | @Override 35 | public ConfigDef config() { 36 | return TopicNameToFieldConfig.config(); 37 | } 38 | 39 | @Override 40 | public void close() { 41 | 42 | } 43 | 44 | TopicNameToFieldConfig config; 45 | Schema schema; 46 | 47 | @Override 48 | public void configure(Map settings) { 49 | this.config = new TopicNameToFieldConfig(settings); 50 | this.schema = SchemaBuilder.string().doc("Topic name"); 51 | } 52 | 53 | @Override 54 | protected SchemaAndValue processMap(R record, Map input) { 55 | input.put(this.config.field, record.topic()); 56 | return new SchemaAndValue(null, input); 57 | } 58 | 59 | Map schemaLookup = new HashMap<>(); 60 | 61 | @Override 62 | protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct input) { 63 | final Schema schema = this.schemaLookup.computeIfAbsent(inputSchema, s -> { 64 | SchemaBuilder builder = SchemaBuilder.struct() 65 | .name(inputSchema.name()) 66 | .doc(inputSchema.doc()) 67 | .version(inputSchema.version()); 68 | if (null != inputSchema.defaultValue()) { 69 | builder.defaultValue(inputSchema.defaultValue()); 70 | } 71 | if (null != inputSchema.parameters() && !inputSchema.parameters().isEmpty()) { 72 | builder.parameters(inputSchema.parameters()); 73 | } 74 | 75 | if (inputSchema.isOptional()) { 76 | builder.optional(); 77 | } 78 | for (Field field : inputSchema.fields()) { 79 | builder.field(field.name(), field.schema()); 80 | } 81 | builder.field(config.field, this.schema); 82 | return builder.build(); 83 | }); 84 | Struct struct = new Struct(schema); 85 | for (Field field : input.schema().fields()) { 86 | Object value = input.get(field.name()); 87 | struct.put(field.name(), value); 88 | } 89 | struct.put(this.config.field, record.topic()); 90 | return new SchemaAndValue(schema, struct); 91 | } 92 | 93 | @Title("TopicNameToField(Key)") 94 | @Description("This transformation is used to add the topic as a field.") 95 | @DocumentationTip("This transformation is used to manipulate fields in the Key of the record.") 96 | public static class Key> extends TopicNameToField { 97 | 98 | @Override 99 | public R apply(R r) { 100 | final SchemaAndValue transformed = process(r, r.keySchema(), r.key()); 101 | 102 | return r.newRecord( 103 | r.topic(), 104 | r.kafkaPartition(), 105 | transformed.schema(), 106 | transformed.value(), 107 | r.valueSchema(), 108 | r.value(), 109 | r.timestamp() 110 | ); 111 | } 112 | } 113 | 114 | @Title("TopicNameToField(Value)") 115 | @Description("This transformation is used to add the topic as a field.") 116 | public static class Value> extends TopicNameToField { 117 | @Override 118 | public R apply(R r) { 119 | final SchemaAndValue transformed = process(r, r.valueSchema(), r.value()); 120 | 121 | return r.newRecord( 122 | r.topic(), 123 | r.kafkaPartition(), 124 | r.keySchema(), 125 | r.key(), 126 | transformed.schema(), 127 | transformed.value(), 128 | r.timestamp() 129 | ); 130 | } 131 | } 132 | 133 | } 134 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/TopicNameToFieldConfig.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.config.ConfigKeyBuilder; 19 | import org.apache.kafka.common.config.AbstractConfig; 20 | import org.apache.kafka.common.config.ConfigDef; 21 | 22 | import java.util.Map; 23 | 24 | public class TopicNameToFieldConfig extends AbstractConfig { 25 | public final String field; 26 | 27 | public static final String FIELD_CONFIG = "field"; 28 | public static final String FIELD_DOC = "The field to insert the topic name."; 29 | 30 | 31 | public TopicNameToFieldConfig(Map settings) { 32 | super(config(), settings); 33 | this.field = getString(FIELD_CONFIG); 34 | } 35 | 36 | public static ConfigDef config() { 37 | return new ConfigDef() 38 | .define( 39 | ConfigKeyBuilder.of(FIELD_CONFIG, ConfigDef.Type.STRING) 40 | .documentation(FIELD_DOC) 41 | .importance(ConfigDef.Importance.HIGH) 42 | .build() 43 | ); 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/com/github/jcustenborder/kafka/connect/transform/common/package-info.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | @Introduction("\n" + 17 | "This project contains common transformations for every day use cases with Kafka Connect.") 18 | @Title("Common Transformations") 19 | @PluginName("kafka-connect-transform-common") 20 | @PluginOwner("jcustenborder") 21 | package com.github.jcustenborder.kafka.connect.transform.common; 22 | 23 | import com.github.jcustenborder.kafka.connect.utils.config.Introduction; 24 | import com.github.jcustenborder.kafka.connect.utils.config.Title; 25 | import com.github.jcustenborder.kafka.connect.utils.config.PluginName; 26 | import com.github.jcustenborder.kafka.connect.utils.config.PluginOwner; -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/AdjustPrecisionAndScaleTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import com.google.common.collect.ImmutableMap; 4 | import org.apache.kafka.connect.data.Decimal; 5 | import org.apache.kafka.connect.data.Schema; 6 | import org.apache.kafka.connect.data.SchemaBuilder; 7 | import org.apache.kafka.connect.data.Struct; 8 | import org.apache.kafka.connect.sink.SinkRecord; 9 | import org.junit.jupiter.api.Test; 10 | 11 | import java.math.BigDecimal; 12 | 13 | import static com.github.jcustenborder.kafka.connect.utils.AssertStruct.assertStruct; 14 | import static org.junit.jupiter.api.Assertions.assertNotNull; 15 | 16 | public class AdjustPrecisionAndScaleTest { 17 | SinkRecord record(Struct struct) { 18 | return new SinkRecord("test", 1, null, null, struct.schema(), struct, 1234L); 19 | } 20 | 21 | @Test 22 | public void noop() { 23 | Schema schema = SchemaBuilder.struct() 24 | .field("first", Schema.STRING_SCHEMA) 25 | .field("last", Schema.STRING_SCHEMA) 26 | .field("email", Schema.STRING_SCHEMA) 27 | .build(); 28 | Struct struct = new Struct(schema) 29 | .put("first", "test") 30 | .put("last", "user") 31 | .put("email", "none@none.com"); 32 | SinkRecord record = record(struct); 33 | AdjustPrecisionAndScale.Value transform = new AdjustPrecisionAndScale.Value<>(); 34 | transform.configure( 35 | ImmutableMap.of(AdjustPrecisionAndScaleConfig.PRECISION_VALUE_CONFIG, 32) 36 | ); 37 | SinkRecord actual = transform.apply(record); 38 | assertNotNull(actual); 39 | assertStruct((Struct) record.value(), (Struct) actual.value()); 40 | } 41 | 42 | @Test 43 | public void convert() { 44 | final Schema inputSchema = SchemaBuilder.struct() 45 | .field("first", Decimal.schema(5)) 46 | .field( 47 | "second", 48 | Decimal.builder(5) 49 | .parameter(AdjustPrecisionAndScale.CONNECT_AVRO_DECIMAL_PRECISION_PROP, "0") 50 | .parameter(AdjustPrecisionAndScale.CONNECT_AVRO_DECIMAL_SCALE_PROP, "127") 51 | .optional() 52 | .build() 53 | ) 54 | .field( 55 | "third", 56 | Decimal.builder(5) 57 | .parameter(AdjustPrecisionAndScale.CONNECT_AVRO_DECIMAL_PRECISION_PROP, "48") 58 | .parameter(AdjustPrecisionAndScale.CONNECT_AVRO_DECIMAL_SCALE_PROP, "16") 59 | .optional() 60 | .build() 61 | ) 62 | .build(); 63 | final Struct inputStruct = new Struct(inputSchema) 64 | .put("first", BigDecimal.ONE) 65 | .put("second", null) 66 | .put("third", BigDecimal.ONE); 67 | final Schema expectedSchema = SchemaBuilder.struct() 68 | .field( 69 | "first", 70 | Decimal.builder(5) 71 | .parameter(AdjustPrecisionAndScale.CONNECT_AVRO_DECIMAL_PRECISION_PROP, "38") 72 | .parameter(AdjustPrecisionAndScale.CONNECT_AVRO_DECIMAL_SCALE_PROP, "20") 73 | .build() 74 | ) 75 | .field( 76 | "second", 77 | Decimal.builder(5) 78 | .parameter(AdjustPrecisionAndScale.CONNECT_AVRO_DECIMAL_PRECISION_PROP, "38") 79 | .parameter(AdjustPrecisionAndScale.CONNECT_AVRO_DECIMAL_SCALE_PROP, "20") 80 | .optional() 81 | .build() 82 | ) 83 | .field( 84 | "third", 85 | Decimal.builder(5) 86 | .parameter(AdjustPrecisionAndScale.CONNECT_AVRO_DECIMAL_PRECISION_PROP, "38") 87 | .parameter(AdjustPrecisionAndScale.CONNECT_AVRO_DECIMAL_SCALE_PROP, "16") 88 | .optional() 89 | .build() 90 | ) 91 | .build(); 92 | final Struct expectedStruct = new Struct(expectedSchema) 93 | .put("first", new BigDecimal("1.00000000000000000000")) 94 | .put("second", null) 95 | .put("third", new BigDecimal("1.0000000000000000")); 96 | 97 | 98 | SinkRecord record = record(inputStruct); 99 | AdjustPrecisionAndScale.Value transform = new AdjustPrecisionAndScale.Value<>(); 100 | transform.configure( 101 | ImmutableMap.of(AdjustPrecisionAndScaleConfig.PRECISION_VALUE_CONFIG , 38, 102 | AdjustPrecisionAndScaleConfig.PRECISION_MODE_CONFIG, "max", 103 | AdjustPrecisionAndScaleConfig.SCALE_VALUE_CONFIG, 20, 104 | AdjustPrecisionAndScaleConfig.SCALE_MODE_CONFIG, "max") 105 | ); 106 | 107 | 108 | SinkRecord actual = transform.apply(record); 109 | assertNotNull(actual); 110 | assertStruct(expectedStruct, (Struct) actual.value()); 111 | } 112 | 113 | } 114 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/BytesToStringTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import com.google.common.collect.ImmutableMap; 4 | import org.apache.kafka.connect.connector.ConnectRecord; 5 | import org.apache.kafka.connect.data.Schema; 6 | import org.apache.kafka.connect.data.SchemaBuilder; 7 | import org.apache.kafka.connect.data.Struct; 8 | import org.apache.kafka.connect.sink.SinkRecord; 9 | import org.apache.kafka.connect.transforms.Transformation; 10 | import org.junit.jupiter.api.Test; 11 | 12 | import java.io.UnsupportedEncodingException; 13 | 14 | import static com.github.jcustenborder.kafka.connect.utils.AssertSchema.assertSchema; 15 | import static org.junit.jupiter.api.Assertions.assertEquals; 16 | 17 | public abstract class BytesToStringTest extends TransformationTest { 18 | protected BytesToStringTest(boolean isKey) { 19 | super(isKey); 20 | } 21 | 22 | @Test 23 | public void struct() throws UnsupportedEncodingException { 24 | this.transformation.configure( 25 | ImmutableMap.of(BytesToStringConfig.FIELD_CONFIG, "bytes") 26 | ); 27 | final String expected = "this is a test"; 28 | Schema schema = SchemaBuilder.struct() 29 | .field("bytes", Schema.BYTES_SCHEMA) 30 | .build(); 31 | Struct struct = new Struct(schema) 32 | .put("bytes", expected.getBytes("UTF-8")); 33 | 34 | final SinkRecord inputRecord = new SinkRecord( 35 | "topic", 36 | 1, 37 | null, 38 | null, 39 | schema, 40 | struct, 41 | 1L 42 | ); 43 | 44 | SinkRecord outputRecord = this.transformation.apply(inputRecord); 45 | 46 | 47 | 48 | } 49 | 50 | @Test 51 | public void bytes() throws UnsupportedEncodingException { 52 | this.transformation.configure( 53 | ImmutableMap.of() 54 | ); 55 | final String expected = "this is a test"; 56 | final SinkRecord inputRecord = new SinkRecord( 57 | "topic", 58 | 1, 59 | null, 60 | null, 61 | Schema.BYTES_SCHEMA, 62 | expected.getBytes("UTF-8"), 63 | 1L 64 | ); 65 | 66 | SinkRecord outputRecord = this.transformation.apply(inputRecord); 67 | assertEquals(expected, outputRecord.value()); 68 | assertSchema(Schema.STRING_SCHEMA, outputRecord.valueSchema()); 69 | } 70 | 71 | @Test 72 | public void nullBytes() throws UnsupportedEncodingException { 73 | this.transformation.configure( 74 | ImmutableMap.of() 75 | ); 76 | final String expected = null; 77 | final SinkRecord inputRecord = new SinkRecord( 78 | "topic", 79 | 1, 80 | null, 81 | null, 82 | Schema.OPTIONAL_BYTES_SCHEMA, 83 | expected, 84 | 1L 85 | ); 86 | 87 | SinkRecord outputRecord = this.transformation.apply(inputRecord); 88 | assertEquals(expected, outputRecord.value()); 89 | assertSchema(Schema.OPTIONAL_STRING_SCHEMA, outputRecord.valueSchema()); 90 | } 91 | 92 | @Test 93 | public void nullStructFieldBytes() throws UnsupportedEncodingException { 94 | this.transformation.configure( 95 | ImmutableMap.of(BytesToStringConfig.FIELD_CONFIG, "bytes") 96 | ); 97 | final String expected = null; 98 | Schema schema = SchemaBuilder.struct() 99 | .field("bytes", Schema.OPTIONAL_BYTES_SCHEMA) 100 | .build(); 101 | Struct struct = new Struct(schema) 102 | .put("bytes", expected); 103 | 104 | final SinkRecord inputRecord = new SinkRecord( 105 | "topic", 106 | 1, 107 | null, 108 | null, 109 | schema, 110 | struct, 111 | 1L 112 | ); 113 | 114 | SinkRecord outputRecord = this.transformation.apply(inputRecord); 115 | } 116 | 117 | public static class ValueTest> extends BytesToStringTest { 118 | protected ValueTest() { 119 | super(false); 120 | } 121 | 122 | @Override 123 | protected Transformation create() { 124 | return new BytesToString.Value<>(); 125 | } 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/ChangeCaseTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.google.common.base.CaseFormat; 19 | import com.google.common.collect.ImmutableMap; 20 | import org.apache.kafka.connect.connector.ConnectRecord; 21 | import org.apache.kafka.connect.data.Schema; 22 | import org.apache.kafka.connect.data.SchemaBuilder; 23 | import org.apache.kafka.connect.data.Struct; 24 | import org.apache.kafka.connect.sink.SinkRecord; 25 | import org.apache.kafka.connect.transforms.Transformation; 26 | import org.junit.jupiter.api.Test; 27 | 28 | import java.util.ArrayList; 29 | import java.util.Collections; 30 | import java.util.function.Function; 31 | 32 | import static com.github.jcustenborder.kafka.connect.utils.AssertSchema.assertSchema; 33 | import static com.github.jcustenborder.kafka.connect.utils.AssertStruct.assertStruct; 34 | import static org.junit.jupiter.api.Assertions.assertNotNull; 35 | 36 | public abstract class ChangeCaseTest extends TransformationTest { 37 | protected ChangeCaseTest(boolean isKey) { 38 | super(isKey); 39 | } 40 | 41 | @Test 42 | public void test() { 43 | this.transformation.configure( 44 | ImmutableMap.of(ChangeCaseConfig.FROM_CONFIG, CaseFormat.UPPER_UNDERSCORE.toString(), 45 | ChangeCaseConfig.TO_CONFIG, CaseFormat.LOWER_UNDERSCORE.toString())); 46 | final Schema inputSchema = makeSchema(CaseFormat.UPPER_UNDERSCORE); 47 | final Schema expectedSchema = makeSchema(CaseFormat.LOWER_UNDERSCORE); 48 | 49 | final Struct inputStruct = makeStruct(inputSchema, CaseFormat.UPPER_UNDERSCORE); 50 | final Struct expectedStruct = makeStruct(expectedSchema, CaseFormat.LOWER_UNDERSCORE); 51 | 52 | final SinkRecord inputRecord = new SinkRecord("topic", 1, null, null, inputSchema, inputStruct, 1L); 53 | for (int i = 0; i < 50; i++) { 54 | final SinkRecord transformedRecord = this.transformation.apply(inputRecord); 55 | assertNotNull(transformedRecord, "transformedRecord should not be null."); 56 | assertSchema(expectedSchema, transformedRecord.valueSchema()); 57 | assertStruct(expectedStruct, (Struct) transformedRecord.value()); 58 | } 59 | } 60 | 61 | private Schema makeSchema(CaseFormat caseFormat) { 62 | final Function convert = s -> CaseFormat.LOWER_UNDERSCORE.to(caseFormat, s); 63 | return SchemaBuilder.struct().field(convert.apply("contacts"), 64 | SchemaBuilder.array(SchemaBuilder.struct() 65 | .field(convert.apply("contact"), 66 | SchemaBuilder.struct() 67 | .field(convert.apply("first_name"), Schema.STRING_SCHEMA) 68 | .field(convert.apply("last_name"), Schema.STRING_SCHEMA) 69 | .build() 70 | ).build()) 71 | ).build(); 72 | } 73 | 74 | private Struct makeStruct(Schema schema, CaseFormat caseFormat) { 75 | final Function convert = s -> CaseFormat.LOWER_UNDERSCORE.to(caseFormat, s); 76 | final Schema contacts = schema.fields().get(0).schema().valueSchema(); 77 | final Schema contact = contacts.fields().get(0).schema(); 78 | return new Struct(schema).put(convert.apply("contacts"), 79 | new ArrayList<>( 80 | Collections.singletonList( 81 | new Struct(contacts).put(convert.apply("contact"), 82 | new Struct(contact) 83 | .put(convert.apply("first_name"), "test") 84 | .put(convert.apply("last_name"), "user")) 85 | ) 86 | ) 87 | ); 88 | } 89 | 90 | public static class ValueTest> extends ChangeCaseTest { 91 | protected ValueTest() { 92 | super(false); 93 | } 94 | 95 | @Override 96 | protected Transformation create() { 97 | return new ChangeCase.Value<>(); 98 | } 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/ChangeTopicCaseTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import com.google.common.base.CaseFormat; 4 | import com.google.common.base.MoreObjects; 5 | import com.google.common.collect.ImmutableMap; 6 | import org.apache.kafka.connect.sink.SinkRecord; 7 | import org.apache.kafka.connect.transforms.Transformation; 8 | import org.junit.jupiter.api.DynamicTest; 9 | import org.junit.jupiter.api.Test; 10 | import org.junit.jupiter.api.TestFactory; 11 | 12 | import java.util.Arrays; 13 | import java.util.Map; 14 | import java.util.stream.Stream; 15 | 16 | import static org.junit.jupiter.api.Assertions.assertEquals; 17 | import static org.junit.jupiter.api.Assertions.assertNotNull; 18 | 19 | public class ChangeTopicCaseTest extends TransformationTest { 20 | protected ChangeTopicCaseTest() { 21 | super(false); 22 | } 23 | 24 | @Override 25 | protected Transformation create() { 26 | return new ChangeTopicCase<>(); 27 | } 28 | 29 | SinkRecord record(String topic) { 30 | return new SinkRecord( 31 | topic, 32 | 1, 33 | null, 34 | null, 35 | null, 36 | null, 37 | 12345L 38 | ); 39 | 40 | } 41 | 42 | static class TestCase { 43 | final CaseFormat from; 44 | final String input; 45 | final CaseFormat to; 46 | final String expected; 47 | 48 | TestCase(CaseFormat from, String input, CaseFormat to, String expected) { 49 | this.from = from; 50 | this.input = input; 51 | this.to = to; 52 | this.expected = expected; 53 | } 54 | 55 | @Override 56 | public String toString() { 57 | return MoreObjects.toStringHelper(this) 58 | .add("from", this.from) 59 | .add("to", this.to) 60 | .add("input", this.input) 61 | .add("expected", this.expected) 62 | .toString(); 63 | } 64 | } 65 | 66 | static TestCase of(CaseFormat from, String input, CaseFormat to, String expected) { 67 | return new TestCase(from, input, to, expected); 68 | } 69 | 70 | @TestFactory 71 | public Stream apply() { 72 | return Arrays.asList( 73 | of(CaseFormat.UPPER_UNDERSCORE, "TOPIC_NAME", CaseFormat.LOWER_CAMEL, "topicName"), 74 | of(CaseFormat.LOWER_CAMEL, "topicName", CaseFormat.UPPER_UNDERSCORE, "TOPIC_NAME"), 75 | of(CaseFormat.LOWER_HYPHEN, "topic-name", CaseFormat.LOWER_UNDERSCORE, "topic_name") 76 | ).stream() 77 | .map(t -> DynamicTest.dynamicTest(t.toString(), () -> { 78 | final Map settings = ImmutableMap.of( 79 | ChangeTopicCaseConfig.FROM_CONFIG, t.from.toString(), 80 | ChangeTopicCaseConfig.TO_CONFIG, t.to.toString() 81 | ); 82 | this.transformation.configure(settings); 83 | final SinkRecord input = record(t.input); 84 | final SinkRecord actual = this.transformation.apply(input); 85 | assertNotNull(actual, "actual should not be null."); 86 | assertEquals(t.expected, actual.topic(), "topic does not match."); 87 | })); 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/ConversionHandlerTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import com.github.jcustenborder.kafka.connect.utils.data.SchemaBuilders; 4 | import com.google.common.collect.LinkedListMultimap; 5 | import com.google.common.collect.Multimap; 6 | import org.apache.kafka.common.record.TimestampType; 7 | import org.apache.kafka.connect.data.Decimal; 8 | import org.apache.kafka.connect.data.Schema; 9 | import org.apache.kafka.connect.data.SchemaBuilder; 10 | import org.apache.kafka.connect.data.Struct; 11 | import org.apache.kafka.connect.data.Timestamp; 12 | import org.apache.kafka.connect.header.ConnectHeaders; 13 | import org.apache.kafka.connect.header.Headers; 14 | import org.apache.kafka.connect.sink.SinkRecord; 15 | import org.junit.jupiter.api.DynamicTest; 16 | import org.junit.jupiter.api.TestFactory; 17 | 18 | import java.math.BigDecimal; 19 | import java.math.BigInteger; 20 | import java.util.Date; 21 | import java.util.stream.Stream; 22 | 23 | import static org.junit.jupiter.api.Assertions.assertEquals; 24 | import static org.junit.jupiter.api.Assertions.assertNotNull; 25 | import static org.junit.jupiter.api.DynamicTest.dynamicTest; 26 | 27 | public class ConversionHandlerTest { 28 | @TestFactory 29 | public Stream parse() { 30 | Multimap tests = LinkedListMultimap.create(); 31 | tests.put(Schema.BOOLEAN_SCHEMA, true); 32 | tests.put(Schema.BOOLEAN_SCHEMA, false); 33 | tests.put(Schema.FLOAT32_SCHEMA, Float.MAX_VALUE); 34 | tests.put(Schema.FLOAT32_SCHEMA, Float.MIN_VALUE); 35 | tests.put(Schema.FLOAT64_SCHEMA, Double.MIN_VALUE); 36 | tests.put(Schema.FLOAT64_SCHEMA, Double.MAX_VALUE); 37 | tests.put(Schema.INT8_SCHEMA, Byte.MIN_VALUE); 38 | tests.put(Schema.INT8_SCHEMA, Byte.MAX_VALUE); 39 | tests.put(Schema.INT16_SCHEMA, Short.MIN_VALUE); 40 | tests.put(Schema.INT16_SCHEMA, Short.MAX_VALUE); 41 | tests.put(Schema.INT32_SCHEMA, Integer.MIN_VALUE); 42 | tests.put(Schema.INT32_SCHEMA, Integer.MAX_VALUE); 43 | tests.put(Schema.INT64_SCHEMA, Long.MIN_VALUE); 44 | tests.put(Schema.INT64_SCHEMA, Long.MAX_VALUE); 45 | tests.put(Timestamp.SCHEMA, new Date(0)); 46 | 47 | for (int i = 0; i < 20; i++) { 48 | Schema schema = Decimal.schema(i); 49 | tests.put(schema, BigDecimal.valueOf(Long.MAX_VALUE, i)); 50 | tests.put(schema, BigDecimal.valueOf(Long.MIN_VALUE, i)); 51 | } 52 | 53 | 54 | return tests.entries().stream() 55 | .map(e -> dynamicTest(String.format( 56 | "%s - %s", 57 | ConversionHandler.SchemaKey.of(e.getKey()), 58 | e.getValue() 59 | ), () -> { 60 | final Schema schema = e.getKey(); 61 | final Object expected = e.getValue(); 62 | final String headerName = "input"; 63 | final String fieldName = "output"; 64 | Headers inputHeaders = new ConnectHeaders(); 65 | inputHeaders.add(headerName, expected, schema); 66 | Schema inputSchema = SchemaBuilder.struct() 67 | .field("firstName", Schema.OPTIONAL_STRING_SCHEMA) 68 | .build(); 69 | Struct inputStruct = new Struct(inputSchema) 70 | .put("firstName", "Test"); 71 | SinkRecord inputRecord = new SinkRecord( 72 | "testing", 73 | 1, 74 | null, 75 | null, 76 | inputStruct.schema(), 77 | inputStruct, 78 | 12345L, 79 | 123412351L, 80 | TimestampType.NO_TIMESTAMP_TYPE, 81 | inputHeaders 82 | ); 83 | Schema outputSchema = SchemaBuilders.of(inputSchema) 84 | .field(fieldName, schema) 85 | .build(); 86 | Struct outputStruct = new Struct(outputSchema); 87 | ConversionHandler handler = ConversionHandler.of(schema, headerName, fieldName); 88 | assertNotNull(handler, "handler cannot be null."); 89 | handler.convert(inputRecord, outputStruct); 90 | Object actual = outputStruct.get(fieldName); 91 | assertEquals(expected, actual); 92 | })); 93 | 94 | 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/DebugTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.SinkRecordHelper; 19 | import com.google.common.collect.ImmutableMap; 20 | import org.apache.kafka.connect.data.Schema; 21 | import org.apache.kafka.connect.data.SchemaBuilder; 22 | import org.apache.kafka.connect.data.Struct; 23 | import org.apache.kafka.connect.sink.SinkRecord; 24 | import org.junit.jupiter.api.BeforeEach; 25 | import org.junit.jupiter.api.Test; 26 | 27 | public class DebugTest { 28 | Debug transform; 29 | 30 | @BeforeEach 31 | public void before() { 32 | this.transform = new Debug<>(); 33 | this.transform.configure(ImmutableMap.of()); 34 | } 35 | 36 | @Test 37 | public void apply() { 38 | Schema valueSchema = SchemaBuilder.struct() 39 | .name("foo") 40 | .field("firstName", Schema.STRING_SCHEMA) 41 | .field("lastName", Schema.STRING_SCHEMA) 42 | .build(); 43 | 44 | 45 | SinkRecord input = SinkRecordHelper.write("test", Schema.STRING_SCHEMA, "1234", valueSchema, new Struct(valueSchema).put("firstName", "adfs").put("lastName", "asdfas")); 46 | SinkRecord output = this.transform.apply(input); 47 | 48 | } 49 | 50 | 51 | } 52 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/DocumentationTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.github.jcustenborder.kafka.connect.utils.BaseDocumentationTest; 19 | 20 | public class DocumentationTest extends BaseDocumentationTest { 21 | 22 | 23 | } 24 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/ExtractNestedFieldTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.google.common.collect.ImmutableMap; 19 | import org.apache.kafka.connect.connector.ConnectRecord; 20 | import org.apache.kafka.connect.data.Schema; 21 | import org.apache.kafka.connect.data.SchemaBuilder; 22 | import org.apache.kafka.connect.data.Struct; 23 | import org.apache.kafka.connect.sink.SinkRecord; 24 | import org.apache.kafka.connect.transforms.Transformation; 25 | import org.junit.jupiter.api.Test; 26 | 27 | import static com.github.jcustenborder.kafka.connect.utils.AssertSchema.assertSchema; 28 | import static com.github.jcustenborder.kafka.connect.utils.AssertStruct.assertStruct; 29 | import static org.junit.jupiter.api.Assertions.assertNotNull; 30 | 31 | public abstract class ExtractNestedFieldTest extends TransformationTest { 32 | protected ExtractNestedFieldTest(boolean isKey) { 33 | super(isKey); 34 | } 35 | 36 | @Test 37 | public void test() { 38 | this.transformation.configure( 39 | ImmutableMap.of( 40 | ExtractNestedFieldConfig.INNER_FIELD_NAME_CONF, "state", 41 | ExtractNestedFieldConfig.OUTER_FIELD_NAME_CONF, "address", 42 | ExtractNestedFieldConfig.OUTPUT_FIELD_NAME_CONF, "state" 43 | ) 44 | ); 45 | 46 | final Schema innerSchema = SchemaBuilder.struct() 47 | .name("Address") 48 | .field("city", Schema.STRING_SCHEMA) 49 | .field("state", Schema.STRING_SCHEMA) 50 | .build(); 51 | final Schema inputSchema = SchemaBuilder.struct() 52 | .field("first_name", Schema.STRING_SCHEMA) 53 | .field("last_name", Schema.STRING_SCHEMA) 54 | .field("address", innerSchema) 55 | .build(); 56 | final Schema expectedSchema = SchemaBuilder.struct() 57 | .field("first_name", Schema.STRING_SCHEMA) 58 | .field("last_name", Schema.STRING_SCHEMA) 59 | .field("address", innerSchema) 60 | .field("state", Schema.STRING_SCHEMA) 61 | .build(); 62 | final Struct innerStruct = new Struct(innerSchema) 63 | .put("city", "Austin") 64 | .put("state", "tx"); 65 | final Struct inputStruct = new Struct(inputSchema) 66 | .put("first_name", "test") 67 | .put("last_name", "developer") 68 | .put("address", innerStruct); 69 | final Struct expectedStruct = new Struct(expectedSchema) 70 | .put("first_name", "test") 71 | .put("last_name", "developer") 72 | .put("address", innerStruct) 73 | .put("state", "tx"); 74 | 75 | final SinkRecord inputRecord = new SinkRecord( 76 | "topic", 77 | 1, 78 | null, 79 | null, 80 | inputSchema, 81 | inputStruct, 82 | 1L 83 | ); 84 | for (int i = 0; i < 50; i++) { 85 | final SinkRecord transformedRecord = this.transformation.apply(inputRecord); 86 | assertNotNull(transformedRecord, "transformedRecord should not be null."); 87 | assertSchema(expectedSchema, transformedRecord.valueSchema()); 88 | assertStruct(expectedStruct, (Struct) transformedRecord.value()); 89 | } 90 | } 91 | 92 | 93 | public static class ValueTest> extends ExtractNestedFieldTest { 94 | protected ValueTest() { 95 | super(false); 96 | } 97 | 98 | @Override 99 | protected Transformation create() { 100 | return new ExtractNestedField.Value(); 101 | } 102 | } 103 | 104 | } 105 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/ExtractTimestampTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.google.common.collect.ImmutableMap; 19 | import org.apache.kafka.connect.connector.ConnectRecord; 20 | import org.apache.kafka.connect.data.Schema; 21 | import org.apache.kafka.connect.data.SchemaBuilder; 22 | import org.apache.kafka.connect.data.Struct; 23 | import org.apache.kafka.connect.data.Timestamp; 24 | import org.apache.kafka.connect.sink.SinkRecord; 25 | import org.apache.kafka.connect.transforms.Transformation; 26 | import org.junit.jupiter.api.Test; 27 | 28 | import java.util.Date; 29 | import java.util.Map; 30 | 31 | import static org.junit.jupiter.api.Assertions.assertEquals; 32 | 33 | public abstract class ExtractTimestampTest extends TransformationTest { 34 | protected ExtractTimestampTest(boolean isKey) { 35 | super(isKey); 36 | } 37 | 38 | public static class ValueTest> extends ExtractTimestampTest { 39 | protected ValueTest() { 40 | super(false); 41 | } 42 | 43 | @Override 44 | protected Transformation create() { 45 | return new ExtractTimestamp.Value<>(); 46 | } 47 | } 48 | 49 | static final Date EXPECTED = new Date(1512164613123L); 50 | 51 | @Test 52 | public void schemaTimestamp() { 53 | this.transformation.configure( 54 | ImmutableMap.of(ExtractTimestampConfig.FIELD_NAME_CONFIG, "timestamp") 55 | ); 56 | final Schema schema = SchemaBuilder.struct() 57 | .field("timestamp", Timestamp.SCHEMA) 58 | .build(); 59 | final Struct struct = new Struct(schema) 60 | .put("timestamp", EXPECTED); 61 | final SinkRecord inputRecord = new SinkRecord( 62 | "topic", 63 | 1, 64 | null, 65 | null, 66 | schema, 67 | struct, 68 | 1L 69 | ); 70 | final SinkRecord transformedRecord = this.transformation.apply(inputRecord); 71 | assertEquals((Long) EXPECTED.getTime(), transformedRecord.timestamp(), "timestamp does not match."); 72 | } 73 | 74 | @Test 75 | public void schemaLong() { 76 | this.transformation.configure( 77 | ImmutableMap.of(ExtractTimestampConfig.FIELD_NAME_CONFIG, "timestamp") 78 | ); 79 | final Schema schema = SchemaBuilder.struct() 80 | .field("timestamp", Schema.INT64_SCHEMA) 81 | .build(); 82 | final Struct struct = new Struct(schema) 83 | .put("timestamp", EXPECTED.getTime()); 84 | final SinkRecord inputRecord = new SinkRecord( 85 | "topic", 86 | 1, 87 | null, 88 | null, 89 | schema, 90 | struct, 91 | 1L 92 | ); 93 | final SinkRecord transformedRecord = this.transformation.apply(inputRecord); 94 | assertEquals((Long) EXPECTED.getTime(), transformedRecord.timestamp(), "timestamp does not match."); 95 | } 96 | 97 | @Test 98 | public void schemalessDate() { 99 | this.transformation.configure( 100 | ImmutableMap.of(ExtractTimestampConfig.FIELD_NAME_CONFIG, "timestamp") 101 | ); 102 | final Map input = ImmutableMap.of( 103 | "timestamp", (Object) EXPECTED 104 | ); 105 | 106 | final SinkRecord inputRecord = new SinkRecord( 107 | "topic", 108 | 1, 109 | null, 110 | null, 111 | null, 112 | input, 113 | 1L 114 | ); 115 | final SinkRecord transformedRecord = this.transformation.apply(inputRecord); 116 | assertEquals((Long) EXPECTED.getTime(), transformedRecord.timestamp(), "timestamp does not match."); 117 | } 118 | 119 | @Test 120 | public void schemalessTimestamp() { 121 | this.transformation.configure( 122 | ImmutableMap.of(ExtractTimestampConfig.FIELD_NAME_CONFIG, "timestamp") 123 | ); 124 | final Map input = ImmutableMap.of( 125 | "timestamp", (Object) EXPECTED.getTime() 126 | ); 127 | 128 | final SinkRecord inputRecord = new SinkRecord( 129 | "topic", 130 | 1, 131 | null, 132 | null, 133 | null, 134 | input, 135 | 1L 136 | ); 137 | final SinkRecord transformedRecord = this.transformation.apply(inputRecord); 138 | assertEquals((Long) EXPECTED.getTime(), transformedRecord.timestamp(), "timestamp does not match."); 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/GenericAssertions.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.google.common.base.Strings; 19 | import com.google.common.collect.MapDifference; 20 | import com.google.common.collect.Maps; 21 | 22 | import java.io.BufferedWriter; 23 | import java.io.IOException; 24 | import java.io.StringWriter; 25 | import java.io.Writer; 26 | import java.util.Map; 27 | import java.util.function.Supplier; 28 | 29 | import static org.junit.jupiter.api.Assertions.assertNotNull; 30 | import static org.junit.jupiter.api.Assertions.assertTrue; 31 | 32 | public class GenericAssertions { 33 | private GenericAssertions() { 34 | 35 | } 36 | 37 | static class MapDifferenceSupplier implements Supplier { 38 | final MapDifference mapDifference; 39 | final String method; 40 | 41 | public MapDifferenceSupplier(MapDifference mapDifference, String method) { 42 | this.mapDifference = mapDifference; 43 | this.method = method; 44 | } 45 | 46 | @Override 47 | public String get() { 48 | try (Writer w = new StringWriter()) { 49 | try (BufferedWriter writer = new BufferedWriter(w)) { 50 | writer.append(String.format("Map for actual.%s() does not match expected.%s().", this.method, this.method)); 51 | writer.newLine(); 52 | Map> differences = mapDifference.entriesDiffering(); 53 | if (!differences.isEmpty()) { 54 | writer.append("Keys with Differences"); 55 | writer.newLine(); 56 | for (Map.Entry> kvp : differences.entrySet()) { 57 | writer.append(" "); 58 | writer.append(kvp.getKey().toString()); 59 | writer.newLine(); 60 | 61 | writer.append(" expected:"); 62 | writer.append(kvp.getValue().leftValue().toString()); 63 | writer.newLine(); 64 | 65 | writer.append(" actual:"); 66 | writer.append(kvp.getValue().rightValue().toString()); 67 | writer.newLine(); 68 | } 69 | } 70 | 71 | Map entries = mapDifference.entriesOnlyOnLeft(); 72 | writeEntries(writer, "Only in expected map", entries); 73 | 74 | Map onlyInActual = mapDifference.entriesOnlyOnRight(); 75 | writeEntries(writer, "Only in actual map", onlyInActual); 76 | } 77 | return w.toString(); 78 | } catch (IOException ex) { 79 | throw new IllegalStateException(ex); 80 | } 81 | } 82 | 83 | private void writeEntries(BufferedWriter writer, String header, Map entries) throws IOException { 84 | if (!entries.isEmpty()) { 85 | writer.append(header); 86 | writer.newLine(); 87 | 88 | for (Map.Entry kvp : entries.entrySet()) { 89 | writer.append(" "); 90 | writer.append(kvp.getKey().toString()); 91 | writer.append(": "); 92 | writer.append(kvp.getValue().toString()); 93 | writer.newLine(); 94 | } 95 | writer.newLine(); 96 | } 97 | } 98 | } 99 | 100 | static void assertMap(Map expected, Map actual, String message) { 101 | if (null == expected && null == actual) { 102 | return; 103 | } 104 | 105 | String prefix = Strings.isNullOrEmpty(message) ? "" : message + ": "; 106 | assertNotNull(expected, prefix + "expected cannot be null"); 107 | assertNotNull(actual, prefix + "actual cannot be null"); 108 | MapDifference mapDifference = Maps.difference(expected, actual); 109 | assertTrue(mapDifference.areEqual(), new MapDifferenceSupplier(mapDifference, prefix)); 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/HeaderToFieldConfigTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import com.google.common.base.Joiner; 4 | import com.google.common.collect.ImmutableSet; 5 | import org.apache.kafka.connect.data.Date; 6 | import org.apache.kafka.connect.data.Decimal; 7 | import org.apache.kafka.connect.data.Schema; 8 | import org.apache.kafka.connect.data.SchemaBuilder; 9 | import org.apache.kafka.connect.data.Time; 10 | import org.apache.kafka.connect.data.Timestamp; 11 | import org.junit.jupiter.api.DynamicTest; 12 | import org.junit.jupiter.api.TestFactory; 13 | import org.slf4j.Logger; 14 | import org.slf4j.LoggerFactory; 15 | 16 | import java.util.Arrays; 17 | import java.util.LinkedHashMap; 18 | import java.util.Map; 19 | import java.util.Set; 20 | import java.util.stream.IntStream; 21 | import java.util.stream.Stream; 22 | 23 | import static org.junit.jupiter.api.Assertions.assertEquals; 24 | import static org.junit.jupiter.api.Assertions.assertNotNull; 25 | import static org.junit.jupiter.api.DynamicTest.dynamicTest; 26 | 27 | public class HeaderToFieldConfigTest { 28 | private static final Logger log = LoggerFactory.getLogger(HeaderToFieldConfigTest.class); 29 | 30 | void addTest(Schema schema, Map tests) { 31 | String schemaText = HeaderToFieldConfig.HeaderToFieldMapping.toString(schema); 32 | StringBuilder builder = new StringBuilder(); 33 | builder.append("foo:"); 34 | builder.append(schemaText); 35 | tests.put( 36 | builder.toString(), 37 | new HeaderToFieldConfig.HeaderToFieldMapping( 38 | "foo", 39 | schema, 40 | "foo" 41 | ) 42 | ); 43 | builder.append(":bar"); 44 | tests.put( 45 | builder.toString(), 46 | new HeaderToFieldConfig.HeaderToFieldMapping( 47 | "foo", 48 | schema, 49 | "bar" 50 | ) 51 | ); 52 | } 53 | 54 | @TestFactory 55 | public Stream parse() { 56 | Set skip = ImmutableSet.of(Schema.Type.ARRAY, Schema.Type.MAP, Schema.Type.STRUCT); 57 | Map tests = new LinkedHashMap<>(); 58 | 59 | addTest(Timestamp.builder().optional().build(), tests); 60 | addTest(Date.builder().optional().build(), tests); 61 | addTest(Time.builder().optional().build(), tests); 62 | IntStream.range(0, 50) 63 | .mapToObj(value -> Decimal.builder(value).optional().build()) 64 | .forEach(schema -> addTest(schema, tests)); 65 | 66 | 67 | Arrays.stream(Schema.Type.values()) 68 | .filter(type -> !skip.contains(type)) 69 | .forEach(type -> { 70 | Schema schema = SchemaBuilder.type(type) 71 | .optional() 72 | .build(); 73 | addTest(schema, tests); 74 | }); 75 | log.info("{}", Joiner.on('\n').join(tests.keySet())); 76 | 77 | 78 | return tests.entrySet().stream() 79 | .map(e -> dynamicTest(e.getKey(), () -> { 80 | final HeaderToFieldConfig.HeaderToFieldMapping expected = e.getValue(); 81 | final HeaderToFieldConfig.HeaderToFieldMapping actual = HeaderToFieldConfig.HeaderToFieldMapping.parse(e.getKey()); 82 | assertNotNull(actual, "actual should not be null."); 83 | assertEquals(expected, actual, "mappings do not match"); 84 | })); 85 | } 86 | 87 | } 88 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/HeaderToFieldTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import com.google.common.collect.ImmutableMap; 4 | import org.apache.kafka.common.record.TimestampType; 5 | import org.apache.kafka.connect.data.Schema; 6 | import org.apache.kafka.connect.data.SchemaBuilder; 7 | import org.apache.kafka.connect.data.Struct; 8 | import org.apache.kafka.connect.header.ConnectHeaders; 9 | import org.apache.kafka.connect.sink.SinkRecord; 10 | import org.apache.kafka.connect.transforms.Transformation; 11 | import org.junit.jupiter.api.BeforeEach; 12 | import org.junit.jupiter.api.Test; 13 | 14 | import java.io.IOException; 15 | 16 | import static com.github.jcustenborder.kafka.connect.utils.AssertStruct.assertStruct; 17 | import static org.junit.jupiter.api.Assertions.assertNotNull; 18 | 19 | public class HeaderToFieldTest { 20 | Transformation transformation; 21 | 22 | @BeforeEach 23 | public void before() { 24 | this.transformation = new HeaderToField.Value<>(); 25 | } 26 | 27 | 28 | @Test 29 | public void apply() throws IOException { 30 | this.transformation.configure( 31 | ImmutableMap.of(HeaderToFieldConfig.HEADER_MAPPINGS_CONF, "applicationId:STRING") 32 | ); 33 | 34 | ConnectHeaders inputHeaders = new ConnectHeaders(); 35 | inputHeaders.addString("applicationId", "testing"); 36 | 37 | Schema inputSchema = SchemaBuilder.struct() 38 | .field("firstName", Schema.OPTIONAL_STRING_SCHEMA) 39 | .field("lastName", Schema.OPTIONAL_STRING_SCHEMA) 40 | .build(); 41 | 42 | Struct inputStruct = new Struct(inputSchema) 43 | .put("firstName", "example") 44 | .put("lastName", "user"); 45 | 46 | Schema expectedSchema = SchemaBuilder.struct() 47 | .field("firstName", Schema.OPTIONAL_STRING_SCHEMA) 48 | .field("lastName", Schema.OPTIONAL_STRING_SCHEMA) 49 | .field("applicationId", Schema.OPTIONAL_STRING_SCHEMA) 50 | .build(); 51 | Struct expectedStruct = new Struct(expectedSchema) 52 | .put("firstName", "example") 53 | .put("lastName", "user") 54 | .put("applicationId", "testing"); 55 | 56 | SinkRecord inputRecord = new SinkRecord( 57 | "testing", 58 | 1, 59 | null, 60 | null, 61 | inputStruct.schema(), 62 | inputStruct, 63 | 12345L, 64 | 123412351L, 65 | TimestampType.NO_TIMESTAMP_TYPE, 66 | inputHeaders 67 | ); 68 | 69 | SinkRecord actualRecord = this.transformation.apply(inputRecord); 70 | assertNotNull(actualRecord, "record should not be null."); 71 | assertStruct(expectedStruct, (Struct) actualRecord.value()); 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/LowerCaseTopicTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import org.apache.kafka.common.record.TimestampType; 4 | import org.apache.kafka.common.utils.Time; 5 | import org.apache.kafka.connect.sink.SinkRecord; 6 | import org.junit.jupiter.api.Test; 7 | 8 | import static org.junit.jupiter.api.Assertions.assertEquals; 9 | import static org.mockito.Mockito.mock; 10 | import static org.mockito.Mockito.times; 11 | import static org.mockito.Mockito.verify; 12 | import static org.mockito.Mockito.when; 13 | 14 | public class LowerCaseTopicTest { 15 | 16 | @Test 17 | public void test() { 18 | final SinkRecord input = new SinkRecord( 19 | "TeSt", 20 | 1, 21 | null, 22 | "", 23 | null, 24 | "", 25 | 1234123L, 26 | 12341312L, 27 | TimestampType.NO_TIMESTAMP_TYPE 28 | ); 29 | LowerCaseTopic transform = new LowerCaseTopic<>(); 30 | final SinkRecord actual = transform.apply(input); 31 | assertEquals("test", actual.topic(), "Topic should match."); 32 | } 33 | 34 | 35 | } 36 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/NormalizeSchemaTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import com.google.common.base.Strings; 4 | import org.apache.kafka.connect.data.Field; 5 | import org.apache.kafka.connect.data.Schema; 6 | import org.apache.kafka.connect.data.SchemaBuilder; 7 | import org.apache.kafka.connect.data.Struct; 8 | import org.apache.kafka.connect.sink.SinkRecord; 9 | import org.apache.kafka.connect.transforms.Transformation; 10 | import org.junit.jupiter.api.Test; 11 | 12 | import java.util.Arrays; 13 | import java.util.LinkedHashMap; 14 | import java.util.List; 15 | import java.util.Map; 16 | 17 | import static org.junit.jupiter.api.Assertions.assertEquals; 18 | import static org.junit.jupiter.api.Assertions.assertNotNull; 19 | 20 | public class NormalizeSchemaTest { 21 | 22 | SinkRecord exampleRecord(Schema schema) { 23 | Struct struct = new Struct(schema); 24 | for (Field field : schema.fields()) { 25 | struct.put(field, Strings.repeat("x", 50)); 26 | } 27 | return new SinkRecord( 28 | "test", 29 | 0, 30 | null, 31 | null, 32 | schema, 33 | struct, 34 | 1234L 35 | ); 36 | 37 | } 38 | 39 | Transformation transformation = new NormalizeSchema.Value<>(); 40 | 41 | Schema exampleSchema(List fieldNames, final int version) { 42 | SchemaBuilder builder = SchemaBuilder.struct() 43 | .name(this.getClass().getName()); 44 | for (String fieldName : fieldNames) { 45 | builder.field(fieldName, Schema.STRING_SCHEMA); 46 | } 47 | builder.version(version); 48 | return builder.build(); 49 | } 50 | 51 | @Test 52 | public void apply() { 53 | List> schemaFields = Arrays.asList( 54 | Arrays.asList("first_name"), 55 | Arrays.asList("first_name", "last_name"), 56 | Arrays.asList("first_name", "last_name", "email_address") 57 | ); 58 | int version = 0; 59 | 60 | Map schemaVersions = new LinkedHashMap<>(); 61 | for (List fieldNames : schemaFields) { 62 | schemaVersions.put(version, exampleSchema(fieldNames, version)); 63 | version++; 64 | } 65 | Integer latestVersion = schemaVersions.keySet().stream() 66 | .max(Integer::compareTo) 67 | .get(); 68 | Schema latestSchema = schemaVersions.get(latestVersion); 69 | SinkRecord latestRecord = exampleRecord(latestSchema); 70 | SinkRecord output = this.transformation.apply(latestRecord); 71 | assertNotNull(output); 72 | assertEquals(latestVersion, output.valueSchema().version()); 73 | 74 | for (int i = 0; i < 50; i++) { 75 | int schemaVersion = i % schemaVersions.size(); 76 | Schema schema = schemaVersions.get(schemaVersion); 77 | SinkRecord input = exampleRecord(schema); 78 | output = this.transformation.apply(input); 79 | assertNotNull(output); 80 | assertEquals(latestVersion, output.valueSchema().version()); 81 | } 82 | schemaVersions.put(version, exampleSchema(Arrays.asList("first_name", "last_name", "email_address", "state"), version)); 83 | latestVersion = schemaVersions.keySet().stream() 84 | .max(Integer::compareTo) 85 | .get(); 86 | latestSchema = schemaVersions.get(latestVersion); 87 | latestRecord = exampleRecord(latestSchema); 88 | output = this.transformation.apply(latestRecord); 89 | 90 | for (int i = 0; i < 50; i++) { 91 | int schemaVersion = i % schemaVersions.size(); 92 | Schema schema = schemaVersions.get(schemaVersion); 93 | SinkRecord input = exampleRecord(schema); 94 | output = this.transformation.apply(input); 95 | assertNotNull(output); 96 | assertEquals(latestVersion, output.valueSchema().version()); 97 | } 98 | 99 | 100 | } 101 | 102 | 103 | } 104 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/PatternFilterTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import com.google.common.collect.ImmutableMap; 4 | import org.apache.kafka.connect.data.Schema; 5 | import org.apache.kafka.connect.data.SchemaBuilder; 6 | import org.apache.kafka.connect.data.Struct; 7 | import org.apache.kafka.connect.sink.SinkRecord; 8 | import org.junit.jupiter.api.BeforeEach; 9 | import org.junit.jupiter.api.Test; 10 | 11 | import static org.junit.jupiter.api.Assertions.assertNotNull; 12 | import static org.junit.jupiter.api.Assertions.assertNull; 13 | 14 | public class PatternFilterTest { 15 | public PatternFilter.Value transform; 16 | 17 | @BeforeEach 18 | public void before() { 19 | this.transform = new PatternFilter.Value(); 20 | this.transform.configure( 21 | ImmutableMap.of( 22 | PatternFilterConfig.FIELD_CONFIG, "input", 23 | PatternFilterConfig.PATTERN_CONFIG, "^filter$" 24 | ) 25 | ); 26 | } 27 | 28 | SinkRecord map(String value) { 29 | return new SinkRecord( 30 | "asdf", 31 | 1, 32 | null, 33 | null, 34 | null, 35 | ImmutableMap.of("input", value), 36 | 1234L 37 | ); 38 | } 39 | 40 | SinkRecord struct(String value) { 41 | Schema schema = SchemaBuilder.struct() 42 | .field("input", Schema.STRING_SCHEMA) 43 | .build(); 44 | Struct struct = new Struct(schema) 45 | .put("input", value); 46 | return new SinkRecord( 47 | "asdf", 48 | 1, 49 | null, 50 | null, 51 | schema, 52 | struct, 53 | 1234L 54 | ); 55 | } 56 | 57 | @Test 58 | public void filtered() { 59 | assertNull(this.transform.apply(struct("filter"))); 60 | assertNull(this.transform.apply(map("filter"))); 61 | } 62 | 63 | @Test 64 | public void notFiltered() { 65 | assertNotNull(this.transform.apply(struct("ok"))); 66 | assertNotNull(this.transform.apply(map("ok"))); 67 | } 68 | 69 | } 70 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/PatternRenameTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.google.common.collect.ImmutableMap; 19 | import org.apache.kafka.connect.connector.ConnectRecord; 20 | import org.apache.kafka.connect.data.Schema; 21 | import org.apache.kafka.connect.data.SchemaBuilder; 22 | import org.apache.kafka.connect.data.Struct; 23 | import org.apache.kafka.connect.sink.SinkRecord; 24 | import org.apache.kafka.connect.transforms.Transformation; 25 | import org.junit.jupiter.api.Test; 26 | 27 | import java.util.Map; 28 | 29 | import static com.github.jcustenborder.kafka.connect.transform.common.GenericAssertions.assertMap; 30 | import static com.github.jcustenborder.kafka.connect.utils.AssertSchema.assertSchema; 31 | import static com.github.jcustenborder.kafka.connect.utils.AssertStruct.assertStruct; 32 | import static org.junit.jupiter.api.Assertions.assertNotNull; 33 | 34 | public abstract class PatternRenameTest extends TransformationTest { 35 | final static String TOPIC = "test"; 36 | 37 | protected PatternRenameTest(boolean isKey) { 38 | super(isKey); 39 | } 40 | 41 | @Test 42 | public void schemaLess() { 43 | this.transformation.configure( 44 | ImmutableMap.of( 45 | PatternRenameConfig.FIELD_PATTERN_CONF, "\\.", 46 | PatternRenameConfig.FIELD_REPLACEMENT_CONF, "_" 47 | ) 48 | ); 49 | 50 | final Map input = ImmutableMap.of( 51 | "first.name", "example", 52 | "last.name", "user" 53 | ); 54 | final Map expected = ImmutableMap.of( 55 | "first_name", "example", 56 | "last_name", "user" 57 | ); 58 | 59 | final Object key = isKey ? input : null; 60 | final Object value = isKey ? null : input; 61 | final Schema keySchema = null; 62 | final Schema valueSchema = null; 63 | 64 | final SinkRecord inputRecord = new SinkRecord( 65 | TOPIC, 66 | 1, 67 | keySchema, 68 | key, 69 | valueSchema, 70 | value, 71 | 1234L 72 | ); 73 | final SinkRecord outputRecord = this.transformation.apply(inputRecord); 74 | assertNotNull(outputRecord); 75 | final Map actual = (Map) (isKey ? outputRecord.key() : outputRecord.value()); 76 | assertMap(expected, actual, ""); 77 | } 78 | 79 | @Test 80 | public void prefixed() { 81 | this.transformation.configure( 82 | ImmutableMap.of( 83 | PatternRenameConfig.FIELD_PATTERN_CONF, "^prefixed", 84 | PatternRenameConfig.FIELD_REPLACEMENT_CONF, "" 85 | ) 86 | ); 87 | 88 | Schema inputSchema = SchemaBuilder.struct() 89 | .name("testing") 90 | .field("prefixedfirstname", Schema.STRING_SCHEMA) 91 | .field("prefixedlastname", Schema.STRING_SCHEMA); 92 | Struct inputStruct = new Struct(inputSchema) 93 | .put("prefixedfirstname", "example") 94 | .put("prefixedlastname", "user"); 95 | 96 | final Object key = isKey ? inputStruct : null; 97 | final Object value = isKey ? null : inputStruct; 98 | final Schema keySchema = isKey ? inputSchema : null; 99 | final Schema valueSchema = isKey ? null : inputSchema; 100 | 101 | final SinkRecord inputRecord = new SinkRecord( 102 | TOPIC, 103 | 1, 104 | keySchema, 105 | key, 106 | valueSchema, 107 | value, 108 | 1234L 109 | ); 110 | final SinkRecord outputRecord = this.transformation.apply(inputRecord); 111 | assertNotNull(outputRecord); 112 | 113 | final Schema actualSchema = isKey ? outputRecord.keySchema() : outputRecord.valueSchema(); 114 | final Struct actualStruct = (Struct) (isKey ? outputRecord.key() : outputRecord.value()); 115 | 116 | final Schema expectedSchema = SchemaBuilder.struct() 117 | .name("testing") 118 | .field("firstname", Schema.STRING_SCHEMA) 119 | .field("lastname", Schema.STRING_SCHEMA); 120 | Struct expectedStruct = new Struct(expectedSchema) 121 | .put("firstname", "example") 122 | .put("lastname", "user"); 123 | 124 | assertSchema(expectedSchema, actualSchema); 125 | assertStruct(expectedStruct, actualStruct); 126 | } 127 | 128 | public static class KeyTest> extends PatternRenameTest { 129 | protected KeyTest() { 130 | super(true); 131 | } 132 | 133 | @Override 134 | protected Transformation create() { 135 | return new PatternRename.Key(); 136 | } 137 | } 138 | 139 | public static class ValueTest> extends PatternRenameTest { 140 | protected ValueTest() { 141 | super(false); 142 | } 143 | 144 | @Override 145 | protected Transformation create() { 146 | return new PatternRename.Value(); 147 | } 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/SchemaNameToTopicTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import com.google.common.base.Strings; 4 | import org.apache.kafka.connect.data.Field; 5 | import org.apache.kafka.connect.data.Schema; 6 | import org.apache.kafka.connect.data.SchemaBuilder; 7 | import org.apache.kafka.connect.data.Struct; 8 | import org.apache.kafka.connect.sink.SinkRecord; 9 | import org.apache.kafka.connect.transforms.Transformation; 10 | import org.junit.jupiter.api.Test; 11 | 12 | import java.util.Arrays; 13 | import java.util.LinkedHashMap; 14 | import java.util.List; 15 | import java.util.Map; 16 | 17 | import static org.junit.jupiter.api.Assertions.assertEquals; 18 | import static org.junit.jupiter.api.Assertions.assertNotNull; 19 | 20 | public class SchemaNameToTopicTest { 21 | Transformation transformation = new SchemaNameToTopic.Value<>(); 22 | SinkRecord exampleRecord(Schema schema) { 23 | Struct struct = new Struct(schema); 24 | for (Field field : schema.fields()) { 25 | struct.put(field, Strings.repeat("x", 50)); 26 | } 27 | return new SinkRecord( 28 | "test", 29 | 0, 30 | null, 31 | null, 32 | schema, 33 | struct, 34 | 1234L 35 | ); 36 | 37 | } 38 | 39 | Schema exampleSchema(List fieldNames, final int version) { 40 | SchemaBuilder builder = SchemaBuilder.struct() 41 | .name(this.getClass().getName()); 42 | for (String fieldName : fieldNames) { 43 | builder.field(fieldName, Schema.STRING_SCHEMA); 44 | } 45 | builder.version(version); 46 | return builder.build(); 47 | } 48 | 49 | @Test 50 | public void apply() { 51 | Schema schema = SchemaBuilder.struct() 52 | .name("com.foo.bar.whatever.ASDF") 53 | .field("firstName", Schema.OPTIONAL_STRING_SCHEMA) 54 | .build(); 55 | SinkRecord input = exampleRecord(schema); 56 | SinkRecord actual = this.transformation.apply(input); 57 | assertNotNull(actual); 58 | assertEquals(schema.name(), actual.topic()); 59 | 60 | 61 | } 62 | 63 | 64 | } 65 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/SetMaximumPrecisionTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import com.google.common.collect.ImmutableMap; 4 | import org.apache.kafka.connect.data.Decimal; 5 | import org.apache.kafka.connect.data.Schema; 6 | import org.apache.kafka.connect.data.SchemaBuilder; 7 | import org.apache.kafka.connect.data.Struct; 8 | import org.apache.kafka.connect.sink.SinkRecord; 9 | import org.junit.jupiter.api.Test; 10 | 11 | import java.math.BigDecimal; 12 | 13 | import static com.github.jcustenborder.kafka.connect.utils.AssertStruct.assertStruct; 14 | import static org.junit.jupiter.api.Assertions.assertNotNull; 15 | 16 | public class SetMaximumPrecisionTest { 17 | SinkRecord record(Struct struct) { 18 | return new SinkRecord("test", 1, null, null, struct.schema(), struct, 1234L); 19 | } 20 | 21 | @Test 22 | public void noop() { 23 | Schema schema = SchemaBuilder.struct() 24 | .field("first", Schema.STRING_SCHEMA) 25 | .field("last", Schema.STRING_SCHEMA) 26 | .field("email", Schema.STRING_SCHEMA) 27 | .build(); 28 | Struct struct = new Struct(schema) 29 | .put("first", "test") 30 | .put("last", "user") 31 | .put("first", "none@none.com"); 32 | SinkRecord record = record(struct); 33 | SetMaximumPrecision.Value transform = new SetMaximumPrecision.Value<>(); 34 | transform.configure( 35 | ImmutableMap.of(SetMaximumPrecisionConfig.MAX_PRECISION_CONFIG, 32) 36 | ); 37 | SinkRecord actual = transform.apply(record); 38 | assertNotNull(actual); 39 | assertStruct((Struct) record.value(), (Struct) actual.value()); 40 | } 41 | 42 | @Test 43 | public void convert() { 44 | final Schema inputSchema = SchemaBuilder.struct() 45 | .field("first", Decimal.schema(5)) 46 | .field( 47 | "second", 48 | Decimal.builder(5) 49 | .parameter(SetMaximumPrecision.CONNECT_AVRO_DECIMAL_PRECISION_PROP, "16") 50 | .optional() 51 | .build() 52 | ) 53 | .field( 54 | "third", 55 | Decimal.builder(5) 56 | .parameter(SetMaximumPrecision.CONNECT_AVRO_DECIMAL_PRECISION_PROP, "48") 57 | .optional() 58 | .build() 59 | ) 60 | .build(); 61 | final Struct inputStruct = new Struct(inputSchema) 62 | .put("first", BigDecimal.ONE) 63 | .put("second", null) 64 | .put("third", BigDecimal.ONE); 65 | final Schema expectedSchema = SchemaBuilder.struct() 66 | .field( 67 | "first", 68 | Decimal.builder(5) 69 | .parameter(SetMaximumPrecision.CONNECT_AVRO_DECIMAL_PRECISION_PROP, "32") 70 | .build() 71 | ) 72 | .field( 73 | "second", 74 | Decimal.builder(5) 75 | .parameter(SetMaximumPrecision.CONNECT_AVRO_DECIMAL_PRECISION_PROP, "16") 76 | .optional() 77 | .build() 78 | ) 79 | .field( 80 | "third", 81 | Decimal.builder(5) 82 | .parameter(SetMaximumPrecision.CONNECT_AVRO_DECIMAL_PRECISION_PROP, "32") 83 | .optional() 84 | .build() 85 | ) 86 | .build(); 87 | final Struct expectedStruct = new Struct(expectedSchema) 88 | .put("first", BigDecimal.ONE) 89 | .put("second", null) 90 | .put("third", BigDecimal.ONE); 91 | 92 | 93 | SinkRecord record = record(inputStruct); 94 | SetMaximumPrecision.Value transform = new SetMaximumPrecision.Value<>(); 95 | transform.configure( 96 | ImmutableMap.of(SetMaximumPrecisionConfig.MAX_PRECISION_CONFIG, 32) 97 | ); 98 | 99 | 100 | SinkRecord actual = transform.apply(record); 101 | assertNotNull(actual); 102 | assertStruct(expectedStruct, (Struct) actual.value()); 103 | } 104 | 105 | } 106 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/SetNullTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import org.apache.kafka.common.record.TimestampType; 4 | import org.apache.kafka.common.utils.Time; 5 | import org.apache.kafka.connect.data.Schema; 6 | import org.apache.kafka.connect.sink.SinkRecord; 7 | import org.junit.jupiter.api.Test; 8 | 9 | import static org.junit.jupiter.api.Assertions.assertEquals; 10 | import static org.junit.jupiter.api.Assertions.assertNull; 11 | import static org.mockito.Mockito.mock; 12 | import static org.mockito.Mockito.times; 13 | import static org.mockito.Mockito.verify; 14 | import static org.mockito.Mockito.when; 15 | 16 | public class SetNullTest { 17 | 18 | @Test 19 | public void test() { 20 | final SinkRecord input = new SinkRecord( 21 | "test", 22 | 1, 23 | Schema.STRING_SCHEMA, 24 | "key", 25 | null, 26 | "", 27 | 1234123L, 28 | 12341312L, 29 | TimestampType.NO_TIMESTAMP_TYPE 30 | ); 31 | final Long expectedTimestamp = 1537808219123L; 32 | SetNull transform = new SetNull.Key<>(); 33 | final SinkRecord actual = transform.apply(input); 34 | assertNull(actual.key(), "key should be null."); 35 | assertNull(actual.keySchema(), "keySchema should be null."); 36 | } 37 | 38 | 39 | } 40 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/TimestampNowTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import org.apache.kafka.common.record.TimestampType; 4 | import org.apache.kafka.common.utils.Time; 5 | import org.apache.kafka.connect.sink.SinkRecord; 6 | import org.junit.jupiter.api.Test; 7 | 8 | import static org.junit.jupiter.api.Assertions.assertEquals; 9 | import static org.mockito.Mockito.mock; 10 | import static org.mockito.Mockito.times; 11 | import static org.mockito.Mockito.verify; 12 | import static org.mockito.Mockito.when; 13 | 14 | public class TimestampNowTest { 15 | 16 | @Test 17 | public void test() { 18 | final SinkRecord input = new SinkRecord( 19 | "test", 20 | 1, 21 | null, 22 | "", 23 | null, 24 | "", 25 | 1234123L, 26 | 12341312L, 27 | TimestampType.NO_TIMESTAMP_TYPE 28 | ); 29 | final Long expectedTimestamp = 1537808219123L; 30 | TimestampNow transform = new TimestampNow<>(); 31 | transform.time = mock(Time.class); 32 | when(transform.time.milliseconds()).thenReturn(expectedTimestamp); 33 | final SinkRecord actual = transform.apply(input); 34 | assertEquals(expectedTimestamp, actual.timestamp(), "Timestamp should match."); 35 | verify(transform.time, times(1)).milliseconds(); 36 | } 37 | 38 | 39 | } 40 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/ToJsonTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.google.common.collect.ImmutableMap; 19 | import org.apache.kafka.connect.connector.ConnectRecord; 20 | import org.apache.kafka.connect.data.Schema; 21 | import org.apache.kafka.connect.data.SchemaBuilder; 22 | import org.apache.kafka.connect.data.Struct; 23 | import org.apache.kafka.connect.sink.SinkRecord; 24 | import org.apache.kafka.connect.transforms.Transformation; 25 | import org.junit.jupiter.api.Test; 26 | 27 | import java.util.LinkedHashMap; 28 | import java.util.Map; 29 | 30 | import static org.junit.jupiter.api.Assertions.assertEquals; 31 | import static org.junit.jupiter.api.Assertions.assertNotNull; 32 | 33 | public abstract class ToJsonTest extends TransformationTest { 34 | protected ToJsonTest(boolean isKey) { 35 | super(isKey); 36 | } 37 | 38 | @Test 39 | public void struct() { 40 | this.transformation.configure(ImmutableMap.of()); 41 | final Schema inputSchema = SchemaBuilder.struct() 42 | .field("FIRST_NAME", Schema.STRING_SCHEMA) 43 | .field("LAST_NAME", Schema.STRING_SCHEMA) 44 | .build(); 45 | final Schema expectedSchema = SchemaBuilder.struct() 46 | .field("first_name", Schema.STRING_SCHEMA) 47 | .field("last_name", Schema.STRING_SCHEMA) 48 | .build(); 49 | final Struct inputStruct = new Struct(inputSchema) 50 | .put("FIRST_NAME", "test") 51 | .put("LAST_NAME", "user"); 52 | final Struct expectedStruct = new Struct(expectedSchema) 53 | .put("first_name", "test") 54 | .put("last_name", "user"); 55 | 56 | final SinkRecord inputRecord = new SinkRecord( 57 | "topic", 58 | 1, 59 | null, 60 | null, 61 | inputSchema, 62 | inputStruct, 63 | 1L 64 | ); 65 | 66 | final SinkRecord transformedRecord = this.transformation.apply(inputRecord); 67 | assertNotNull(transformedRecord, "transformedRecord should not be null."); 68 | } 69 | 70 | @Test 71 | public void map() { 72 | this.transformation.configure(ImmutableMap.of()); 73 | Map input = new LinkedHashMap<>(); 74 | input.put("FIRST_NAME", "test"); 75 | input.put("LAST_NAME", "user"); 76 | 77 | final SinkRecord inputRecord = new SinkRecord( 78 | "topic", 79 | 1, 80 | null, 81 | null, 82 | null, 83 | input, 84 | 1L 85 | ); 86 | 87 | final SinkRecord transformedRecord = this.transformation.apply(inputRecord); 88 | assertNotNull(transformedRecord, "transformedRecord should not be null."); 89 | } 90 | 91 | 92 | @Test 93 | public void ignoreNonStruct() { 94 | final SinkRecord inputRecord = new SinkRecord( 95 | "topic", 96 | 1, 97 | null, 98 | null, 99 | null, 100 | "", 101 | 1L 102 | ); 103 | 104 | SinkRecord outputRecord = this.transformation.apply(inputRecord); 105 | assertEquals(inputRecord.key(), outputRecord.key()); 106 | assertEquals(inputRecord.value(), outputRecord.value()); 107 | } 108 | 109 | 110 | public static class ValueTest> extends ToJsonTest { 111 | protected ValueTest() { 112 | super(false); 113 | } 114 | 115 | @Override 116 | protected Transformation create() { 117 | return new ToJSON.Value<>(); 118 | } 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/ToLongTest.java: -------------------------------------------------------------------------------- 1 | package com.github.jcustenborder.kafka.connect.transform.common; 2 | 3 | import com.github.jcustenborder.kafka.connect.utils.data.SchemaKey; 4 | import com.google.common.collect.ImmutableMap; 5 | import org.apache.kafka.connect.connector.ConnectRecord; 6 | import org.apache.kafka.connect.data.Decimal; 7 | import org.apache.kafka.connect.data.Schema; 8 | import org.apache.kafka.connect.data.Schema.Type; 9 | import org.apache.kafka.connect.data.SchemaAndValue; 10 | import org.apache.kafka.connect.data.SchemaBuilder; 11 | import org.apache.kafka.connect.data.Struct; 12 | import org.apache.kafka.connect.sink.SinkRecord; 13 | import org.apache.kafka.connect.transforms.Transformation; 14 | import org.junit.jupiter.api.BeforeEach; 15 | import org.junit.jupiter.api.DynamicTest; 16 | import org.junit.jupiter.api.TestFactory; 17 | 18 | import java.math.BigDecimal; 19 | import java.util.Arrays; 20 | import java.util.List; 21 | import java.util.stream.Stream; 22 | 23 | import static com.github.jcustenborder.kafka.connect.utils.AssertStruct.assertStruct; 24 | import static com.github.jcustenborder.kafka.connect.utils.SinkRecordHelper.write; 25 | import static com.github.jcustenborder.kafka.connect.utils.StructHelper.struct; 26 | import static org.junit.jupiter.api.Assertions.assertNotNull; 27 | import static org.junit.jupiter.api.Assertions.assertTrue; 28 | import static org.junit.jupiter.api.DynamicTest.dynamicTest; 29 | 30 | public abstract class ToLongTest extends TransformationTest { 31 | protected ToLongTest(boolean isKey) { 32 | super(isKey); 33 | } 34 | 35 | @BeforeEach 36 | public void asdf() { 37 | this.transformation.configure( 38 | ImmutableMap.of(ToLongConfig.FIELD_CONFIG, "value") 39 | ); 40 | } 41 | 42 | @TestFactory 43 | public Stream apply() { 44 | List inputs = Arrays.asList( 45 | new SchemaAndValue(Schema.FLOAT32_SCHEMA, Float.MAX_VALUE), 46 | new SchemaAndValue(Schema.FLOAT64_SCHEMA, Double.MAX_VALUE), 47 | new SchemaAndValue(Schema.INT8_SCHEMA, Byte.MAX_VALUE), 48 | new SchemaAndValue(Schema.INT16_SCHEMA, Short.MAX_VALUE), 49 | new SchemaAndValue(Schema.INT32_SCHEMA, Integer.MAX_VALUE), 50 | new SchemaAndValue(Schema.INT64_SCHEMA, Long.MAX_VALUE), 51 | new SchemaAndValue(Decimal.schema(2), BigDecimal.valueOf(1234231, 2)) 52 | ); 53 | return inputs.stream().map(i -> dynamicTest(SchemaKey.of(i.schema()).toString(), () -> { 54 | 55 | final Schema valueSchema = SchemaBuilder.struct() 56 | .name("value") 57 | .field("name", Schema.STRING_SCHEMA) 58 | .field("value", i.schema()) 59 | .build(); 60 | final Struct value = new Struct(valueSchema) 61 | .put("name", "testing") 62 | .put("value", i.value()); 63 | 64 | final SinkRecord input = write( 65 | "test", 66 | struct("key", 67 | "id", Type.INT64, false, 1234L 68 | ), 69 | value 70 | ); 71 | final Struct expectedStruct = struct("value", 72 | "name", Type.STRING, false, "testing", 73 | "value", Type.INT64, false, ((Number) i.value()).longValue() 74 | ); 75 | 76 | SinkRecord output = this.transformation.apply(input); 77 | assertNotNull(output, "output cannot be null."); 78 | assertNotNull(output.value(), "output.value() cannot be null."); 79 | assertTrue(output.value() instanceof Struct, "output.value() should be a struct."); 80 | assertNotNull(output.valueSchema(), "output.valueSchema() cannot be null."); 81 | assertStruct(expectedStruct, (Struct) output.value()); 82 | })); 83 | } 84 | 85 | public static class ValueTest> extends ToLongTest { 86 | protected ValueTest() { 87 | super(false); 88 | } 89 | 90 | @Override 91 | protected Transformation create() { 92 | return new ToLong.Value<>(); 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/TopicNameToFieldTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import com.google.common.collect.ImmutableMap; 19 | import org.apache.kafka.connect.connector.ConnectRecord; 20 | import org.apache.kafka.connect.data.Schema; 21 | import org.apache.kafka.connect.data.SchemaAndValue; 22 | import org.apache.kafka.connect.data.SchemaBuilder; 23 | import org.apache.kafka.connect.data.Struct; 24 | import org.apache.kafka.connect.sink.SinkRecord; 25 | import org.apache.kafka.connect.transforms.Transformation; 26 | import org.junit.jupiter.api.Test; 27 | 28 | import static com.github.jcustenborder.kafka.connect.utils.SinkRecordHelper.write; 29 | import static org.junit.jupiter.api.Assertions.assertEquals; 30 | import static org.junit.jupiter.api.Assertions.assertNotNull; 31 | import static org.junit.jupiter.api.Assertions.assertTrue; 32 | 33 | public abstract class TopicNameToFieldTest extends TransformationTest { 34 | 35 | 36 | protected TopicNameToFieldTest(boolean isKey) { 37 | super(isKey); 38 | } 39 | 40 | public static class ValueTest> extends TopicNameToFieldTest { 41 | protected ValueTest() { 42 | super(false); 43 | } 44 | 45 | @Override 46 | protected Transformation create() { 47 | return new TopicNameToField.Value<>(); 48 | } 49 | } 50 | 51 | @Test 52 | public void struct() { 53 | Schema schema = SchemaBuilder.struct() 54 | .field("test", Schema.STRING_SCHEMA) 55 | .build(); 56 | 57 | 58 | SchemaAndValue input = new SchemaAndValue(schema, new Struct(schema).put("test", "test")); 59 | SinkRecord record = write("testing", new SchemaAndValue(Schema.STRING_SCHEMA, "foo"), input); 60 | 61 | this.transformation.configure( 62 | ImmutableMap.of(TopicNameToFieldConfig.FIELD_CONFIG, "topicName") 63 | ); 64 | SinkRecord actual = this.transformation.apply(record); 65 | assertNotNull(actual, "actual cannot be null."); 66 | assertTrue(actual.value() instanceof Struct, "value() should be a struct."); 67 | Struct struct = (Struct) actual.value(); 68 | assertEquals("testing", struct.getString("topicName")); 69 | 70 | 71 | 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /src/test/java/com/github/jcustenborder/kafka/connect/transform/common/TransformationTest.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright © 2017 Jeremy Custenborder (jcustenborder@gmail.com) 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.github.jcustenborder.kafka.connect.transform.common; 17 | 18 | import org.apache.kafka.connect.sink.SinkRecord; 19 | import org.apache.kafka.connect.transforms.Transformation; 20 | import org.junit.jupiter.api.BeforeEach; 21 | 22 | public abstract class TransformationTest { 23 | final boolean isKey; 24 | final static String TOPIC = "test"; 25 | 26 | 27 | protected TransformationTest(boolean isKey) { 28 | this.isKey = isKey; 29 | } 30 | 31 | protected abstract Transformation create(); 32 | 33 | Transformation transformation; 34 | 35 | @BeforeEach 36 | public void before() { 37 | this.transformation = create(); 38 | } 39 | 40 | 41 | } 42 | -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/BytesToString/bytes.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "topic", 4 | "kafkaPartition" : 1, 5 | "valueSchema" : { 6 | "type" : "BYTES", 7 | "isOptional" : false 8 | }, 9 | "value" : "dGhpcyBpcyBhIHRlc3Q=", 10 | "timestampType" : "NO_TIMESTAMP_TYPE", 11 | "offset" : 1, 12 | "headers" : [ ] 13 | }, 14 | "description" : "This example converts the `bytes` field from a byte array to a string.", 15 | "name" : "Struct", 16 | "config" : { }, 17 | "childClass" : "Value" 18 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/BytesToString/struct.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "topic", 4 | "kafkaPartition" : 1, 5 | "valueSchema" : { 6 | "type" : "STRUCT", 7 | "isOptional" : false, 8 | "fieldSchemas" : { 9 | "bytes" : { 10 | "type" : "BYTES", 11 | "isOptional" : false 12 | } 13 | } 14 | }, 15 | "value" : { 16 | "schema" : { 17 | "type" : "STRUCT", 18 | "isOptional" : false, 19 | "fieldSchemas" : { 20 | "bytes" : { 21 | "type" : "BYTES", 22 | "isOptional" : false 23 | } 24 | } 25 | }, 26 | "fieldValues" : [ { 27 | "name" : "bytes", 28 | "schema" : { 29 | "type" : "BYTES", 30 | "isOptional" : false 31 | }, 32 | "storage" : "dGhpcyBpcyBhIHRlc3Q=" 33 | } ] 34 | }, 35 | "timestampType" : "NO_TIMESTAMP_TYPE", 36 | "offset" : 1, 37 | "headers" : [ ] 38 | }, 39 | "description" : "This example converts the `bytes` field from a byte array to a string.", 40 | "name" : "Struct", 41 | "config" : { 42 | "fields" : "bytes" 43 | }, 44 | "childClass" : "Value" 45 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/ChangeCase/simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "topic", 4 | "kafkaPartition" : 1, 5 | "valueSchema" : { 6 | "type" : "STRUCT", 7 | "isOptional" : false, 8 | "fieldSchemas" : { 9 | "FIRST_NAME" : { 10 | "type" : "STRING", 11 | "isOptional" : false 12 | }, 13 | "LAST_NAME" : { 14 | "type" : "STRING", 15 | "isOptional" : false 16 | } 17 | } 18 | }, 19 | "value" : { 20 | "schema" : { 21 | "type" : "STRUCT", 22 | "isOptional" : false, 23 | "fieldSchemas" : { 24 | "FIRST_NAME" : { 25 | "type" : "STRING", 26 | "isOptional" : false 27 | }, 28 | "LAST_NAME" : { 29 | "type" : "STRING", 30 | "isOptional" : false 31 | } 32 | } 33 | }, 34 | "fieldValues" : [ { 35 | "name" : "FIRST_NAME", 36 | "schema" : { 37 | "type" : "STRING", 38 | "isOptional" : false 39 | }, 40 | "storage" : "test" 41 | }, { 42 | "name" : "LAST_NAME", 43 | "schema" : { 44 | "type" : "STRING", 45 | "isOptional" : false 46 | }, 47 | "storage" : "user" 48 | } ] 49 | }, 50 | "timestampType" : "NO_TIMESTAMP_TYPE", 51 | "offset" : 1, 52 | "headers" : [ ] 53 | }, 54 | "description" : "This example will convert the field names of a schema from upper underscore to lower underscore.", 55 | "name" : "Upper underscore to lower underscore", 56 | "config" : { 57 | "from" : "UPPER_UNDERSCORE", 58 | "to" : "LOWER_UNDERSCORE" 59 | }, 60 | "childClass" : "Value" 61 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/ChangeTopicCase/LOWER_CAMELToUPPER_UNDERSCORE.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "topicName", 4 | "kafkaPartition" : 1, 5 | "timestampType" : "NO_TIMESTAMP_TYPE", 6 | "offset" : 12345, 7 | "headers" : [ ] 8 | }, 9 | "description" : "This example will convert the field names of a schema from upper underscore to lower underscore.", 10 | "name" : "LOWER_CAMEL to UPPER_UNDERSCORE", 11 | "config" : { 12 | "from" : "LOWER_CAMEL", 13 | "to" : "UPPER_UNDERSCORE" 14 | } 15 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/ChangeTopicCase/LOWER_HYPHENToLOWER_UNDERSCORE.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "topic-name", 4 | "kafkaPartition" : 1, 5 | "timestampType" : "NO_TIMESTAMP_TYPE", 6 | "offset" : 12345, 7 | "headers" : [ ] 8 | }, 9 | "description" : "This example will convert the field names of a schema from upper underscore to lower underscore.", 10 | "name" : "LOWER_HYPHEN to LOWER_UNDERSCORE", 11 | "config" : { 12 | "from" : "LOWER_HYPHEN", 13 | "to" : "LOWER_UNDERSCORE" 14 | } 15 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/ChangeTopicCase/UPPER_UNDERSCOREToLOWER_CAMEL.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "TOPIC_NAME", 4 | "kafkaPartition" : 1, 5 | "timestampType" : "NO_TIMESTAMP_TYPE", 6 | "offset" : 12345, 7 | "headers" : [ ] 8 | }, 9 | "description" : "This example will convert the field names of a schema from upper underscore to lower underscore.", 10 | "name" : "UPPER_UNDERSCORE to LOWER_CAMEL", 11 | "config" : { 12 | "from" : "UPPER_UNDERSCORE", 13 | "to" : "LOWER_CAMEL" 14 | } 15 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/ExtractNestedField/simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "topic", 4 | "kafkaPartition" : 1, 5 | "valueSchema" : { 6 | "type" : "STRUCT", 7 | "isOptional" : false, 8 | "fieldSchemas" : { 9 | "first_name" : { 10 | "type" : "STRING", 11 | "isOptional" : false 12 | }, 13 | "last_name" : { 14 | "type" : "STRING", 15 | "isOptional" : false 16 | }, 17 | "address" : { 18 | "name" : "Address", 19 | "type" : "STRUCT", 20 | "isOptional" : false, 21 | "fieldSchemas" : { 22 | "city" : { 23 | "type" : "STRING", 24 | "isOptional" : false 25 | }, 26 | "state" : { 27 | "type" : "STRING", 28 | "isOptional" : false 29 | } 30 | } 31 | } 32 | } 33 | }, 34 | "value" : { 35 | "schema" : { 36 | "type" : "STRUCT", 37 | "isOptional" : false, 38 | "fieldSchemas" : { 39 | "first_name" : { 40 | "type" : "STRING", 41 | "isOptional" : false 42 | }, 43 | "last_name" : { 44 | "type" : "STRING", 45 | "isOptional" : false 46 | }, 47 | "address" : { 48 | "name" : "Address", 49 | "type" : "STRUCT", 50 | "isOptional" : false, 51 | "fieldSchemas" : { 52 | "city" : { 53 | "type" : "STRING", 54 | "isOptional" : false 55 | }, 56 | "state" : { 57 | "type" : "STRING", 58 | "isOptional" : false 59 | } 60 | } 61 | } 62 | } 63 | }, 64 | "fieldValues" : [ { 65 | "name" : "first_name", 66 | "schema" : { 67 | "type" : "STRING", 68 | "isOptional" : false 69 | }, 70 | "storage" : "test" 71 | }, { 72 | "name" : "last_name", 73 | "schema" : { 74 | "type" : "STRING", 75 | "isOptional" : false 76 | }, 77 | "storage" : "developer" 78 | }, { 79 | "name" : "address", 80 | "schema" : { 81 | "name" : "Address", 82 | "type" : "STRUCT", 83 | "isOptional" : false, 84 | "fieldSchemas" : { 85 | "city" : { 86 | "type" : "STRING", 87 | "isOptional" : false 88 | }, 89 | "state" : { 90 | "type" : "STRING", 91 | "isOptional" : false 92 | } 93 | } 94 | }, 95 | "struct" : { 96 | "schema" : { 97 | "name" : "Address", 98 | "type" : "STRUCT", 99 | "isOptional" : false, 100 | "fieldSchemas" : { 101 | "city" : { 102 | "type" : "STRING", 103 | "isOptional" : false 104 | }, 105 | "state" : { 106 | "type" : "STRING", 107 | "isOptional" : false 108 | } 109 | } 110 | }, 111 | "fieldValues" : [ { 112 | "name" : "city", 113 | "schema" : { 114 | "type" : "STRING", 115 | "isOptional" : false 116 | }, 117 | "storage" : "Austin" 118 | }, { 119 | "name" : "state", 120 | "schema" : { 121 | "type" : "STRING", 122 | "isOptional" : false 123 | }, 124 | "storage" : "tx" 125 | } ] 126 | } 127 | } ] 128 | }, 129 | "timestampType" : "NO_TIMESTAMP_TYPE", 130 | "offset" : 1, 131 | "headers" : [ ] 132 | }, 133 | "description" : "This example takes the value of `address.state` and copies it to a field on the `state` field.", 134 | "name" : "Example", 135 | "config" : { 136 | "input.inner.field.name" : "state", 137 | "input.outer.field.name" : "address", 138 | "output.field.name" : "state" 139 | }, 140 | "childClass" : "Value" 141 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/ExtractTimestamp/simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "topic", 4 | "kafkaPartition" : 1, 5 | "value" : { 6 | "timestamp" : 1512164613123 7 | }, 8 | "timestampType" : "NO_TIMESTAMP_TYPE", 9 | "offset" : 1, 10 | "headers" : [ ] 11 | }, 12 | "description" : "This example takes the value of the `timestamp` field and uses it for the record timestamp.", 13 | "name" : "Example", 14 | "config" : { 15 | "field.name" : "timestamp" 16 | }, 17 | "childClass" : "Value" 18 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/ExtractXPath/SOAPEnvelope1.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 12345 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/ExtractXPath/Transaction.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 12345 4 | -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/HeaderToField/headertofield.json: -------------------------------------------------------------------------------- 1 | { 2 | "input": { 3 | "topic": "testing", 4 | "kafkaPartition": 1, 5 | "valueSchema": { 6 | "type": "STRUCT", 7 | "isOptional": false, 8 | "fieldSchemas": { 9 | "firstName": { 10 | "type": "STRING", 11 | "isOptional": true 12 | }, 13 | "lastName": { 14 | "type": "STRING", 15 | "isOptional": true 16 | } 17 | } 18 | }, 19 | "value": { 20 | "schema": { 21 | "type": "STRUCT", 22 | "isOptional": false, 23 | "fieldSchemas": { 24 | "firstName": { 25 | "type": "STRING", 26 | "isOptional": true 27 | }, 28 | "lastName": { 29 | "type": "STRING", 30 | "isOptional": true 31 | } 32 | } 33 | }, 34 | "fieldValues": [ 35 | { 36 | "name": "firstName", 37 | "schema": { 38 | "type": "STRING", 39 | "isOptional": true 40 | }, 41 | "storage": "example" 42 | }, 43 | { 44 | "name": "lastName", 45 | "schema": { 46 | "type": "STRING", 47 | "isOptional": true 48 | }, 49 | "storage": "user" 50 | } 51 | ] 52 | }, 53 | "timestamp": 123412351, 54 | "timestampType": "NO_TIMESTAMP_TYPE", 55 | "offset": 12345, 56 | "headers": [ 57 | { 58 | "name": "applicationId", 59 | "schema": { 60 | "type": "STRING", 61 | "isOptional": false 62 | }, 63 | "storage": "testing" 64 | } 65 | ] 66 | }, 67 | "description": "The following example takes the value from the `applicationId` header and appends it as a new field to the value of the message.", 68 | "name": "Header to field", 69 | "config": { 70 | "header.mappings": "applicationId:STRING" 71 | }, 72 | "childClass": "Value" 73 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/HeaderToField/spooldir.json: -------------------------------------------------------------------------------- 1 | { 2 | "input": { 3 | "topic": "testing", 4 | "kafkaPartition": 1, 5 | "valueSchema": { 6 | "type": "STRUCT", 7 | "isOptional": false, 8 | "fieldSchemas": { 9 | "firstName": { 10 | "type": "STRING", 11 | "isOptional": true 12 | }, 13 | "lastName": { 14 | "type": "STRING", 15 | "isOptional": true 16 | } 17 | } 18 | }, 19 | "value": { 20 | "schema": { 21 | "type": "STRUCT", 22 | "isOptional": false, 23 | "fieldSchemas": { 24 | "firstName": { 25 | "type": "STRING", 26 | "isOptional": true 27 | }, 28 | "lastName": { 29 | "type": "STRING", 30 | "isOptional": true 31 | } 32 | } 33 | }, 34 | "fieldValues": [ 35 | { 36 | "name": "firstName", 37 | "schema": { 38 | "type": "STRING", 39 | "isOptional": true 40 | }, 41 | "storage": "example" 42 | }, 43 | { 44 | "name": "lastName", 45 | "schema": { 46 | "type": "STRING", 47 | "isOptional": true 48 | }, 49 | "storage": "user" 50 | } 51 | ] 52 | }, 53 | "timestamp": 123412351, 54 | "timestampType": "NO_TIMESTAMP_TYPE", 55 | "offset": 12345, 56 | "headers": [ 57 | { 58 | "name": "file.path", 59 | "schema": { 60 | "type": "STRING", 61 | "isOptional": false 62 | }, 63 | "storage": "/tmp/input/test1.csv" 64 | }, 65 | { 66 | "name": "file.name", 67 | "schema": { 68 | "type": "STRING", 69 | "isOptional": false 70 | }, 71 | "storage": "test1.csv" 72 | }, 73 | { 74 | "name": "file.last.modified", 75 | "schema": { 76 | "type": "INT64", 77 | "isOptional": false, 78 | "name": "org.apache.kafka.connect.data.Timestamp" 79 | }, 80 | "storage": 1610656447123 81 | } 82 | ] 83 | }, 84 | "description": "The following example takes the output from the Spooldir connector copies headers for the metadata to fields in the value.", 85 | "name": "Spooldir metadata", 86 | "config": { 87 | "header.mappings": "file.path:STRING:file_path,file.name:STRING:file_name,file.last.modified:INT64(Timestamp):file_last_modified" 88 | }, 89 | "childClass": "Value" 90 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/LowerCaseTopic/example.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "TestTopic", 4 | "kafkaPartition" : 1, 5 | "key" : "", 6 | "value" : "", 7 | "timestamp" : 12341312, 8 | "timestampType" : "NO_TIMESTAMP_TYPE", 9 | "offset" : 1234123, 10 | "headers" : [ ] 11 | }, 12 | "description" : "This example will change the topic name to be all lower case.", 13 | "name" : "Example", 14 | "config" : { } 15 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/PatternRename/simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "test", 4 | "kafkaPartition" : 1, 5 | "valueSchema" : { 6 | "name" : "testing", 7 | "type" : "STRUCT", 8 | "isOptional" : false, 9 | "fieldSchemas" : { 10 | "prefixedfirstname" : { 11 | "type" : "STRING", 12 | "isOptional" : false 13 | }, 14 | "prefixedlastname" : { 15 | "type" : "STRING", 16 | "isOptional" : false 17 | } 18 | } 19 | }, 20 | "value" : { 21 | "schema" : { 22 | "name" : "testing", 23 | "type" : "STRUCT", 24 | "isOptional" : false, 25 | "fieldSchemas" : { 26 | "prefixedfirstname" : { 27 | "type" : "STRING", 28 | "isOptional" : false 29 | }, 30 | "prefixedlastname" : { 31 | "type" : "STRING", 32 | "isOptional" : false 33 | } 34 | } 35 | }, 36 | "fieldValues" : [ { 37 | "name" : "prefixedfirstname", 38 | "schema" : { 39 | "type" : "STRING", 40 | "isOptional" : false 41 | }, 42 | "storage" : "example" 43 | }, { 44 | "name" : "prefixedlastname", 45 | "schema" : { 46 | "type" : "STRING", 47 | "isOptional" : false 48 | }, 49 | "storage" : "user" 50 | } ] 51 | }, 52 | "timestampType" : "NO_TIMESTAMP_TYPE", 53 | "offset" : 1234, 54 | "headers" : [ ] 55 | }, 56 | "description" : "This example takes fields that are prefixed with `prefixed` and removes the prefix.", 57 | "name" : "Example", 58 | "config" : { 59 | "field.pattern" : "^prefixed", 60 | "field.replacement" : "" 61 | }, 62 | "childClass" : "Value" 63 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/SchemaNameToTopic/example.json: -------------------------------------------------------------------------------- 1 | { 2 | "title" : "Simple", 3 | "input" : { 4 | "topic" : "test", 5 | "kafkaPartition" : 0, 6 | "valueSchema" : { 7 | "name" : "com.foo.bar.whatever.ASDF", 8 | "type" : "STRUCT", 9 | "isOptional" : false, 10 | "fieldSchemas" : { 11 | "firstName" : { 12 | "type" : "STRING", 13 | "isOptional" : true 14 | } 15 | } 16 | }, 17 | "value" : { 18 | "schema" : { 19 | "name" : "com.foo.bar.whatever.ASDF", 20 | "type" : "STRUCT", 21 | "isOptional" : false, 22 | "fieldSchemas" : { 23 | "firstName" : { 24 | "type" : "STRING", 25 | "isOptional" : true 26 | } 27 | } 28 | }, 29 | "fieldValues" : [ { 30 | "name" : "firstName", 31 | "schema" : { 32 | "type" : "STRING", 33 | "isOptional" : true 34 | }, 35 | "storage" : "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" 36 | } ] 37 | }, 38 | "timestampType" : "NO_TIMESTAMP_TYPE", 39 | "offset" : 1234, 40 | "headers" : [ ] 41 | }, 42 | "description" : "This example copies the schema name from the value of the record and replaces the topic with the name of the schema.", 43 | "name" : "Simple", 44 | "config" : { }, 45 | "childClass" : "Value" 46 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/TimestampNow/example.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "test", 4 | "kafkaPartition" : 1, 5 | "key" : "", 6 | "value" : "", 7 | "timestamp" : 12341312, 8 | "timestampType" : "NO_TIMESTAMP_TYPE", 9 | "offset" : 1234123, 10 | "headers" : [ ] 11 | }, 12 | "description" : "This example replaces the timestamp on a record with the current timestamp.", 13 | "name" : "Example", 14 | "config" : { } 15 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/TimestampNowField/insertTimestamp.json: -------------------------------------------------------------------------------- 1 | { 2 | "input": { 3 | "topic": "testing", 4 | "kafkaPartition": 1, 5 | "valueSchema": { 6 | "type": "STRUCT", 7 | "isOptional": false, 8 | "fieldSchemas": { 9 | "firstName": { 10 | "type": "STRING", 11 | "isOptional": true 12 | }, 13 | "lastName": { 14 | "type": "STRING", 15 | "isOptional": true 16 | } 17 | } 18 | }, 19 | "value": { 20 | "schema": { 21 | "type": "STRUCT", 22 | "isOptional": false, 23 | "fieldSchemas": { 24 | "firstName": { 25 | "type": "STRING", 26 | "isOptional": true 27 | }, 28 | "lastName": { 29 | "type": "STRING", 30 | "isOptional": true 31 | } 32 | } 33 | }, 34 | "fieldValues": [ 35 | { 36 | "name": "firstName", 37 | "schema": { 38 | "type": "STRING", 39 | "isOptional": true 40 | }, 41 | "storage": "example" 42 | }, 43 | { 44 | "name": "lastName", 45 | "schema": { 46 | "type": "STRING", 47 | "isOptional": true 48 | }, 49 | "storage": "user" 50 | } 51 | ] 52 | }, 53 | "timestamp": 123412351, 54 | "timestampType": "NO_TIMESTAMP_TYPE", 55 | "offset": 12345, 56 | "headers": [] 57 | }, 58 | "description": "The following example will insert a field `timestamp` into the value of the record.", 59 | "name": "Insert timestamp field", 60 | "config": { 61 | "fields": "timestamp" 62 | }, 63 | "childClass": "Value" 64 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/ToJSON/simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "topic", 4 | "kafkaPartition" : 1, 5 | "valueSchema" : { 6 | "type" : "STRUCT", 7 | "isOptional" : false, 8 | "fieldSchemas" : { 9 | "FIRST_NAME" : { 10 | "type" : "STRING", 11 | "isOptional" : false 12 | }, 13 | "LAST_NAME" : { 14 | "type" : "STRING", 15 | "isOptional" : false 16 | } 17 | } 18 | }, 19 | "value" : { 20 | "schema" : { 21 | "type" : "STRUCT", 22 | "isOptional" : false, 23 | "fieldSchemas" : { 24 | "FIRST_NAME" : { 25 | "type" : "STRING", 26 | "isOptional" : false 27 | }, 28 | "LAST_NAME" : { 29 | "type" : "STRING", 30 | "isOptional" : false 31 | } 32 | } 33 | }, 34 | "fieldValues" : [ { 35 | "name" : "FIRST_NAME", 36 | "schema" : { 37 | "type" : "STRING", 38 | "isOptional" : false 39 | }, 40 | "storage" : "test" 41 | }, { 42 | "name" : "LAST_NAME", 43 | "schema" : { 44 | "type" : "STRING", 45 | "isOptional" : false 46 | }, 47 | "storage" : "user" 48 | } ] 49 | }, 50 | "timestampType" : "NO_TIMESTAMP_TYPE", 51 | "offset" : 1, 52 | "headers" : [ ] 53 | }, 54 | "description" : "This example takes an input struct and converts it to JSON.", 55 | "name" : "Example", 56 | "config" : { }, 57 | "childClass" : "Value" 58 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/ToLong/simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "test", 4 | "kafkaPartition" : 1, 5 | "keySchema" : { 6 | "name" : "key", 7 | "type" : "STRUCT", 8 | "isOptional" : false, 9 | "fieldSchemas" : { 10 | "id" : { 11 | "type" : "INT64", 12 | "isOptional" : false 13 | } 14 | } 15 | }, 16 | "key" : { 17 | "schema" : { 18 | "name" : "key", 19 | "type" : "STRUCT", 20 | "isOptional" : false, 21 | "fieldSchemas" : { 22 | "id" : { 23 | "type" : "INT64", 24 | "isOptional" : false 25 | } 26 | } 27 | }, 28 | "fieldValues" : [ { 29 | "name" : "id", 30 | "schema" : { 31 | "type" : "INT64", 32 | "isOptional" : false 33 | }, 34 | "storage" : 1234 35 | } ] 36 | }, 37 | "valueSchema" : { 38 | "name" : "value", 39 | "type" : "STRUCT", 40 | "isOptional" : false, 41 | "fieldSchemas" : { 42 | "name" : { 43 | "type" : "STRING", 44 | "isOptional" : false 45 | }, 46 | "value" : { 47 | "name" : "org.apache.kafka.connect.data.Decimal", 48 | "type" : "BYTES", 49 | "version" : 1, 50 | "parameters" : { 51 | "scale" : "2" 52 | }, 53 | "isOptional" : false 54 | } 55 | } 56 | }, 57 | "value" : { 58 | "schema" : { 59 | "name" : "value", 60 | "type" : "STRUCT", 61 | "isOptional" : false, 62 | "fieldSchemas" : { 63 | "name" : { 64 | "type" : "STRING", 65 | "isOptional" : false 66 | }, 67 | "value" : { 68 | "name" : "org.apache.kafka.connect.data.Decimal", 69 | "type" : "BYTES", 70 | "version" : 1, 71 | "parameters" : { 72 | "scale" : "2" 73 | }, 74 | "isOptional" : false 75 | } 76 | } 77 | }, 78 | "fieldValues" : [ { 79 | "name" : "name", 80 | "schema" : { 81 | "type" : "STRING", 82 | "isOptional" : false 83 | }, 84 | "storage" : "testing" 85 | }, { 86 | "name" : "value", 87 | "schema" : { 88 | "name" : "org.apache.kafka.connect.data.Decimal", 89 | "type" : "BYTES", 90 | "version" : 1, 91 | "parameters" : { 92 | "scale" : "2" 93 | }, 94 | "isOptional" : false 95 | }, 96 | "storage" : 12342.31 97 | } ] 98 | }, 99 | "timestamp" : 1530286549123, 100 | "timestampType" : "CREATE_TIME", 101 | "offset" : 91283741, 102 | "headers" : [ ] 103 | }, 104 | "description" : "This example takes the `value` field and converts it to a long.", 105 | "name" : "Example", 106 | "config" : { 107 | "fields" : "value" 108 | }, 109 | "childClass" : "Value" 110 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/TopicNameToField/simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "topic" : "testing", 4 | "kafkaPartition" : 1, 5 | "keySchema" : { 6 | "type" : "STRING", 7 | "isOptional" : false 8 | }, 9 | "key" : "foo", 10 | "valueSchema" : { 11 | "type" : "STRUCT", 12 | "isOptional" : false, 13 | "fieldSchemas" : { 14 | "test" : { 15 | "type" : "STRING", 16 | "isOptional" : false 17 | } 18 | } 19 | }, 20 | "value" : { 21 | "schema" : { 22 | "type" : "STRUCT", 23 | "isOptional" : false, 24 | "fieldSchemas" : { 25 | "test" : { 26 | "type" : "STRING", 27 | "isOptional" : false 28 | } 29 | } 30 | }, 31 | "fieldValues" : [ { 32 | "name" : "test", 33 | "schema" : { 34 | "type" : "STRING", 35 | "isOptional" : false 36 | }, 37 | "storage" : "test" 38 | } ] 39 | }, 40 | "timestamp" : 1530286549123, 41 | "timestampType" : "CREATE_TIME", 42 | "offset" : 91283741, 43 | "headers" : [ ] 44 | }, 45 | "description" : "This example takes the topic and adds it to the `topicName` field of the value.", 46 | "name" : "Example", 47 | "config" : { 48 | "field" : "topicName" 49 | }, 50 | "childClass" : "Value" 51 | } -------------------------------------------------------------------------------- /src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/foo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcustenborder/kafka-connect-transform-common/1bde8223eca60dde8b23b896e8c1ed6d06409297/src/test/resources/com/github/jcustenborder/kafka/connect/transform/common/foo -------------------------------------------------------------------------------- /src/test/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | --------------------------------------------------------------------------------